/** * Execute statements from an include file */ void include(String content) throws Exception { InputStream input = new ByteArrayInputStream(content.getBytes("UTF-8")); HplsqlLexer lexer = new HplsqlLexer(new ANTLRInputStream(input)); CommonTokenStream tokens = new CommonTokenStream(lexer); HplsqlParser parser = new HplsqlParser(tokens); ParseTree tree = parser.program(); visit(tree); }
private static SQLParser createSQLParser(final DatabaseType databaseType, final Lexer lexer) { TokenStream tokenStream = new CommonTokenStream(lexer); switch (databaseType) { case H2: case MySQL: return new MySQLParser(tokenStream); case PostgreSQL: return new PostgreSQLParser(tokenStream); case SQLServer: return new SQLServerParser(tokenStream); case Oracle: return new OracleParser(tokenStream); default: throw new UnsupportedOperationException(String.format("Can not support database type [%s].", databaseType)); } } }
@VisibleForTesting static Expr parse(String in, ExprMacroTable macroTable, boolean withFlatten) { ExprLexer lexer = new ExprLexer(new ANTLRInputStream(in)); CommonTokenStream tokens = new CommonTokenStream(lexer); ExprParser parser = new ExprParser(tokens); parser.setBuildParseTree(true); ParseTree parseTree = parser.expr(); ParseTreeWalker walker = new ParseTreeWalker(); ExprListenerImpl listener = new ExprListenerImpl(parseTree, macroTable); walker.walk(listener, parseTree); return withFlatten ? flatten(listener.getAST()) : listener.getAST(); }
/** {@inheritDoc} */ @Override public XGModelComposition parse(byte[] mdl) { try (ByteArrayInputStream bais = new ByteArrayInputStream(mdl)) { CharStream cStream = CharStreams.fromStream(bais); XGBoostModelLexer lexer = new XGBoostModelLexer(cStream); CommonTokenStream tokens = new CommonTokenStream(lexer); XGBoostModelParser parser = new XGBoostModelParser(tokens); XGModelVisitor visitor = new XGModelVisitor(); return visitor.visit(parser.xgModel()); } catch (IOException e) { throw new RuntimeException(e); } } }
private static void smali0(DexFileVisitor dcv, CharStream is) throws IOException { SmaliLexer lexer = new SmaliLexer(is); CommonTokenStream ts = new CommonTokenStream(lexer); SmaliParser parser = new SmaliParser(ts); for (SmaliParser.SFileContext ctx : parser.sFiles().sFile()) { AntlrSmaliUtil.acceptFile(ctx, dcv); } }
/** Runs SwiftLexer on input file to generate token stream. * * @param input Lexer input * @return Token stream */ private Optional<CommonTokenStream> getTokenStream(File input) { try (FileInputStream inputStream = new FileInputStream(input)) { SwiftLexer lexer = new SwiftLexer(new ANTLRInputStream(inputStream)); if (!configuration.debugFlagSet()) { lexer.removeErrorListeners(); lexer.addErrorListener(new ErrorListener()); } return Optional.of(new CommonTokenStream(lexer)); } catch (IOException e) { handleIOException(e); } catch (CLIArgumentParserException e) { handleCLIException(e); } return Optional.empty(); }
private static List<SchemaChange> parseSQL(String currentDB, String sql) { ANTLRInputStream input = new ANTLRInputStream(sql); mysqlLexer lexer = new mysqlLexer(input); lexer.removeErrorListeners(); TokenStream tokens = new CommonTokenStream(lexer); LOGGER.debug("SQL_PARSE <- \"" + sql + "\""); mysqlParser parser = new mysqlParser(tokens); parser.removeErrorListeners(); MysqlParserListener listener = new MysqlParserListener(currentDB, tokens); ParseTree tree = parser.parse(); ParseTreeWalker.DEFAULT.walk(listener, tree); LOGGER.debug("SQL_PARSE -> " + tree.toStringTree(parser)); return listener.getSchemaChanges(); }
public Pipeline parsePipeline(String id, String source) { final ParseContext parseContext = new ParseContext(false); final SyntaxErrorListener errorListener = new SyntaxErrorListener(parseContext); final RuleLangLexer lexer = new RuleLangLexer(new ANTLRInputStream(source)); lexer.removeErrorListeners(); lexer.addErrorListener(errorListener); final RuleLangParser parser = new RuleLangParser(new CommonTokenStream(lexer)); parser.setErrorHandler(new DefaultErrorStrategy()); parser.removeErrorListeners(); parser.addErrorListener(errorListener); final RuleLangParser.PipelineContext pipelineContext = parser.pipeline(); WALKER.walk(new PipelineAstBuilder(parseContext), pipelineContext); if (parseContext.getErrors().isEmpty()) { final Pipeline pipeline = parseContext.pipelines.get(0); return pipeline.withId(id); } throw new ParseException(parseContext.getErrors()); }
public List<Pipeline> parsePipelines(String pipelines) throws ParseException { final ParseContext parseContext = new ParseContext(false); final SyntaxErrorListener errorListener = new SyntaxErrorListener(parseContext); final RuleLangLexer lexer = new RuleLangLexer(new ANTLRInputStream(pipelines)); lexer.removeErrorListeners(); lexer.addErrorListener(errorListener); final RuleLangParser parser = new RuleLangParser(new CommonTokenStream(lexer)); parser.setErrorHandler(new DefaultErrorStrategy()); parser.removeErrorListeners(); parser.addErrorListener(errorListener); final RuleLangParser.PipelineDeclsContext pipelineDeclsContext = parser.pipelineDecls(); WALKER.walk(new PipelineAstBuilder(parseContext), pipelineDeclsContext); if (parseContext.getErrors().isEmpty()) { return parseContext.pipelines; } throw new ParseException(parseContext.getErrors()); }
TokenStream tokenStream = new CommonTokenStream(lexer); InfluxLineProtocolParser parser = new InfluxLineProtocolParser(tokenStream);
public List<DocItem> collect(Path file) throws IOException { this.file = file; doc = new ArrayList<>(); CharStream input = CharStreams.fromPath(file, StandardCharsets.UTF_8); FuzzyDocLexer lexer = new FuzzyDocLexer(input); ANTLRErrorListener error = errorListener(LoggerFactory.getLogger(DocCollector.class), file); lexer.removeErrorListeners(); lexer.addErrorListener(error); TokenStream tokens = new CommonTokenStream(lexer); FuzzyDocParser parser = new FuzzyDocParser(tokens); parser.removeErrorListeners(); parser.addErrorListener(error); ParseTreeWalker.DEFAULT.walk(this, parser.source()); return doc; }
private static JavadocParser.DocumentationContext parseJavadoc(Path file, String doc) { ANTLRErrorListener error = errorListener(LoggerFactory.getLogger(DocCollector.class), file); CharStream stream = CharStreams.fromString(doc == null ? "" : doc); JavadocLexer lexer = new JavadocLexer(stream); lexer.removeErrorListeners(); lexer.addErrorListener(error); CommonTokenStream tokens = new CommonTokenStream(lexer); JavadocParser parser = new JavadocParser(tokens); parser.removeErrorListeners(); parser.addErrorListener(error); return parser.documentation(); }
private static ParserRuleContext parseTypeCalculation(String calculation) { TypeCalculationLexer lexer = new TypeCalculationLexer(new CaseInsensitiveStream(new ANTLRInputStream(calculation))); CommonTokenStream tokenStream = new CommonTokenStream(lexer); TypeCalculationParser parser = new TypeCalculationParser(tokenStream); lexer.removeErrorListeners(); lexer.addErrorListener(ERROR_LISTENER); parser.removeErrorListeners(); parser.addErrorListener(ERROR_LISTENER); ParserRuleContext tree; try { // first, try parsing with potentially faster SLL mode parser.getInterpreter().setPredictionMode(PredictionMode.SLL); tree = parser.typeCalculation(); } catch (ParseCancellationException ex) { // if we fail, parse with LL mode tokenStream.reset(); // rewind input stream parser.reset(); parser.getInterpreter().setPredictionMode(PredictionMode.LL); tree = parser.typeCalculation(); } return tree; }
/** * Generate the proxy object by passing in the toml file. * * @param stream charstream object containing the content * @param path path to the toml file * @return proxy object */ public static ParseTree parseTomlContent(CharStream stream, String path) { TomlLexer lexer = new TomlLexer(stream); CommonTokenStream tokens = new CommonTokenStream(lexer); TomlParser parser = new TomlParser(tokens); parser.removeErrorListeners(); parser.addErrorListener(TomlErrorListener.getErrorListener(path)); return parser.toml(); } }
/** * Parses block comment content as javadoc comment. * @param blockComment * block comment content. * @return parse tree */ private JavadocParser createJavadocParser(String blockComment) { final JavadocLexer lexer = new JavadocLexer(CharStreams.fromString(blockComment)); final CommonTokenStream tokens = new CommonTokenStream(lexer); final JavadocParser parser = new JavadocParser(tokens); // remove default error listeners parser.removeErrorListeners(); // add custom error listener that logs syntax errors parser.addErrorListener(errorListener); // JavadocParserErrorStrategy stops parsing on first parse error encountered unlike the // DefaultErrorStrategy used by ANTLR which rather attempts error recovery. parser.setErrorHandler(new JavadocParserErrorStrategy()); return parser; }
/** * Initializes logical expression lexer and parser, add error listener that converts all * syntax error into {@link org.apache.drill.common.exceptions.ExpressionParsingException}. * Parses given expression into logical expression instance. * * @param expr expression to be parsed * @return logical expression instance */ public static LogicalExpression parse(String expr) { ExprLexer lexer = new ExprLexer(CharStreams.fromString(expr)); lexer.removeErrorListeners(); // need to remove since default listener will output warning lexer.addErrorListener(ErrorListener.INSTANCE); CommonTokenStream tokens = new CommonTokenStream(lexer); ExprParser parser = new ExprParser(tokens); parser.removeErrorListeners(); // need to remove since default listener will output warning parser.addErrorListener(ErrorListener.INSTANCE); ExprParser.ParseContext parseContext = parser.parse(); logger.trace("Tokens: [{}]. Parsing details: [{}].", tokens.getText(), parseContext.toInfoString(parser)); return parseContext.e; }
private ParserRuleContext getParseTree(final String sql) { final SqlBaseLexer sqlBaseLexer = new SqlBaseLexer( new CaseInsensitiveStream(CharStreams.fromString(sql))); final CommonTokenStream tokenStream = new CommonTokenStream(sqlBaseLexer); final SqlBaseParser sqlBaseParser = new SqlBaseParser(tokenStream); sqlBaseLexer.removeErrorListeners(); sqlBaseLexer.addErrorListener(ERROR_LISTENER); sqlBaseParser.removeErrorListeners(); sqlBaseParser.addErrorListener(ERROR_LISTENER); final Function<SqlBaseParser, ParserRuleContext> parseFunction = SqlBaseParser::statements; try { // first, try parsing with potentially faster SLL mode sqlBaseParser.getInterpreter().setPredictionMode(PredictionMode.SLL); return parseFunction.apply(sqlBaseParser); } catch (final ParseCancellationException ex) { // if we fail, parse with LL mode tokenStream.seek(0); // rewind input stream sqlBaseParser.reset(); sqlBaseParser.getInterpreter().setPredictionMode(PredictionMode.LL); return parseFunction.apply(sqlBaseParser); } }
CommonTokenStream tokens = new CommonTokenStream(lexer);
CommonTokenStream tokenStream = new CommonTokenStream(lexer); SqlBaseParser parser = new SqlBaseParser(tokenStream);
@Override public void parse(String ddlContent, Tables databaseTables) { this.databaseTables = databaseTables; CodePointCharStream ddlContentCharStream = CharStreams.fromString(ddlContent); L lexer = createNewLexerInstance(new CaseChangingCharStream(ddlContentCharStream, isGrammarInUpperCase())); P parser = createNewParserInstance(new CommonTokenStream(lexer)); dataTypeResolver = initializeDataTypeResolver(); // remove default console output printing error listener parser.removeErrorListener(ConsoleErrorListener.INSTANCE); ParsingErrorListener parsingErrorListener = new ParsingErrorListener(AbstractDdlParser::accumulateParsingFailure); parser.addErrorListener(parsingErrorListener); ParseTree parseTree = parseTree(parser); if (parsingErrorListener.getErrors().isEmpty()) { antlrDdlParserListener = createParseTreeWalkerListener(); if (antlrDdlParserListener != null) { ParseTreeWalker.DEFAULT.walk(antlrDdlParserListener, parseTree); if (throwErrorsFromTreeWalk && !antlrDdlParserListener.getErrors().isEmpty()) { throwParsingException(antlrDdlParserListener.getErrors()); } } } else { throwParsingException(parsingErrorListener.getErrors()); } }