Refine search
@VisibleForTesting static Expr parse(String in, ExprMacroTable macroTable, boolean withFlatten) { ExprLexer lexer = new ExprLexer(new ANTLRInputStream(in)); CommonTokenStream tokens = new CommonTokenStream(lexer); ExprParser parser = new ExprParser(tokens); parser.setBuildParseTree(true); ParseTree parseTree = parser.expr(); ParseTreeWalker walker = new ParseTreeWalker(); ExprListenerImpl listener = new ExprListenerImpl(parseTree, macroTable); walker.walk(listener, parseTree); return withFlatten ? flatten(listener.getAST()) : listener.getAST(); }
/** * Walks the provided parse tree using the list of listeners. * * @param listeners List of parse tree listeners. * @param tree Parse tree. */ private void walkParseTree(List<SwiftBaseListener> listeners, TopLevelContext tree) { ParseTreeWalker walker = new ParseTreeWalker(); listeners.forEach(listener -> walker.walk(listener, tree)); }
@Override public TransformExpressionTree compileToExpressionTree(String expression) { CharStream charStream = new ANTLRInputStream(expression); PQL2Lexer lexer = new PQL2Lexer(charStream); lexer.setTokenFactory(new CommonTokenFactory(true)); TokenStream tokenStream = new UnbufferedTokenStream<CommonToken>(lexer); PQL2Parser parser = new PQL2Parser(tokenStream); parser.setErrorHandler(new BailErrorStrategy()); // Parse ParseTree parseTree = parser.expression(); ParseTreeWalker walker = new ParseTreeWalker(); Pql2AstListener listener = new Pql2AstListener(expression); walker.walk(listener, parseTree); return new TransformExpressionTree(listener.getRootNode()); }
private static List<SchemaChange> parseSQL(String currentDB, String sql) { ANTLRInputStream input = new ANTLRInputStream(sql); mysqlLexer lexer = new mysqlLexer(input); lexer.removeErrorListeners(); TokenStream tokens = new CommonTokenStream(lexer); LOGGER.debug("SQL_PARSE <- \"" + sql + "\""); mysqlParser parser = new mysqlParser(tokens); parser.removeErrorListeners(); MysqlParserListener listener = new MysqlParserListener(currentDB, tokens); ParseTree tree = parser.parse(); ParseTreeWalker.DEFAULT.walk(listener, tree); LOGGER.debug("SQL_PARSE -> " + tree.toStringTree(parser)); return listener.getSchemaChanges(); }
/** * Parse the raw EQL query and apply it to the supplied query. */ public static <T> void parse(String raw, SpiQuery<T> query) { EQLLexer lexer = new EQLLexer(CharStreams.fromString(raw)); CommonTokenStream tokens = new CommonTokenStream(lexer); EQLParser parser = new EQLParser(tokens); parser.addErrorListener(errorListener); EQLParser.Select_statementContext context = parser.select_statement(); EqlAdapter<T> adapter = new EqlAdapter<>(query); ParseTreeWalker walker = new ParseTreeWalker(); walker.walk(adapter, context); query.simplifyExpressions(); }
CommonTokenStream tokens = new CommonTokenStream(lexer); StatusCodeParser parser = new StatusCodeParser(tokens); Map<Integer, String> statusCodes = new LinkedHashMap<>(); ParseTreeWalker.DEFAULT.walk(listener, parser.text()); return statusCodes;
public FormatListener() { this.lexer = new FeatureResultsLexer( new ANTLRInputStream( "" ) ); this.parser = new FeatureResultsParser( new CommonTokenStream( lexer ) ); this.walker = new ParseTreeWalker(); this.parameterVerifier = new ParameterVerifier(); this.resultsVerifier = new ResultsVerifier(); }
public boolean parseParameter( String value ) { resetListeners( parameterVerifier ); lexer.setInputStream( new ANTLRInputStream( value ) ); walker.walk( parameterVerifier, parser.value() ); return parameterVerifier.getOkAndReset(); }
private static String cleanJavadoc(JavadocParser.DocumentationContext ctx) { StringBuilder buff = new StringBuilder(); JavadocParserBaseListener listener = new JavadocParserBaseListener() { @Override public void enterDescriptionLine(final JavadocParser.DescriptionLineContext ctx) { buff.append(ctx.getText()).append("\n"); } }; ParseTreeWalker.DEFAULT.walk(listener, ctx); return buff.toString(); }
static Feature parseWithWalker(ANTLRInputStream inputStream) { GherkinParser.FeatureContext featureContext = newParser(inputStream).feature(); ParseTreeWalker walker = new ParseTreeWalker(); FeatureWalker listener = new FeatureWalker(inputStream.getSourceName()); walker.walk(listener, featureContext); return listener.getFeature(); }
public void walk(ParseTreeListener listener, ParseTree t) { if ( t instanceof ErrorNode) { listener.visitErrorNode((ErrorNode)t); return; } else if ( t instanceof TerminalNode) { listener.visitTerminal((TerminalNode)t); return; } RuleNode r = (RuleNode)t; enterRule(listener, r); int n = r.getChildCount(); for (int i = 0; i<n; i++) { walk(listener, r.getChild(i)); } exitRule(listener, r); }
private FilterExpression(String expressionString, FilterParser parser) { this.expression = parser.exp(); this.walker = new ParseTreeWalker(); this.expressionString = expressionString; }
public Pipeline parsePipeline(String id, String source) { final ParseContext parseContext = new ParseContext(false); final SyntaxErrorListener errorListener = new SyntaxErrorListener(parseContext); final RuleLangLexer lexer = new RuleLangLexer(new ANTLRInputStream(source)); lexer.removeErrorListeners(); lexer.addErrorListener(errorListener); final RuleLangParser parser = new RuleLangParser(new CommonTokenStream(lexer)); parser.setErrorHandler(new DefaultErrorStrategy()); parser.removeErrorListeners(); parser.addErrorListener(errorListener); final RuleLangParser.PipelineContext pipelineContext = parser.pipeline(); WALKER.walk(new PipelineAstBuilder(parseContext), pipelineContext); if (parseContext.getErrors().isEmpty()) { final Pipeline pipeline = parseContext.pipelines.get(0); return pipeline.withId(id); } throw new ParseException(parseContext.getErrors()); }
private void parseBean(File sqlFile, GlobalConfig globalConfig) throws IOException { CharStream input = new ANTLRFileStream(sqlFile.getAbsolutePath()); CreateTableLexer lexer = new CreateTableLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); CreateTableParser parser = new CreateTableParser(tokens); ParseTree tree = parser.sql(); ParseTreeWalker walker = new ParseTreeWalker(); CreateTableListenerImpl extractor = new CreateTableListenerImpl(globalConfig); walker.walk(extractor, tree); beans = extractor.getTables(); }
private static List<IdentifierContext> getDeclarationNames(TopLevelContext ctx, Declaration decl) { DeclarationListener listener = new DeclarationListener(decl); ParseTreeWalker walker = new ParseTreeWalker(); walker.walk(listener, ctx); return listener.getDeclarations(); }
public List<DocItem> collect(Path file) throws IOException { this.file = file; doc = new ArrayList<>(); CharStream input = CharStreams.fromPath(file, StandardCharsets.UTF_8); FuzzyDocLexer lexer = new FuzzyDocLexer(input); ANTLRErrorListener error = errorListener(LoggerFactory.getLogger(DocCollector.class), file); lexer.removeErrorListeners(); lexer.addErrorListener(error); TokenStream tokens = new CommonTokenStream(lexer); FuzzyDocParser parser = new FuzzyDocParser(tokens); parser.removeErrorListeners(); parser.addErrorListener(error); ParseTreeWalker.DEFAULT.walk(this, parser.source()); return doc; }
try { CharStream charStream = new ANTLRInputStream(expression); PQL2Lexer lexer = new PQL2Lexer(charStream); lexer.setTokenFactory(new CommonTokenFactory(true)); ParseTreeWalker walker = new ParseTreeWalker(); Pql2AstListener listener = new Pql2AstListener(expression); walker.walk(listener, parseTree);
public boolean parseResults( String value ) { resetListeners( resultsVerifier ); lexer.setInputStream( new ANTLRInputStream( value ) ); walker.walk( resultsVerifier, parser.value() ); return resultsVerifier.getOkAndReset(); }
ParseTreeWalker.DEFAULT.walk(listener, parseJavadoc(file, doc)); return new DocItem(method, pattern, summary, text.toString(), params, returns.toString(), statusCodes(returns.toString()));