private static TokenSource getLexer(String sql, Set<String> terminators) { requireNonNull(sql, "sql is null"); CharStream stream = new CaseInsensitiveStream(new ANTLRInputStream(sql)); return new DelimiterLexer(stream, terminators); }
public static void smaliFile(String name, InputStream in, DexFileVisitor dcv) throws IOException { try (InputStreamReader reader = new InputStreamReader(in, StandardCharsets.UTF_8)) { ANTLRInputStream is = new ANTLRInputStream(reader); is.name = name; smali0(dcv, is); } }
public static void smaliFile(String fileName, char[] data, DexFileVisitor dcv) throws IOException { // System.err.println("parsing " + f.getAbsoluteFile()); ANTLRInputStream is = new ANTLRInputStream(data, data.length); is.name = fileName; smali0(dcv, is); }
public static void smaliFile(Path path, DexFileVisitor dcv) throws IOException { try (Reader reader = Files.newBufferedReader(path, StandardCharsets.UTF_8)) { ANTLRInputStream is = new ANTLRInputStream(reader); is.name = path.toString(); smali0(dcv, is); } }
public static void smaliFile(String name, String buff, DexFileVisitor dcv) throws IOException { ANTLRInputStream is = new ANTLRInputStream(buff); is.name = name; smali0(dcv, is); }
public static String parse(String text) throws ParseCancellationException { MyLexer lexer = new MyLexer(new ANTLRInputStream(text)); lexer.removeErrorListeners(); lexer.addErrorListener(ThrowingErrorListener.INSTANCE); CommonTokenStream tokens = new CommonTokenStream(lexer); MyParser parser = new MyParser(tokens); parser.removeErrorListeners(); parser.addErrorListener(ThrowingErrorListener.INSTANCE); ParserRuleContext tree = parser.expr(); MyParseRules extractor = new MyParseRules(); return extractor.visit(tree); }
import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.tree.*; public class HelloRunner { public static void main( String[] args) throws Exception { ANTLRInputStream input = new ANTLRInputStream( System.in); HelloLexer lexer = new HelloLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); HelloParser parser = new HelloParser(tokens); ParseTree tree = parser.r(); // begin parsing at rule 'r' System.out.println(tree.toStringTree(parser)); // print LISP-style tree } }
/** * Execute statements from an include file */ void include(String content) throws Exception { InputStream input = new ByteArrayInputStream(content.getBytes("UTF-8")); HplsqlLexer lexer = new HplsqlLexer(new ANTLRInputStream(input)); CommonTokenStream tokens = new CommonTokenStream(lexer); HplsqlParser parser = new HplsqlParser(tokens); ParseTree tree = parser.program(); visit(tree); }
@VisibleForTesting static Expr parse(String in, ExprMacroTable macroTable, boolean withFlatten) { ExprLexer lexer = new ExprLexer(new ANTLRInputStream(in)); CommonTokenStream tokens = new CommonTokenStream(lexer); ExprParser parser = new ExprParser(tokens); parser.setBuildParseTree(true); ParseTree parseTree = parser.expr(); ParseTreeWalker walker = new ParseTreeWalker(); ExprListenerImpl listener = new ExprListenerImpl(parseTree, macroTable); walker.walk(listener, parseTree); return withFlatten ? flatten(listener.getAST()) : listener.getAST(); }
/** Runs SwiftLexer on input file to generate token stream. * * @param input Lexer input * @return Token stream */ private Optional<CommonTokenStream> getTokenStream(File input) { try (FileInputStream inputStream = new FileInputStream(input)) { SwiftLexer lexer = new SwiftLexer(new ANTLRInputStream(inputStream)); if (!configuration.debugFlagSet()) { lexer.removeErrorListeners(); lexer.addErrorListener(new ErrorListener()); } return Optional.of(new CommonTokenStream(lexer)); } catch (IOException e) { handleIOException(e); } catch (CLIArgumentParserException e) { handleCLIException(e); } return Optional.empty(); }
private static ParserRuleContext parseTypeCalculation(String calculation) { TypeCalculationLexer lexer = new TypeCalculationLexer(new CaseInsensitiveStream(new ANTLRInputStream(calculation))); CommonTokenStream tokenStream = new CommonTokenStream(lexer); TypeCalculationParser parser = new TypeCalculationParser(tokenStream); lexer.removeErrorListeners(); lexer.addErrorListener(ERROR_LISTENER); parser.removeErrorListeners(); parser.addErrorListener(ERROR_LISTENER); ParserRuleContext tree; try { // first, try parsing with potentially faster SLL mode parser.getInterpreter().setPredictionMode(PredictionMode.SLL); tree = parser.typeCalculation(); } catch (ParseCancellationException ex) { // if we fail, parse with LL mode tokenStream.reset(); // rewind input stream parser.reset(); parser.getInterpreter().setPredictionMode(PredictionMode.LL); tree = parser.typeCalculation(); } return tree; }
private static List<SchemaChange> parseSQL(String currentDB, String sql) { ANTLRInputStream input = new ANTLRInputStream(sql); mysqlLexer lexer = new mysqlLexer(input); lexer.removeErrorListeners(); TokenStream tokens = new CommonTokenStream(lexer); LOGGER.debug("SQL_PARSE <- \"" + sql + "\""); mysqlParser parser = new mysqlParser(tokens); parser.removeErrorListeners(); MysqlParserListener listener = new MysqlParserListener(currentDB, tokens); ParseTree tree = parser.parse(); ParseTreeWalker.DEFAULT.walk(listener, tree); LOGGER.debug("SQL_PARSE -> " + tree.toStringTree(parser)); return listener.getSchemaChanges(); }
public Pipeline parsePipeline(String id, String source) { final ParseContext parseContext = new ParseContext(false); final SyntaxErrorListener errorListener = new SyntaxErrorListener(parseContext); final RuleLangLexer lexer = new RuleLangLexer(new ANTLRInputStream(source)); lexer.removeErrorListeners(); lexer.addErrorListener(errorListener); final RuleLangParser parser = new RuleLangParser(new CommonTokenStream(lexer)); parser.setErrorHandler(new DefaultErrorStrategy()); parser.removeErrorListeners(); parser.addErrorListener(errorListener); final RuleLangParser.PipelineContext pipelineContext = parser.pipeline(); WALKER.walk(new PipelineAstBuilder(parseContext), pipelineContext); if (parseContext.getErrors().isEmpty()) { final Pipeline pipeline = parseContext.pipelines.get(0); return pipeline.withId(id); } throw new ParseException(parseContext.getErrors()); }
public List<Pipeline> parsePipelines(String pipelines) throws ParseException { final ParseContext parseContext = new ParseContext(false); final SyntaxErrorListener errorListener = new SyntaxErrorListener(parseContext); final RuleLangLexer lexer = new RuleLangLexer(new ANTLRInputStream(pipelines)); lexer.removeErrorListeners(); lexer.addErrorListener(errorListener); final RuleLangParser parser = new RuleLangParser(new CommonTokenStream(lexer)); parser.setErrorHandler(new DefaultErrorStrategy()); parser.removeErrorListeners(); parser.addErrorListener(errorListener); final RuleLangParser.PipelineDeclsContext pipelineDeclsContext = parser.pipelineDecls(); WALKER.walk(new PipelineAstBuilder(parseContext), pipelineDeclsContext); if (parseContext.getErrors().isEmpty()) { return parseContext.pipelines; } throw new ParseException(parseContext.getErrors()); }
public Map<String, Object> parseToMap(String input) CharStream charStream = new ANTLRInputStream(input); InfluxLineProtocolLexer lexer = new InfluxLineProtocolLexer(charStream); TokenStream tokenStream = new CommonTokenStream(lexer);
public ScriptDefinition assemble(InputStream in) throws IOException rs2asmLexer lexer = new rs2asmLexer(new ANTLRInputStream(in));
private AutocompleteResult find(String txt, int cur, ClassLoader l, Imports imports) { registry = AutocompleteRegistryFactory.createRegistry(cps); ClassUtils cu = createClassUtils(l); setup(cu, registry); AutocompleteRegistryFactory.addDefaultImports(cu, registry, imports.toListOfStrings(), cps); Lexer lexer = new JavaLexer(new ANTLRInputStream(txt)); CommonTokenStream tokens = new CommonTokenStream(lexer); // Create a parser that reads from the scanner JavaParser parser = new JavaParser(tokens); parser.removeErrorListeners(); // start parsing at the compilationUnit rule ParserRuleContext t = parser.compilationUnit(); ParseTreeWalker walker = new ParseTreeWalker(); List<AutocompleteCandidate> q = new ArrayList<AutocompleteCandidate>(); JavaImportDeclarationCompletion extractor = new JavaImportDeclarationCompletion(txt, cur, registry, cps, cu); JavaNameBuilder extractor2 = new JavaNameBuilder(registry, cu); JavaNodeCompletion extractor3 = new JavaNodeCompletion(txt, cur, registry, cu); walker.walk(extractor, t); if (extractor.getQuery() != null) q.addAll(extractor.getQuery()); walker.walk(extractor2, t); walker.walk(extractor3, t); if (extractor3.getQuery() != null) q.addAll(extractor3.getQuery()); List<String> ret = registry.searchCandidates(q); if (!ret.isEmpty()) { return new AutocompleteResult(ret, getStartIndex(extractor, extractor2, extractor3)); } return findAutocompleteResult(txt, cur, cu); }
final SyntaxErrorListener errorListener = new SyntaxErrorListener(parseContext); final RuleLangLexer lexer = new RuleLangLexer(new ANTLRInputStream(rule)); lexer.removeErrorListeners(); lexer.addErrorListener(errorListener);
AutocompleteRegistryFactory.moreSetup(cu); Lexer lexer = new GroovyLexer(new ANTLRInputStream(txt)); lexer.removeErrorListeners(); CommonTokenStream tokens = new CommonTokenStream(lexer);
@Override public TransformExpressionTree compileToExpressionTree(String expression) { CharStream charStream = new ANTLRInputStream(expression); PQL2Lexer lexer = new PQL2Lexer(charStream); lexer.setTokenFactory(new CommonTokenFactory(true)); TokenStream tokenStream = new UnbufferedTokenStream<CommonToken>(lexer); PQL2Parser parser = new PQL2Parser(tokenStream); parser.setErrorHandler(new BailErrorStrategy()); // Parse ParseTree parseTree = parser.expression(); ParseTreeWalker walker = new ParseTreeWalker(); Pql2AstListener listener = new Pql2AstListener(expression); walker.walk(listener, parseTree); return new TransformExpressionTree(listener.getRootNode()); }