/** Count EOF just once. */ public int getNumberOfOnChannelTokens() { int n = 0; fill(); for (int i = 0; i < tokens.size(); i++) { Token t = tokens.get(i); if ( t.getChannel()==channel ) n++; if ( t.getType()==Token.EOF ) break; } return n; } }
CommonTokenStream tokenStream = new CommonTokenStream(lexer); try { tokenStream.fill();
/** Count EOF just once. */ public int getNumberOfOnChannelTokens() { int n = 0; fill(); for (int i = 0; i < tokens.size(); i++) { Token t = tokens.get(i); if ( t.getChannel()==channel ) n++; if ( t.getType()==Token.EOF ) break; } return n; } }
public static List<CommonToken> copy(CommonTokenStream tokens) { List<CommonToken> copy = new ArrayList<>(); tokens.fill(); for (Token t : tokens.getTokens()) { copy.add(new CommonToken(t)); } return copy; }
/** Count EOF just once. */ public int getNumberOfOnChannelTokens() { int n = 0; fill(); for (int i = 0; i < tokens.size(); i++) { Token t = tokens.get(i); if ( t.getChannel()==channel ) n++; if ( t.getType()==Token.EOF ) break; } return n; } }
/** Count EOF just once. */ public int getNumberOfOnChannelTokens() { int n = 0; fill(); for (int i = 0; i < tokens.size(); i++) { Token t = tokens.get(i); if ( t.getChannel()==channel ) n++; if ( t.getType()==Token.EOF ) break; } return n; } }
public static void printLex(InputStream in) throws Exception { Java7Lexer lex = new Java7Lexer(new ANTLRInputStream(in)); CommonTokenStream tokens = new CommonTokenStream(lex); tokens.fill(); for (Token token : tokens.getTokens()) { System.out.println(token.getType() + " " + token.getText()); } }
public static void printLex(InputStream in) throws Exception { Java7Lexer lex = new Java7Lexer(new ANTLRInputStream(in)); CommonTokenStream tokens = new CommonTokenStream(lex); tokens.fill(); for (Token token : tokens.getTokens()) { System.out.println(token.getType() + " " + token.getText()); } }
/** Count EOF just once. */ public int getNumberOfOnChannelTokens() { int n = 0; fill(); for (int i = 0; i < tokens.size(); i++) { Token t = tokens.get(i); if ( t.getChannel()==channel ) n++; if ( t.getType()==Token.EOF ) break; } return n; } }
public static CommonTokenStream tokenizeANTLRGrammar(String text) { ANTLRInputStream input = new ANTLRInputStream(text); ANTLRv4Lexer lexer = new ANTLRv4Lexer(input); CommonTokenStream tokens = new TokenStreamSubset(lexer); tokens.fill(); return tokens; }
protected Queue<Token> getQueue(String input) { DSWrapperLexer lexer = dsWrapperLexerThreadLocal.get(); lexer.setInputStream(new ANTLRInputStream(input)); CommonTokenStream commonTokenStream = new CommonTokenStream(lexer); commonTokenStream.fill(); List<Token> tokens = commonTokenStream.getTokens(); if (tokens.isEmpty()) { throw new RuntimeException("Could not parse: " + input); } // this is sensitive to the grammar in DSQuery.g4. We could just use the visitor but doing so // means we need to be creating the AST and instead we could just use the lexer. in any case, // we don't expect the graphite format to change anytime soon. // filter all EOF tokens first. Queue<Token> queue = tokens.stream().filter(t -> t.getType() != Lexer.EOF).collect( Collectors.toCollection(ArrayDeque::new)); return queue; }
protected Queue<Token> getQueue(String input) { DSWrapperLexer lexer = dsWrapperLexerThreadLocal.get(); lexer.setInputStream(new ANTLRInputStream(input)); CommonTokenStream commonTokenStream = new CommonTokenStream(lexer); commonTokenStream.fill(); List<Token> tokens = commonTokenStream.getTokens(); if (tokens.isEmpty()) { throw new RuntimeException("Could not parse: " + input); } // this is sensitive to the grammar in DSQuery.g4. We could just use the visitor but doing so // means we need to be creating the AST and instead we could just use the lexer. in any case, // we don't expect the graphite format to change anytime soon. // filter all EOF tokens first. Queue<Token> queue = tokens.stream().filter(t -> t.getType() != Lexer.EOF).collect( Collectors.toCollection(ArrayDeque::new)); return queue; }
/** * get the tokens for a string -- only for lexer grammars * @param toParse string to be tokenized * @return a list of tokens * @throws IllegalWorkflowException in case no lexername is provided */ public List<Token> lex(String toParse) throws IllegalWorkflowException { if (lexerName.isEmpty()) throw new IllegalWorkflowException("lexerName must not be empty " + "-- did you already run compile?"); InmemantlrErrorListener el = new InmemantlrErrorListener(); listener.reset(); //CodePointCharStream input = CharStreams.fromString(toParse); CharStream input = provider.getCharStream(toParse); Objects.requireNonNull(input, "char stream must not be null"); LOGGER.debug("load lexer {}", lexerName); Lexer lex = sc.instanciateLexer(input, lexerName, useCached); lex.addErrorListener(el); Objects.requireNonNull(lex, "lex must not be null"); CommonTokenStream tokens = new CommonTokenStream(lex); tokens.fill(); return tokens.getTokens(); }
public ORExpression parse() throws EagleQueryParseException { try { EagleFilterLexer lexer = new EagleFilterLexer(new ANTLRInputStream(query)); CommonTokenStream tokens = new CommonTokenStream(lexer); tokens.fill(); EagleFilterParser p = new EagleFilterParser(tokens); p.setErrorHandler(new EagleANTLRErrorStrategy()); p.setBuildParseTree(true); EagleQueryFilterListenerImpl listener = new EagleQueryFilterListenerImpl(); p.addParseListener(listener); EagleFilterParser.FilterContext fc = p.filter(); if (fc.exception != null) { LOG.error("Can not successfully parse the query:" + query, fc.exception); throw fc.exception; } return listener.result(); } catch (Exception ex) { LOG.error("Can not successfully parse the query:", ex); throw new EagleQueryParseException("can not successfully parse the query:" + query); } } }
public ORExpression parse() throws EagleQueryParseException{ try{ EagleFilterLexer lexer = new EagleFilterLexer(new ANTLRInputStream(_query)); CommonTokenStream tokens = new CommonTokenStream(lexer); tokens.fill(); EagleFilterParser p = new EagleFilterParser(tokens); p.setErrorHandler(new EagleANTLRErrorStrategy()); p.setBuildParseTree(true); EagleQueryFilterListenerImpl listener = new EagleQueryFilterListenerImpl(); p.addParseListener(listener); EagleFilterParser.FilterContext fc = p.filter(); if(fc.exception != null){ LOG.error("Can not successfully parse the query:" + _query, fc.exception); throw fc.exception; } return listener.result(); }catch(Exception ex){ LOG.error("Can not successfully parse the query:", ex); throw new EagleQueryParseException("can not successfully parse the query:" + _query); } } }
CommonTokenStream tokens = new CommonTokenStream(lexer); tokens.fill();
CommonTokenStream tokens = new CommonTokenStream(lexer); tokens.fill();
CommonTokenStream tokens = new CommonTokenStream(lexer); tokens.fill();
CommonTokenStream tokens = new CommonTokenStream(lexer); tokens.fill();
@Override public Tree parse(String slangCode) { SLangLexer lexer = new SLangLexer(CharStreams.fromString(slangCode)); List<Comment> comments = new ArrayList<>(); CommonTokenStream antlrTokens = new CommonTokenStream(lexer); antlrTokens.fill(); List<org.sonarsource.slang.api.Token> tokens = new ArrayList<>(); for (int index = 0; index < antlrTokens.size(); index++) { Token token = antlrTokens.get(index); TextRange textRange = getSlangTextRange(token); if (token.getChannel() == 1) { comments.add(comment(token, textRange)); } else { Type type = Type.OTHER; if (KEYWORD_TOKEN_TYPES.contains(token.getType())) { type = Type.KEYWORD; } else if (token.getType() == SLangParser.StringLiteral) { type = Type.STRING_LITERAL; } tokens.add(new TokenImpl(textRange, token.getText(), type)); } } SLangParser parser = new SLangParser(antlrTokens); parser.setErrorHandler(new ErrorStrategy()); SLangParseTreeVisitor slangVisitor = new SLangParseTreeVisitor(comments, tokens); return slangVisitor.visit(parser.slangFile()); }