public ExpressionPosition pos(Token token){ return new ExpressionPosition(fullExpression, token.getTokenIndex()); }
/** * Returns {@code true} if no line terminator exists between the specified * token offset and the prior one on the {@code HIDDEN} channel. * * @return {@code true} if no line terminator exists between the specified * token offset and the prior one on the {@code HIDDEN} channel. */ private boolean noTerminatorBetween(int tokenOffset) { BufferedTokenStream stream = (BufferedTokenStream)_input; List<Token> tokens = stream.getHiddenTokensToLeft(stream.LT(tokenOffset).getTokenIndex()); if (tokens == null) { return true; } for (Token token : tokens) { if (token.getText().contains("\n")) return false; } return true; }
protected List<Comment> getComments(ParserRuleContext ctx) { Token start = ctx.getStart(); if (start != null) { int tokPos = start.getTokenIndex(); List<Token> refChannel = tokens.getHiddenTokensToLeft(tokPos, CHANNEL_COMMENTS); if (refChannel != null) { return getCommentOnChannel(refChannel); } } return Collections.emptyList(); }
Multimap<Integer, String> candidates = analyzer.process(currentState, currentToken.getTokenIndex(), context);
private void addIgnoredChars(ParserRuleContext ctx, NodeBuilder nodeBuilder) { Token start = ctx.getStart(); int tokenStartIndex = start.getTokenIndex(); List<Token> leftChannel = tokens.getHiddenTokensToLeft(tokenStartIndex, CHANNEL_IGNORED_CHARS); List<IgnoredChar> ignoredCharsLeft = mapTokenToIgnoredChar(leftChannel); Token stop = ctx.getStop(); int tokenStopIndex = stop.getTokenIndex(); List<Token> rightChannel = tokens.getHiddenTokensToRight(tokenStopIndex, CHANNEL_IGNORED_CHARS); List<IgnoredChar> ignoredCharsRight = mapTokenToIgnoredChar(rightChannel); nodeBuilder.ignoredChars(new IgnoredChars(ignoredCharsLeft, ignoredCharsRight)); }
@Override public void enterSkip_parens(Skip_parensContext ctx) { if ( ctx.MAXWELL_ELIDED_PARSE_ISSUE() == null ) throw new ReparseSQLException(spliceParens(ctx.getStart().getTokenIndex(), 0)); }
@Override public void enterSkip_parens_inside_partition_definitions(Skip_parens_inside_partition_definitionsContext ctx) { if ( ctx.MAXWELL_ELIDED_PARSE_ISSUE() == null ) throw new ReparseSQLException(spliceParens(ctx.getStart().getTokenIndex(), 1)); }
int possibleIndexEosToken = this.getCurrentToken().getTokenIndex() - 1; Token ahead = _input.get(possibleIndexEosToken); if (ahead.getChannel() != Lexer.HIDDEN) { possibleIndexEosToken = this.getCurrentToken().getTokenIndex() - 2; ahead = _input.get(possibleIndexEosToken);
private boolean commentLeftOfCloseBrace(Token closeBraceToken) { Location closeBraceLocation = ListenerUtil.getTokenLocation(closeBraceToken); List<Token> tokens = tokenStream.getHiddenTokensToLeft(closeBraceToken.getTokenIndex()); // if comments are to the left of } if (tokens != null) { Token commentToken = getLastCommentToken(tokens); if (commentToken != null) { int commentEndLine = ListenerUtil.getEndLineOfToken(commentToken); if (commentEndLine == closeBraceLocation.line) { return true; } } } return false; }
boolean lastGreaterThanDocument = last.getTokenIndex() > stop.getTokenIndex(); boolean sameChannel = last.getChannel() == stop.getChannel(); if (notEOF && lastGreaterThanDocument && sameChannel) {
private void verifyBlankLinesAroundFunction(SwiftParser.FunctionDeclarationContext ctx) { SwiftParser.DeclarationContext declCtx = (SwiftParser.DeclarationContext) ctx.getParent(); ParseTree left = ParseTreeUtil.getLeftNode(declCtx); if (left != null) { Token start = declCtx.getStart(); List<Token> tokens = tokenStream.getHiddenTokensToLeft(start.getTokenIndex()); if (getNumberOfBlankLines(tokens) < 1) { printer.error(Rules.FUNCTION_WHITESPACE, Messages.FUNCTION + Messages.BLANK_LINE_BEFORE, ListenerUtil.getTokenLocation(start)); } } ParseTree right = ParseTreeUtil.getRightNode(declCtx); if (right != null) { if (right.getText().equals("<EOF>")) { // function is at the end of the file return; } Token end = declCtx.getStop(); List<Token> tokens = tokenStream.getHiddenTokensToRight(end.getTokenIndex()); if (getNumberOfBlankLines(tokens) < 1) { printer.error(Rules.FUNCTION_WHITESPACE, Messages.FUNCTION + Messages.BLANK_LINE_AFTER, ListenerUtil.getTokenEndLocation(end)); } } }
public final ParseContext parse() throws RecognitionException { ParseContext _localctx = new ParseContext(_ctx, getState()); enterRule(_localctx, 0, RULE_parse); try { enterOuterAlt(_localctx, 1); { setState(72); ((ParseContext)_localctx).expression = expression(); setState(73); match(EOF); ((ParseContext)_localctx).e = ((ParseContext)_localctx).expression.e; if (fullExpression == null) fullExpression = (((ParseContext)_localctx).expression!=null?_input.getText(((ParseContext)_localctx).expression.start,((ParseContext)_localctx).expression.stop):null); tokenPos = (((ParseContext)_localctx).expression!=null?(((ParseContext)_localctx).expression.start):null).getTokenIndex(); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; }
/** * Get matched token by token type. * * @param tokenType token type * @return matched token * @throws RecognitionException mismatch throw exception */ public Token getMatchedToken(final int tokenType) throws RecognitionException { Token result = parser.getCurrentToken(); boolean isIdentifierCompatible = false; if (identifierTokenIndex == tokenType && identifierTokenIndex > result.getType()) { isIdentifierCompatible = true; } if (result.getType() == tokenType || isIdentifierCompatible) { if (Token.EOF != tokenType && isIdentifierCompatible && result instanceof CommonToken) { ((CommonToken) result).setType(identifierTokenIndex); } parser.getErrorHandler().reportMatch(parser); parser.consume(); } else { result = parser.getErrorHandler().recoverInline(parser); if (parser.getBuildParseTree() && -1 == result.getTokenIndex()) { parser.getContext().addErrorNode(parser.createErrorNode(parser.getContext(), result)); } } return result; } }
@Override public Interval getSourceInterval() { if ( start == null ) { return Interval.INVALID; } if ( stop==null || stop.getTokenIndex()<start.getTokenIndex() ) { return Interval.of(start.getTokenIndex(), start.getTokenIndex()-1); // empty } return Interval.of(start.getTokenIndex(), stop.getTokenIndex()); }
public void replace(String programName, Token from, Token to, Object text) { replace(programName, from.getTokenIndex(), to.getTokenIndex(), text); }
@Override public Interval getSourceInterval() { if ( symbol ==null ) return Interval.INVALID; int tokenIndex = symbol.getTokenIndex(); return new Interval(tokenIndex, tokenIndex); }
@Override public String getText(Token start, Token stop) { if ( start!=null && stop!=null ) { return getText(Interval.of(start.getTokenIndex(), stop.getTokenIndex())); } return ""; }
@Override public String getText(Token start, Token stop) { return getText(Interval.of(start.getTokenIndex(), stop.getTokenIndex())); }
public void insertAfter(String programName, Token t, Object text) { insertAfter(programName,t.getTokenIndex(), text); }
CompilationUnitContext cuc = (CompilationUnitContext) arg0.getParent(); List<ParseTree> children = cuc.children; int tokenIndex = arg0.getSymbol().getTokenIndex(); if (tokenIndex - 2 >= 0 && tokenIndex + 1 <= children.size()) { ParseTree variablePT = children.get(tokenIndex - 2);