private Token<RubyCommentTokenId> token(RubyCommentTokenId id) { return tokenFactory.createToken(id); }
private Token<RubyStringTokenId> token(RubyStringTokenId id) { return tokenFactory.createToken(id); }
private Token<DoxygenTokenId> token(DoxygenTokenId id) { return tokenFactory.createToken(id); }
private Token<JspTokenId> token(JspTokenId tokenId) { if(LOG) { checkToken(tokenId); } return tokenFactory.createToken(tokenId); }
private Token<RhtmlTokenId> token(RhtmlTokenId id) { if(input.readLength() == 0) { new Exception("Error - token length is zero!; state = " + state).printStackTrace(); } Token<RhtmlTokenId> t = tokenFactory.createToken(id); return t; }
@Override public Token<PHPTopTokenId> nextToken() { PHPTopTokenId tokenId = scanner.nextToken(); Token<PHPTopTokenId> token = null; if (tokenId != null) { token = tokenFactory.createToken(tokenId); } return token; }
@Override public Token<PHPTokenId> nextToken() { try { PHPTokenId tokenId = scanner.nextToken(); Token<PHPTokenId> token = null; if (tokenId != null) { token = tokenFactory.createToken(tokenId); } return token; } catch (IOException ex) { Logger.getLogger(GSFPHPLexer.class.getName()).log(Level.SEVERE, null, ex); } return null; }
@Override public Token<CfgPropsTokenId> nextToken() { try { CfgPropsTokenId tokenId = scanner.nextTokenId(); Token<CfgPropsTokenId> token = null; if (tokenId != null) { token = tokenFactory.createToken(tokenId); } return token; } catch (IOException ex) { Logger.getLogger(CfgPropsLexer.class.getName()).log(Level.SEVERE, null, ex); } return null; }
@Override public Token<PHPDocCommentTokenId> nextToken() { try { PHPDocCommentTokenId tokenId = scanner.nextToken(); Token<PHPDocCommentTokenId> token = null; if (tokenId != null) { token = tokenFactory.createToken(tokenId); } return token; } catch (IOException ex) { Logger.getLogger(GSFPHPLexer.class.getName()).log(Level.SEVERE, null, ex); } return null; }
private Token<ELTokenId> token(ELTokenId tokenId) { if(LOG) { if(input.readLength() == 0) { LOGGER.log(Level.INFO, "[" + this.getClass().getSimpleName() + "] Found zero length token: "); //NOI18N } LOGGER.log(Level.INFO, "[" + this.getClass().getSimpleName() + "] token ('" + input.readText().toString() + "'; id=" + tokenId + ")\n"); //NOI18N } return tokenFactory.createToken(tokenId); }
private Token<CppStringTokenId> token(CppStringTokenId id, String fixedText, PartType part) { assert id != null : "id must be not null"; Token<CppStringTokenId> token; if (fixedText != null && !escapedLF) { // create flyweight token token = tokenFactory.getFlyweightToken(id, fixedText); } else { if (part != PartType.COMPLETE) { token = tokenFactory.createToken(id, input.readLength(), part); } else { token = tokenFactory.createToken(id); } } escapedLF = false; assert token != null : "token must be created as result for " + id; return token; }
/** * Creates token */ private Token<FortranTokenId> token(FortranTokenId id, String fixedText, PartType part) { assert id != null : "id must be not null"; Token<FortranTokenId> token; if (fixedText != null) { // create flyweight token token = tokenFactory.getFlyweightToken(id, fixedText); } else { if (part != PartType.COMPLETE) { token = tokenFactory.createToken(id, input.readLength(), part); } else { token = tokenFactory.createToken(id); } } assert token != null : "token must be created as result for " + id; return token; }
@Override public Token<CPTokenId> nextToken() { //just read whole input while(input.read() != LexerInput.EOF) {}; //and create one big token return input.readLength() > 0 ? tokenFactory.createToken(CPTokenId.CSS) : null; }
private Token<RubyTokenId> token(RubyTokenId id, int length) { String fixedText = id.fixedText(); return (fixedText != null) ? tokenFactory.getFlyweightToken(id, fixedText) : tokenFactory.createToken(id, length); }
private Token<CppTokenId> token(CppTokenId id, String fixedText, PartType part) { assert id != null : "id must be not null"; Token<CppTokenId> token; if (fixedText != null && !isTokenSplittedByEscapedLine()) { // create flyweight token token = tokenFactory.getFlyweightToken(id, fixedText); } else { if (part != PartType.COMPLETE) { token = tokenFactory.createToken(id, input.readLength(), part); } else { token = tokenFactory.createToken(id); } } tokenSplittedByEscapedLine = 0; escapedEatenChars = 0; assert token != null : "token must be created as result for " + id; postTokenCreate(id); return token; }
@NonNull private Token<DockerfileTokenId> token(DockerfileTokenId id) { final String fixedText = id.fixedText(); return (fixedText != null && fixedText.length() == input.readLength()) ? tokenFactory.getFlyweightToken(id, fixedText) : tokenFactory.createToken(id); }
private Token<GroovyTokenId> createToken(int tokenIntId, int tokenLength) { GroovyTokenId id = getTokenId(tokenIntId); LOG.log(Level.FINEST, "Creating token: {0}, length: {1}", new Object[]{id.name(), tokenLength}); String fixedText = id.fixedText(); return (fixedText != null) ? tokenFactory.getFlyweightToken(id, fixedText) : tokenFactory.createToken(id, tokenLength); }
private Token<DockerfileTokenId> finishWhitespace( int currentState, int[] stateHolder) { while (true) { int c = nextChar(); switch (c) { case '\r': case '\n': stateHolder[0] = currentState == STATE_ESCAPE ? STATE_CONT_LINE: STATE_NEW_LINE; break; case '\t': case 0x0b: case '\f': case 0x1c: case 0x1d: case 0x1e: case 0x1f: case ' ': break; case EOF: default: backup(1); return tokenFactory.createToken(DockerfileTokenId.WHITESPACE); } } }
private Token<STokenId> createToken (int type, int start) { STokenId tokenId = tokenIDToType.get (type); assert tokenId != null : "Unknown token type \"" + type + "\""; if (!(input instanceof DelegatingInputBridge)) { return tokenFactory.createToken (tokenId); } List embeddings = ((DelegatingInputBridge) input).getEmbeddings (); if (embeddings.isEmpty ()) return tokenFactory.createToken (tokenId); Map<String,Feature> imports = language.getTokenImports (); if (imports.containsKey (type)) // no preprocessor imports in token import. return tokenFactory.createToken (tokenId); Marenka marenka = new Marenka ((Integer) state); Object property = CONTINUOUS_TOKEN_START; Iterator it = embeddings.iterator (); while(it.hasNext ()) { Vojta v = (Vojta) it.next (); if (start < v.startOffset) { marenka.add (new Vojta (type, start, v.startOffset, property)); property = CONTINUOUS_TOKEN; } marenka.add (v); start = v.endOffset; } if (start < input.getIndex ()) marenka.add (new Vojta (type, start, input.getIndex (), property)); return createToken (marenka); }
@Override public Token<ECMAScriptTokenId> nextToken() { org.antlr.v4.runtime.Token token = lexer.nextToken(); if (token.getType() != lexer.EOF) { ECMAScriptTokenId tokenId = ECMAScriptLanguageHierarchy.getToken(token.getType()); return info.tokenFactory().createToken(tokenId); } return null; }