public List<Pair<K,V>> getPairs() { List<Pair<K,V>> pairs = new ArrayList<Pair<K,V>>(); for (K key : keySet()) { for (V value : get(key)) { pairs.add(new Pair<K,V>(key, value)); } } return pairs; } }
public Set<String> getTokenRefs() { Set<String> refs = new HashSet<String>(); for (int i=1; i<=numberOfAlts; i++) { refs.addAll(alt[i].tokenRefs.keySet()); } return refs; }
public Set<String> getElementLabelNames() { Set<String> refs = new HashSet<String>(); for (int i=1; i<=numberOfAlts; i++) { refs.addAll(alt[i].labelDefs.keySet()); } if ( refs.isEmpty() ) return null; return refs; }
public Lexer(OutputModelFactory factory, LexerFile file) { super(factory); this.file = file; // who contains us? Grammar g = factory.getGrammar(); channels = new LinkedHashMap<String, Integer>(g.channelNameToValueMap); modes = ((LexerGrammar)g).modes.keySet(); } }
public Lexer(OutputModelFactory factory, LexerFile file) { super(factory); this.file = file; // who contains us? Grammar g = factory.getGrammar(); channels = new LinkedHashMap<String, Integer>(g.channelNameToValueMap); modes = ((LexerGrammar)g).modes.keySet(); } }
public Lexer(OutputModelFactory factory, LexerFile file) { super(factory); this.file = file; // who contains us? Grammar g = factory.getGrammar(); channels = new LinkedHashMap<String, Integer>(g.channelNameToValueMap); modes = ((LexerGrammar)g).modes.keySet(); } }
public Lexer(OutputModelFactory factory, LexerFile file) { super(factory); this.file = file; // who contains us? Grammar g = factory.getGrammar(); channels = new LinkedHashMap<String, Integer>(g.channelNameToValueMap); modes = ((LexerGrammar)g).modes.keySet(); } }
public Lexer(OutputModelFactory factory, LexerFile file) { super(factory); this.file = file; // who contains us? Grammar g = factory.getGrammar(); channels = new LinkedHashMap<String, Integer>(g.channelNameToValueMap); modes = ((LexerGrammar)g).modes.keySet(); } }
public List<Pair<K,V>> getPairs() { List<Pair<K,V>> pairs = new ArrayList<Pair<K,V>>(); for (K key : keySet()) { for (V value : get(key)) { pairs.add(new Pair<K,V>(key, value)); } } return pairs; } }
public List<Tuple2<K, V>> getPairs() { List<Tuple2<K, V>> pairs = new ArrayList<Tuple2<K, V>>(); for (K key : keySet()) { for (V value : get(key)) { pairs.add(Tuple.create(key, value)); } } return pairs; } }
public List<Pair<K,V>> getPairs() { List<Pair<K,V>> pairs = new ArrayList<Pair<K,V>>(); for (K key : keySet()) { for (V value : get(key)) { pairs.add(new Pair<K,V>(key, value)); } } return pairs; } }
public List<Pair<K,V>> getPairs() { List<Pair<K,V>> pairs = new ArrayList<Pair<K,V>>(); for (K key : keySet()) { for (V value : get(key)) { pairs.add(new Pair<K,V>(key, value)); } } return pairs; } }
public void checkForModeConflicts(Grammar g) { if (g.isLexer()) { LexerGrammar lexerGrammar = (LexerGrammar)g; for (String modeName : lexerGrammar.modes.keySet()) { if (!modeName.equals("DEFAULT_MODE") && reservedNames.contains(modeName)) { Rule rule = lexerGrammar.modes.get(modeName).iterator().next(); g.tool.errMgr.grammarError(ErrorType.MODE_CONFLICTS_WITH_COMMON_CONSTANTS, g.fileName, rule.ast.parent.getToken(), modeName); } if (g.getTokenType(modeName) != Token.INVALID_TYPE) { Rule rule = lexerGrammar.modes.get(modeName).iterator().next(); g.tool.errMgr.grammarError(ErrorType.MODE_CONFLICTS_WITH_TOKEN, g.fileName, rule.ast.parent.getToken(), modeName); } } } }
public void checkForModeConflicts(Grammar g) { if (g.isLexer()) { LexerGrammar lexerGrammar = (LexerGrammar)g; for (String modeName : lexerGrammar.modes.keySet()) { if (!modeName.equals("DEFAULT_MODE") && reservedNames.contains(modeName)) { Rule rule = lexerGrammar.modes.get(modeName).iterator().next(); g.tool.errMgr.grammarError(ErrorType.MODE_CONFLICTS_WITH_COMMON_CONSTANTS, g.fileName, rule.ast.parent.getToken(), modeName); } if (g.getTokenType(modeName) != Token.INVALID_TYPE) { Rule rule = lexerGrammar.modes.get(modeName).iterator().next(); g.tool.errMgr.grammarError(ErrorType.MODE_CONFLICTS_WITH_TOKEN, g.fileName, rule.ast.parent.getToken(), modeName); } } } }
public void checkForModeConflicts(Grammar g) { if (g.isLexer()) { LexerGrammar lexerGrammar = (LexerGrammar)g; for (String modeName : lexerGrammar.modes.keySet()) { if (!modeName.equals("DEFAULT_MODE") && reservedNames.contains(modeName)) { Rule rule = lexerGrammar.modes.get(modeName).iterator().next(); g.tool.errMgr.grammarError(ErrorType.MODE_CONFLICTS_WITH_COMMON_CONSTANTS, g.fileName, rule.ast.parent.getToken(), modeName); } if (g.getTokenType(modeName) != Token.INVALID_TYPE) { Rule rule = lexerGrammar.modes.get(modeName).iterator().next(); g.tool.errMgr.grammarError(ErrorType.MODE_CONFLICTS_WITH_TOKEN, g.fileName, rule.ast.parent.getToken(), modeName); } } } }
public LexerInterpreter createLexerInterpreter(CharStream input) { if (this.isParser()) { throw new IllegalStateException("A lexer interpreter can only be created for a lexer or combined grammar."); } if (this.isCombined()) { return implicitLexer.createLexerInterpreter(input); } char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn); ATN deserialized = new ATNDeserializer().deserialize(serializedAtn); List<String> allChannels = new ArrayList<String>(); allChannels.add("DEFAULT_TOKEN_CHANNEL"); allChannels.add("HIDDEN"); allChannels.addAll(channelValueToNameList); return new LexerInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), allChannels, ((LexerGrammar)this).modes.keySet(), deserialized, input); }
public void checkForModeConflicts(Grammar g) { if (g.isLexer()) { LexerGrammar lexerGrammar = (LexerGrammar)g; for (String modeName : lexerGrammar.modes.keySet()) { if (!modeName.equals("DEFAULT_MODE") && reservedNames.contains(modeName)) { Rule rule = lexerGrammar.modes.get(modeName).iterator().next(); g.tool.errMgr.grammarError(ErrorType.MODE_CONFLICTS_WITH_COMMON_CONSTANTS, g.fileName, rule.ast.parent.getToken(), modeName); } if (g.getTokenType(modeName) != Token.INVALID_TYPE) { Rule rule = lexerGrammar.modes.get(modeName).iterator().next(); g.tool.errMgr.grammarError(ErrorType.MODE_CONFLICTS_WITH_TOKEN, g.fileName, rule.ast.parent.getToken(), modeName); } } } }
public LexerInterpreter createLexerInterpreter(CharStream input) { if (this.isParser()) { throw new IllegalStateException("A lexer interpreter can only be created for a lexer or combined grammar."); } if (this.isCombined()) { return implicitLexer.createLexerInterpreter(input); } char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn); ATN deserialized = new ATNDeserializer().deserialize(serializedAtn); List<String> allChannels = new ArrayList<String>(); allChannels.add("DEFAULT_TOKEN_CHANNEL"); allChannels.add("HIDDEN"); allChannels.addAll(channelValueToNameList); return new LexerInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), allChannels, ((LexerGrammar)this).modes.keySet(), deserialized, input); }
public LexerInterpreter createLexerInterpreter(CharStream input) { if (this.isParser()) { throw new IllegalStateException("A lexer interpreter can only be created for a lexer or combined grammar."); } if (this.isCombined()) { return implicitLexer.createLexerInterpreter(input); } char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn); ATN deserialized = new ATNDeserializer().deserialize(serializedAtn); return new LexerInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), ((LexerGrammar)this).modes.keySet(), deserialized, input); }
public LexerInterpreter createLexerInterpreter(CharStream input) { if (this.isParser()) { throw new IllegalStateException("A lexer interpreter can only be created for a lexer or combined grammar."); } if (this.isCombined()) { return implicitLexer.createLexerInterpreter(input); } char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn); ATN deserialized = new ATNDeserializer().deserialize(serializedAtn); return new LexerInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), ((LexerGrammar)this).modes.keySet(), deserialized, input); }