public GrammarParserInterpreter(Grammar g, ATN atn, TokenStream input) { super(g.fileName, g.getVocabulary(), Arrays.asList(g.getRuleNames()), atn, // must run ATN through serializer to set some state flags input); this.g = g; decisionStatesThatSetOuterAltNumInContext = findOuterMostDecisionStates(); stateToAltsMap = new int[g.atn.states.size()][]; }
public GrammarParserInterpreter(Grammar g, ATN atn, TokenStream input) { super(g.fileName, g.getVocabulary(), Arrays.asList(g.getRuleNames()), atn, // must run ATN through serializer to set some state flags input); this.g = g; decisionStatesThatSetOuterAltNumInContext = findOuterMostDecisionStates(); stateToAltsMap = new int[g.atn.states.size()][]; }
public GrammarParserInterpreter(Grammar g, ATN atn, TokenStream input) { super(g.fileName, g.getVocabulary(), Arrays.asList(g.getRuleNames()), atn, // must run ATN through serializer to set some state flags input); this.g = g; decisionStatesThatSetOuterAltNumInContext = findOuterMostDecisionStates(); stateToAltsMap = new int[g.atn.states.size()][]; }
public GrammarParserInterpreter(Grammar g, ATN atn, TokenStream input) { super(g.fileName, g.getVocabulary(), Arrays.asList(g.getRuleNames()), atn, // must run ATN through serializer to set some state flags input); this.g = g; decisionStatesThatSetOuterAltNumInContext = findOuterMostDecisionStates(); stateToAltsMap = new int[g.atn.states.size()][]; }
public ParserInterpreter createParserInterpreter(TokenStream tokenStream) { if (this.isLexer()) { throw new IllegalStateException("A parser interpreter can only be created for a parser or combined grammar."); } char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn, Arrays.asList(getRuleNames())); ATN deserialized = new ATNDeserializer().deserialize(serializedAtn); return new ParserInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), deserialized, tokenStream); }
public LexerInterpreter createLexerInterpreter(CharStream input) { if (this.isParser()) { throw new IllegalStateException("A lexer interpreter can only be created for a lexer or combined grammar."); } if (this.isCombined()) { return implicitLexer.createLexerInterpreter(input); } char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn, Arrays.asList(getRuleNames())); ATN deserialized = new ATNDeserializer().deserialize(serializedAtn); List<String> allChannels = new ArrayList<String>(); allChannels.add("DEFAULT_TOKEN_CHANNEL"); allChannels.add("HIDDEN"); allChannels.addAll(channelValueToNameList); return new LexerInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), allChannels, ((LexerGrammar)this).modes.keySet(), deserialized, input); }
/** @since 4.5.1 */ public GrammarParserInterpreter createGrammarParserInterpreter(TokenStream tokenStream) { if (this.isLexer()) { throw new IllegalStateException("A parser interpreter can only be created for a parser or combined grammar."); } char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn, Arrays.asList(getRuleNames())); ATN deserialized = new ATNDeserializer().deserialize(serializedAtn); return new GrammarParserInterpreter(this, deserialized, tokenStream); }
names = g.getRuleNames(); for (String name : names) { content.append(name).append("\n"); IntegerList serializedATN = ATNSerializer.getSerialized(g.atn, Arrays.asList(g.getRuleNames())); content.append("atn:\n"); content.append(serializedATN.toString());
names = g.getRuleNames(); for (String name : names) { content.append(name + "\n");
public ParserInterpreter createParserInterpreter(TokenStream tokenStream) { if (this.isLexer()) { throw new IllegalStateException("A parser interpreter can only be created for a parser or combined grammar."); } char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn); ATN deserialized = new ATNDeserializer().deserialize(serializedAtn); return new ParserInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), deserialized, tokenStream); } }
names = g.getRuleNames(); for (String name : names) { content.append(name + "\n");
public LexerInterpreter createLexerInterpreter(CharStream input) { if (this.isParser()) { throw new IllegalStateException("A lexer interpreter can only be created for a lexer or combined grammar."); } if (this.isCombined()) { return implicitLexer.createLexerInterpreter(input); } char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn); ATN deserialized = new ATNDeserializer().deserialize(serializedAtn); List<String> allChannels = new ArrayList<String>(); allChannels.add("DEFAULT_TOKEN_CHANNEL"); allChannels.add("HIDDEN"); allChannels.addAll(channelValueToNameList); return new LexerInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), allChannels, ((LexerGrammar)this).modes.keySet(), deserialized, input); }
public LexerInterpreter createLexerInterpreter(CharStream input) { if (this.isParser()) { throw new IllegalStateException("A lexer interpreter can only be created for a lexer or combined grammar."); } if (this.isCombined()) { return implicitLexer.createLexerInterpreter(input); } char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn); ATN deserialized = new ATNDeserializer().deserialize(serializedAtn); List<String> allChannels = new ArrayList<String>(); allChannels.add("DEFAULT_TOKEN_CHANNEL"); allChannels.add("HIDDEN"); allChannels.addAll(channelValueToNameList); return new LexerInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), allChannels, ((LexerGrammar)this).modes.keySet(), deserialized, input); }
public ParserInterpreter createParserInterpreter(TokenStream tokenStream) { if (this.isLexer()) { throw new IllegalStateException("A parser interpreter can only be created for a parser or combined grammar."); } char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn); ATN deserialized = new ATNDeserializer().deserialize(serializedAtn); return new ParserInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), deserialized, tokenStream); } }
public ParserInterpreter createParserInterpreter(TokenStream tokenStream) { if (this.isLexer()) { throw new IllegalStateException("A parser interpreter can only be created for a parser or combined grammar."); } char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn); ATN deserialized = new ATNDeserializer().deserialize(serializedAtn); return new ParserInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), deserialized, tokenStream); } }
public ParserInterpreter createParserInterpreter(TokenStream tokenStream) { if (this.isLexer()) { throw new IllegalStateException("A parser interpreter can only be created for a parser or combined grammar."); } char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn); ATN deserialized = new ATNDeserializer().deserialize(serializedAtn); return new ParserInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), deserialized, tokenStream); } }
@Override public void run() { lastTree = result.tree; if (result.parser instanceof PreviewParser) { AltLabelTextProvider provider = new AltLabelTextProvider(result.parser, preview.g); treeViewer.setTreeTextProvider(provider); treeViewer.setTree(result.tree); hierarchyViewer.setTreeTextProvider(provider); hierarchyViewer.setTree(result.tree); } else { treeViewer.setRuleNames(Arrays.asList(preview.g.getRuleNames())); treeViewer.setTree(result.tree); hierarchyViewer.setRuleNames(Arrays.asList(preview.g.getRuleNames())); hierarchyViewer.setTree(result.tree); } } });
public LexerInterpreter createLexerInterpreter(CharStream input) { if (this.isParser()) { throw new IllegalStateException("A lexer interpreter can only be created for a lexer or combined grammar."); } if (this.isCombined()) { return implicitLexer.createLexerInterpreter(input); } char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn); ATN deserialized = new ATNDeserializer().deserialize(serializedAtn); return new LexerInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), ((LexerGrammar)this).modes.keySet(), deserialized, input); }
public LexerInterpreter createLexerInterpreter(CharStream input) { if (this.isParser()) { throw new IllegalStateException("A lexer interpreter can only be created for a lexer or combined grammar."); } if (this.isCombined()) { return implicitLexer.createLexerInterpreter(input); } char[] serializedAtn = ATNSerializer.getSerializedAsChars(atn); ATN deserialized = new ATNDeserializer().deserialize(serializedAtn); return new LexerInterpreter(fileName, getVocabulary(), Arrays.asList(getRuleNames()), ((LexerGrammar)this).modes.keySet(), deserialized, input); }
public Recognizer(OutputModelFactory factory) { super(factory); Grammar g = factory.getGrammar(); grammarFileName = new File(g.fileName).getName(); grammarName = g.name; name = g.getRecognizerName(); accessLevel = g.getOptionString("accessLevel"); tokens = new LinkedHashMap<String,Integer>(); for (Map.Entry<String, Integer> entry : g.tokenNameToTypeMap.entrySet()) { Integer ttype = entry.getValue(); if ( ttype>0 ) { tokens.put(entry.getKey(), ttype); } } ruleNames = g.rules.keySet(); rules = g.rules.values(); atn = new SerializedATN(factory, g.atn, Arrays.asList(g.getRuleNames())); if (g.getOptionString("superClass") != null) { superClass = new ActionText(null, g.getOptionString("superClass")); } else { superClass = null; } tokenNames = translateTokenStringsToTarget(g.getTokenDisplayNames(), factory); literalNames = translateTokenStringsToTarget(g.getTokenLiteralNames(), factory); symbolicNames = translateTokenStringsToTarget(g.getTokenSymbolicNames(), factory); abstractRecognizer = g.isAbstract(); }