tokensRoot = (GrammarAST)adaptor.create(ANTLRParser.TOKENS_SPEC, "TOKENS"); tokensRoot.g = rootGrammar; root.insertChild(1, tokensRoot); // ^(GRAMMAR ID TOKENS...) rootGrammar.tool.log("grammar", action.g.name+" "+scopeName+":"+name+"="+action.getText()); if ( action.g != rootGrammar ) { root.insertChild(1, action.getParent());
((GrammarAST)ast.getChild(0)).token = adaptor.createToken(ast.getChild(0).getType(), variantName); GrammarAST ruleParent = (GrammarAST)rule.ast.getParent(); ruleParent.insertChild(rule.ast.getChildIndex() + 1, ast); ruleParent.freshenParentAndChildIndexes(rule.ast.getChildIndex()); ((GrammarAST)unfactoredAst.getChild(0)).token = adaptor.createToken(unfactoredAst.getChild(0).getType(), variantName); GrammarAST ruleParent = (GrammarAST)rule.ast.getParent(); ruleParent.insertChild(rule.ast.getChildIndex() + 1, unfactoredAst); ruleParent.freshenParentAndChildIndexes(rule.ast.getChildIndex());
tokensRoot = (GrammarAST)adaptor.create(ANTLRParser.TOKENS_SPEC, "TOKENS"); tokensRoot.g = rootGrammar; root.insertChild(1, tokensRoot); // ^(GRAMMAR ID TOKENS...) rootGrammar.tool.log("grammar", action.g.name+" "+scopeName+":"+name+"="+action.getText()); if ( action.g != rootGrammar ) { root.insertChild(1, action.getParent());
channelsRoot = imp_channelRoot.dupTree(); channelsRoot.g = rootGrammar; root.insertChild(1, channelsRoot); // ^(GRAMMAR ID TOKENS...) } else { for (int c = 0; c < imp_channelRoot.getChildCount(); ++c) { tokensRoot = (GrammarAST)adaptor.create(ANTLRParser.TOKENS_SPEC, "TOKENS"); tokensRoot.g = rootGrammar; root.insertChild(1, tokensRoot); // ^(GRAMMAR ID TOKENS...) rootGrammar.tool.log("grammar", action.g.name+" "+scopeName+":"+name+"="+action.getText()); if ( action.g != rootGrammar ) { root.insertChild(1, action.getParent());
channelsRoot = imp_channelRoot.dupTree(); channelsRoot.g = rootGrammar; root.insertChild(1, channelsRoot); // ^(GRAMMAR ID TOKENS...) } else { for (int c = 0; c < imp_channelRoot.getChildCount(); ++c) { tokensRoot = adaptor.create(ANTLRParser.TOKENS_SPEC, "TOKENS"); tokensRoot.g = rootGrammar; root.insertChild(1, tokensRoot); // ^(GRAMMAR ID TOKENS...) rootGrammar.tool.log("grammar", action.g.name+" "+scopeName+":"+name+"="+action.getText()); if ( action.g != rootGrammar ) { root.insertChild(1, action.getParent());
channelsRoot = imp_channelRoot.dupTree(); channelsRoot.g = rootGrammar; root.insertChild(1, channelsRoot); // ^(GRAMMAR ID TOKENS...) } else { for (int c = 0; c < imp_channelRoot.getChildCount(); ++c) { tokensRoot = (GrammarAST)adaptor.create(ANTLRParser.TOKENS_SPEC, "TOKENS"); tokensRoot.g = rootGrammar; root.insertChild(1, tokensRoot); // ^(GRAMMAR ID TOKENS...) rootGrammar.tool.log("grammar", action.g.name+" "+scopeName+":"+name+"="+action.getText()); if ( action.g != rootGrammar ) { root.insertChild(1, action.getParent());
litRule.addChild(new TerminalAST(idToken)); litRule.addChild(blk); lexerRulesRoot.insertChild(insertIndex, litRule);
litRule.addChild(new TerminalAST(idToken)); litRule.addChild(blk); lexerRulesRoot.insertChild(insertIndex, litRule);
litRule.addChild(new TerminalAST(idToken)); litRule.addChild(blk); lexerRulesRoot.insertChild(insertIndex, litRule);
litRule.addChild(new TerminalAST(idToken)); litRule.addChild(blk); lexerRulesRoot.insertChild(insertIndex, litRule);
litRule.addChild(new TerminalAST(idToken)); litRule.addChild(blk); lexerRulesRoot.insertChild(insertIndex, litRule);