protected void setupTemplates(ST sql, Map<String, Object> params) { sql.add("key", this.keyField); sql.add("value", this.valueField); sql.add("table", this.tableName); params.putAll(extraFields); for (Map.Entry<String, Object> entry : params.entrySet()) { if (entry.getValue() == null) { continue; } else if ("inset".equals(entry.getKey()) || "prefix".equals(entry.getKey()) || "maxLimit".equals(entry.getKey())) { continue; } sql.addAggr("extras.{field}", entry.getKey()); } }
/** If someone does PLUS='+' in the parser, must make sure we get * "PLUS : '+' ;" in lexer not "T73 : '+';" */ public void defineLexerRuleForAliasedStringLiteral(String tokenID, String literal, int tokenType) { if ( getGrammarIsRoot() ) { // don't build lexers for delegates //System.out.println("defineLexerRuleForAliasedStringLiteral: "+literal+" "+tokenType); lexerGrammarST.addAggr("literals.{ruleName,type,literal}", tokenID, Utils.integer(tokenType), literal); } // track this lexer rule's name composite.lexerRules.add(tokenID); }
/** If someone does PLUS='+' in the parser, must make sure we get * "PLUS : '+' ;" in lexer not "T73 : '+';" */ public void defineLexerRuleForAliasedStringLiteral(String tokenID, String literal, int tokenType) { if ( getGrammarIsRoot() ) { // don't build lexers for delegates //System.out.println("defineLexerRuleForAliasedStringLiteral: "+literal+" "+tokenType); lexerGrammarST.addAggr("literals.{ruleName,type,literal}", tokenID, Utils.integer(tokenType), literal); } // track this lexer rule's name composite.lexerRules.add(tokenID); }
/** Set attributes tokens and literals attributes in the incoming * code template. This is not the token vocab interchange file, but * rather a list of token type ID needed by the recognizer. */ protected void genTokenTypeConstants(ST code) { // make constants for the token types for (String tokenID : grammar.getTokenIDs()) { int tokenType = grammar.getTokenType(tokenID); if ( tokenType==Label.EOF || tokenType>=Label.MIN_TOKEN_TYPE ) { // don't do FAUX labels 'cept EOF code.addAggr("tokens.{name,type}", tokenID, Utils.integer(tokenType)); } } }
/** Set attributes tokens and literals attributes in the incoming * code template. This is not the token vocab interchange file, but * rather a list of token type ID needed by the recognizer. */ protected void genTokenTypeConstants(ST code) { // make constants for the token types for (String tokenID : grammar.getTokenIDs()) { int tokenType = grammar.getTokenType(tokenID); if ( tokenType==Label.EOF || tokenType>=Label.MIN_TOKEN_TYPE ) { // don't do FAUX labels 'cept EOF code.addAggr("tokens.{name,type}", tokenID, Utils.integer(tokenType)); } } }
/** If the grammar is a combined grammar, return the text of the implicit * lexer grammar. */ public String getLexerGrammar() { if ( lexerGrammarST.getAttribute("literals")==null && lexerGrammarST.getAttribute("rules")==null ) { // if no rules, return nothing return null; } lexerGrammarST.add("name", name); // if there are any actions set for lexer, pass them in if ( getActions().get("lexer")!=null ) { lexerGrammarST.add("actionNames", getActions().get("lexer").keySet()); lexerGrammarST.add("actions", getActions().get("lexer").values()); } // make sure generated grammar has the same options if ( options!=null ) { for (String optionName : options.keySet()) { if ( !doNotCopyOptionsToLexer.contains(optionName) ) { Object value = options.get(optionName); lexerGrammarST.addAggr("options.{name,value}", optionName, value); } } } return lexerGrammarST.render(); }
/** If the grammar is a combined grammar, return the text of the implicit * lexer grammar. */ public String getLexerGrammar() { if ( lexerGrammarST.getAttribute("literals")==null && lexerGrammarST.getAttribute("rules")==null ) { // if no rules, return nothing return null; } lexerGrammarST.add("name", name); // if there are any actions set for lexer, pass them in if ( getActions().get("lexer")!=null ) { lexerGrammarST.add("actionNames", getActions().get("lexer").keySet()); lexerGrammarST.add("actions", getActions().get("lexer").values()); } // make sure generated grammar has the same options if ( options!=null ) { for (String optionName : options.keySet()) { if ( !doNotCopyOptionsToLexer.contains(optionName) ) { Object value = options.get(optionName); lexerGrammarST.addAggr("options.{name,value}", optionName, value); } } } return lexerGrammarST.render(); }
public void defineLexerRuleForStringLiteral(String literal, int tokenType) { //System.out.println("defineLexerRuleForStringLiteral: "+literal+" "+tokenType); // compute new token name like T237 and define it as having tokenType String tokenID = computeTokenNameFromLiteral(tokenType,literal); defineToken(tokenID, tokenType); // tell implicit lexer to define a rule to match the literal if ( getGrammarIsRoot() ) { // don't build lexers for delegates lexerGrammarST.addAggr("literals.{ruleName,type,literal}", tokenID, Utils.integer(tokenType), literal); } }
public void defineLexerRuleForStringLiteral(String literal, int tokenType) { //System.out.println("defineLexerRuleForStringLiteral: "+literal+" "+tokenType); // compute new token name like T237 and define it as having tokenType String tokenID = computeTokenNameFromLiteral(tokenType,literal); defineToken(tokenID, tokenType); // tell implicit lexer to define a rule to match the literal if ( getGrammarIsRoot() ) { // don't build lexers for delegates lexerGrammarST.addAggr("literals.{ruleName,type,literal}", tokenID, Utils.integer(tokenType), literal); } }
/** Generate a token vocab file with all the token names/types. For example: * ID=7 * FOR=8 * 'for'=8 * * This is independent of the target language; used by antlr internally */ protected ST genTokenVocabOutput() { ST vocabFileST = new ST(vocabFilePattern); vocabFileST.add("literals",(Object)null); // "define" literals arg vocabFileST.add("tokens",(Object)null); vocabFileST.impl.name = "vocab-file"; // make constants for the token names for (String tokenID : grammar.getTokenIDs()) { int tokenType = grammar.getTokenType(tokenID); if ( tokenType>=Label.MIN_TOKEN_TYPE ) { vocabFileST.addAggr("tokens.{name,type}", tokenID, Utils.integer(tokenType)); } } // now dump the strings for (String literal : grammar.getStringLiterals()) { int tokenType = grammar.getTokenType(literal); if ( tokenType>=Label.MIN_TOKEN_TYPE ) { vocabFileST.addAggr("tokens.{name,type}", literal, Utils.integer(tokenType)); } } return vocabFileST; }
/** Generate a token vocab file with all the token names/types. For example: * ID=7 * FOR=8 * 'for'=8 * * This is independent of the target language; used by antlr internally */ protected ST genTokenVocabOutput() { ST vocabFileST = new ST(vocabFilePattern); vocabFileST.add("literals",(Object)null); // "define" literals arg vocabFileST.add("tokens",(Object)null); vocabFileST.impl.name = "vocab-file"; // make constants for the token names for (String tokenID : grammar.getTokenIDs()) { int tokenType = grammar.getTokenType(tokenID); if ( tokenType>=Label.MIN_TOKEN_TYPE ) { vocabFileST.addAggr("tokens.{name,type}", tokenID, Utils.integer(tokenType)); } } // now dump the strings for (String literal : grammar.getStringLiterals()) { int tokenType = grammar.getTokenType(literal); if ( tokenType>=Label.MIN_TOKEN_TYPE ) { vocabFileST.addAggr("tokens.{name,type}", literal, Utils.integer(tokenType)); } } return vocabFileST; }
testFileTemplate.addAggr("file.{Options,name,tests}", index.rawGetDictionary("Options"), testName,
public final void exceptionHandler(ST ruleST) throws RecognitionException { GrammarAST ACTION2=null; GrammarAST ARG_ACTION3=null; try { // org/antlr/grammar/v3/CodeGenTreeWalker.g:661:2: ( ^( 'catch' ARG_ACTION ACTION ) ) // org/antlr/grammar/v3/CodeGenTreeWalker.g:661:4: ^( 'catch' ARG_ACTION ACTION ) { match(input,CATCH,FOLLOW_CATCH_in_exceptionHandler766); if (state.failed) return; match(input, Token.DOWN, null); if (state.failed) return; ARG_ACTION3=(GrammarAST)match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_exceptionHandler768); if (state.failed) return; ACTION2=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_exceptionHandler770); if (state.failed) return; match(input, Token.UP, null); if (state.failed) return; if ( state.backtracking==0 ) { List<? extends Object> chunks = generator.translateAction(currentRuleName,ACTION2); ruleST.addAggr("exceptions.{decl,action}",(ARG_ACTION3!=null?ARG_ACTION3.getText():null),chunks); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving } } // $ANTLR end "exceptionHandler"
public final void exceptionHandler(ST ruleST) throws RecognitionException { GrammarAST ACTION2=null; GrammarAST ARG_ACTION3=null; try { // org/antlr/grammar/v3/CodeGenTreeWalker.g:661:2: ( ^( 'catch' ARG_ACTION ACTION ) ) // org/antlr/grammar/v3/CodeGenTreeWalker.g:661:4: ^( 'catch' ARG_ACTION ACTION ) { match(input,CATCH,FOLLOW_CATCH_in_exceptionHandler766); if (state.failed) return; match(input, Token.DOWN, null); if (state.failed) return; ARG_ACTION3=(GrammarAST)match(input,ARG_ACTION,FOLLOW_ARG_ACTION_in_exceptionHandler768); if (state.failed) return; ACTION2=(GrammarAST)match(input,ACTION,FOLLOW_ACTION_in_exceptionHandler770); if (state.failed) return; match(input, Token.UP, null); if (state.failed) return; if ( state.backtracking==0 ) { List<? extends Object> chunks = generator.translateAction(currentRuleName,ACTION2); ruleST.addAggr("exceptions.{decl,action}",(ARG_ACTION3!=null?ARG_ACTION3.getText():null),chunks); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving } } // $ANTLR end "exceptionHandler"
if (state.failed) return retval; if ( state.backtracking==0 ) { retval.code.addAggr("root.{el,line,pos}", (r!=null?((CodeGenTreeWalker.rewrite_atom_return)r).code:null), (r!=null?((GrammarAST)r.start):null).getLine(), if (state.failed) return retval; if ( state.backtracking==0 ) { retval.code.addAggr("children.{el,line,pos}", (el!=null?((CodeGenTreeWalker.rewrite_element_return)el).code:null), (el!=null?((GrammarAST)el.start):null).getLine(),
if (state.failed) return retval; if ( state.backtracking==0 ) { retval.code.addAggr("root.{el,line,pos}", (r!=null?((CodeGenTreeWalker.rewrite_atom_return)r).code:null), (r!=null?((GrammarAST)r.start):null).getLine(), if (state.failed) return retval; if ( state.backtracking==0 ) { retval.code.addAggr("children.{el,line,pos}", (el!=null?((CodeGenTreeWalker.rewrite_element_return)el).code:null), (el!=null?((GrammarAST)el.start):null).getLine(),
if ((e!=null?((CodeGenTreeWalker.element_return)e).code:null) != null) retval.code.addAggr("elements.{el,line,pos}", (e!=null?((CodeGenTreeWalker.element_return)e).code:null), (e!=null?((GrammarAST)e.start):null).getLine(),
tracePathAlt, labels); st.addAggr("paths.{alt, states}", displayAltI, path);
tracePathAlt, labels); st.addAggr("paths.{alt, states}", displayAltI, path);
generator.genSetExpr(templates,s.getSetValue(),1,false)); ST altcode=templates.getInstanceOf("alt"); altcode.addAggr("elements.{el,line,pos}", setcode, s.getLine(),