public SqlBaseLexer(CharStream input) { super(input); _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); }
private static void modifyInterpreter(final NaturalLanguageStrategyLexer l) { final int originalSize = l.getInterpreter().decisionToDFA.length; final DFA[] emptyDFA = new DFA[originalSize]; // give our own array so the static one isn't used final LexerATNSimulator newInterpreter = new LexerATNSimulator(l, l.getATN(), emptyDFA, new PredictionContextCache()); newInterpreter.clearDFA(); // initialize our array so that the lexer functions properly l.setInterpreter(newInterpreter); // replace the interpreter to bypass all static caches }
_channel = Token.DEFAULT_CHANNEL; _tokenStartCharIndex = _input.index(); _tokenStartCharPositionInLine = getInterpreter().getCharPositionInLine(); _tokenStartLine = getInterpreter().getLine(); _text = null; do { ttype = getInterpreter().match(_input, _mode);
@Override public int getLine() { return getInterpreter().getLine(); }
@Override public int getCharPositionInLine() { return getInterpreter().getCharPositionInLine(); }
private static void modifyInterpreter(final NaturalLanguageStrategyLexer l) { final int originalSize = l.getInterpreter().decisionToDFA.length; final DFA[] emptyDFA = new DFA[originalSize]; // give our own array so the static one isn't used final LexerATNSimulator newInterpreter = new LexerATNSimulator(l, l.getATN(), emptyDFA, new PredictionContextCache()); newInterpreter.clearDFA(); // initialize our array so that the lexer functions properly l.setInterpreter(newInterpreter); // replace the interpreter to bypass all static caches }
_channel = Token.DEFAULT_CHANNEL; _tokenStartCharIndex = _input.index(); _tokenStartCharPositionInLine = getInterpreter().getCharPositionInLine(); _tokenStartLine = getInterpreter().getLine(); _text = null; do { ttype = getInterpreter().match(_input, _mode);
public TypeCalculationLexer(CharStream input) { super(input); _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); }
public InfluxLineProtocolLexer(CharStream input) { super(input); _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); }
public ExprLexer(CharStream input) { super(input); _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); }
public SQLServerStatementLexer(CharStream input) { super(input); _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); }
public MySQLStatementLexer(CharStream input) { super(input); _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); }
public OracleStatementLexer(CharStream input) { super(input); _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); }
public PostgreSQLStatementLexer(CharStream input) { super(input); _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); }
public SmaliLexer(CharStream input) { super(input); _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); }
public RuleLangLexer(CharStream input) { super(input); _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); }
public JavadocLexer(CharStream input) { super(input); _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); }
/** * Constructs a new instance of XGBoost model lexer. * * @param input Character stream. */ public XGBoostModelLexer(CharStream input) { super(input); _interp = new LexerATNSimulator(this, _ATN, _decisionToDFA, _sharedContextCache); }
public HplsqlLexer(CharStream input) { super(input); _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); }
public Kotlin(CharStream input) { super(input); _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); }