@Override public TokenSource getTokenSource() { return input.getTokenSource(); }
@Override public TokenSource getTokenSource() { return input.getTokenSource(); }
@Override public TokenSource getTokenSource() { return input.getTokenSource(); }
@Override public TokenSource getTokenSource() { return input.getTokenSource(); }
@Override public TokenSource getTokenSource() { return input.getTokenSource(); }
public TokenSource getTokenSource() { return input.getTokenSource(); }
public JBehaviourStatementParser(String data) throws JBehaviourParsingError { super(JBehahiourStoryLexer.getTokensFromString(data)); lexer = (JBehahiourStoryLexer) input.getTokenSource(); }
public JBehaviourCallParser(String data) throws JBehaviourParsingError { super(JBehahiourCallLexer.getTokensFromString(data)); lexer = (JBehahiourCallLexer) input.getTokenSource(); }
public JBehaviourParser(File filename) throws JBehaviourParsingError { super(JBehahiourStoryLexer.getTokens(filename.getAbsolutePath())); lexer = (JBehahiourStoryLexer) input.getTokenSource(); }
/** * Determines if a token stream contains only numeric tokens * @param stream * @return true if all tokens in the given stream can be parsed as an integer */ private boolean isAllNumeric(TokenStream stream) { List<Token> tokens = ((NattyTokenSource) stream.getTokenSource()).getTokens(); for(Token token:tokens) { try { Integer.parseInt(token.getText()); } catch(NumberFormatException e) { return false; } } return true; }
/** * Determines if a token stream contains only numeric tokens * @param stream * @return true if all tokens in the given stream can be parsed as an integer */ private boolean isAllNumeric(TokenStream stream) { List<Token> tokens = ((NattyTokenSource) stream.getTokenSource()).getTokens(); for(Token token:tokens) { try { Integer.parseInt(token.getText()); } catch(NumberFormatException e) { return false; } } return true; }
int currentTokenType; StringBuilder tokenString = new StringBuilder(); while((currentToken = stream.getTokenSource().nextToken()).getType() != DateLexer.EOF) { currentTokenType = currentToken.getType(); tokenString.append(DateParser.tokenNames[currentTokenType]).append(" ");
int currentTokenType; StringBuilder tokenString = new StringBuilder(); while((currentToken = stream.getTokenSource().nextToken()).getType() != DateLexer.EOF) { currentTokenType = currentToken.getType(); tokenString.append(DateParser.tokenNames[currentTokenType]).append(" ");
for(TokenStream stream:streams) { lastStream = stream; List<Token> tokens = ((NattyTokenSource) stream.getTokenSource()).getTokens(); DateGroup group = singleParse(stream, value, referenceDate); while((group == null || group.getDates().size() == 0) && tokens.size() > 0) {
for(TokenStream stream:streams) { lastStream = stream; List<Token> tokens = ((NattyTokenSource) stream.getTokenSource()).getTokens(); DateGroup group = singleParse(stream, value, referenceDate); while((group == null || group.getDates().size() == 0) && tokens.size() > 0) {
List<Token> tokens = ((NattyTokenSource) stream.getTokenSource()).getTokens(); if(tokens.isEmpty()) return group;
List<Token> tokens = ((NattyTokenSource) stream.getTokenSource()).getTokens(); if(tokens.isEmpty()) return group;
GroupLexer lexer = (GroupLexer)input.getTokenSource(); this.group = lexer.group = group;
GroupLexer lexer = (GroupLexer)input.getTokenSource(); this.group = lexer.group = group;
GroupLexer lexer = (GroupLexer)input.getTokenSource(); this.group = lexer.group = group;