/** * * @param tokeniser * @param in * @return int value of the ttype field of the tokeniser * @throws IOException * @throws ParserException */ private int nextToken(StreamTokenizer tokeniser, Reader in) throws IOException, ParserException { return nextToken(tokeniser, in, false); }
/** * * @param tokeniser * @param in * @return int value of the ttype field of the tokeniser * @throws IOException * @throws ParserException */ private int nextToken(StreamTokenizer tokeniser, Reader in) throws IOException, ParserException { return nextToken(tokeniser, in, false); }
/** * * @param tokeniser * @param in * @return int value of the ttype field of the tokeniser * @throws IOException * @throws ParserException */ private int nextToken(StreamTokenizer tokeniser, Reader in) throws IOException, ParserException { return nextToken(tokeniser, in, false); }
/** * Absorbs extraneous newlines. * * @param tokeniser * @throws IOException */ private void absorbWhitespace(final StreamTokenizer tokeniser, Reader in) throws IOException, ParserException { // HACK: absorb extraneous whitespace between components (KOrganizer).. while (nextToken(tokeniser, in) == StreamTokenizer.TT_EOL) { if (log.isTraceEnabled()) { log.trace("Absorbing extra whitespace.."); } } if (log.isTraceEnabled()) { log.trace("Aborting: absorbing extra whitespace complete"); } }
/** * Absorbs extraneous newlines. * * @param tokeniser * @param in * @return int value of the ttype field of the tokeniser * @throws IOException */ private int absorbWhitespace(final StreamTokenizer tokeniser, Reader in, boolean ignoreEOF) throws IOException, ParserException { // HACK: absorb extraneous whitespace between components (KOrganizer).. int ntok; while ((ntok = nextToken(tokeniser, in, ignoreEOF)) == StreamTokenizer.TT_EOL) { if (log.isTraceEnabled()) { log.trace("Absorbing extra whitespace.."); } } if (log.isTraceEnabled()) { log.trace("Aborting: absorbing extra whitespace complete"); } return ntok; }
/** * Absorbs extraneous newlines. * * @param tokeniser * @param in * @return int value of the ttype field of the tokeniser * @throws IOException */ private int absorbWhitespace(final StreamTokenizer tokeniser, Reader in) throws IOException, ParserException { // HACK: absorb extraneous whitespace between components (KOrganizer).. int ntok; while ((ntok = nextToken(tokeniser, in, true)) == StreamTokenizer.TT_EOL) { if (log.isTraceEnabled()) { log.trace("Absorbing extra whitespace.."); } } if (log.isTraceEnabled()) { log.trace("Aborting: absorbing extra whitespace complete"); } return ntok; }
/** * Absorbs extraneous newlines. * * @param tokeniser * @param in * @return int value of the ttype field of the tokeniser * @throws IOException */ private int absorbWhitespace(final StreamTokenizer tokeniser, Reader in, boolean ignoreEOF) throws IOException, ParserException { // HACK: absorb extraneous whitespace between components (KOrganizer).. int ntok; while ((ntok = nextToken(tokeniser, in, ignoreEOF)) == StreamTokenizer.TT_EOL) { if (log.isTraceEnabled()) { log.trace("Absorbing extra whitespace.."); } } if (log.isTraceEnabled()) { log.trace("Aborting: absorbing extra whitespace complete"); } return ntok; }
/** * Asserts that the next token in the stream matches the specified token. * * @param tokeniser stream tokeniser to perform assertion on * @param token expected token * @throws IOException when unable to read from stream * @throws ParserException when next token in the stream does not match the expected token */ private void assertToken(final StreamTokenizer tokeniser, Reader in, final int token) throws IOException, ParserException { if (nextToken(tokeniser, in) != token) { throw new ParserException(MessageFormat.format(UNEXPECTED_TOKEN_MESSAGE, token, tokeniser.ttype), getLineNumber(tokeniser, in)); } if (log.isDebugEnabled()) { log.debug("[" + token + "]"); } }
/** * Asserts that the next token in the stream matches the specified token. * * @param tokeniser stream tokeniser to perform assertion on * @param token expected token * @return int value of the ttype field of the tokeniser * @throws IOException when unable to read from stream * @throws ParserException when next token in the stream does not match the expected token */ private int assertToken(final StreamTokenizer tokeniser, Reader in, final int token) throws IOException, ParserException { int ntok = nextToken(tokeniser, in); if (ntok != token) { throw new ParserException(MessageFormat.format(UNEXPECTED_TOKEN_MESSAGE, token, tokeniser.ttype), getLineNumber(tokeniser, in)); } if (log.isDebugEnabled()) { log.debug("[" + token + "]"); } return ntok; }
/** * Asserts that the next token in the stream matches the specified token. * * @param tokeniser stream tokeniser to perform assertion on * @param token expected token * @return int value of the ttype field of the tokeniser * @throws IOException when unable to read from stream * @throws ParserException when next token in the stream does not match the expected token */ private int assertToken(final StreamTokenizer tokeniser, Reader in, final int token) throws IOException, ParserException { int ntok = nextToken(tokeniser, in); if (ntok != token) { throw new ParserException(MessageFormat.format(UNEXPECTED_TOKEN_MESSAGE, token, tokeniser.ttype), getLineNumber(tokeniser, in)); } if (log.isDebugEnabled()) { log.debug("[" + token + "]"); } return ntok; }
/** * Asserts that the next token in the stream matches the specified token. * * @param tokeniser stream tokeniser to perform assertion on * @param token expected token * @return int value of the ttype field of the tokeniser * @throws IOException when unable to read from stream * @throws ParserException when next token in the stream does not match the expected token */ private int assertToken(final StreamTokenizer tokeniser, Reader in, final int token) throws IOException, ParserException { int ntok = nextToken(tokeniser, in); if (ntok != token) { throw new ParserException(MessageFormat.format(UNEXPECTED_TOKEN_MESSAGE, token, tokeniser.ttype), getLineNumber(tokeniser, in)); } if (log.isDebugEnabled()) { log.debug("[" + token + "]"); } return ntok; }