/** * Asserts that the next token in the stream matches the specified token. This method is case-sensitive. * @param tokeniser * @param token * @throws IOException * @throws ParserException */ private void assertToken(final StreamTokenizer tokeniser, Reader in, final String token) throws IOException, ParserException { assertToken(tokeniser, in, token, false); }
/** * Asserts that the next token in the stream matches the specified token. This method is case-sensitive. * * @param tokeniser * @param token * @throws IOException * @throws ParserException */ private void assertToken(final StreamTokenizer tokeniser, Reader in, final String token) throws IOException, ParserException { assertToken(tokeniser, in, token, false, false); }
/** * Asserts that the next token in the stream matches the specified token. This method is case-sensitive. * * @param tokeniser * @param token * @return int value of the ttype field of the tokeniser * @throws IOException * @throws ParserException */ private int assertToken(final StreamTokenizer tokeniser, Reader in, final String token) throws IOException, ParserException { return assertToken(tokeniser, in, token, false, false); }
/** * Asserts that the next token in the stream matches the specified token. This method is case-sensitive. * * @param tokeniser * @param token * @return int value of the ttype field of the tokeniser * @throws IOException * @throws ParserException */ private int assertToken(final StreamTokenizer tokeniser, Reader in, final String token) throws IOException, ParserException { return assertToken(tokeniser, in, token, false, false); }
/** * Asserts that the next token in the stream matches the specified token. This method is case-sensitive. * * @param tokeniser * @param token * @return int value of the ttype field of the tokeniser * @throws IOException * @throws ParserException */ private int assertToken(final StreamTokenizer tokeniser, Reader in, final String token) throws IOException, ParserException { return assertToken(tokeniser, in, token, false, false); }
/** * Skip newlines and linefeed at the beginning. * * @param tokeniser * @param in * @param token * @throws ParserException * @throws IOException */ private void skipNewLines(StreamTokenizer tokeniser, Reader in, String token) throws ParserException, IOException { for (int i = 0;; i++) { try { assertToken(tokeniser, in, StreamTokenizer.TT_WORD); break; } catch (ParserException exc) { //Skip a maximum of 10 newlines, linefeeds etc at the beginning if (i == IGNORE_BEGINNING_NON_WORD_COUNT) { throw exc; } } } }
/** * Skip newlines and linefeed at the beginning. * * @param tokeniser * @param in * @param token * @return int value of the ttype field of the tokeniser * @throws ParserException * @throws IOException */ private int skipNewLines(StreamTokenizer tokeniser, Reader in, String token) throws ParserException, IOException { for (int i = 0;; i++) { try { return assertToken(tokeniser, in, StreamTokenizer.TT_WORD); } catch (ParserException exc) { //Skip a maximum of 10 newlines, linefeeds etc at the beginning if (i == IGNORE_BEGINNING_NON_WORD_COUNT) { throw exc; } } } }
/** * Skip newlines and linefeed at the beginning. * * @param tokeniser * @param in * @param token * @return int value of the ttype field of the tokeniser * @throws ParserException * @throws IOException */ private int skipNewLines(StreamTokenizer tokeniser, Reader in, String token) throws ParserException, IOException { for (int i = 0;; i++) { try { return assertToken(tokeniser, in, StreamTokenizer.TT_WORD); } catch (ParserException exc) { //Skip a maximum of 10 newlines, linefeeds etc at the beginning if (i == IGNORE_BEGINNING_NON_WORD_COUNT) { throw exc; } } } }
/** * Skip newlines and linefeed at the beginning. * * @param tokeniser * @param in * @param token * @return int value of the ttype field of the tokeniser * @throws ParserException * @throws IOException */ private int skipNewLines(StreamTokenizer tokeniser, Reader in, String token) throws ParserException, IOException { for (int i = 0;; i++) { try { return assertToken(tokeniser, in, StreamTokenizer.TT_WORD); } catch (ParserException exc) { //Skip a maximum of 10 newlines, linefeeds etc at the beginning if (i == IGNORE_BEGINNING_NON_WORD_COUNT) { throw exc; } } } }
/** * Parses an iCalendar VCALENDAR from the specified stream tokeniser. * * @param tokeniser * @param in * @param handler * @throws IOException * @throws ParseException * @throws URISyntaxException * @throws ParserException */ private void parseCalendar(final StreamTokenizer tokeniser, Reader in, final ContentHandler handler) throws IOException, ParseException, URISyntaxException, ParserException { assertToken(tokeniser, in, ':'); assertToken(tokeniser, in, Calendar.VCALENDAR, true, false); assertToken(tokeniser, in, StreamTokenizer.TT_EOL); handler.startCalendar(); // parse calendar properties.. propertyListParser.parse(tokeniser, in, handler); // parse components.. componentListParser.parse(tokeniser, in, handler); // END:VCALENDAR // assertToken(tokeniser,Calendar.END); assertToken(tokeniser, in, ':'); assertToken(tokeniser, in, Calendar.VCALENDAR, true, false); handler.endCalendar(); } /**
/** * Parses an iCalendar VCALENDAR from the specified stream tokeniser. * * @param tokeniser * @param in * @param handler * @throws IOException * @throws ParseException * @throws URISyntaxException * @throws ParserException */ private void parseCalendar(final StreamTokenizer tokeniser, Reader in, final ContentHandler handler) throws IOException, ParseException, URISyntaxException, ParserException { assertToken(tokeniser, in, ':'); assertToken(tokeniser, in, Calendar.VCALENDAR, true, false); assertToken(tokeniser, in, StreamTokenizer.TT_EOL); handler.startCalendar(); // parse calendar properties.. propertyListParser.parse(tokeniser, in, handler); // parse components.. componentListParser.parse(tokeniser, in, handler); // END:VCALENDAR // assertToken(tokeniser,Calendar.END); assertToken(tokeniser, in, ':'); assertToken(tokeniser, in, Calendar.VCALENDAR, true, false); handler.endCalendar(); } /**
/** * Parses an iCalendar VCALENDAR from the specified stream tokeniser. * * @param tokeniser * @param in * @param handler * @throws IOException * @throws ParseException * @throws URISyntaxException * @throws ParserException */ private void parseCalendar(final StreamTokenizer tokeniser, Reader in, final ContentHandler handler) throws IOException, ParseException, URISyntaxException, ParserException { assertToken(tokeniser, in, ':'); assertToken(tokeniser, in, Calendar.VCALENDAR, true, false); assertToken(tokeniser, in, StreamTokenizer.TT_EOL); handler.startCalendar(); // parse calendar properties.. propertyListParser.parse(tokeniser, in, handler); // parse components.. componentListParser.parse(tokeniser, in, handler); // END:VCALENDAR // assertToken(tokeniser,Calendar.END); assertToken(tokeniser, in, ':'); assertToken(tokeniser, in, Calendar.VCALENDAR, true, false); handler.endCalendar(); } /**
/** * Parses multiple VCALENDARs from the specified stream tokeniser. * * @param tokeniser * @param handler * @throws IOException * @throws ParseException * @throws URISyntaxException * @throws ParserException */ private void parseCalendarList(final StreamTokenizer tokeniser, Reader in, final ContentHandler handler) throws IOException, ParseException, URISyntaxException, ParserException { // BEGIN:VCALENDAR int ntok = assertToken(tokeniser, in, Calendar.BEGIN, false, true); while (ntok != StreamTokenizer.TT_EOF) { parseCalendar(tokeniser, in, handler); ntok = absorbWhitespace(tokeniser, in, true); } }
/** * Parses multiple VCALENDARs from the specified stream tokeniser. * * @param tokeniser * @param handler * @throws IOException * @throws ParseException * @throws URISyntaxException * @throws ParserException */ private void parseCalendarList(final StreamTokenizer tokeniser, Reader in, final ContentHandler handler) throws IOException, ParseException, URISyntaxException, ParserException { // BEGIN:VCALENDAR int ntok = assertToken(tokeniser, in, Calendar.BEGIN, false, true); while (ntok != StreamTokenizer.TT_EOF) { parseCalendar(tokeniser, in, handler); ntok = absorbWhitespace(tokeniser, in, true); } }
/** * Parses multiple VCALENDARs from the specified stream tokeniser. * * @param tokeniser * @param handler * @throws IOException * @throws ParseException * @throws URISyntaxException * @throws ParserException */ private void parseCalendarList(final StreamTokenizer tokeniser, Reader in, final ContentHandler handler) throws IOException, ParseException, URISyntaxException, ParserException { // BEGIN:VCALENDAR int ntok = assertToken(tokeniser, in, Calendar.BEGIN, false, true); while (ntok != StreamTokenizer.TT_EOF) { parseCalendar(tokeniser, in, handler); ntok = absorbWhitespace(tokeniser, in); } }
/** * Asserts that the next token in the stream matches the specified token. * @param tokeniser stream tokeniser to perform assertion on * @param token expected token * @throws IOException when unable to read from stream * @throws ParserException when next token in the stream does not match the expected token */ private void assertToken(final StreamTokenizer tokeniser, Reader in, final String token, final boolean ignoreCase) throws IOException, ParserException { // ensure next token is a word token.. assertToken(tokeniser, in, StreamTokenizer.TT_WORD); if (ignoreCase) { if (!token.equalsIgnoreCase(tokeniser.sval)) { throw new ParserException(MessageFormat.format(UNEXPECTED_TOKEN_MESSAGE, new Object[] { token, tokeniser.sval, }), getLineNumber(tokeniser, in)); } } else if (!token.equals(tokeniser.sval)) { throw new ParserException(MessageFormat.format(UNEXPECTED_TOKEN_MESSAGE, new Object[] { token, tokeniser.sval, }), getLineNumber(tokeniser, in)); } if (log.isDebugEnabled()) { log.debug("[" + token + "]"); } }
sval = getSvalIgnoringBom(tokeniser, in, token); } else { assertToken(tokeniser, in, StreamTokenizer.TT_WORD); sval = tokeniser.sval;
sval = getSvalIgnoringBom(tokeniser, in, token); } else { ntok = assertToken(tokeniser, in, StreamTokenizer.TT_WORD); sval = tokeniser.sval;
sval = getSvalIgnoringBom(tokeniser, in, token); } else { ntok = assertToken(tokeniser, in, StreamTokenizer.TT_WORD); sval = tokeniser.sval;
sval = getSvalIgnoringBom(tokeniser, in, token); } else { ntok = assertToken(tokeniser, in, StreamTokenizer.TT_WORD); sval = tokeniser.sval;