protected static ManchesterOWLSyntaxTokenizer getTokenizer(String s) { return new ManchesterOWLSyntaxTokenizer(s); }
protected char handleChar(char last) { char lastChar = last; char ch = readChar(); if (ch == ESCAPE_CHAR) { lastChar = ch; ch = readChar(); } if (ch == '\"' && lastChar != '\\') { readString('\"', true); } else if (ch == '\'' && lastChar != '\\') { readString('\'', true); } else if (ch == '<') { // Potentially the start of an IRI readIRI(); } else if (skip.contains(Character.valueOf(ch))) { consumeToken(); } else if (commentDelimiters.contains(Character.valueOf(ch))) { consumeToken(); readComment(); } else if (delims.contains(Character.valueOf(ch))) { consumeToken(); sb.append(ch); if (ch != '@') { consumeToken(); } } else { sb.append(ch); } return ch; }
/** * @return tokens */ public List<Token> tokenize() { reset(); int bufferLen = buffer.length(); char lastChar = ' '; while (pos < bufferLen) { lastChar = handleChar(lastChar); } consumeToken(); tokens.add(new Token(EOFTOKEN, pos, col, row)); return new ArrayList<>(tokens); }
private void readComment() { char ch = '#'; while (ch != '\n' && pos < buffer.length()) { ch = readChar(); } consumeToken(); }
private void readString(char terminator, boolean appendTerminator) { if (appendTerminator) { sb.append(terminator); } while (pos < buffer.length()) { char ch = readChar(); if (ch == ESCAPE_CHAR) { int j = pos + 1; handleEscapeChar(ch, j); } else if (ch == terminator) { if (appendTerminator) { sb.append(ch); } break; } else { sb.append(ch); } } consumeToken(); }
private <F> void parseFrameSections(boolean eof, Set<OntologyAxiomPair> axioms, F frameSubject, Map<ManchesterOWLSyntax, AnnAxiom<F, ?>> sectionParsers) { while (true) { String sect = peekToken(); AnnAxiom<F, ?> parser = sectionParsers.get(parse(sect)); if (parser != null) { consumeToken(); Set<OWLOntology> onts = getOntologies(); if (!isEmptyFrameSection(sectionParsers)) { axioms.addAll(parseAnnotatedListItems(frameSubject, parser, onts)); } } else if (eof && !eof(sect)) { List<ManchesterOWLSyntax> expected = new ArrayList<>(); expected.addAll(sectionParsers.keySet()); if (frameSubject instanceof OWLAnnotationSubject || frameSubject instanceof OWLEntity) { expected.add(ANNOTATIONS); } throw new ExceptionBuilder().withKeyword(expected).build(); } else { break; } } }
protected void handleEscapeChar(char ch, int j) { if (j < buffer.length()) { char escapedChar = readChar(); if (escapedChar == '\"' || escapedChar == '\'' || escapedChar == '\\') { sb.append(escapedChar); } else { sb.append(ch); sb.append(escapedChar); } } else { sb.append('\\'); } }
private void readString(char terminator, boolean appendTerminator) { if (appendTerminator) { sb.append(terminator); } while (pos < buffer.length()) { char ch = readChar(); if (ch == ESCAPE_CHAR) { int j = pos + 1; handleEscapeChar(ch, j); } else if (ch == terminator) { if (appendTerminator) { sb.append(ch); } break; } else { sb.append(ch); } } consumeToken(); }
private void readComment() { char ch = '#'; while (ch != '\n' && pos < buffer.length()) { ch = readChar(); } consumeToken(); }
private <F> void parseFrameSections(boolean eof, Set<OntologyAxiomPair> axioms, F frameSubject, Map<ManchesterOWLSyntax, AnnAxiom<F, ?>> sectionParsers) { while (true) { String sect = peekToken(); AnnAxiom<F, ?> parser = sectionParsers.get(parse(sect)); if (parser != null) { consumeToken(); Set<OWLOntology> onts = getOntologies(); if (!isEmptyFrameSection(sectionParsers)) { axioms.addAll(parseAnnotatedListItems(frameSubject, parser, onts)); } } else if (eof && !eof(sect)) { List<ManchesterOWLSyntax> expected = new ArrayList<>(); expected.addAll(sectionParsers.keySet()); if (frameSubject instanceof OWLAnnotationSubject || frameSubject instanceof OWLEntity) { expected.add(ANNOTATIONS); } throw new ExceptionBuilder().withKeyword(expected).build(); } else { break; } } }
protected void handleEscapeChar(char ch, int j) { if (j < buffer.length()) { char escapedChar = readChar(); if (escapedChar == '\"' || escapedChar == '\'' || escapedChar == '\\') { sb.append(escapedChar); } else { sb.append(ch); sb.append(escapedChar); } } else { sb.append('\\'); } }
protected char handleChar(char last) { char lastChar = last; char ch = readChar(); if (ch == ESCAPE_CHAR) { lastChar = ch; ch = readChar(); } if (ch == '\"' && lastChar != '\\') { readString('\"', true); } else if (ch == '\'' && lastChar != '\\') { readString('\'', true); } else if (ch == '<') { // Potentially the start of an IRI readIRI(); } else if (skip.contains(Character.valueOf(ch))) { consumeToken(); } else if (commentDelimiters.contains(Character.valueOf(ch))) { consumeToken(); readComment(); } else if (delims.contains(Character.valueOf(ch))) { consumeToken(); sb.append(ch); if (ch != '@') { consumeToken(); } } else { sb.append(ch); } return ch; }
/** * @return tokens */ public List<Token> tokenize() { reset(); int bufferLen = buffer.length(); char lastChar = ' '; while (pos < bufferLen) { lastChar = handleChar(lastChar); } consumeToken(); tokens.add(new Token(EOFTOKEN, pos, col, row)); return new ArrayList<>(tokens); }
private void readString(char terminator, boolean appendTerminator) { if (appendTerminator) { sb.append(terminator); } while (pos < buffer.length()) { char ch = readChar(); if (ch == ESCAPE_CHAR) { int j = pos + 1; handleEscapeChar(ch, j); } else if (ch == terminator) { if (appendTerminator) { sb.append(ch); } break; } else { sb.append(ch); } } consumeToken(); }
private void readComment() { char ch = '#'; while (ch != '\n' && pos < buffer.length()) { ch = readChar(); } consumeToken(); }
private <F> void parseFrameSections(boolean eof, Set<OntologyAxiomPair> axioms, F frameSubject, Map<ManchesterOWLSyntax, AnnAxiom<F, ?>> sectionParsers) { while (true) { String sect = peekToken(); AnnAxiom<F, ?> parser = sectionParsers.get(parse(sect)); if (parser != null) { consumeToken(); Set<OWLOntology> onts = getOntologies(); if (!isEmptyFrameSection(sectionParsers)) { axioms.addAll(parseAnnotatedListItems(frameSubject, parser, onts)); } } else if (eof && !eof(sect)) { List<ManchesterOWLSyntax> expected = new ArrayList<>(); expected.addAll(sectionParsers.keySet()); if (frameSubject instanceof OWLAnnotationSubject || frameSubject instanceof OWLEntity) { expected.add(ANNOTATIONS); } throw new ExceptionBuilder().withKeyword(expected).build(); } else { break; } } }
protected static ManchesterOWLSyntaxTokenizer getTokenizer(String s) { return new ManchesterOWLSyntaxTokenizer(s); }
protected void handleEscapeChar(char ch, int j) { if (j < buffer.length()) { char escapedChar = readChar(); if (escapedChar == '\"' || escapedChar == '\'' || escapedChar == '\\') { sb.append(escapedChar); } else { sb.append(ch); sb.append(escapedChar); } } else { sb.append('\\'); } }
protected char handleChar(char last) { char lastChar = last; char ch = readChar(); if (ch == ESCAPE_CHAR) { lastChar = ch; ch = readChar(); } if (ch == '\"' && lastChar != '\\') { readString('\"', true); } else if (ch == '\'' && lastChar != '\\') { readString('\'', true); } else if (ch == '<') { // Potentially the start of an IRI readIRI(); } else if (skip.contains(Character.valueOf(ch))) { consumeToken(); } else if (commentDelimiters.contains(Character.valueOf(ch))) { consumeToken(); readComment(); } else if (delims.contains(Character.valueOf(ch))) { consumeToken(); sb.append(ch); if (ch != '@') { consumeToken(); } } else { sb.append(ch); } return ch; }
/** * @return tokens */ public List<Token> tokenize() { reset(); int bufferLen = buffer.length(); char lastChar = ' '; while (pos < bufferLen) { lastChar = handleChar(lastChar); } consumeToken(); tokens.add(new Token(EOFTOKEN, pos, col, row)); return new ArrayList<>(tokens); }