private SWRLAtom parseDataPropertyAtom() { String predicate = consumeToken(); if (!isDataPropertyName(predicate)) { throw new ExceptionBuilder().withData().build(); } consumeToken(OPEN.keyword()); SWRLIArgument obj1 = parseIObject(); consumeToken(COMMA.keyword()); SWRLDArgument obj2 = parseDObject(); consumeToken(CLOSE.keyword()); return df.getSWRLDataPropertyAtom(getOWLDataProperty(predicate), obj1, obj2); }
private void initialiseIndividualFrameSections() { //@formatter:off initialiseSection(new AnnAxiom<OWLIndividual, OWLAnnotation>(x -> parseAnnotation(), ANNOTATIONS, (s, o, anns) -> create(df, s, o, anns)), individualFrameSections); initialiseSection(new AnnAxiom<OWLIndividual, OWLClassExpression>(x -> parseUnion(), TYPES, (s, o, anns) -> df.getOWLClassAssertionAxiom(o, s, anns)), individualFrameSections); initialiseSection(new AnnAxiom<>(this::parseFact, FACTS, (s, o, anns) -> o.getAnnotatedAxiom(anns)), individualFrameSections); initialiseSection(new AnnAxiom<OWLIndividual, OWLIndividual>(x -> parseIndividual(), SAME_AS, (s, o, anns) -> df.getOWLSameIndividualAxiom(s, o, anns)), individualFrameSections); initialiseSection(new AnnAxiom<OWLIndividual, OWLIndividual>(x -> parseIndividual(), DIFFERENT_FROM, (s, o, anns) -> df.getOWLDifferentIndividualsAxiom(s, o, anns)), individualFrameSections); // Extensions initialiseSection(new AnnAxiom<OWLIndividual, Set<OWLIndividual>>(x -> parseIndividualList(), DIFFERENT_INDIVIDUALS, (s, o, anns) -> create(df, s, o, anns)), individualFrameSections); //@formatter:on }
@Override @Nullable public OWLObjectProperty getOWLObjectProperty(String name) { if (objectPropertyNames.contains(name)) { return df.getOWLObjectProperty(getIRI(name)); } return null; }
private OWLDatatype parseDatatype() { String name = consumeToken(); return getOWLDatatype(name); }
private Set<OWLAnnotation> parseAnnotations() { String next = peekToken(); Set<OWLAnnotation> annotations = Collections.emptySet(); if (ANNOTATIONS.matches(next)) { consumeToken(); annotations = parseAnnotationList(); } return annotations; }
private SWRLAtom parseDataRangeAtom() { OWLDataRange range = parseDataIntersectionOf(false); consumeToken(OPEN.keyword()); SWRLVariable obj1 = parseDVariable(); consumeToken(CLOSE.keyword()); return df.getSWRLDataRangeAtom(range, obj1); }
private OWLDataRange parseDataRangePrimary(boolean lookaheadCheck) { String tok = peekToken(); if (isDatatypeName(tok)) { consumeToken(); OWLDatatype datatype = getOWLDatatype(tok); String next = peekToken(); if (OPENBRACKET.matches(next)) { consumeToken(); String sep = COMMA.keyword(); Set<OWLFacetRestriction> facetRestrictions = new HashSet<>(); while (COMMA.matches(sep)) { OWLFacet fv = parseFacet(); if (fv == null) { throw new ExceptionBuilder().withKeyword(OWLFacet.getFacets()).build(); OWLLiteral con = parseLiteral(datatype); facetRestrictions.add(df.getOWLFacetRestriction(fv, con)); sep = consumeToken(); return parseDataComplementOf(false); } else if (OPENBRACE.matches(tok)) { return parseDataOneOf(); } else if (OPEN.matches(tok)) { consumeToken(); OWLDataRange rng = parseDataIntersectionOf(false); consumeToken(CLOSE.keyword()); return rng; } else {
String tok = peekToken(); if (NOT.matches(tok)) { consumeToken(); OWLClassExpression complemented = parseNestedClassExpression(false); return df.getOWLObjectComplementOf(complemented); } else if (isClassName(tok)) { consumeToken(); return getOWLClass(tok); } else if (isObjectPropertyName(tok) || INVERSE.matches(tok)) { return parseObjectRestriction(); } else if (isDataPropertyName(tok)) { return parseDataRestriction(); } else if (OPENBRACE.matches(tok)) { return parseObjectOneOf(); } else if (OPEN.matches(tok)) { return parseNestedClassExpression(false); consumeToken(); throw new ExceptionBuilder().withClass().withObject().withData() .withKeyword(OPEN, OPENBRACE, NOT, INVERSE).build();
@Override public Set<OntologyAxiomPair> parseDatatypeFrame() { String tok = consumeToken(); if (!DATATYPE.matches(tok)) { throw new ExceptionBuilder().withKeyword(DATATYPE).build(); String subj = consumeToken(); OWLDatatype datatype = getOWLDatatype(subj); Set<OntologyAxiomPair> axioms = new HashSet<>(); axioms.add(new OntologyAxiomPair(defaultOntology, df.getOWLDeclarationAxiom(datatype))); while (true) { String sect = peekToken(); if (EQUIVALENT_TO.matches(sect)) { potentialKeywords.clear(); consumeToken(); Set<OWLOntology> onts = getOntologies(); Set<OWLDataRange> drs = parseDataRangeList(); for (OWLOntology ont : onts) { for (OWLDataRange dr : drs) { axioms.addAll(parseAnnotations(datatype.getIRI())); } else { break;
@Override public Set<OntologyAxiomPair> parseValuePartitionFrame() { String section = consumeToken(); if (!VALUE_PARTITION.matches(section)) { throw new ExceptionBuilder().withKeyword(VALUE_PARTITION).build(); } OWLObjectPropertyExpression prop = parseObjectPropertyExpression(false); String clsName = consumeToken(); if (eof(clsName)) { throw new ExceptionBuilder().withObject().build(); } OWLClass cls = getOWLClass(clsName); Set<OntologyAxiomPair> axioms = new HashSet<>(); Set<OWLOntology> onts = getOntologies(); axioms.addAll(parseValuePartitionValues(onts, cls)); for (OWLOntology ont : onts) { axioms.add(new OntologyAxiomPair(ont, df.getOWLFunctionalObjectPropertyAxiom(prop))); axioms.add(new OntologyAxiomPair(ont, df.getOWLObjectPropertyRangeAxiom(prop, cls))); } return axioms; }
@Override public OWLLiteral parseLiteral(@Nullable OWLDatatype datatype) { String tok = consumeToken(); if (tok.startsWith("\"")) { String lit = unquoteLiteral(tok); if ("^".equals(peekToken())) { consumeToken(); if (!"^".equals(peekToken())) { throw new ExceptionBuilder().withKeyword("^").build(); consumeToken(); return df.getOWLLiteral(lit, parseDatatype()); } else if (peekToken().startsWith("@")) { String lang = consumeToken().substring(1); return df.getOWLLiteral(lit, lang); } else { return df.getOWLLiteral(asFloat(f), OWL2Datatype.XSD_FLOAT); } catch (@SuppressWarnings("unused") NumberFormatException e) {
protected OWLDataProperty parseDataProperty() { String name = consumeToken(); if (!isDataPropertyName(name)) { throw new ExceptionBuilder().withData().build(); } return getOWLDataProperty(name); }
@Override public Set<OntologyAxiomPair> parseAnnotationPropertyFrame() { String tok = consumeToken(); if (!ANNOTATION_PROPERTY.matches(tok)) { throw new ExceptionBuilder().withKeyword(ANNOTATION_PROPERTY).build(); } String subj = consumeToken(); OWLAnnotationProperty prop = getOWLAnnotationProperty(subj); Set<OntologyAxiomPair> axioms = new HashSet<>(); for (OWLOntology ont : getOntologies()) { axioms.add(new OntologyAxiomPair(ont, df.getOWLDeclarationAxiom(prop))); } parseFrameSections(false, axioms, prop, annotationPropertyFrameSections); return axioms; }
private Set<OntologyAxiomPair> parseValuePartitionValues(Set<OWLOntology> onts, OWLClass superclass) { Set<OntologyAxiomPair> axioms = new HashSet<>(); Set<OWLClass> siblings = new HashSet<>(); consumeToken(OPENBRACKET.keyword()); String sep = COMMA.keyword(); while (COMMA.matches(sep)) { String clsName = consumeToken(); OWLClass cls = getOWLClass(clsName); siblings.add(cls); OWLSubClassOfAxiom ax = df.getOWLSubClassOfAxiom(cls, superclass); for (OWLOntology ont : onts) { axioms.add(new OntologyAxiomPair(ont, ax)); } if (peekToken().equals(OPENBRACKET.keyword())) { axioms.addAll(parseValuePartitionValues(onts, cls)); } sep = peekToken(); if (COMMA.matches(sep)) { consumeToken(); } } consumeToken(CLOSEBRACKET.keyword()); OWLAxiom ax = df.getOWLDisjointClassesAxiom(siblings); for (OWLOntology ont : onts) { axioms.add(new OntologyAxiomPair(ont, ax)); } return axioms; }
private SWRLIArgument parseIObject() { String s = peekToken(); if (isIndividualName(s)) { return parseIIndividualObject(); } else if ("?".equals(s)) { return parseIVariable(); } else { consumeToken(); throw new ExceptionBuilder().withInd().withKeyword("?$var$").build(); } }
private ManchesterOWLSyntaxParser createParser(String expression) { if (disposed.get()) { throw new OWLRuntimeException("Illegal State: Trying to use an disposed instance."); } ManchesterOWLSyntaxParser parser = new ManchesterOWLSyntaxParserImpl( new OntologyConfigurator(), dataFactory); parser.setStringToParse(expression); parser.setOWLEntityChecker(entityChecker); LOG.info("parsing: {}", expression); return parser; }
private Set<OntologyAxiomPair> parseClassFrame(boolean eof) { String tok = consumeToken(); if (!CLASS.matches(tok)) { throw new ExceptionBuilder().withKeyword(CLASS).build(); } String subj = consumeToken(); OWLClass cls = getOWLClass(subj); Set<OntologyAxiomPair> axioms = new HashSet<>(); axioms.add(new OntologyAxiomPair(defaultOntology, df.getOWLDeclarationAxiom(cls))); parseFrameSections(eof, axioms, cls, classFrameSections); return axioms; }
protected OWLAnnotationProperty parseAnnotationProperty() { String name = consumeToken(); if (!isAnnotationPropertyName(name)) { throw new ExceptionBuilder().withAnn().build(); } return getOWLAnnotationProperty(name); }
@Override public Set<OntologyAxiomPair> parseDataPropertyFrame() { String tok = consumeToken(); if (!DATA_PROPERTY.matches(tok)) { throw new ExceptionBuilder().withKeyword(DATA_PROPERTY).build(); } String subj = consumeToken(); OWLDataProperty prop = getOWLDataProperty(subj); Set<OntologyAxiomPair> axioms = new HashSet<>(); axioms.add(new OntologyAxiomPair(defaultOntology, df.getOWLDeclarationAxiom(prop))); parseFrameSections(false, axioms, prop, dataPropertyFrameSections); return axioms; }
private SWRLAtom parseClassAtom() { OWLClassExpression predicate = parseUnion(); consumeToken(OPEN.keyword()); SWRLIArgument obj = parseIObject(); consumeToken(CLOSE.keyword()); return df.getSWRLClassAtom(predicate, obj); }