@Override public boolean canHandleStreaming(IRI s, IRI p, IRI o) { consumer.addAnnotatedSource(o, s); consumer.checkForAndProcessAnnotatedDeclaration(s); return false; }
@Override public void handleTriple(IRI s, IRI p, IRI o) { consumer.addAnnotationIRI(s); } }
protected void addAp(IRI iri, boolean explicitlyTyped) { consumer.addAnnotationProperty(iri, explicitlyTyped); }
IRI remappedNode = consumer.remapIRI(mainNode); Set<OWLAnnotation> annotations = new HashSet<>(); Set<IRI> predicates = consumer.getPredicatesBySubject(remappedNode); for (IRI i : predicates) { if (consumer.isAnnotationProperty(i)) { OWLAnnotationProperty p = consumer.getDataFactory().getOWLAnnotationProperty(i); OWLLiteral literal = consumer.getLiteralObject(remappedNode, i, true); while (literal != null) { annotations.add(consumer.getDataFactory().getOWLAnnotation(p, literal)); literal = consumer.getLiteralObject(remappedNode, i, true); .getResourceObject(remappedNode, SWRLVocabulary.HEAD.getIRI(), true); if (ruleHeadIRI != null) { consequent = listTranslator.translateToSet(ruleHeadIRI); .getResourceObject(remappedNode, SWRLVocabulary.BODY.getIRI(), true); if (ruleBodyIRI != null) { antecedent = listTranslator.translateToSet(ruleBodyIRI); consumer.addAxiom(getRule(remappedNode, annotations, consequent, antecedent));
@Override public void endModel() { parsedAllTriples = true; // We are now left with triples that could not be consumed during // streaming parsing iriMap.clear(); tripleLogger.logNumberOfTriples(); translatorAccessor.consumeSWRLRules(swrlRules); Set<RDFTriple> remainingTriples = handlerAccessor.mopUp(); if (ontologyFormat != null) { RDFParserMetaData metaData = new RDFParserMetaData(RDFOntologyHeaderStatus.PARSED_ONE_HEADER, tripleLogger.count(), remainingTriples, guessedDeclarations); getOntologyFormat().setOntologyLoaderMetaData(metaData); } // Do we need to change the ontology IRI? dumpRemainingTriples(); cleanup(); addAnnotationAxioms(); removeAxiomsScheduledForRemoval(); chooseAndSetOntologyIRI(); TripleLogger.logOntologyID(ontology.getOntologyID()); }
} else if (axiomTypes.get(o) == null) { OWLIndividual ind = consumer.translateIndividual(s); OWLClassExpression ce = consumer.translatorAccessor.translateClassExpression(o); consumer.addAxiom(consumer.getDataFactory().getOWLClassAssertionAxiom(ce, ind, consumer.getPendingAnnotations()));
@Override public OWLDocumentFormat parse(OWLOntologyDocumentSource documentSource, OWLOntology ontology, OWLOntologyLoaderConfiguration configuration) { try { final RDFXMLDocumentFormat format = new RDFXMLDocumentFormat(); RDFParser parser = new RDFParser() { @Override public void startPrefixMapping(@Nullable String prefix, @Nullable String uri) throws SAXException { super.startPrefixMapping(prefix, uri); if (prefix != null && uri != null) { format.setPrefix(prefix, uri); } } }; OWLRDFConsumer consumer = new OWLRDFConsumer(ontology, configuration); consumer.setIRIProvider(parser); consumer.setOntologyFormat(format); InputSource is = getInputSource(documentSource, configuration); parser.parse(is, consumer); return format; } catch (RDFParserException | SAXException | OWLOntologyInputSourceException | IOException e) { throw new OWLRDFXMLParserException(e); } } }
consumer.addOWLNamedIndividual(s, false); if (nonBuiltInTypes.canHandleStreaming(s, p, o)) { nonBuiltInTypes.handleTriple(s, p, o); consumer.addAxiom(s); consumer.addTriple(s, p, o);
protected void handleImportingRDFGraphRatherThanOntology(OWLImportsDeclaration id, OWLOntologyManager man, @Nullable OWLOntology io) { if (io != null) { OWLDocumentFormat importedOntologyFormat = io.getFormat(); if (importedOntologyFormat instanceof AbstractRDFPrefixDocumentFormat && io.isAnonymous() && consumer.getConfiguration() .getMissingOntologyHeaderStrategy() == INCLUDE_GRAPH) { // We should have just included the triples rather // than imported them. So, // we remove the imports statement, add the axioms // from the imported ontology to // out importing ontology and remove the imported // ontology. // WHO EVER THOUGHT THAT THIS WAS A GOOD IDEA? man.applyChange(new RemoveImport(consumer.getOntology(), id)); io.importsDeclarations().forEach(d -> addImport(man, d)); io.annotations().forEach(ann -> addOntAnn(man, ann)); io.axioms().forEach(this::add); man.removeOntology(io); } } } }
private void logError(RDFResourceParseError error) { getOntologyFormat().addError(error); }
@Override public void handleTriple(IRI s, IRI p, IRI o) { consume(s, p, o); IRI listNode = verifyNotNull(getRO(s, OWL_MEMBERS)); if (isOpLax(consumer.getFirstResource(listNode, false))) { Set<OWLObjectPropertyExpression> props = ops(listNode); anns(s); consumer.addAxiom(df.getOWLDisjointObjectPropertiesAxiom(props, anns())); } else { Set<OWLDataPropertyExpression> props = dps(listNode); anns(s); consumer.addAxiom(df.getOWLDisjointDataPropertiesAxiom(props, anns())); } }
protected void addImport(OWLOntologyManager man, OWLImportsDeclaration i) { man.applyChange(new AddImport(consumer.getOntology(), i)); }
private OWLRDFConsumer consumeTriples(RepositoryConnection connection, org.openrdf.model.URI axiomId) throws OWLOntologyCreationException, RepositoryException, IOException, RDFHandlerException, SAXException { OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLOntology ontology = manager.createOntology(); OWLRDFConsumer consumer = new OWLRDFConsumer(ontology, anonymousNodeChecker, new OWLOntologyLoaderConfiguration()); consumer.setOntologyFormat(new TrackingOntologyFormat()); RepositoryResult<Statement> triples = connection.getStatements(null, null, null, false, axiomId); try {
protected void addCe(IRI iri, boolean explicitlyTyped) { consumer.addClassExpression(iri, explicitlyTyped); }
@Override public boolean canHandleStreaming(IRI s, IRI p, IRI o) { // We can't handle this is a streaming fashion, because we can't // be sure that the s, p, o triples have been // parsed. consumer.addAxiom(s); return false; }
OWLRDFConsumer consumer = consumeTriples(connection, axiomId); String nodeName = generateName(classExpressionNode); OWLClassExpression ce = consumer.translateClassExpression(IRI.create(nodeName)); consumer.endModel(); if (!((TrackingOntologyFormat) consumer.getOntologyFormat()).getFailed()) { return ce;
consumer.endModel(); OWLOntology ontology = consumer.getOntology(); OWLAxiom result = null; if (ontology.getAxiomCount() == 1) {
IRI remappedNode = consumer.remapIRI(mainNode); Set<OWLAnnotation> annotations = new HashSet<>(); Set<IRI> predicates = consumer.getPredicatesBySubject(remappedNode); for (IRI i : predicates) { if (consumer.isAnnotationProperty(i)) { OWLAnnotationProperty p = consumer.getDataFactory().getOWLAnnotationProperty(i); OWLLiteral literal = consumer.getLiteralObject(remappedNode, i, true); while (literal != null) { annotations.add(consumer.getDataFactory().getOWLAnnotation(p, literal)); literal = consumer.getLiteralObject(remappedNode, i, true); .getResourceObject(remappedNode, SWRLVocabulary.HEAD.getIRI(), true); if (ruleHeadIRI != null) { consequent = listTranslator.translateToSet(ruleHeadIRI); .getResourceObject(remappedNode, SWRLVocabulary.BODY.getIRI(), true); if (ruleBodyIRI != null) { antecedent = listTranslator.translateToSet(ruleBodyIRI); consumer.addAxiom(getRule(remappedNode, annotations, consequent, antecedent));
@Override public void endModel() { parsedAllTriples = true; // We are now left with triples that could not be consumed during // streaming parsing iriMap.clear(); tripleLogger.logNumberOfTriples(); translatorAccessor.consumeSWRLRules(swrlRules); Set<RDFTriple> remainingTriples = handlerAccessor.mopUp(); if (ontologyFormat != null) { RDFParserMetaData metaData = new RDFParserMetaData(RDFOntologyHeaderStatus.PARSED_ONE_HEADER, tripleLogger.count(), remainingTriples, guessedDeclarations); getOntologyFormat().setOntologyLoaderMetaData(metaData); } // Do we need to change the ontology IRI? dumpRemainingTriples(); cleanup(); addAnnotationAxioms(); removeAxiomsScheduledForRemoval(); chooseAndSetOntologyIRI(); TripleLogger.logOntologyID(ontology.getOntologyID()); }
} else if (axiomTypes.get(o) == null) { OWLIndividual ind = consumer.translateIndividual(s); OWLClassExpression ce = consumer.translatorAccessor.translateClassExpression(o); consumer.addAxiom(consumer.getDataFactory().getOWLClassAssertionAxiom(ce, ind, consumer.getPendingAnnotations()));