/** * Adds RDF statements to the model. * A subsequent call to {@link #build()} is required for committing the data to the model. * * @param rdfInput An input stream to the RDF data. Its format is defined by the <code>format</code> parameter. * @param base The base uri to be used when converting relative URI's to absolute URI's. * @param format The format of the RDF data. One of "RDF/XML", "N-TRIPLE", "TURTLE" (or "TTL") and "N3" */ @Override public void addRDF(InputStream rdfInput, String base, String format) { model.read(rdfInput, base, format); }
public boolean isClass(String uri) { OntClass c = ontModel.getOntClass(uri); if (c != null) return true; return false; }
public void deleteStatementsByResource(Resource res) { ontModel.remove(ontModel.listStatements(res, null, (RDFNode) null)); ontModel.remove(ontModel.listStatements(null, null, res)); }
OntClass pitfallClass = model.createClass( oops + "pitfall"); DatatypeProperty hasCodeDTP = model.createDatatypeProperty( oops + "hasCode"); DatatypeProperty hasNameDTP = model.createDatatypeProperty( oops + "hasName"); DatatypeProperty hasDescriptionDTP = model.createDatatypeProperty( oops + "hasDescription"); DatatypeProperty hasImportanceLevelDTP = model.createDatatypeProperty( oops + "hasImportanceLevel"); DatatypeProperty hasFrequencyDTP = model.createDatatypeProperty( oops + "hasNumberAffectedElements"); ObjectProperty hasAffectedElement = model.createObjectProperty(oops + "hasAffectedElement"); ObjectProperty mightNotBeInverseOf = model.createObjectProperty(oops + "mightNotBeInverseOf"); ObjectProperty hasEquivalentClass = model.createObjectProperty(oops + "hasEquivalentClass"); ObjectProperty hasWrongEquivalentClass = model.createObjectProperty(oops + "hasWrongEquivalentClass"); ObjectProperty noSuggestion = model.createObjectProperty(oops + "noSuggestion"); ObjectProperty haveSameLabel = model.createObjectProperty(oops + "haveSameLabel"); ExtendedIterator<Individual> p = model.listIndividuals(pitfallClass); List<Individual> plist = p.toList(); System.out.println("Pitfall number: " + plist.size() ); ResIterator resources= model.listSubjectsWithProperty(hasCodeDTP, temp); Individual indi = model.getIndividual(uri); Individual indi = model.getIndividual(uri); Individual indi = model.getIndividual(uri); Individual indi = model.getIndividual(uri); Individual indi = model.getIndividual(uri);
OntModel autoOntology = ModelFactory.createOntologyModel( OntModelSpec.OWL_MEM ); String ns = Namespaces.KARMA; autoOntology.setNsPrefix("karma", ns); OntClass topClass = autoOntology.createClass( ns + worksheet.getTitle().replaceAll(" ", "_")); // replace blank spaces with undrscore for (HNode hNode : sortedLeafHNodes){ DatatypeProperty dp = autoOntology.createDatatypeProperty(ns+hNode.getColumnName().trim().replaceAll(" ", "_")); dp.addDomain(topClass); dp.addRange(XSD.xstring); ObjectProperty op = autoOntology.createObjectProperty(ns + "relatedTo"); op.addDomain(topClass); outUTF8 = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(path), "UTF8")); autoOntology.write(outUTF8, null); outUTF8.flush(); outUTF8.close();
/** * First find the URI of actual resource represented with <i>reference</i> parameter. Then deletes all * statements where it is a subject or object * * @param reference */ public void deleteStatementsByReference(String reference) { List<Statement> refStatement = ontModel.listStatements(null, CMSAdapterVocabulary.CMSAD_RESOURCE_REF_PROP, reference).toList(); if (refStatement.size() == 0) { log.warn("There is no resource having CMSAdapterVocabulary.CMSAD_RESOURCE_REF_PROP {}", reference); return; } Resource subject = refStatement.get(0).getSubject(); deleteStatementsByResource(subject); }
input.read(new ByteArrayInputStream(bytes), ""); final StmtIterator iterator = input.listStatements(); return new Iterator<T>(){
OntClass rdfsSuperConceptClass = model.createClass(urlPrefixMetaData + "ConceptClass"); rdfsSuperConceptClass.addLabel("Concept Class", "en"); rdfsSuperConceptClass.addComment("The super for classes describing Concept types", "en"); OntClass rdfsClass = model.createClass(urlPrefixMetaData + cc.getId()); rdfsClass.addLabel(cc.getFullname(), "en"); rdfsClass.addComment(cc.getDescription(), "en"); OntClass rdfsClass = model.getOntClass(urlPrefixMetaData + cc.getId()); ConceptClass superClass = cc.getSpecialisationOf(); if (superClass != null) { rdfsClass.addSuperClass(model.getOntClass(urlPrefixMetaData + superClass.getId())); } else { rdfsClass.addSuperClass(rdfsSuperConceptClass); Property rdfsSuperRelationType = model.createObjectProperty(urlPrefixMetaData + "RelationType"); for (RelationType rt : omd.getRelationTypes()) { ObjectProperty objectProperty = model.createObjectProperty(urlPrefixMetaData + rt.getId()); objectProperty.addComment(rt.getDescription(), "en"); objectProperty.addLabel(rt.getFullname(), "en"); ObjectProperty objectProperty = model.getObjectProperty(urlPrefixMetaData + rt.getId()); RelationType superRt = rt.getSpecialisationOf(); if (superRt != null) { objectProperty.addSuperProperty(model.getObjectProperty(urlPrefixMetaData + superRt.getId())); } else { objectProperty.addSuperProperty(rdfsSuperRelationType);
public List<Individual> getIndividualsOfClass(Resource clazz) { return model.listIndividuals(clazz).toList(); }
provenanceOntology.addNestedWorkflow(workflowRun, nestedWorkflowRun); OntClass ontClass = provenanceOntology.model .getOntClass(ProvenanceVocab.NESTED_WORKFLOW_RUN.getURI()); Iterator iterator = ontClass.listInstances(); assertTrue("There is at least one NestedWorkflowRun", iterator ((nestedWorkflowRunIndividual).getURI())); ObjectProperty parentWorkflow = provenanceOntology.model .getObjectProperty(ProvenanceVocab.NESTED_RUN.getURI()); Individual workflowRunIndividual = provenanceOntology.model .createIndividual(workflowRun, ProvenanceVocab.WORKFLOW_RUN); NodeIterator iter = provenanceOntology.model.listObjectsOfProperty( workflowRunIndividual, parentWorkflow); assertTrue("There is at least one value in range of parentWorkflow",
public OntClass resource() { synchronized (model) { return model.createClass(uri); } } };
private void createIndividuals(OntModel ontModel, Model model) { for (BioPAXElement bp : model.getObjects()) { String name = bp.getModelInterface().getName(); name = name.substring(name.lastIndexOf('.') + 1); OntClass ontClass = ontModel.getOntClass(this.getLevel().getNameSpace() + name); if (log.isTraceEnabled()) { log.trace("ontClass = " + ontClass); } Individual individual = ontModel.createIndividual(bp.getRDFId(), ontClass); if (log.isTraceEnabled()) { log.trace("individual = " + individual); } objectToIndividualMap.put(bp, individual); } }
ExtendedIterator<OntClass> itrC = ontHandler.getOntModel().listNamedClasses(); Property rdfType = this.ontHandler.getOntModel().createProperty(Uris.RDF_TYPE_URI); Resource classNode = this.ontHandler.getOntModel().createResource(Uris.RDFS_CLASS_URI); ResIterator itr = ontHandler.getOntModel().listSubjectsWithProperty(rdfType, classNode); OntClass c = this.ontHandler.getOntModel().getOntClass(r.getURI()); if(c != null) classes.put(c.getURI(), ontHandler.getResourceLabel(c));
public RDFModel read(String uri, boolean loadImports) { RDFModel m = new RDFModel(); OntModel model = ModelFactory.createOntologyModel( OntModelSpec.OWL_MEM ); model.getDocumentManager().setProcessImports( loadImports ); FileManager.get().readModel( model, uri ); StmtIterator stmtIter = model.listStatements(); while( stmtIter.hasNext() ) { Statement stmt = stmtIter.nextStatement(); m.addStatement( stmt ); } return m; } }
/** * Read a stream in RDF and return the corresponding set of services * as Java Objects * * @param in The input stream of MSM services * @param baseUri Base URI to use for parsing * @param syntax used within the stream * @return The collection of Services parsed from the stream */ @Override public List<Service> parse(InputStream in, String baseUri, Syntax syntax) { OntModel model = null; List<Service> result = new ArrayList<Service>(); try { // create an empty model model = ModelFactory.createOntologyModel(OntModelSpec.RDFS_MEM); // Parse the stream into a model model.read(in, baseUri, syntax.getName()); result = parseService(model); } finally { if (model != null) model.close(); } return result; }
public OntModel buildGroupOntologyModel(OntModel ontModel) { OntClass groupClass = ontModel.createClass(ISOTHES.CONCEPT_GROUP .getURI()); groupClass.addLabel(ontModel.createLiteral(ISOTHES.CONCEPT_GROUP .getLocalName())); groupClass.addSuperClass(SKOS.COLLECTION); ObjectProperty subGroup = ontModel .createObjectProperty(ISOTHES.SUB_GROUP.getURI()); subGroup.addLabel(ontModel.createLiteral(ISOTHES.SUB_GROUP .getLocalName())); subGroup.addRange(groupClass); subGroup.addDomain(groupClass); ObjectProperty superGroup = ontModel .createObjectProperty(ISOTHES.SUPER_GROUP.getURI()); superGroup.addLabel(ontModel.createLiteral(ISOTHES.SUPER_GROUP .getLocalName())); superGroup.addRange(groupClass.asResource()); superGroup.addDomain(groupClass.asResource()); return ontModel; }
/** * This shows no problems * * @param fName * @throws NdlException */ public void openModel(String fName) throws NdlException { String ndlRequest = loadFile(fName); ByteArrayInputStream modelStream = new ByteArrayInputStream(ndlRequest.getBytes()); requestModel = NdlModel.getRequestModelFromStream(modelStream, OntModelSpec.OWL_MEM_RDFS_INF, true); // need some imports for inference to work for (String model : inferenceModels) requestModel.read(NdlCommons.ORCA_NS + model); ValidityReport rep = requestModel.validate(); getValidityOutput(rep); }
if (model.hasLoadedImport( importURI )) { for (StmtIterator i = importModel.listStatements( null, model.getProfile().IMPORTS(), (RDFNode) null ); i.hasNext(); ) { imports.add( i.nextStatement().getResource().getURI() ); model.removeSubModel( importModel, false ); model.removeLoadedImport( importURI ); for (StmtIterator i = model.listStatements( null, model.getProfile().IMPORTS(), (RDFNode) null ); i.hasNext(); ) { imports.remove( i.nextStatement().getResource().getURI() ); model.rebind();
public OntClass addGroupTypeToOntModel(OntModel ontmodel, String groupType) { OntClass groupTypeRes = ontmodel.createClass(GINCO.getResource( groupType).getURI()); groupTypeRes.addLabel(ontmodel.createLiteral(GINCO.getResource( groupType).getLocalName())); groupTypeRes.addSuperClass(ontmodel.getResource(ISOTHES.CONCEPT_GROUP .getURI())); return groupTypeRes; }
public void testReasoning() throws Exception { JenaProvenanceOntology ontology = new JenaProvenanceOntology(new File( INSTANCES_DIR, "test.owl").toURI()); OntModel ontModel = ontology.getOntModel(); ontModel.write(System.out); ExtendedIterator instances = ontModel.getOntClass( ProvenanceVocab.DATA_OBJECT.getURI()).listInstances(); int i = 0; while (instances.hasNext()) { Individual nextIndividual = (Individual) instances.next(); System.out.println(nextIndividual.getURI()); i++; } instances.close(); assertEquals("expected runs", 2, i); // Model plain = ModelFactory.createModelForGraph( ontModel.getGraph() // ); // plain.write( System.out, "RDF/XML" ); }