Refine search
private void attachScopeImportsClerezza(Graph target, org.semanticweb.owlapi.model.IRI prefix) { IRI iri = new IRI(prefix + _id); String scopePrefix = prefix.toString(); scopePrefix = scopePrefix.substring(0, scopePrefix.lastIndexOf("/" + shortName + "/")) + "/ontology/"; for (String scopeID : attachedScopes) { IRI physIRI = new IRI(scopePrefix + scopeID); target.add(new TripleImpl(iri, OWL.imports, physIRI)); } }
/** * Set the set of permissions required for read access to a triple-collection, if * the set is non-empty the default TCPermisson is no longer required. * * @param GraphUri * @param permissionDescriptions */ public void setRequiredReadPermissionStrings(IRI GraphUri, Collection<String> permissionDescriptions) { readPermissionCache.remove(GraphUri); final Graph permissionMGraph = getOrCreatePermisionGraph(); Lock l = permissionMGraph.getLock().writeLock(); l.lock(); try { removeExistingRequiredReadPermissions(GraphUri, permissionMGraph); final BlankNodeOrIRI permissionList = createList(permissionDescriptions.iterator(), permissionMGraph); permissionMGraph.add(new TripleImpl(GraphUri, readPermissionListProperty, permissionList)); } finally { l.unlock(); } }
Graph systemGraph = getSystemGraph(); Literal javaPermEntry = new PlainLiteralImpl(permissionString); Lock readLock = systemGraph.getLock().readLock(); readLock.lock(); try { Iterator<Triple> javaPermTriples = systemGraph.filter(null, PERMISSION.javaPermissionEntry, javaPermEntry); if (javaPermTriples.hasNext()) { return javaPermTriples.next().getSubject(); readLock.unlock(); Lock writeLock = systemGraph.getLock().writeLock(); writeLock.lock(); try { BlankNode result = new BlankNode(); systemGraph.add(new TripleImpl(result, PERMISSION.javaPermissionEntry, javaPermEntry)); return result;
Graph metadata = contentItem.getMetadata(); addedTriples.add(new TripleImpl(ta, ENHANCER_SELECTION_PREFIX, new PlainLiteralImpl(text.substring(Math.max(0,start-prefixSuffixSize), start), lang))); addedTriples.add(new TripleImpl(ta, ENHANCER_SELECTION_SUFFIX, new PlainLiteralImpl(text.substring(end,Math.min(text.length(), end+prefixSuffixSize)),lang))); addedTriples.add(new TripleImpl(ta, ENHANCER_SELECTION_HEAD, new PlainLiteralImpl(text.substring(start, start+prefixSuffixSize), lang))); addedTriples.add(new TripleImpl(ta, ENHANCER_SELECTION_TAIL, new PlainLiteralImpl(text.substring(end-prefixSuffixSize,end),lang))); } else { //add missing fise:selected String selection = text.substring(start, end); addedTriples.add(new TripleImpl(ta, ENHANCER_SELECTED_TEXT, new PlainLiteralImpl(selection,lang))); if(!metadata.filter(ta, ENHANCER_SELECTION_CONTEXT, null).hasNext()){ addedTriples.add(new TripleImpl(ta, ENHANCER_SELECTION_CONTEXT, new PlainLiteralImpl(EnhancementEngineHelper.getSelectionContext(text, selection, start),lang))); contentItem.getLock().readLock().unlock(); contentItem.getLock().writeLock().lock(); try { metadata.addAll(addedTriples);
Lock l = mGraph.getLock().writeLock(); l.lock(); try { for (Iterator<Triple> it = mGraph.iterator(); it.hasNext();) { final Triple triple = it.next(); final BlankNodeOrIRI subject = triple.getSubject(); BlankNodeOrIRI subjectReplacement = current2ReplacementMap.get(subject); final RDFTerm object = triple.getObject(); @SuppressWarnings(value = "element-type-mismatch") RDFTerm objectReplacement = current2ReplacementMap.get(object); objectReplacement = object; newTriples.add(new TripleImpl(subjectReplacement, triple.getPredicate(), objectReplacement)); mGraph.add(triple); mGraph.addAll(owlSameAsGraph); } finally { l.unlock();
ci.getLock().readLock().lock(); try{ Graph em = ExecutionMetadataHelper.getExecutionMetadata(ci); Iterator<Triple> engineExecutions = em.filter(null, ExecutionPlan.ENGINE, new PlainLiteralImpl(engine.getName())); BlankNodeOrIRI engineExecution = engineExecutions.next().getSubject(); if(em.contains(new TripleImpl(executionPlanNode, ExecutionPlan.HAS_EXECUTION_NODE, engineExecution))){ extractEnhancementProperties(engineExProps,em, engineExecution, "Engine Execution"); log.debug(" - no ExecutionMetadata are present ..."); } finally { ci.getLock().readLock().unlock();
private void deletePermissionEntriesOfARole(BlankNodeOrIRI role, String id, List<String> permissionEntries) { AccessController.checkPermission(new SecurityPermission("getPolicy")); if (role == null) { logger.debug("Cannot delete permissions: {} does not exist", id); return; } if (permissionEntries.isEmpty()) { return; } Graph systemGraph = getSystemGraph(); Lock writeLock = systemGraph.getLock().writeLock(); writeLock.lock(); try { for (String permissionEntry : permissionEntries) { BlankNodeOrIRI permission = getPermissionOfAJavaPermEntry(permissionEntry); systemGraph.remove(new TripleImpl(role, PERMISSION.hasPermission, permission)); } } finally { writeLock.unlock(); } //refresh the policy so it will recheck the permissions Policy.getPolicy().refresh(); }
IRI id = new IRI(representation.getId()); final Lock writeLock = writeLockGraph(); try { Iterator<Triple> current = graph.filter(id, null, null); boolean contains = current.hasNext(); while(current.hasNext()){ //delete current Iterator<Triple> it = toAdd.getRdfGraph().filter(toAdd.getNode(), null, null); if(!it.hasNext()){ graph.add(new TripleImpl(toAdd.getNode(), MANAGED_REPRESENTATION, TRUE_LITERAL)); } else { while(it.hasNext()){ writeLock.unlock();
/** * Internally used to create Representations for URIs * @param uri the uri * @param check if <code>false</code> than there is no check if the URI * refers to a RDFTerm in the graph that is of type {@link #REPRESENTATION} * @return the Representation */ protected final Representation getRepresentation(IRI uri, boolean check) { final Lock readLock = readLockGraph(); try { if(!check || isRepresentation(uri)){ Graph nodeGraph = createRepresentationGraph(uri, graph); //Remove the triple internally used to represent an empty Representation // ... this will only remove the triple if the Representation is empty // but a check would take longer than the this call nodeGraph.remove(new TripleImpl(uri,MANAGED_REPRESENTATION,TRUE_LITERAL)); return ((RdfValueFactory)getValueFactory()).createRdfRepresentation(uri, nodeGraph); } else { return null; //not found } } finally { if(readLock != null){ readLock.unlock(); } } } /**
/** * Adds triples that point from the bundle resource to its documentations. * * @param bundle * @param docGraph */ private void addAdditionalTriples(Bundle bundle, Graph docGraph) { IRI bundleUri = new IRI(bundle.getLocation()); Triple triple = new TripleImpl(bundleUri, RDF.type, OSGI.Bundle); docGraph.add(triple); Iterator<Triple> titledContents = docGraph.filter(null, RDF.type, DISCOBITS.TitledContent); Set<Triple> newTriples = new HashSet<Triple>(); for (Iterator<Triple> it = titledContents; it.hasNext();) { BlankNodeOrIRI titledContent = it.next().getSubject(); if (docGraph.filter(null, DISCOBITS.holds, titledContent).hasNext()) { continue; } triple = new TripleImpl(bundleUri, DOCUMENTATION.documentation, titledContent); newTriples.add(triple); } docGraph.addAll(newTriples); }
@Test public void testUseTypedLiterals() { Graph graph = getEmptyGraph(); Assert.assertEquals(0, graph.size()); Literal value = new TypedLiteralImpl("<elem>value</elem>",xmlLiteralType); final TripleImpl triple1 = new TripleImpl(uriRef1, uriRef2, value); graph.add(triple1); Iterator<Triple> tripleIter = graph.filter(uriRef1, uriRef2, null); Assert.assertTrue(tripleIter.hasNext()); RDFTerm gotValue = tripleIter.next().getObject(); Assert.assertEquals(value, gotValue); }
List<Triple> newImports = new LinkedList<Triple>(); synchronized (o) { it = o.filter(null, OWL.imports, null); String s = ((IRI) (t.getObject())).getUnicodeString(); IRI target = new IRI((managed ? universalPrefix + "/" + tid + "/" : URIUtils.upOne(universalPrefix) + "/") + s); o.remove(t); newImports.add(new TripleImpl(t.getSubject(), OWL.imports, target)); o.add(t);
private static Triple map(Triple triple, Map<BlankNode, BlankNode> map) { final BlankNodeOrIRI oSubject = triple.getSubject(); BlankNodeOrIRI subject = oSubject instanceof BlankNode ? map.get((BlankNode)oSubject) : oSubject; RDFTerm oObject = triple.getObject(); RDFTerm object = oObject instanceof BlankNode ? map.get((BlankNode)oObject) : oObject; return new TripleImpl(subject, triple.getPredicate(), object); }
void mapLocator(org.semanticweb.owlapi.model.IRI locator, IRI graphName) { if (graphName == null) throw new IllegalArgumentException("A null graph name is not allowed."); // Null locator is a legal argument, will remove all locator mappings from the supplied graph Set<Triple> remove = new HashSet<Triple>(); for (Iterator<Triple> nodes = graph.filter(graphName, null, null); nodes.hasNext();) { Triple t = nodes.next(); // isOntology |= RDF.type.equals(t.getPredicate()) && OWL.Ontology.equals(t.getObject()); if (RETRIEVED_FROM_URIREF.equals(t.getPredicate())) remove.add(t); } graph.removeAll(remove); if (locator != null) { Literal litloc = LiteralFactory.getInstance().createTypedLiteral( new IRI(locator.toString())); graph.add(new TripleImpl(graphName, RETRIEVED_FROM_URIREF, litloc)); } }
Iterator<Triple> textAnnotationsIterator = enhancements.filter(null, ENHANCER_SELECTED_TEXT, anchorLiteral); Map<Integer, Collection<BlankNodeOrIRI>> existingTextAnnotationsMap = new HashMap<Integer, Collection<BlankNodeOrIRI>>(); while (textAnnotationsIterator.hasNext()) { BlankNodeOrIRI subject = textAnnotationsIterator.next().getSubject(); if (enhancements.contains(new TripleImpl(subject, RDF_TYPE, ENHANCER_TEXTANNOTATION))) { Integer start = EnhancementEngineHelper.get(enhancements, subject, ENHANCER_START, Integer.class, literalFactory); if (start != null) {
public static BlankNodeOrIRI createEngineExecution(Graph graph, BlankNodeOrIRI chainExecution, BlankNodeOrIRI executionNode){ BlankNodeOrIRI node = new BlankNode(); graph.add(new TripleImpl(node, RDF_TYPE, EXECUTION)); graph.add(new TripleImpl(node, RDF_TYPE, ENGINE_EXECUTION)); graph.add(new TripleImpl(node, EXECUTION_PART, chainExecution)); graph.add(new TripleImpl(node, EXECUTION_NODE, executionNode)); graph.add(new TripleImpl(node, STATUS, STATUS_SCHEDULED)); return node; } /**
Graph add = new SimpleGraph(); for (Triple t: model) { BlankNodeOrIRI subj = t.getSubject(); RDFTerm obj = t.getObject(); IRI pred = t.getPredicate(); boolean match = false; if (subj instanceof BlankNode) { remove.add(t); add.add(new TripleImpl(subj,pred,obj)); model.removeAll(remove); model.addAll(add);
else if (collector instanceof OntologySpace) colltype = OntologySpace.shortName + "/"; else if (collector instanceof Session) colltype = Session.shortName + "/"; IRI c = new IRI(_NS_STANBOL_INTERNAL + colltype + collector.getID()); Set<OWLOntologyID> aliases = listAliases(removedOntology); aliases.add(removedOntology); for (Iterator<Triple> it = meta.filter(c, null, u); it.hasNext();) { IRI property = it.next().getPredicate(); if (collector instanceof OntologySpace || collector instanceof Session) { if (property.equals(MANAGES_URIREF)) badState = false; for (Iterator<Triple> it = meta.filter(u, null, c); it.hasNext();) { IRI property = it.next().getPredicate(); if (collector instanceof OntologySpace || collector instanceof Session) { if (property.equals(IS_MANAGED_BY_URIREF)) badState = false; meta.remove(new TripleImpl(c, MANAGES_URIREF, u)); meta.remove(new TripleImpl(u, IS_MANAGED_BY_URIREF, c));
private Triple createTestTriple() { BlankNodeOrIRI subject = new BlankNode() {}; IRI predicate = new IRI("http://test.com/"); BlankNodeOrIRI object = new IRI("http://test.com/myObject"); return new TripleImpl(subject, predicate, object); }
@Override public boolean removeRecipe(IRI recipeID) throws RecipeEliminationException { // remove the recipe from the TcManager try { tcManager.deleteGraph(recipeID); } catch (NoSuchEntityException e) { throw new RecipeEliminationException(e); } Graph recipeIndexGraph = tcManager.getGraph(new IRI(recipeIndexLocation)); Triple triple = new TripleImpl(recipeID, RDF.type, Symbols.Recipe); recipeIndexGraph.remove(triple); // System.out.println("Recipes: " +recipes.size()); // remove the recipe ID from in-memory list recipes.remove(recipeID); return true; }