@Test public void testWriteStmtToRDFPos(){ RDFParser parser = new RDFXMLParser(); String baseURI = ""; Model origStmts = new LinkedHashModel(); Model processedStmts = new LinkedHashModel(); StatementCollector collector = new StatementCollector(processedStmts); parser.setRDFHandler(collector); origStmts.add(sexOffend,predicate,object); try{ converter.writeStmtToRDF(origStmts, rdfFile); FileReader reader = new FileReader(rdfFile); parser.parse(reader, baseURI); if(origStmts.equals(processedStmts)){ assert(true); } }catch(FileNotFoundException e){ e.printStackTrace(); fail(); }catch(Exception e){ e.printStackTrace(); fail(); } }
private void readObject(ObjectInputStream s) throws IOException, ClassNotFoundException { // Read in any hidden serialization magic s.defaultReadObject(); // Read in size int size = s.readInt(); values = new HashMap<Value, ModelNode>(size * 2); statements = new LinkedHashSet<ModelStatement>(size); // Read in all elements for (int i = 0; i < size; i++) { Statement st = (Statement) s.readObject(); add(st); } }
public LinkedHashModel(Model model) { this(model.getNamespaces()); addAll(model); }
public boolean add(Resource subj, URI pred, Value obj, Resource... contexts) { if (subj == null || pred == null || obj == null) throw new UnsupportedOperationException("Incomplete statement"); Value[] ctxs = notNull(contexts); if (ctxs.length == 0) { ctxs = NULL_CTX; } boolean changed = false; for (Value ctx : ctxs) { ModelNode<Resource> s = asNode(subj); ModelNode<URI> p = asNode(pred); ModelNode<Value> o = asNode(obj); ModelNode<Resource> c = asNode((Resource) ctx); ModelStatement st = new ModelStatement(s, p, o, c); changed |= addModelStatement(st); } return changed; }
@Override public Set makeEmptySet() { return new LinkedHashModel().filter(createURI("test"), RDF.VALUE, null).objects(); }
@Override public Set makeEmptySet() { return new LinkedHashModel().filter(createURI("test"), RDF.VALUE, createLiteral("value")).contexts(); }
@Test public void testEndOfLineEmptyCommentWithSpaceBoth() throws Exception { RDFParser ntriplesParser = createRDFParser(); Model model = new LinkedHashModel(); ntriplesParser.setRDFHandler(new StatementCollector(model)); ntriplesParser.parse(new StringReader( "<urn:test:subject> <urn:test:predicate> <urn:test:object> . # \n"), "http://example/"); assertEquals(1, model.size()); assertEquals("urn:test:object", model.objectString().get()); }
@Test public void testExceptionHandlingWithoutStopAtFirstError() throws Exception { String data = "invalid nt"; RDFParser ntriplesParser = createRDFParser(); ntriplesParser.getParserConfig().addNonFatalError(NTriplesParserSettings.FAIL_ON_NTRIPLES_INVALID_LINES); ntriplesParser.getParserConfig().set(NTriplesParserSettings.FAIL_ON_NTRIPLES_INVALID_LINES, Boolean.TRUE); Model model = new LinkedHashModel(); ntriplesParser.setRDFHandler(new StatementCollector(model)); ntriplesParser.parse(new StringReader(data), NTRIPLES_TEST_URL); assertEquals(0, model.size()); assertEquals(0, model.subjects().size()); assertEquals(0, model.predicates().size()); assertEquals(0, model.objects().size()); }
.getParser(); rdfParser.setValueFactory(new ValueFactoryImpl()); rdfParser.setVerifyData(true); rdfParser.setStopAtFirstError(true); final Graph g = new LinkedHashModel();
private void testPerformanceInternal(boolean storeParsedStatements) throws Exception Model model = new LinkedHashModel(); model.add(potentialSubjects.get(prng.nextInt(potentialSubjects.size())), potentialPredicates.get(prng.nextInt(potentialPredicates.size())), obj); System.out.println("Test statements size: " + model.size() + " (" + rdfWriterFactory.getRDFFormat() + ")"); assertFalse("Did not generate any test statements", model.isEmpty()); try { RDFParser rdfParser = rdfParserFactory.getParser(); setupParserConfig(rdfParser.getParserConfig()); rdfParser.setValueFactory(vf); Model parsedModel = new LinkedHashModel(); if (storeParsedStatements) { rdfParser.setRDFHandler(new StatementCollector(parsedModel));
setupParserConfig(rdfParser.getParserConfig()); if (preserveBNodeIds) { rdfParser.getParserConfig().set(BasicParserSettings.PRESERVE_BNODE_IDS, true); rdfParser.setValueFactory(vf); Model model = new LinkedHashModel(); rdfParser.setRDFHandler(new StatementCollector(model)); rdfParser.parse(in, "foo:bar"); assertEquals("Unexpected number of statements, found " + model.size(), 20, model.size()); if (rdfParser.getRDFFormat().supportsNamespaces()) { assertTrue("Expected at least one namespace, found" + model.getNamespaces().size(), model.getNamespaces().size() >= 1); assertEquals(exNs, model.getNamespace("ex").get().getName());
throws IOException, RDFParseException, UnsupportedRDFormatException Model result = new LinkedHashModel(); RDFParser parser = createParser(dataFormat, valueFactory); parser.setParserConfig(settings); parser.setParseErrorListener(errors); parser.setRDFHandler(new ContextStatementCollector(result, valueFactory, contexts));
private Collection<Annotation> parseInput(final Representation entity) throws ResourceException final Model input = new LinkedHashModel(); final RDFFormat inputFormat = Rio.getParserFormatForMIMEType(entity.getMediaType().getName()); parser.setRDFHandler(new StatementCollector(input)); try parser.parse(entity.getStream(), ""); for(final Resource nextSubject : input.filter(null, RDF.TYPE, OasConstants.TAGGING_TAGGING).subjects())
@Test public void testExceptionHandlingWithDefaultSettings() throws Exception { String data = "invalid nt"; RDFParser ntriplesParser = createRDFParser(); ntriplesParser.setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE); Model model = new LinkedHashModel(); ntriplesParser.setRDFHandler(new StatementCollector(model)); try { ntriplesParser.parse(new StringReader(data), NTRIPLES_TEST_URL); fail("expected RDFParseException due to invalid data"); } catch (RDFParseException expected) { assertEquals(expected.getLineNumber(), 1); } }
/** * Constructor to start parsing R2RML mappings from file. * @param file - the File object */ public R2RMLManager(File file) throws Exception { myModel = new LinkedHashModel(); RDFParser parser = Rio.createParser(RDFFormat.TURTLE); InputStream in = new FileInputStream(file); URL documentUrl = new URL("file://" + file); StatementCollector collector = new StatementCollector(myModel); parser.setRDFHandler(collector); parser.parse(in, documentUrl.toString()); r2rmlParser = new R2RMLParser(); }
renderObject(out, mentionID, model); out.append("</td><td>"); out.append(model.filter(mentionID, NIF.ANCHOR_OF, null).objectString()); out.append("</td><td>"); renderObject(out, model.filter(mentionID, RDF.TYPE, null).objects(), model); out.append("</td><td>"); final Model mentionModel = new LinkedHashModel(); for (final Statement statement : model.filter(mentionID, null, null)) { final URI pred = statement.getPredicate();
private final Model getTestModel(String datatypeValue, IRI datatypeURI) { Model result = new LinkedHashModel(); result.add(vf.createStatement(vf.createBNode(), DC.DESCRIPTION, vf.createLiteral(datatypeValue, datatypeURI))); return result; }
// always use a ValueFactory, avoid instantiating URIImpl directly. ValueFactory vf = ValueFactoryImpl().getInstance(); URI c = vf.createURI(C.getFullIRI()); URI prop = vf.createURI(property.getFullIRI()) // create a new Model for the resulting triple collection Model result = new LinkedHashModel(); // filter on the supplied property Model propMatches = triples.filter(null, prop, null); for(Resource subject: propMatches.subjects()) { // check if the selected subject is of the supplied type if (triples.contains(subject, RDF.TYPE, c)) { // add the type triple to the result result.add(subject, RDF.TYPE, c); // add the property triple(s) to the result result.addAll(propMatches.filter(subject, null, null)); } } return result;
public DeltaMerger(Model added, BindingSet additional) { this.added = added; this.removed = new LinkedHashModel(); this.additional = additional; }
super.parse(graph, subj); try { Model model = new LinkedHashModel(graph); for (Value obj : model.filter(subj, DATA_DIR, null).objects()) { if (obj instanceof URI) { try { pkgPrefix = model.filter(subj, PACKAGE_PREFIX, null).objectString(); memberPrefix = model.filter(subj, MEMBER_PREFIX, null) .objectString();