private void init() { query = null; model = new HashMap<Resource, Object>(); statements = new StatementCollector(); }
@Test public void testReadWrite() throws RDFHandlerException, IOException, RDFParseException { StatementCollector statementCollector = new StatementCollector(); parser.setRDFHandler(statementCollector); parser.parse(this.getClass().getResourceAsStream("/testcases/nquads/test2.nq"), "http://test.base.uri"); Assert.assertEquals(400, statementCollector.getStatements().size()); ByteArrayOutputStream baos = new ByteArrayOutputStream(); writer = rdfWriterFactory.getWriter(baos); writer.startRDF(); for (Statement nextStatement : statementCollector.getStatements()) { writer.handleStatement(nextStatement); } writer.endRDF(); Assert.assertEquals("Unexpected number of lines.", 400, baos.toString().split("\n").length); }
public void handleStatement(Statement st) throws RDFHandlerException { Resource subj = st.getSubject(); URI pred = st.getPredicate(); Value obj = st.getObject(); if (pred.equals(RDF.TYPE)) { Object o; if (obj.equals(SeRQO.TUPLEQUERY)) { o = query = new ParsedTupleQuery(); } else if (obj.equals(SeRQO.GRAPHQUERY)) { o = query = new ParsedGraphQuery(statements.getNamespaces()); } else if (obj.equals(RDF.SEQ)) { o = new ArrayList(); } else { o = createNode((URI) obj); } model.put(subj, o); } else { statements.handleStatement(st); } }
public void endRDF() throws RDFHandlerException { statements.endRDF(); try { digest(statements.getStatements()); } catch (UndeclaredThrowableException e) { try { throw e.getCause(); } catch (Exception e1) { throw new RDFHandlerException(e1); } catch (Throwable e1) { throw e; } } }
final StatementCollector statementCollector = new StatementCollector(); SesameUtils.storeOntology(this.testOntology, statementCollector.getStatements(), statementCollector.getNamespaces(), nonEmptyInferredStatements, this.testRepository, SesameUtils.ONTOLOGY_MANAGER_GRAPH);
log.debug("RDFHandler statements = {}", rdfHandler.getStatements().size()); statementsIterator = rdfHandler.getStatements().iterator(); namespaces = rdfHandler.getNamespaces(); ArrayList<Statement>(rdfHandler.getStatements()); Collections.sort(statements, new OWLAPICompatibleComparator());
@Override public void handleStatement(Statement st) throws RDFHandlerException { statementCollector.handleStatement(st); }
final StatementCollector statementCollector = new StatementCollector(); SesameUtils.storeOntology(this.testOntology, statementCollector.getStatements(), statementCollector.getNamespaces(), nonEmptyInferredStatements, this.testRepository, SesameUtils.ONTOLOGY_MANAGER_GRAPH);
public void handleStatement(Statement stmt) { // Stop parsing in a brutal way if we exceeded the limit if (_stmtCount++ > _maxSize) { throw new RuntimeException("Document longer than " + _maxSize + " statements, interrupting parsing."); } if (_filter.allowStatement(stmt.getSubject(), stmt.getPredicate(), stmt.getObject())) { super.handleStatement(stmt); } }
/** * If the OutputStream is null, then it is acceptable to return an in memory StatementCollector. * * This method will only be called from storeOntology if setRioHandler is not previously called * with a non-null argument. * * @param outputStream * @return */ protected RDFHandler getRDFHandlerForOutputStream(final RDFFormat format, final OutputStream outputStream) { // by default return a StatementCollector if they did not specify a format if(format == null) { return new StatementCollector(); } else { return Rio.createWriter(format, outputStream); } }
.setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE); final StatementCollector rdfHandler = new StatementCollector(); g.addAll(rdfHandler.getStatements());
final StatementCollector statementCollector = new StatementCollector(); SesameUtils.storeOntology(this.testOntology, statementCollector.getStatements(), statementCollector.getNamespaces(), nonEmptyInferredStatements, this.testRepository, SesameUtils.ONTOLOGY_MANAGER_GRAPH);
@Override public void handleStatement(Statement st) { Resource s = st.getSubject(); URI p = st.getPredicate(); Value o = st.getObject(); super .handleStatement(new ContextStatementImpl(s, p, o, uri)); }
/** * If the Writer is null, then it is acceptable to return an in memory StatementCollector. * * This method will only be called from storeOntology if setRioHandler is not called with a * non-null argument. * * @param writer * @return */ protected RDFHandler getRDFHandlerForWriter(final RDFFormat format, final Writer writer) { // by default return a StatementCollector if they did not specify a format if(format == null) { return new StatementCollector(); } else { return Rio.createWriter(format, writer); } }
private void initializeDefaultRoles() { StatementCollector sc = new StatementCollector(); try { RDFParser parser = Rio.createParser(RDFFormat.forFileName(initFileName)); InputStream initializationStream = RepositoryProperties.getLocationAsStream(initFileName); parser.setRDFHandler(sc); parser.parse(initializationStream, ""); } catch (UnsupportedRDFormatException mse) { throw new RuntimeException(mse); } catch (RDFHandlerException mse) { throw new RuntimeException(mse); } catch (RDFParseException mse) { throw new RuntimeException(mse); } catch (BocaException mse) { throw new BocaRuntimeException(mse); } catch (IOException mse) { throw new RuntimeException(mse); } resetGraph(sc.getStatements()); String defaultUser = "http://boca.adtech.internet.ibm.com/users/sysadmin"; if (defaultUser != null) { URI node = Constants.valueFactory.createURI(defaultUser); if (node != null) { Long id = getNodeLayout().fetchId(node); setUserId(id); } } }
final StatementCollector statementCollector = new StatementCollector(); SesameUtils.storeOntology(this.testOntology, statementCollector.getStatements(), statementCollector.getNamespaces(), emptyInferredStatements, this.testRepository, SesameUtils.ONTOLOGY_MANAGER_GRAPH);
@Override public void handleStatement(Statement st) { Resource s = st.getSubject(); URI p = st.getPredicate(); Value o = st.getObject(); super.handleStatement(new ContextStatementImpl(s, p, o, uri)); }
public SesameTripleCallback() { this(new StatementCollector(new LinkedHashModel())); }
RDFParser rdfParser = rdfParserFactory.getParser(); rdfParser.setValueFactory(vf); StatementCollector stCollector = new StatementCollector(); rdfParser.setRDFHandler(stCollector); Collection<Statement> statements = stCollector.getStatements(); assertEquals("Unexpected number of statements", 1, statements.size());
/** * Constructor to start parsing R2RML mappings from file. * @param file - the File object */ public R2RMLManager(File file) throws Exception { myModel = new LinkedHashModel(); RDFParser parser = Rio.createParser(RDFFormat.TURTLE); InputStream in = new FileInputStream(file); URL documentUrl = new URL("file://" + file); StatementCollector collector = new StatementCollector(myModel); parser.setRDFHandler(collector); parser.parse(in, documentUrl.toString()); r2rmlParser = new R2RMLParser(); }