/** * Create a new RDF streaming output that uses the given connection to * stream RDF triples in the given format from the a graph. * * @param conn the repository connection * @param format the RDF format * @param contexts the graphs to stream from */ public RDFStreamingOutput( RepositoryConnection conn, RDFFormat format, Resource... contexts) { super(conn); this.contexts = contexts; this.factory = RDFWriterRegistry.getInstance().get(format); }
/** * Tries to match a MIME type against the list of RDF formats that can be * written. * * @param mimeType * A MIME type, e.g. "application/rdf+xml". * @return An RDFFormat object if a match was found, or * {@link Optional#empty()} otherwise. * @see #getWriterFormatForMIMEType(String, RDFFormat) */ public static Optional<RDFFormat> getWriterFormatForMIMEType(String mimeType) { return RDFFormat.matchMIMEType(mimeType, RDFWriterRegistry.getInstance().getKeys()); }
@Override protected Map<String, Object> referenceData(HttpServletRequest request) { Map<String, Object> result = new HashMap<String, Object>(); Map<String, String> resultFormats = new TreeMap<String, String>(); for (RDFWriterFactory factory : RDFWriterRegistry.getInstance().getAll()) { RDFFormat resultFormat = factory.getRDFFormat(); resultFormats.put(resultFormat.getName(), resultFormat.getName()); } result.put("resultFormats", resultFormats); return result; } }
public static List<RDFFormat> filterAvailableWriters(List<RDFFormat> rdfFormats) { final List<RDFFormat> result = new ArrayList<>(); final RDFWriterRegistry writerRegistry = RDFWriterRegistry.getInstance(); for (RDFFormat f: rdfFormats) { if (writerRegistry.has(f)) { result.add(f); } } return result; }
@Override protected void doQuery(final BigdataSailRepositoryConnection cxn, final OutputStream os) throws Exception { final BigdataSailGraphQuery query = (BigdataSailGraphQuery) setupQuery(cxn); // Note: getQueryTask() verifies that format will be non-null. final RDFFormat format = RDFWriterRegistry.getInstance() .getFileFormatForMIMEType(mimeType); final RDFWriter w = RDFWriterRegistry.getInstance().get(format) .getWriter(os); query.evaluate(w); }
/** * Get all statements and export them as RDF. * * @return a model and view for exporting the statements. */ private ModelAndView getExportStatementsResult(Repository repository, HttpServletRequest request, HttpServletResponse response) throws ClientHTTPException { ProtocolUtil.logRequestParameters(request); ValueFactory vf = repository.getValueFactory(); IRI graph = getGraphName(request, vf); RDFWriterFactory rdfWriterFactory = ProtocolUtil.getAcceptableService(request, response, RDFWriterRegistry.getInstance()); Map<String, Object> model = new HashMap<String, Object>(); model.put(ExportStatementsView.CONTEXTS_KEY, new Resource[] { graph }); model.put(ExportStatementsView.FACTORY_KEY, rdfWriterFactory); model.put(ExportStatementsView.USE_INFERENCING_KEY, true); model.put(ExportStatementsView.HEADERS_ONLY, METHOD_HEAD.equals(request.getMethod())); return new ModelAndView(ExportStatementsView.getInstance(), model); }
log.info("RDFParserFactory: before: " + f); for (RDFWriterFactory f : RDFWriterRegistry.getInstance().getAll()) { log.info("RDFWriterFactory: before: " + f); final RDFWriterRegistry r = RDFWriterRegistry.getInstance(); r.add((RDFWriterFactory) getInstanceForClass(TURTLE_WRITER_FACTORY)); r.add((RDFWriterFactory) getInstanceForClass(JSON_CONSTRUCT_WRITER_FACTORY)); log.info("RDFParserFactory: after: " + f); for (RDFWriterFactory f : RDFWriterRegistry.getInstance().getAll()) { log.info("RDFWriterFactory: after: " + f);
/** * This is needed, because Rio is unable to find the Parser/Writer Factories * automatically when the jar gets deployed as plugin inside the Neo4j * Server. */ private synchronized void initRio() { if (!rioInitialized) { RDFParserRegistry parserRegistry = RDFParserRegistry.getInstance(); parserRegistry.add(new TurtleParserFactory()); parserRegistry.add(new RDFXMLParserFactory()); parserRegistry.add(new NTriplesParserFactory()); parserRegistry.add(new RDFJSONParserFactory()); RDFWriterRegistry writerRegistry = RDFWriterRegistry.getInstance(); writerRegistry.add(new TurtleWriterFactory()); writerRegistry.add(new RDFXMLWriterFactory()); writerRegistry.add(new NTriplesWriterFactory()); writerRegistry.add(new RDFJSONWriterFactory()); rioInitialized = true; } } }
@Override protected void doQuery(final BigdataSailRepositoryConnection cxn, final OutputStream os) throws Exception { final BigdataSailGraphQuery query = (BigdataSailGraphQuery) setupQuery(cxn); // Note: getQueryTask() verifies that format will be non-null. final RDFFormat format = RDFWriterRegistry.getInstance() .getFileFormatForMIMEType(mimeType); final RDFWriter w = RDFWriterRegistry.getInstance().get(format) .getWriter(os); query.evaluate(w); }
/** * Get all statements and export them as RDF. * * @return a model and view for exporting the statements. */ private ModelAndView getExportStatementsResult(RepositoryConnection conn, UUID txnId, HttpServletRequest request, HttpServletResponse response) throws ClientHTTPException { ProtocolUtil.logRequestParameters(request); ValueFactory vf = conn.getValueFactory(); Resource subj = ProtocolUtil.parseResourceParam(request, SUBJECT_PARAM_NAME, vf); IRI pred = ProtocolUtil.parseURIParam(request, PREDICATE_PARAM_NAME, vf); Value obj = ProtocolUtil.parseValueParam(request, OBJECT_PARAM_NAME, vf); Resource[] contexts = ProtocolUtil.parseContextParam(request, CONTEXT_PARAM_NAME, vf); boolean useInferencing = ProtocolUtil.parseBooleanParam(request, INCLUDE_INFERRED_PARAM_NAME, true); RDFWriterFactory rdfWriterFactory = ProtocolUtil.getAcceptableService(request, response, RDFWriterRegistry.getInstance()); Map<String, Object> model = new HashMap<String, Object>(); model.put(ExportStatementsView.SUBJECT_KEY, subj); model.put(ExportStatementsView.PREDICATE_KEY, pred); model.put(ExportStatementsView.OBJECT_KEY, obj); model.put(ExportStatementsView.CONTEXTS_KEY, contexts); model.put(ExportStatementsView.USE_INFERENCING_KEY, Boolean.valueOf(useInferencing)); model.put(ExportStatementsView.FACTORY_KEY, rdfWriterFactory); model.put(ExportStatementsView.HEADERS_ONLY, METHOD_HEAD.equals(request.getMethod())); model.put(ExportStatementsView.CONNECTION_KEY, conn); model.put(ExportStatementsView.TRANSACTION_ID_KEY, txnId); return new ModelAndView(ExportStatementsView.getInstance(), model); }
log.info("RDFParserFactory: before: " + f); for (RDFWriterFactory f : RDFWriterRegistry.getInstance().getAll()) { log.info("RDFWriterFactory: before: " + f); final RDFWriterRegistry r = RDFWriterRegistry.getInstance(); r.add((RDFWriterFactory) getInstanceForClass(TURTLE_WRITER_FACTORY)); r.add((RDFWriterFactory) getInstanceForClass(JSON_CONSTRUCT_WRITER_FACTORY)); log.info("RDFParserFactory: after: " + f); for (RDFWriterFactory f : RDFWriterRegistry.getInstance().getAll()) { log.info("RDFWriterFactory: after: " + f);
/** * Returns a {@link RDFWriterFactory} that produces RDF data according to a * given MIME-type. * * @param mimetype the mimetype * @return the corresponding writer factory */ private RDFWriterFactory getRDFWriterFactory(String mimetype) { RDFWriterRegistry registry = RDFWriterRegistry.getInstance(); return registry.get(getRDFFormat(mimetype)); }
/** * Tries to match the extension of a file name against the list of RDF * formats that can be written. * * @param fileName * A file name. * @return An RDFFormat object if a match was found, or * {@link Optional#empty()} otherwise. * @see #getWriterFormatForFileName(String, RDFFormat) */ public static Optional<RDFFormat> getWriterFormatForFileName(String fileName) { return RDFFormat.matchFileName(fileName, RDFWriterRegistry.getInstance().getKeys()); }
for (String mt:mimeTypes.nextElement().split(",")) { mt = mt.trim(); RDFFormat fmt = RDFWriterRegistry.getInstance() .getFileFormatForMIMEType(mt); if (conn.getTripleStore().isQuads() && (mt.equals(RDFFormat.NQUADS.getDefaultMIMEType()) || mt.equals(RDFFormat.TURTLE.getDefaultMIMEType())) || !conn.getTripleStore().isQuads() && fmt != null) { mimeType = mt; mimeType = RDFFormat.NTRIPLES.getDefaultMIMEType(); format = RDFWriterRegistry.getInstance() .getFileFormatForMIMEType(mimeType); final RDFWriter w = RDFWriterRegistry.getInstance().get(format) .getWriter(os);
/** * Get all statements and export them as RDF. * * @return a model and view for exporting the statements. */ private ModelAndView getExportStatementsResult(Repository repository, HttpServletRequest request, HttpServletResponse response) throws ClientHTTPException { ProtocolUtil.logRequestParameters(request); ValueFactory vf = repository.getValueFactory(); Resource subj = ProtocolUtil.parseResourceParam(request, SUBJECT_PARAM_NAME, vf); IRI pred = ProtocolUtil.parseURIParam(request, PREDICATE_PARAM_NAME, vf); Value obj = ProtocolUtil.parseValueParam(request, OBJECT_PARAM_NAME, vf); Resource[] contexts = ProtocolUtil.parseContextParam(request, CONTEXT_PARAM_NAME, vf); boolean useInferencing = ProtocolUtil.parseBooleanParam(request, INCLUDE_INFERRED_PARAM_NAME, true); RDFWriterFactory rdfWriterFactory = ProtocolUtil.getAcceptableService(request, response, RDFWriterRegistry.getInstance()); Map<String, Object> model = new HashMap<String, Object>(); model.put(ExportStatementsView.SUBJECT_KEY, subj); model.put(ExportStatementsView.PREDICATE_KEY, pred); model.put(ExportStatementsView.OBJECT_KEY, obj); model.put(ExportStatementsView.CONTEXTS_KEY, contexts); model.put(ExportStatementsView.USE_INFERENCING_KEY, Boolean.valueOf(useInferencing)); model.put(ExportStatementsView.FACTORY_KEY, rdfWriterFactory); model.put(ExportStatementsView.HEADERS_ONLY, METHOD_HEAD.equals(request.getMethod())); return new ModelAndView(ExportStatementsView.getInstance(), model); }
@Override protected Map<String, Object> referenceData(HttpServletRequest request) { @SuppressWarnings("unchecked") Map<String, Object> result = (Map<String, Object>)super.referenceData(request); Map<String, String> resultFormats = new TreeMap<String, String>(); for (RDFWriterFactory factory : RDFWriterRegistry.getInstance().getAll()) { RDFFormat resultFormat = factory.getRDFFormat(); resultFormats.put(resultFormat.getName(), resultFormat.getName()); } result.put("resultFormats", resultFormats); return result; } }
/** * Convenience methods for creating RDFWriter objects. This method uses the * registry returned by {@link RDFWriterRegistry#getInstance()} to get a * factory for the specified format and uses this factory to create the * appropriate writer. * * @throws UnsupportedRDFormatException * If no writer is available for the specified RDF format. */ public static RDFWriter createWriter(RDFFormat format, Writer writer) throws UnsupportedRDFormatException { RDFWriterFactory factory = RDFWriterRegistry.getInstance().get(format).orElseThrow( Rio.unsupportedFormat(format)); return factory.getWriter(writer); }
/** * returns a list of all mimetypes which can be produced by implemented serializers * @return */ @Override public List<String> getProducedTypes() { Set<String> producedTypes = new LinkedHashSet<>(); for(RDFFormat format : RDFWriterRegistry.getInstance().getKeys()) { // Ignore binary formats if(format.hasCharset()) { producedTypes.addAll(format.getMIMETypes()); } } return new ArrayList<>(producedTypes); }
for (String mt:mimeTypes.nextElement().split(",")) { mt = mt.trim(); RDFFormat fmt = RDFWriterRegistry.getInstance() .getFileFormatForMIMEType(mt); if (conn.getTripleStore().isQuads() && (mt.equals(RDFFormat.NQUADS.getDefaultMIMEType()) || mt.equals(RDFFormat.TURTLE.getDefaultMIMEType())) || !conn.getTripleStore().isQuads() && fmt != null) { mimeType = mt; mimeType = RDFFormat.NTRIPLES.getDefaultMIMEType(); format = RDFWriterRegistry.getInstance() .getFileFormatForMIMEType(mimeType); final RDFWriter w = RDFWriterRegistry.getInstance().get(format) .getWriter(os);
registry = RDFWriterRegistry.getInstance(); view = GraphQueryResultView.getInstance();