Refine search
@Override public void writeTo(OutputStream out) { RDFDataMgr.write(out, graph, syntax) ; } } ;
/** Read quads or triples into a Dataset from the given location, with hint of language. * @see #read(Dataset, String, String, Lang, Context) * @param dataset Destination * @param uri URI to read from (includes file: and a plain file name). */ public static void read(Dataset dataset, String uri) { read(dataset.asDatasetGraph(), uri) ; }
/** Write the DatasetGraph to the output stream in the default serialization for the language. * @param out OutputStream * @param dataset DatasetGraph to write * @param lang Language for the serialization. */ public static void write(OutputStream out, DatasetGraph dataset, Lang lang) { RDFFormat serialization = langToFormatOrException(lang); write(out, dataset, serialization); }
/** Read RDF data. * Use {@code RDFParser.source(uri).parse(sink)} * @param sink Destination for the RDF read. * @param uri URI to read from (includes file: and a plain file name). */ //@deprecated Use {@code RDFParser.source(uri).parse(sink)} //@Deprecated public static void parse(StreamRDF sink, String uri) { parse(sink, uri, defaultLang(uri)) ; }
/** Read RDF data. * @param sink Destination for the RDF read. * @param in Reader * @param lang Syntax for the stream. * @deprecated To be removed. An {@code InputStream} or {@code StringReader} is preferable. Use {@code RDFParser.create().source(in).lang(hintLang).parse()} */ @Deprecated public static void parse(StreamRDF sink, Reader in, Lang lang) { parse(sink, in, defaultBase(), lang, (Context)null) ; }
@Test public void blankNodes2() { // Duplicate. String s = "_:a <http://example/p> 'foo' . _:a <http://example/p> 'foo' ." ; StringReader r = new StringReader(s) ; Model m = ModelFactory.createDefaultModel() ; RDFDataMgr.read(m, r, null, RDFLanguages.TURTLE) ; assertEquals(1, m.size()) ; }
@Test public void read_StringReader_31() { String x = "<s> <p> <p> ." ; { StringReader s = new StringReader(x) ; Model m = ModelFactory.createDefaultModel() ; RDFDataMgr.read(m, s, null, RDFLanguages.NTRIPLES) ; } StringReader s1 = new StringReader("<s> <p> <p> .") ; Model m1 = ModelFactory.createDefaultModel() ; m1.read(s1, null, "N-TRIPLES") ; }
@Test public void updatePrefixMapping1() { Model model = ModelFactory.createDefaultModel() ; StringReader reader = new StringReader("@prefix x: <http://example/x>.") ; RDFDataMgr.read(model, reader, null, RDFLanguages.TURTLE) ; assertEquals(1, model.getNsPrefixMap().size()) ; assertEquals("http://example/x", model.getNsPrefixURI("x")) ; }
@Test public void trans_01() { Model named = dataset.getNamedModel(ns+"g") ; Txn.executeWrite(dataset, ()->{ RDFDataMgr.read(dataset, new StringReader(data1), null, Lang.TRIG) ; }) ; Txn.executeRead(dataset, ()->{ long x1 = Iter.count(dataset.getDefaultModel().listStatements()) ; assertEquals(2, x1) ; long x2 = Iter.count(named.listStatements()) ; assertEquals(3, x2) ; }) ; }
private Model parseToModelNTriples(String string) { StringReader r = new StringReader(string); Model model = ModelFactory.createDefaultModel(); RDFDataMgr.read(model, r, null, RDFLanguages.NTRIPLES); return model; }
@Test public void trans_02() { Model model = dataset.getDefaultModel() ; Txn.executeWrite(dataset, ()->{ RDFDataMgr.read(model, new StringReader(data2), null, Lang.TURTLE) ; }) ; Txn.executeRead(dataset, ()->{ assertEquals(4, model.size()) ; }) ; }
@Test public void optionalDotInBase() { Model model = ModelFactory.createDefaultModel() ; StringReader reader = new StringReader("@base <http://example/> <x> <p> <o> .") ; RDFDataMgr.read(model, reader, null, RDFLanguages.TURTLE) ; assertEquals(1, model.size()) ; Resource r = model.createResource("http://example/x") ; Property p = model.createProperty("http://example/p") ; assertTrue(model.contains(r,p)) ; }
/** Read in N-Triples data, which is not empty, * then write-read-compare using the format given. * * @param testdata * @param lang */ static void blankNodeLang(String testdata, RDFFormat lang) { StringReader r = new StringReader(testdata) ; Model m = ModelFactory.createDefaultModel() ; RDFDataMgr.read(m, r, null, RDFLanguages.NTRIPLES) ; Assert.assertTrue(m.size() > 0); ByteArrayOutputStream output = new ByteArrayOutputStream(); RDFDataMgr.write(output, m, lang); ByteArrayInputStream input = new ByteArrayInputStream(output.toByteArray()); Model m2 = ModelFactory.createDefaultModel(); RDFDataMgr.read(m2, input, lang.getLang()); Assert.assertTrue(m2.size() > 0); Assert.assertTrue(m.isIsomorphicWith(m2)); }
@Test public void bnode_cycles() { Model m = RDFDataMgr.loadModel("testing/DAWG-Final/construct/data-ident.ttl"); Assert.assertTrue(m.size() > 0); ByteArrayOutputStream output = new ByteArrayOutputStream(); RDFDataMgr.write(output, m, Lang.TURTLE); ByteArrayInputStream input = new ByteArrayInputStream(output.toByteArray()); Model m2 = ModelFactory.createDefaultModel(); RDFDataMgr.read(m2, input, Lang.TURTLE); Assert.assertTrue(m2.size() > 0); Assert.assertTrue(m.isIsomorphicWith(m2)); }
@Override public void writeFragment(ServletOutputStream outputStream, IDataSource datasource, ILinkedDataFragment fragment, ILinkedDataFragmentRequest ldfRequest) throws Exception { final Model output = ModelFactory.createDefaultModel(); output.setNsPrefixes(getPrefixes()); output.add(fragment.getMetadata()); output.add(fragment.getTriples()); output.add(fragment.getControls()); RDFDataMgr.write(outputStream, output, contentType); }
public static void main(String[] args) throws IOException { Model model = ModelFactory.createDefaultModel(); List<Resource> suites = new ArrayList<>(); suites.addAll(SparqlQcReader.loadTestSuites("sparqlqc/1.4/benchmark/cqnoproj.rdf")); suites.addAll(SparqlQcReader.loadTestSuites("sparqlqc/1.4/benchmark/ucqproj.rdf")); suites.addAll(SparqlQcReader.loadTestSuites("sparqlqc/1.4/benchmark/ucqrdfs.rdf")); suites.forEach(suite -> model.add(suite.getModel())); RDFDataMgr.write(System.out, model, RDFFormat.TURTLE_PRETTY); } }
static void rtRJRds(String filename) Dataset ds1 = RDFDataMgr.loadDataset(filename) ; RDFDataMgr.write(out, ds1, JSONLD) ; ByteArrayInputStream r = new ByteArrayInputStream(out.toByteArray()) ; RDFDataMgr.read(ds2, r, null, JSONLD) ; checkNamespaces(ds2.getDefaultModel(), ds1.getDefaultModel().getNsPrefixMap()); Iterator<String> graphNames = ds2.listNames(); while (graphNames.hasNext()) { String gn = graphNames.next(); checkNamespaces(ds2.getNamedModel(gn), ds1.getNamedModel(gn).getNsPrefixMap());
private void loadFile(File fileOrFolder){ if (fileOrFolder.isHidden()) return ; if (fileOrFolder.getPath().contains("daq-metadata.trig")){ Dataset _ds = RDFDataMgr.loadDataset(fileOrFolder.getPath()); String datasetPLD = fileOrFolder.getParent(); datasetPLD = datasetPLD.replace(metadataBaseDir, ""); Iterator<String> iter = _ds.listNames(); while (iter.hasNext()){ String name = iter.next(); d.addNamedModel(name, _ds.getNamedModel(name)); dsToQG.put(datasetPLD, name); qualityGraphToDS.put(name, datasetPLD); } d.getDefaultModel().add(_ds.getDefaultModel()); } if (fileOrFolder.isDirectory()){ File[] listOfFiles = fileOrFolder.listFiles(); for(File file : listOfFiles){ loadFile(file); } } }
@Test public void read_30() { { TypedInputStream in = RDFDataMgr.open(filename("D-not-TTL.ttl") ); Model m0 = ModelFactory.createDefaultModel() ; RDFDataMgr.read(m0, in, RDFLanguages.RDFXML) ; } TypedInputStream in1 = RDFDataMgr.open(filename("D-not-TTL.ttl") ); Model m1 = ModelFactory.createDefaultModel() ; m1.read(in1, null, "RDF/XML") ; }