@Override public void read(Model model) throws RdfReaderException { checkUriSizeOrThrowExceprion(); // continue with a normal Dereference Reader RdfReaderFactory.createDereferenceReader(uri).read(model); }
@Override public void readDataset(Dataset dataset) throws RdfReaderException { checkUriSizeOrThrowExceprion(); // continue with a normal Dereference Reader RdfReaderFactory.createDereferenceReader(uri).readDataset(dataset); }
@Override public void read(Model model) throws RdfReaderException { for (RdfReader r : readers) { try { r.read(model); } catch (RdfReaderException e) { throw new RdfReaderException("Cannot read from reader", e); } } }
public static RdfReader createFileOrResourceReader(String filename, String resource) { Collection<RdfReader> readers = new ArrayList<>(); readers.add(new RdfStreamReader(filename)); readers.add(createResourceReader(resource)); return new RdfFirstSuccessReader(readers); }
public static RdfReader createResourceOrFileOrDereferenceReader(String uri) { Collection<RdfReader> readers = new ArrayList<>(); readers.add(createResourceReader(uri)); readers.add(new RdfStreamReader(uri)); readers.add(new RdfDereferenceReader(uri)); return new RdfFirstSuccessReader(readers); }
public static RdfReader createFileOrDereferenceReader(String filename, String uri) { /* String baseFolder, TestAppliesTo schemaType, String uri, String prefix */ Collection<RdfReader> readers = new ArrayList<>(); readers.add(new RdfStreamReader(filename)); readers.add(new RdfDereferenceReader(uri)); RdfReader r = new RdfFirstSuccessReader(readers); RdfWriter w = new RdfFileWriter(filename, true); return new RdfReadAndCacheReader(r, w); }
public static RdfReader createEmptyReader() { return RdfReaderFactory.createResourceReader("/org/aksw/rdfunit/validate/data/empty.ttl"); } }
@Override public void readDataset(Dataset dataset) throws RdfReaderException { for (RdfReader r : readers) { try { r.readDataset(dataset); } catch (RdfReaderException e) { throw new RdfReaderException("Cannot read from reader", e); } } }
private void checkUriSizeOrThrowExceprion() throws RdfReaderException { long size = getUriSize(uri); if (size > limitInBytes || !strict || size < 0) { throw new RdfReaderException("'" + uri + "' size (" + size + ") bigger than " + limitInBytes); } }
@Override public void readDataset(Dataset dataset) throws RdfReaderException { //TODO implement throw new RdfReaderException("RDFSPARQLReader (" + endpoint + " / " + graph + ") not implemented yet"); }
private static RdfReader createReaderFromBaseDirsAndResource(Collection<String> baseDirectories, String relativeName) { ArrayList<RdfReader> readers = new ArrayList<>(); for (String baseDirectory : baseDirectories) { String normalizedBaseDir = baseDirectory.endsWith("/") ? baseDirectory : baseDirectory + "/"; readers.add(new RdfStreamReader(normalizedBaseDir + relativeName)); } readers.add(RdfReaderFactory.createResourceReader("/org/aksw/rdfunit/configuration/" + relativeName)); return new RdfFirstSuccessReader(readers); }
public static RdfReader createResourceOrFileOrDereferenceReader(String uri) { Collection<RdfReader> readers = new ArrayList<>(); readers.add(createResourceReader(uri)); readers.add(new RdfStreamReader(uri)); readers.add(new RdfDereferenceReader(uri)); return new RdfFirstSuccessReader(readers); }
@Override public void read(Model model) throws RdfReaderException { checkUriSizeOrThrowExceprion(); // continue with a normal Dereference Reader RdfReaderFactory.createDereferenceReader(uri).read(model); }
@Override public void read(Model model) throws RdfReaderException { for (RdfReader r : readers) { try { r.read(model); } catch (RdfReaderException e) { throw new RdfReaderException("Cannot read from reader", e); } } }
@Override public void readDataset(Dataset dataset) throws RdfReaderException { checkUriSizeOrThrowExceprion(); // continue with a normal Dereference Reader RdfReaderFactory.createDereferenceReader(uri).readDataset(dataset); }
@Override public void readDataset(Dataset dataset) throws RdfReaderException { for (RdfReader r : readers) { try { r.readDataset(dataset); } catch (RdfReaderException e) { throw new RdfReaderException("Cannot read from reader", e); } } }
private void checkUriSizeOrThrowExceprion() throws RdfReaderException { long size = getUriSize(uri); if (size > limitInBytes || !strict || size < 0) { throw new RdfReaderException("'" + uri + "' size (" + size + ") bigger than " + limitInBytes); } }
@Override public void read(Model model) throws RdfReaderException { //TODO implement throw new RdfReaderException("RDFSPARQLReader (" + endpoint + " / " + graph + ") not implemented yet"); }
private static RdfReader createReaderFromBaseDirsAndResource(Collection<String> baseDirectories, String relativeName) { ArrayList<RdfReader> readers = new ArrayList<>(); for (String baseDirectory : baseDirectories) { String normalizedBaseDir = baseDirectory.endsWith("/") ? baseDirectory : baseDirectory + "/"; readers.add(new RdfStreamReader(normalizedBaseDir + relativeName)); } readers.add(RdfReaderFactory.createResourceReader("/org/aksw/rdfunit/configuration/" + relativeName)); return new RdfFirstSuccessReader(readers); }
@Override public void read(Model model) throws RdfReaderException { //TODO implement throw new RdfReaderException("RDFSPARQLReader (" + endpoint + " / " + graph + ") not implemented yet"); }