/** * Guess at the {@link RDFFormat}. * * @param filename * Some filename. * * @return The {@link RDFFormat} -or- <code>null</code> iff * {@link #fallback} is <code>null</code> and the no format * was recognized for the <i>filename</i> */ public RDFFormat getRDFFormat(String filename) { final RDFFormat rdfFormat = // fallback == null // ? RDFFormat.forFileName(filename) // : RDFFormat.forFileName(filename, fallback)// ; return rdfFormat; }
/** * Guess at the {@link RDFFormat}. * * @param filename * Some filename. * * @return The {@link RDFFormat} -or- <code>null</code> iff * {@link #fallback} is <code>null</code> and the no format * was recognized for the <i>filename</i> */ public RDFFormat getRDFFormat(String filename) { final RDFFormat rdfFormat = // fallback == null // ? RDFFormat.forFileName(filename) // : RDFFormat.forFileName(filename, fallback)// ; return rdfFormat; }
private static RDFFormat detectRDFFormat(@Nullable final String string, final RDFFormat fallback) { return string == null ? fallback : RDFFormat.forFileName("dummy." + string.trim(), fallback); }
private RDFFormat formatForFileName(String filename) { RDFFormat format = RDFFormat.forFileName(filename); if (format != null) return format; if (filename.endsWith(".owl")) return RDFFormat.RDFXML; throw new IllegalArgumentException("Unknow RDF format for " + filename); }
/** * * Utility method to get the {@link RDFFormat} for filename. * * It checks for compressed endings and is provided as a utility. * * @param fileName * @return */ public static RDFFormat rdfFormatForFile(final String fileName) { /* * Try to get the RDFFormat from the URL's file path. */ RDFFormat fmt = RDFFormat.forFileName(fileName); if (fmt == null && fileName.endsWith(".zip")) { fmt = RDFFormat.forFileName(fileName.substring(0, fileName.length() - 4)); } if (fmt == null && fileName.endsWith(".gz")) { fmt = RDFFormat.forFileName(fileName.substring(0, fileName.length() - 3)); } if (fmt == null) { // Default format. fmt = RDFFormat.RDFXML; } return fmt; }
/** * Return the best guess at the {@link RDFFormat} for a resource. * <p> * Note: This handles the .gz and .zip extensions. * * @param n * The name of the resource. * @param rdfFormat * The fallback format (optional). * * @return The best guess format. */ private RDFFormat guessRDFFormat(final String n, final RDFFormat rdfFormat) { RDFFormat fmt = RDFFormat.forFileName(n); if (fmt == null && n.endsWith(".zip")) { fmt = RDFFormat.forFileName(n.substring(0, n.length() - 4)); } if (fmt == null && n.endsWith(".gz")) { fmt = RDFFormat.forFileName(n.substring(0, n.length() - 3)); } if (fmt == null) // fallback fmt = rdfFormat; return fmt; }
public static void loadGraph(INamedGraph graph, File file) throws Exception { RDFFormat format = RDFFormat.forFileName(file.getName()); if (format == null) { if (file.getName().endsWith(".owl")) { format = RDFFormat.RDFXML; } } loadGraph(graph, new FileInputStream(file), format,""); }
private RDFFormat findRdfFormat(URL url, URLConnection conn, RDFFormat defaultFormat) { RDFFormat format = RDFFormat.forMIMEType(conn.getContentType()); if (format == null) { format = RDFFormat.forFileName(url.getFile(), defaultFormat); } return format; } }
protected void loadTestData(String dataFile, Resource... contexts) throws RDFParseException, RepositoryException, IOException { logger.debug("loading dataset {}", dataFile); InputStream dataset = ComplexSPARQLQueryTest.class.getResourceAsStream(dataFile); try { conn.add(dataset, "", RDFFormat.forFileName(dataFile), contexts); } finally { dataset.close(); } logger.debug("dataset loaded."); } }
boolean isRDF = RDFFormat.forFileName(name) != null || (name.endsWith(".zip") && RDFFormat.forFileName(name .substring(0, name.length() - 4)) != null) || (name.endsWith(".gz") && RDFFormat.forFileName(name .substring(0, name.length() - 3)) != null);
@Override public boolean importDataset(File file, String dataset, boolean cleanBefore) throws FileNotFoundException { return importDataset(new FileInputStream(file), RDFFormat.forFileName(file.getAbsolutePath()), dataset, cleanBefore); }
boolean isRDF = RDFFormat.forFileName(name) != null || (name.endsWith(".zip") && RDFFormat.forFileName(name .substring(0, name.length() - 4)) != null) || (name.endsWith(".gz") && RDFFormat.forFileName(name .substring(0, name.length() - 3)) != null);
/** * Note: Overridden to turn off autocommit and commit after the data are * loaded. */ @Override protected void loadDataset(String datasetFile) throws RDFParseException, RepositoryException, IOException { logger.debug("loading dataset..."); InputStream dataset = SPARQLUpdateTest.class.getResourceAsStream(datasetFile); try { // con.setAutoCommit(false); con.add(dataset, "", RDFFormat.forFileName(datasetFile));//RDFFormat.TRIG); con.commit(); } finally { dataset.close(); } logger.debug("dataset loaded."); }
/** * Note: Overridden to turn off autocommit and commit after the data are * loaded. */ protected void loadDataset(String datasetFile) throws RDFParseException, RepositoryException, IOException { logger.debug("loading dataset..."); InputStream dataset = SPARQLUpdateTest.class.getResourceAsStream(datasetFile); try { // con.setAutoCommit(false); con.add(dataset, "", RDFFormat.forFileName(datasetFile));//RDFFormat.TRIG); con.commit(); } finally { dataset.close(); } logger.debug("dataset loaded."); }
private RDFFormat forFileName(String path, RDFFormat fallback) { RDFFormat format = RDFFormat.forFileName(path); RDFParserRegistry registry = RDFParserRegistry.getInstance(); if (format != null && registry.has(format)) return format; return fallback; }
/** * Overridden to turn off auto commit and to commit after the data set is * loaded. */ @Override protected void loadTestData(String dataFile, Resource... contexts) throws RDFParseException, RepositoryException, IOException { logger.debug("loading dataset " + dataFile); InputStream dataset = ComplexSPARQLQueryTest.class.getResourceAsStream(dataFile); try { conn.setAutoCommit(false); conn.add(dataset, "", RDFFormat.forFileName(dataFile), contexts); conn.commit(); } finally { dataset.close(); } logger.debug("dataset loaded."); }
protected void loadTestData(String dataFile, Resource... contexts) throws RDFParseException, RepositoryException, IOException { logger.debug("loading dataset {}", dataFile); InputStream dataset = ComplexSPARQLQueryTest.class.getResourceAsStream(dataFile); try { conn.add(dataset, "", RDFFormat.forFileName(dataFile), contexts); } finally { dataset.close(); } logger.debug("dataset loaded."); } }
/** * Note: Overridden to turn off autocommit and commit after the data are * loaded. */ protected void loadDataset(String datasetFile) throws RDFParseException, RepositoryException, IOException { logger.debug("loading dataset..."); InputStream dataset = SPARQLUpdateTest.class.getResourceAsStream(datasetFile); try { // con.setAutoCommit(false); con.add(dataset, "", RDFFormat.forFileName(datasetFile));//RDFFormat.TRIG); con.commit(); } finally { dataset.close(); } logger.debug("dataset loaded."); }
/** * Note: Overridden to turn off autocommit and commit after the data are * loaded. */ @Override protected void loadDataset(String datasetFile) throws RDFParseException, RepositoryException, IOException { logger.debug("loading dataset..."); InputStream dataset = SPARQLUpdateTest.class.getResourceAsStream(datasetFile); try { // con.setAutoCommit(false); con.add(dataset, "", RDFFormat.forFileName(datasetFile));//RDFFormat.TRIG); con.commit(); } finally { dataset.close(); } logger.debug("dataset loaded."); }
/** * Overridden to turn off auto commit and to commit after the data set is * loaded. */ @Override protected void loadTestData(String dataFile, Resource... contexts) throws RDFParseException, RepositoryException, IOException { logger.debug("loading dataset " + dataFile); InputStream dataset = ComplexSPARQLQueryTest.class.getResourceAsStream(dataFile); try { conn.setAutoCommit(false); conn.add(dataset, "", RDFFormat.forFileName(dataFile), contexts); conn.commit(); } finally { dataset.close(); } logger.debug("dataset loaded."); }