public void loadFromModifiableHDT(TempHDT modHdt, ProgressListener listener) { modHdt.reorganizeDictionary(listener); modHdt.reorganizeTriples(listener); // Get parts TempTriples modifiableTriples = (TempTriples) modHdt.getTriples(); TempDictionary modifiableDictionary = (TempDictionary) modHdt.getDictionary(); // Convert triples to final format if(triples.getClass().equals(modifiableTriples.getClass())) { triples = modifiableTriples; } else { //StopWatch tripleConvTime = new StopWatch(); triples.load(modifiableTriples, listener); //System.out.println("Triples conversion time: "+tripleConvTime.stopAndShow()); } // Convert dictionary to final format if(dictionary.getClass().equals(modifiableDictionary.getClass())) { dictionary = (DictionaryPrivate)modifiableDictionary; } else { //StopWatch dictConvTime = new StopWatch(); dictionary.load(modifiableDictionary, listener); //System.out.println("Dictionary conversion time: "+dictConvTime.stopAndShow()); } this.baseUri = modHdt.getBaseURI(); isClosed=false; }
private void init() { // Create TempHDT modHDT = new TempHDTImpl(spec, baseUri, ModeOfLoading.ONE_PASS); dictionary = modHDT.getDictionary(); triples = modHDT.getTriples(); // Load RDF in the dictionary and generate triples dictionary.startProcessing(); }
modHDT.reorganizeDictionary(listener); modHDT.reorganizeTriples(listener); modHDT.getHeader().insert( "_:statistics", HDTVocabulary.ORIGINAL_SIZE, size); hdt.populateHeaderStructure(modHDT.getBaseURI()); long originalSize = HeaderUtil.getPropertyLong(modHDT.getHeader(), "_:statistics", HDTVocabulary.ORIGINAL_SIZE); hdt.getHeader().insert("_:statistics", HDTVocabulary.ORIGINAL_SIZE, originalSize); } catch (NotFoundException e) { modHDT.close();
@Override public HDT doGenerateHDT(Iterator<TripleString> triples, String baseURI, HDTOptions spec, ProgressListener listener) throws IOException { //choose the importer TempHDTImporterOnePass loader = new TempHDTImporterOnePass(); // Create TempHDT TempHDT modHdt = loader.loadFromTriples(spec, triples, baseURI, listener); // Convert to HDT HDTImpl hdt = new HDTImpl(spec); hdt.loadFromModifiableHDT(modHdt, listener); hdt.populateHeaderStructure(modHdt.getBaseURI()); // Add file size to Header try { long originalSize = HeaderUtil.getPropertyLong(modHdt.getHeader(), "_:statistics", HDTVocabulary.ORIGINAL_SIZE); hdt.getHeader().insert("_:statistics", HDTVocabulary.ORIGINAL_SIZE, originalSize); } catch (NotFoundException e) { } modHdt.close(); return hdt; }
@Override public HDT doGenerateHDT(String rdfFileName, String baseURI, RDFNotation rdfNotation, HDTOptions spec, ProgressListener listener) throws IOException, ParserException { //choose the importer String loaderType = spec.get("loader.type"); TempHDTImporter loader; if ("two-pass".equals(loaderType)) { loader = new TempHDTImporterTwoPass(); } else { loader = new TempHDTImporterOnePass(); } // Create TempHDT TempHDT modHdt = loader.loadFromRDF(spec, rdfFileName, baseURI, rdfNotation, listener); // Convert to HDT HDTImpl hdt = new HDTImpl(spec); hdt.loadFromModifiableHDT(modHdt, listener); hdt.populateHeaderStructure(modHdt.getBaseURI()); // Add file size to Header try { long originalSize = HeaderUtil.getPropertyLong(modHdt.getHeader(), "_:statistics", HDTVocabulary.ORIGINAL_SIZE); hdt.getHeader().insert("_:statistics", HDTVocabulary.ORIGINAL_SIZE, originalSize); } catch (NotFoundException e) { } modHdt.close(); return hdt; }
@Override public TempHDT loadFromRDF(HDTOptions specs, String filename, String baseUri, RDFNotation notation, ProgressListener listener) throws IOException, ParserException { RDFParserCallback parser = RDFParserFactory.getParserCallback(notation); // Create Modifiable Instance TempHDT modHDT = new TempHDTImpl(specs, baseUri, ModeOfLoading.ONE_PASS); TempDictionary dictionary = modHDT.getDictionary(); TempTriples triples = modHDT.getTriples(); TripleAppender appender = new TripleAppender(dictionary, triples, listener); // Load RDF in the dictionary and generate triples dictionary.startProcessing(); parser.doParse(filename, baseUri, notation, appender); dictionary.endProcessing(); // Reorganize both the dictionary and the triples modHDT.reorganizeDictionary(listener); modHDT.reorganizeTriples(listener); modHDT.getHeader().insert( "_:statistics", HDTVocabulary.ORIGINAL_SIZE, appender.size); return modHDT; }
@Override public TempHDT loadFromRDF(HDTOptions specs, String filename, String baseUri, RDFNotation notation, ProgressListener listener) throws IOException, ParserException { RDFParserCallback parser = RDFParserFactory.getParserCallback(notation); // Create Modifiable Instance and parser TempHDT modHDT = new TempHDTImpl(specs, baseUri, ModeOfLoading.TWO_PASS); TempDictionary dictionary = modHDT.getDictionary(); TempTriples triples = modHDT.getTriples(); // Load RDF in the dictionary dictionary.startProcessing(); parser.doParse(filename, baseUri, notation, new DictionaryAppender(dictionary, listener)); dictionary.endProcessing(); // Reorganize IDs before loading triples modHDT.reorganizeDictionary(listener); // Load triples (second pass) parser.doParse(filename, baseUri, notation, new TripleAppender2(dictionary, triples, listener)); //reorganize HDT modHDT.reorganizeTriples(listener); return modHDT; } }
TempDictionary dictionary = modHDT.getDictionary(); TempTriples triples = modHDT.getTriples(); modHDT.reorganizeDictionary(listener); modHDT.reorganizeTriples(listener); modHDT.getHeader().insert( "_:statistics", HDTVocabulary.ORIGINAL_SIZE, size);