@Override protected Indexer loadIndexer(String pa, String pr) { BasicSinglePassIndexer _indexer; if (blocks) _indexer = new BlockSinglePassIndexer(pa, pr); else _indexer = new BasicSinglePassIndexer(pa, pr); return _indexer; }
protected void createMemoryPostings(){ if (useFieldInformation) mp = new BlockFieldMemoryPostings(); else mp = new BlockMemoryPostings(); }
/** returns the SPIR implementation that should be used for reading the postings * written earlier */ protected PostingInRun getPostingReader() { if (saveTagInformation) { return new BlockFieldPostingInRun(fieldCount); } return new BlockPostingInRun(); }
@Override protected void addDirectStructure(IndexOnDisk index) throws Exception { new Inverted2DirectIndexBuilder(index).createDirectIndex(); } }
/** {@inheritDoc} */ @Override protected void createRunMerger(String[][] files) throws Exception{ //modified to use getPostingInRunClass() merger = new RunsMerger(new FileRunIteratorFactory(files, getPostingInRunClass(), 0)); }
/** * Hook method that creates a FieldRunMerger instance * @throws IOException if an I/O error occurs. */ protected void createFieldRunMerger(String[][] files) throws Exception{ merger = new RunsMerger(new FileRunIteratorFactory(files, FieldPostingInRun.class, super.numFields)); }
protected void init(int size, BitOut invertedFile) throws Exception{ queue = new PriorityQueue<RunIterator>(size, new PostingComparator()); bos = invertedFile; for(int i = 0; i < size; i++){ RunIterator run = runsSource.createRunIterator(i); run.next(); queue.add(run); } }
/** * Triggers the writing of the postings in memory to disk. * Uses the default RunWriter, writing to the specified files. * @param file name of the file to write the postings. * @throws IOException if an I/O error occurs. */ public void finish(String[] file) throws IOException{ finish(new RunWriter(file[0], file[1])); }
/** * Hook method that creates the right type of MemoryPostings class. */ protected void createMemoryPostings(){ if (useFieldInformation) mp = new FieldsMemoryPostings(); else mp = new MemoryPostings(); }
/** returns the SPIR implementation that should be used for reading the postings * written earlier */ protected PostingInRun getPostingReader() { if (saveTagInformation) { return new FieldPostingInRun(fieldCount); } return new SimplePostingInRun(); }
/** * Force the indexer to flush everything and free memory. * Either calls the super method, or passes to a delegate if * the flushDelegate is set. * @see org.terrier.structures.indexing.singlepass.BasicSinglePassIndexer#forceFlush() */ @Override protected void forceFlush() throws IOException { if (flushDelegate == null) super.forceFlush(); else flushDelegate.forceFlush(); } }
@Override public void addToLexiconEntry(LexiconEntry _le) { super.addToLexiconEntry(_le); FieldLexiconEntry le = (FieldLexiconEntry)_le; int[] tffs = le.getFieldFrequencies(); addTo(tffs, fieldTFs); }
@Override public IterablePosting getPostingIterator(final int runShift) throws IOException { return new fPIRPostingIterator(runShift); } @Override
@Override public void createDirectIndex(Collection[] collections) { createInvertedIndex(collections); } @Override
@Override public IterablePosting getPostingIterator(int runShift) throws IOException { return new BlockPIRPostingIterator(runShift); } }
@Override public IterablePosting getPostingIterator(final int runShift) throws IOException { return new PIRPostingIterator(runShift); }
@Override public IterablePosting getPostingIterator(final int runShift) throws IOException { return new bfPIRPostingIterator(runShift); } }
/** * Hook method that creates a RunsMerger instance * @throws IOException if an I/O error occurs. */ protected void createRunMerger(String[][] files) throws Exception{ merger = new RunsMerger(new FileRunIteratorFactory(files, useFieldInformation ? FieldPostingInRun.class : SimplePostingInRun.class, 0)); }
protected void createFieldRunMerger(String[][] files) throws IOException{ merger = new RunsMerger(new FileRunIteratorFactory(files, BlockFieldPostingInRun.class, super.numFields)); }
protected void createRunMerger(String[][] files) throws Exception{ merger = new RunsMerger(new FileRunIteratorFactory(files, BlockPostingInRun.class, 0)); }