void initIndexInput() { if (this.in == null) { this.in = fr.parent.termsIn.clone(); } }
@Override public FileIndexInput clone() { FileIndexInput clone = (FileIndexInput) super.clone(); clone.in = in.clone(); return clone; }
public IndexInput openInput() throws IOException { IndexInput local = this.content; if (local == null) { throw new AccessDeniedException("Can't open a file still open for writing: " + fileName); } return local.clone(); }
@Override public SlicedIndexInput clone() { SlicedIndexInput clone = (SlicedIndexInput)super.clone(); clone.base = base.clone(); clone.fileOffset = fileOffset; clone.length = length; return clone; }
@Override public BufferedIndexInput clone() { BufferedIndexInput clone = (BufferedIndexInput)super.clone(); clone.buffer = null; clone.bufferLength = 0; clone.bufferPosition = 0; clone.bufferStart = getFilePointer(); return clone; }
/** {@inheritDoc} */ @Override public IndexInput clone() { GridLuceneInputStream clone = (GridLuceneInputStream) super.clone(); if(closed) throw new AlreadyClosedException(toString()); clone.isClone = true; return clone; }
if (docIn == null) { docIn = startDocIn.clone();
SlicedIndexInput(String sliceDescription, IndexInput base, long offset, long length) { super((sliceDescription == null) ? base.toString() : (base.toString() + " [slice=" + sliceDescription + "]"), BufferedIndexInput.BUFFER_SIZE); if (offset < 0 || length < 0 || offset + length > base.length()) { throw new IllegalArgumentException("slice() " + sliceDescription + " out of bounds: " + base); } this.base = base.clone(); this.fileOffset = offset; this.length = length; }
public PostingsEnum reset(IntBlockTermState termState, int flags) throws IOException { docFreq = termState.docFreq; totalTermFreq = indexHasFreq ? termState.totalTermFreq : docFreq; docTermStartFP = termState.docStartFP; skipOffset = termState.skipOffset; singletonDocID = termState.singletonDocID; if (docFreq > 1) { if (docIn == null) { // lazy init docIn = startDocIn.clone(); } docIn.seek(docTermStartFP); } doc = -1; this.needsFreq = PostingsEnum.featureRequested(flags, PostingsEnum.FREQS); if (indexHasFreq == false || needsFreq == false) { Arrays.fill(freqBuffer, 1); } accum = 0; docUpto = 0; nextSkipDoc = BLOCK_SIZE - 1; // we won't skip if target is found in first block docBufferUpto = BLOCK_SIZE; skipped = false; return this; }
public BlockPostingsEnum(FieldInfo fieldInfo) throws IOException { this.startDocIn = Lucene50PostingsReader.this.docIn; this.docIn = null; this.posIn = Lucene50PostingsReader.this.posIn.clone(); encoded = new byte[MAX_ENCODED_SIZE]; indexHasOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; indexHasPayloads = fieldInfo.hasPayloads(); }
public MergeReader(BKDReader bkd, MergeState.DocMap docMap) throws IOException { this.bkd = bkd; state = new BKDReader.IntersectState(bkd.in.clone(), bkd.numDataDims, bkd.packedBytesLength, bkd.packedIndexBytesLength, bkd.maxPointsInLeafNode, null, null); this.docMap = docMap; state.in.seek(bkd.getMinLeafBlockFP()); this.packedValues = new byte[bkd.maxPointsInLeafNode * bkd.packedBytesLength]; }
public EverythingEnum(FieldInfo fieldInfo) throws IOException { this.startDocIn = Lucene50PostingsReader.this.docIn; this.docIn = null; this.posIn = Lucene50PostingsReader.this.posIn.clone(); this.payIn = Lucene50PostingsReader.this.payIn.clone(); encoded = new byte[MAX_ENCODED_SIZE]; indexHasOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; if (indexHasOffsets) { offsetStartDeltaBuffer = new int[MAX_DATA_SIZE]; offsetLengthBuffer = new int[MAX_DATA_SIZE]; } else { offsetStartDeltaBuffer = null; offsetLengthBuffer = null; startOffset = -1; endOffset = -1; } indexHasPayloads = fieldInfo.hasPayloads(); if (indexHasPayloads) { payloadLengthBuffer = new int[MAX_DATA_SIZE]; payloadBytes = new byte[128]; payload = new BytesRef(); } else { payloadLengthBuffer = null; payloadBytes = null; payload = null; } }
private CompressingTermVectorsReader(CompressingTermVectorsReader reader) { this.fieldInfos = reader.fieldInfos; this.vectorsStream = reader.vectorsStream.clone(); this.indexReader = reader.indexReader.clone(); this.packedIntsVersion = reader.packedIntsVersion; this.compressionMode = reader.compressionMode; this.decompressor = reader.decompressor.clone(); this.chunkSize = reader.chunkSize; this.numDocs = reader.numDocs; this.reader = new BlockPackedReaderIterator(vectorsStream, packedIntsVersion, PACKED_BLOCK_SIZE, 0); this.version = reader.version; this.numChunks = reader.numChunks; this.numDirtyChunks = reader.numDirtyChunks; this.maxPointer = reader.maxPointer; this.closed = false; }
/** Create a new {@link IntersectState} */ public IntersectState getIntersectState(IntersectVisitor visitor) { IndexTree index; if (packedIndex != null) { index = new PackedIndexTree(); } else { index = new LegacyIndexTree(); } return new IntersectState(in.clone(), numDataDims, packedBytesLength, packedIndexBytesLength, maxPointsInLeafNode, visitor, index); }
private CompressingStoredFieldsReader(CompressingStoredFieldsReader reader, boolean merging) { this.version = reader.version; this.fieldInfos = reader.fieldInfos; this.fieldsStream = reader.fieldsStream.clone(); this.indexReader = reader.indexReader.clone(); this.maxPointer = reader.maxPointer; this.chunkSize = reader.chunkSize; this.packedIntsVersion = reader.packedIntsVersion; this.compressionMode = reader.compressionMode; this.decompressor = reader.decompressor.clone(); this.numDocs = reader.numDocs; this.numChunks = reader.numChunks; this.numDirtyChunks = reader.numDirtyChunks; this.merging = merging; this.state = new BlockState(); this.closed = false; }
if (docIn == null) { docIn = startDocIn.clone();
final IndexInput clone = indexIn.clone();
} else { skipStream[i] = skipStream[0].clone(); if (inputIsBuffered && length < BufferedIndexInput.BUFFER_SIZE) { ((BufferedIndexInput) skipStream[i]).setBufferSize(Math.max(BufferedIndexInput.MIN_BUFFER_SIZE, (int) length));
/** * Clones the provided input, reads all bytes from the file, and calls {@link #checkFooter} * <p> * Note that this method may be slow, as it must process the entire file. * If you just need to extract the checksum value, call {@link #retrieveChecksum}. */ public static long checksumEntireFile(IndexInput input) throws IOException { IndexInput clone = input.clone(); clone.seek(0); ChecksumIndexInput in = new BufferedChecksumIndexInput(clone); assert in.getFilePointer() == 0; if (in.length() < footerLength()) { throw new CorruptIndexException("misplaced codec footer (file truncated?): length=" + in.length() + " but footerLength==" + footerLength(), input); } in.seek(in.length() - footerLength()); return checkFooter(in); }
this.commonSuffix = commonSuffix; in = fr.parent.termsIn.clone(); stack = new IntersectTermsEnumFrame[5]; for(int idx=0;idx<stack.length;idx++) {