} else { sorted = new HeapPointWriter((int) pointCount, (int) pointCount, packedBytesLength, longOrds, singleValuePerDoc); sorted.copyFrom(heapPointWriter); sorted.close(); return sorted; } else {
} else { heapPointWriter.append(packedValue, pointCount, docID);
@Override public BytesRef apply(int i) { heapSource.getPackedValueSlice(Math.toIntExact(source.start + i), scratch); return scratch; } };
PointWriter getPointWriter(long count, String desc) throws IOException { if (count <= maxPointsSortInHeap) { int size = Math.toIntExact(count); return new HeapPointWriter(size, size, packedBytesLength, longOrds, singleValuePerDoc); } else { return new OfflinePointWriter(tempDir, tempFileNamePrefix, packedBytesLength, longOrds, desc, count, singleValuePerDoc); } }
/** If the current segment has too many points then we spill over to temp files / offline sort. */ private void spillToOffline() throws IOException { // For each .add we just append to this input file, then in .finish we sort this input and resursively build the tree: offlinePointWriter = new OfflinePointWriter(tempDir, tempFileNamePrefix, packedBytesLength, longOrds, "spill", 0, singleValuePerDoc); tempInput = offlinePointWriter.out; PointReader reader = heapPointWriter.getReader(0, pointCount); for(int i=0;i<pointCount;i++) { boolean hasNext = reader.next(); assert hasNext; offlinePointWriter.append(reader.packedValue(), i, heapPointWriter.docIDs[i]); } heapPointWriter = null; }
/** Sliced reference to points in an OfflineSorter.ByteSequencesWriter file. */ private static final class PathSlice { final PointWriter writer; final long start; final long count; public PathSlice(PointWriter writer, long start, long count) { this.writer = writer; this.start = start; this.count = count; } @Override public String toString() { return "PathSlice(start=" + start + " count=" + count + " writer=" + writer + ")"; } }
@Override public void append(byte[] packedValue, long ord, int docID) { assert closed == false; assert packedValue.length == packedBytesLength; if (docIDs.length == nextWrite) { int nextSize = Math.min(maxSize, ArrayUtil.oversize(nextWrite+1, Integer.BYTES)); assert nextSize > nextWrite: "nextSize=" + nextSize + " vs nextWrite=" + nextWrite; docIDs = ArrayUtil.growExact(docIDs, nextSize); if (singleValuePerDoc == false) { if (ordsLong != null) { ordsLong = ArrayUtil.growExact(ordsLong, nextSize); } else { ords = ArrayUtil.growExact(ords, nextSize); } } } writePackedValue(nextWrite, packedValue); if (singleValuePerDoc == false) { if (ordsLong != null) { ordsLong[nextWrite] = ord; } else { assert ord <= Integer.MAX_VALUE; ords[nextWrite] = (int) ord; } } docIDs[nextWrite] = docID; nextWrite++; }
heapPointWriter = new HeapPointWriter(16, maxPointsSortInHeap, packedBytesLength, longOrds, singleValuePerDoc);
/** If the current segment has too many points then we spill over to temp files / offline sort. */ private void spillToOffline() throws IOException { // For each .add we just append to this input file, then in .finish we sort this input and resursively build the tree: offlinePointWriter = new OfflinePointWriter(tempDir, tempFileNamePrefix, packedBytesLength, longOrds, "spill", 0, singleValuePerDoc); tempInput = offlinePointWriter.out; PointReader reader = heapPointWriter.getReader(0, pointCount); for(int i=0;i<pointCount;i++) { boolean hasNext = reader.next(); assert hasNext; offlinePointWriter.append(reader.packedValue(), i, heapPointWriter.docIDs[i]); } heapPointWriter = null; }
/** Sliced reference to points in an OfflineSorter.ByteSequencesWriter file. */ private static final class PathSlice { final PointWriter writer; final long start; final long count; public PathSlice(PointWriter writer, long start, long count) { this.writer = writer; this.start = start; this.count = count; } @Override public String toString() { return "PathSlice(start=" + start + " count=" + count + " writer=" + writer + ")"; } }
@Override public void append(byte[] packedValue, long ord, int docID) { assert closed == false; assert packedValue.length == packedBytesLength; if (docIDs.length == nextWrite) { int nextSize = Math.min(maxSize, ArrayUtil.oversize(nextWrite+1, Integer.BYTES)); assert nextSize > nextWrite: "nextSize=" + nextSize + " vs nextWrite=" + nextWrite; docIDs = ArrayUtil.growExact(docIDs, nextSize); if (singleValuePerDoc == false) { if (ordsLong != null) { ordsLong = ArrayUtil.growExact(ordsLong, nextSize); } else { ords = ArrayUtil.growExact(ords, nextSize); } } } writePackedValue(nextWrite, packedValue); if (singleValuePerDoc == false) { if (ordsLong != null) { ordsLong[nextWrite] = ord; } else { assert ord <= Integer.MAX_VALUE; ords[nextWrite] = (int) ord; } } docIDs[nextWrite] = docID; nextWrite++; }
} else { sorted = new HeapPointWriter((int) pointCount, (int) pointCount, packedBytesLength, longOrds, singleValuePerDoc); sorted.copyFrom(heapPointWriter); sorted.close(); return sorted; } else {
/** Pull a partition back into heap once the point count is low enough while recursing. */ private PathSlice switchToHeap(PathSlice source, List<Closeable> toCloseHeroically) throws IOException { int count = Math.toIntExact(source.count); // Not inside the try because we don't want to close it here: PointReader reader = source.writer.getSharedReader(source.start, source.count, toCloseHeroically); try (PointWriter writer = new HeapPointWriter(count, count, packedBytesLength, longOrds, singleValuePerDoc)) { for(int i=0;i<count;i++) { boolean hasNext = reader.next(); assert hasNext; writer.append(reader.packedValue(), reader.ord(), reader.docID()); } return new PathSlice(writer, 0, count); } catch (Throwable t) { throw verifyChecksum(t, source.writer); } }
@Override public BytesRef apply(int i) { heapSource.getPackedValueSlice(Math.toIntExact(source.start + i), scratch); return scratch; } };
/** If the current segment has too many points then we spill over to temp files / offline sort. */ private void spillToOffline() throws IOException { // For each .add we just append to this input file, then in .finish we sort this input and resursively build the tree: offlinePointWriter = new OfflinePointWriter(tempDir, tempFileNamePrefix, packedBytesLength, longOrds, "spill", 0, singleValuePerDoc); tempInput = offlinePointWriter.out; PointReader reader = heapPointWriter.getReader(0, pointCount); for(int i=0;i<pointCount;i++) { boolean hasNext = reader.next(); assert hasNext; offlinePointWriter.append(reader.packedValue(), i, heapPointWriter.docIDs[i]); } heapPointWriter = null; }
/** Sliced reference to points in an OfflineSorter.ByteSequencesWriter file. */ private static final class PathSlice { final PointWriter writer; final long start; final long count; public PathSlice(PointWriter writer, long start, long count) { this.writer = writer; this.start = start; this.count = count; } @Override public String toString() { return "PathSlice(start=" + start + " count=" + count + " writer=" + writer + ")"; } }
} else { heapPointWriter.append(packedValue, pointCount, docID);
} else { sorted = new HeapPointWriter((int) pointCount, (int) pointCount, packedBytesLength, longOrds, singleValuePerDoc); sorted.copyFrom(heapPointWriter); sorted.close(); return sorted; } else {
PointWriter getPointWriter(long count, String desc) throws IOException { if (count <= maxPointsSortInHeap) { int size = Math.toIntExact(count); return new HeapPointWriter(size, size, packedBytesLength, longOrds, singleValuePerDoc); } else { return new OfflinePointWriter(tempDir, tempFileNamePrefix, packedBytesLength, longOrds, desc, count, singleValuePerDoc); } }
@Override public BytesRef apply(int i) { heapSource.getPackedValueSlice(Math.toIntExact(source.start + i), scratch); return scratch; } };