public void logStartRead(DiskRangeList current) { if (log == null) return; int offset = this.offset; if (offset + 1 > log.length) return; log[offset] = makeIntPair(START_READ, current.hasData() ? System.identityHashCode(current.getData()) : 0); this.offset += 1; }
throw new IOException("Trying to extend compressed block into uncompressed block " + next); compressed = next.getData(); ++extraChunkCount; if (compressed.remaining() >= remaining) {
throw new IOException("Trying to extend compressed block into uncompressed block " + next); compressed = next.getData(); ++extraChunkCount; if (compressed.remaining() >= remaining) {
while (drl != null) { if (drl instanceof BufferChunk) { toRelease.put(drl.getData(), true);
long from = drl.getOffset(), to = drl.getEnd(); int offsetFromReadStart = (int)(from - readStartPos), candidateSize = (int)(to - from); ByteBuffer data = drl.getData().duplicate(); data.get(array, arrayOffset + offsetFromReadStart, candidateSize); cache.releaseBuffer(((CacheChunk)drl).getBuffer()); ByteBuffer data = candidate.getData().duplicate(); data.get(array, arrayOffset + offsetFromReadStart, candidateSize); cache.releaseBuffer(((CacheChunk)candidate).getBuffer());
while (drl != null) { if (drl instanceof BufferChunk) { toRelease.put(drl.getData(), true);
case ROW_INDEX: if (included == null || included[column]) { ByteBuffer bb = range.getData().duplicate(); bb.position((int) (offset - range.getOffset())); bb.limit((int) (bb.position() + stream.getLength())); case BLOOM_FILTER_UTF8: if (sargColumns != null && sargColumns[column]) { ByteBuffer bb = range.getData().duplicate(); bb.position((int) (offset - range.getOffset())); bb.limit((int) (bb.position() + stream.getLength()));