public BloomFilter(IBufferCache bufferCache, FileReference file, int[] keyFields) throws HyracksDataException { this.bufferCache = bufferCache; this.file = file; this.keyFields = keyFields; this.numBitsPerPage = bufferCache.getPageSize() * Byte.SIZE; this.numBlocksPerPage = this.numBitsPerPage / NUM_BITS_PER_BLOCK; }
@Override public int getPageSize() { return bufferCache.getPageSize(); }
private int getLAFEntryPageId(int compressedPageId) { return compressedPageId * ENTRY_LENGTH / bufferCache.getPageSize(); }
int cumulElements = (bufferCache.getPageSize() - startOff) / elementSize; elementIndexes[i] = elementIndexes[i - 1] + (bufferCache.getPageSize() / elementSize);
public void writePageInfo(int compressedPageId, long offset, long size) { final int entryOffset = compressedPageId * ENTRY_LENGTH % bufferCache.getPageSize(); //Put page offset cPage.getBuffer().putLong(entryOffset, offset); //Put page size cPage.getBuffer().putLong(entryOffset + SIZE_ENTRY_OFFSET, size); //Keep the max entry offset to set EOF (if needed) maxEntryOffset = Math.max(maxEntryOffset, entryOffset); numOfEntries++; }
public RTree(IBufferCache bufferCache, IPageManager freePageManager, ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory, IBinaryComparatorFactory[] cmpFactories, int fieldCount, FileReference file, boolean isPointMBR) { super(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmpFactories, fieldCount, file); globalNsn = new AtomicLong(); ITreeIndexFrame leafFrame = leafFrameFactory.createFrame(); ITreeIndexFrame interiorFrame = interiorFrameFactory.createFrame(); maxTupleSize = Math.min(leafFrame.getMaxTupleSize(bufferCache.getPageSize()), interiorFrame.getMaxTupleSize(bufferCache.getPageSize())); this.isPointMBR = isPointMBR; }
public BTree(IBufferCache bufferCache, IPageManager freePageManager, ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory, IBinaryComparatorFactory[] cmpFactories, int fieldCount, FileReference file) { super(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmpFactories, fieldCount, file); this.treeLatch = new ReentrantReadWriteLock(true); this.smoCounter = new AtomicInteger(); ITreeIndexFrame leafFrame = leafFrameFactory.createFrame(); ITreeIndexFrame interiorFrame = interiorFrameFactory.createFrame(); maxTupleSize = Math.min(leafFrame.getMaxTupleSize(bufferCache.getPageSize()), interiorFrame.getMaxTupleSize(bufferCache.getPageSize())); }
/** * Tries to allocate enough buffers to read the inverted list at once. If memory budget is not enough, this method * stops allocating buffers. */ private void allocateBuffers() throws HyracksDataException { do { ByteBuffer tmpBuffer = bufferManagerForSearch.acquireFrame(bufferCache.getPageSize()); if (tmpBuffer == null) { // Budget exhausted break; } Arrays.fill(tmpBuffer.array(), (byte) 0); buffers.add(tmpBuffer); } while (buffers.size() < numPages); // At least there should be one frame to load a page from disk. if (buffers.isEmpty()) { throw HyracksDataException.create(ErrorCode.NOT_ENOUGH_BUDGET_FOR_TEXTSEARCH, FixedSizeElementInvertedListCursor.class.getName()); } }
@Override public long getMemoryAllocationSize() { IBufferCache virtualBufferCache = btree.getBufferCache(); return (long) virtualBufferCache.getPageBudget() * virtualBufferCache.getPageSize(); }
/** * Returns the next element. */ @Override public void doNext() throws HyracksDataException { if (currentOffsetForScan + 2 * elementSize > bufferCache.getPageSize()) { currentPageIxForScan++; currentOffsetForScan = 0; } else { currentOffsetForScan += elementSize; } // Needs to read the next block? if (currentElementIxForScan > bufferEndElementIx && endPageId > bufferEndPageId) { loadPages(); currentOffsetForScan = 0; } currentElementIxForScan++; tuple.reset(buffers.get(currentPageIxForScan).array(), currentOffsetForScan); }
private ICachedPage pinAndGetPage(int compressedPageId) throws HyracksDataException { final int pageId = compressedPageId * ENTRY_LENGTH / bufferCache.getPageSize(); return bufferCache.pin(getDiskPageId(pageId), false); }
public LAFWriter(CompressedFileManager compressedFileManager, IBufferCache bufferCache) { this.compressedFileManager = compressedFileManager; this.bufferCache = bufferCache; queue = bufferCache.createFIFOQueue(); cachedFrames = new HashMap<>(); recycledFrames = new ArrayDeque<>(); this.fileId = compressedFileManager.getFileId(); callBack = new PageWriteFailureCallback(); maxNumOfEntries = bufferCache.getPageSize() / ENTRY_LENGTH; lastOffset = 0; totalNumOfPages = 0; maxPageId = -1; currentPageId = -1; }
@Override public void doNext() throws HyracksDataException { if (currentOffsetForScan + 2 * elementSize > bufferCache.getPageSize()) { // Read the next page. currentOffsetForScan = 0; loadPages(); } else { currentOffsetForScan += elementSize; } currentElementIxForScan++; tuple.reset(page.getBuffer().array(), currentOffsetForScan); }
@Override public void ensureCapacity(IBufferCache bufferCache, ITupleReference tuple, IExtraPageBlockHelper extraPageBlockHelper) throws HyracksDataException { // we call ensureCapacity() for large tuples- ensure large flag is set setLargeFlag(true); int gapBytes = getBytesRequiredToWriteTuple(tuple) - getFreeContiguousSpace(); if (gapBytes > 0) { int deltaPages = (int) Math.ceil((double) gapBytes / bufferCache.getPageSize()); growCapacity(extraPageBlockHelper, bufferCache, deltaPages); } }
@Override public long getSize() { IBufferCache virtualBufferCache = getIndex().getBufferCache(); return virtualBufferCache.getPageBudget() * (long) virtualBufferCache.getPageSize(); }
private void growCapacity(IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache, int deltaPages) throws HyracksDataException { int framePagesOld = page.getFrameSizeMultiplier(); int newMultiplier = framePagesOld + deltaPages; // we need to get the old slot offsets before we grow int oldSlotEnd = slotManager.getSlotEndOff(); int oldSlotStart = slotManager.getSlotStartOff() + slotManager.getSlotSize(); bufferCache.resizePage(getPage(), newMultiplier, extraPageBlockHelper); buf = getPage().getBuffer(); // fixup the slots System.arraycopy(buf.array(), oldSlotEnd, buf.array(), slotManager.getSlotEndOff(), oldSlotStart - oldSlotEnd); // fixup total free space counter buf.putInt(TOTAL_FREE_SPACE_OFFSET, buf.getInt(TOTAL_FREE_SPACE_OFFSET) + (bufferCache.getPageSize() * deltaPages)); }
private void setCompressedPageInfo(int compressedPageId, ICachedPageInternal compressedPage) throws HyracksDataException { ensureState(READABLE); if (totalNumOfPages == 0) { /* * It seems it is legal to pin empty file. * Return the page information as it is not compressed. */ compressedPage.setCompressedPageOffset(0); compressedPage.setCompressedPageSize(bufferCache.getPageSize()); return; } final ICachedPage page = pinAndGetPage(compressedPageId); try { // No need for read latches as pages are immutable. final ByteBuffer buf = page.getBuffer(); final int entryOffset = compressedPageId * ENTRY_LENGTH % bufferCache.getPageSize(); compressedPage.setCompressedPageOffset(buf.getLong(entryOffset)); compressedPage.setCompressedPageSize((int) buf.getLong(entryOffset + SIZE_ENTRY_OFFSET)); } finally { bufferCache.unpin(page); } }
private void init() throws HyracksDataException { final int numOfPages = bufferCache.getNumPagesOfFile(fileId); //Maximum number of entries in a page final int numOfEntriesPerPage = bufferCache.getPageSize() / ENTRY_LENGTH; //get the last page which may contain less entries than maxNumOfEntries final long dpid = getDiskPageId(numOfPages - 1); final ICachedPage page = bufferCache.pin(dpid, false); try { final ByteBuffer buf = page.getBuffer(); //Start at 1 since it is impossible to have EOF at the first entry of a page int i = 1; //Seek EOF and count number of entries while (i < numOfEntriesPerPage && buf.getLong(i * ENTRY_LENGTH) != EOF) { i++; } totalNumOfPages = (numOfPages - 1) * numOfEntriesPerPage + i; } finally { bufferCache.unpin(page); } }
int tupleBytes = tupleWriter.bytesRequired(tuple, 0, cmp.getKeyFieldCount()); int spaceNeeded = tupleBytes + slotSize + 4; if (tupleBytes > interiorFrame.getMaxTupleSize(BTree.this.bufferCache.getPageSize())) { throw HyracksDataException.create(ErrorCode.RECORD_IS_TOO_LARGE, tupleBytes, interiorFrame.getMaxTupleSize(BTree.this.bufferCache.getPageSize()));
boolean largePage = false; if (leftNode.getBuffer().capacity() > newLeftNode.getBuffer().capacity()) { bufferCache.resizePage(newLeftNode, leftNode.getBuffer().capacity() / bufferCache.getPageSize(), ctx); largePage = true;