@Override public ICachedPage confiscatePage(long dpid) throws HyracksDataException { return bufferCache.confiscatePage(dpid); }
private void confiscatePage(int pageId) throws HyracksDataException { //Writing new page. Confiscate the page from the buffer cache. final ICachedPage newPage = bufferCache.confiscatePage(BufferedFileHandle.getDiskPageId(fileId, pageId)); cachedFrames.put(pageId, getLAFFrame(newPage)); maxPageId = Math.max(maxPageId, pageId); }
protected void pinNextPage() throws HyracksDataException { queue.put(currentPage, this); currentPageId++; currentPage = bufferCache.confiscatePage(BufferedFileHandle.getDiskPageId(fileId, currentPageId)); }
private void allocateAndInitMetaDataPage() throws HyracksDataException { if (metaDataPage == null) { metaDataPage = bufferCache.confiscatePage(BufferedFileHandle.getDiskPageId(fileId, METADATA_PAGE_ID)); } metaDataPage.getBuffer().putInt(NUM_PAGES_OFFSET, numPages); metaDataPage.getBuffer().putInt(NUM_HASHES_USED_OFFSET, numHashes); metaDataPage.getBuffer().putLong(NUM_ELEMENTS_OFFSET, actualNumElements); metaDataPage.getBuffer().putLong(NUM_BITS_OFFSET, numBits); metaDataPage.getBuffer().putInt(VERSION_OFFSET, BLOCKED_BLOOM_FILTER_VERSION); }
protected void addLevel() throws HyracksDataException { NodeFrontier frontier = new NodeFrontier(tupleWriter.createTupleReference()); frontier.page = bufferCache.confiscatePage(IBufferCache.INVALID_DPID); frontier.pageId = -1; frontier.lastTuple.setFieldCount(cmp.getKeyFieldCount()); interiorFrame.setPage(frontier.page); interiorFrame.initBuffer((byte) nodeFrontiers.size()); nodeFrontiers.add(frontier); }
while (currentPageId <= numPages) { ICachedPage page = bufferCache.confiscatePage(BufferedFileHandle.getDiskPageId(fileId, currentPageId)); initPage(page.getBuffer().array()); pages[currentPageId - 1] = page;
@Override public void open(int fileId) throws HyracksDataException { this.fileId = fileId; // get the number of pages of the file int pages = bufferCache.getNumPagesOfFile(fileId); //if there are no pages in the file yet, we're just initializing if (pages == 0) { if (confiscatedPage != null) { throw new HyracksDataException("Metadata Page Manager is already initialized"); } ITreeIndexMetadataFrame metaFrame = createMetadataFrame(); ICachedPage metaNode = bufferCache.confiscatePage(BufferCache.INVALID_DPID); try { metaFrame.setPage(metaNode); metaFrame.init(); metaFrame.setMaxPage(-1); } finally { confiscatedPage = metaNode; } } }
.confiscatePage(BufferedFileHandle.getDiskPageId(getFileId(), leafFrontier.pageId)); leafFrame.setPage(leafFrontier.page); leafFrame.initBuffer((byte) 0);
lowerFrame.setPage(frontier.page); frontier.page = bufferCache.confiscatePage(BufferCache.INVALID_DPID); interiorFrame.setPage(frontier.page); interiorFrame.initBuffer((byte) level);
public AbstractOnDiskInvertedIndexBulkLoader(float btreeFillFactor, boolean verifyInput, long numElementsHint, boolean checkIfEmptyIndex, int startPageId) throws HyracksDataException { this.verifyInput = verifyInput; this.invListCmp = MultiComparator.create(invListCmpFactories); if (verifyInput) { allCmp = MultiComparator.create(btree.getComparatorFactories(), invListCmpFactories); } else { allCmp = null; } this.btreeTupleBuilder = new ArrayTupleBuilder(btree.getFieldCount()); this.btreeTupleReference = new ArrayTupleReference(); this.lastTupleBuilder = new ArrayTupleBuilder(numTokenFields + numInvListKeys); this.lastTuple = new ArrayTupleReference(); this.btreeBulkloader = btree.createBulkLoader(btreeFillFactor, verifyInput, numElementsHint, checkIfEmptyIndex); currentPageId = startPageId; currentPage = bufferCache.confiscatePage(BufferedFileHandle.getDiskPageId(fileId, currentPageId)); invListBuilder.setTargetBuffer(currentPage.getBuffer().array(), 0); queue = bufferCache.createFIFOQueue(); }
public AbstractTreeIndexBulkLoader(float fillFactor) throws HyracksDataException { leafFrame = leafFrameFactory.createFrame(); interiorFrame = interiorFrameFactory.createFrame(); metaFrame = freePageManager.createMetadataFrame(); queue = bufferCache.createFIFOQueue(); if (!isEmptyTree(leafFrame)) { throw HyracksDataException.create(ErrorCode.CANNOT_BULK_LOAD_NON_EMPTY_TREE); } this.cmp = MultiComparator.create(cmpFactories); leafFrame.setMultiComparator(cmp); interiorFrame.setMultiComparator(cmp); tupleWriter = leafFrame.getTupleWriter(); NodeFrontier leafFrontier = new NodeFrontier(leafFrame.createTupleReference()); leafFrontier.pageId = freePageManager.takePage(metaFrame); leafFrontier.page = bufferCache.confiscatePage(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId)); interiorFrame.setPage(leafFrontier.page); interiorFrame.initBuffer((byte) 0); interiorMaxBytes = (int) (interiorFrame.getBuffer().capacity() * fillFactor); leafFrame.setPage(leafFrontier.page); leafFrame.initBuffer((byte) 0); leafMaxBytes = (int) (leafFrame.getBuffer().capacity() * fillFactor); slotSize = leafFrame.getSlotSize(); nodeFrontiers.add(leafFrontier); pagesToWrite = new ArrayList<>(); compressedPageWriter = bufferCache.getCompressedPageWriter(fileId); }
freePageManager.takeBlock(metaFrame, multiplier - 1)); } else { leafFrontier.page = bufferCache.confiscatePage(dpid); } else { final long dpid = BufferedFileHandle.getDiskPageId(getFileId(), leafFrontier.pageId); leafFrontier.page = bufferCache.confiscatePage(dpid); leafFrame.setPage(leafFrontier.page); leafFrame.initBuffer((byte) 0);
frontier.page = bufferCache.confiscatePage(BufferCache.INVALID_DPID); interiorFrame.setPage(frontier.page); interiorFrame.initBuffer((byte) level);