@Override public void returnData(ColumnVectorBatch data) { cvbPool.offer(data); }
@Override public void returnData(OrcEncodedColumnBatch ecb) { for (int colIx = 0; colIx < ecb.getTotalColCount(); ++colIx) { if (!ecb.hasData(colIx)) continue; ColumnStreamData[] datas = ecb.getColumnData(colIx); for (ColumnStreamData data : datas) { if (data == null || data.decRef() != 0) continue; if (LlapIoImpl.LOCKING_LOGGER.isTraceEnabled()) { for (MemoryBuffer buf : data.getCacheBuffers()) { LlapIoImpl.LOCKING_LOGGER.trace("Unlocking {} at the end of processing", buf); } } bufferManager.decRefBuffers(data.getCacheBuffers()); if (useObjectPools) { CSD_POOL.offer(data); } } } // We can offer ECB even with some streams not discarded; reset() will clear the arrays. if (useObjectPools) { ECB_POOL.offer(ecb); } }
@Override public void returnData(OrcEncodedColumnBatch ecb) { for (int colIx = 0; colIx < ecb.getTotalColCount(); ++colIx) { if (!ecb.hasData(colIx)) continue; // TODO: reuse columnvector-s on hasBatch - save the array by column? take apart each list. ColumnStreamData[] datas = ecb.getColumnData(colIx); for (ColumnStreamData data : datas) { if (data == null || data.decRef() != 0) continue; if (LlapIoImpl.LOCKING_LOGGER.isTraceEnabled()) { for (MemoryBuffer buf : data.getCacheBuffers()) { LlapIoImpl.LOCKING_LOGGER.trace("Unlocking {} at the end of processing", buf); } } bufferManager.decRefBuffers(data.getCacheBuffers()); if (useObjectPools) { CSD_POOL.offer(data); } } } // We can offer ECB even with some streams not discarded; reset() will clear the arrays. if (useObjectPools) { ECB_POOL.offer(ecb); } }
@Override public void returnData(ColumnVectorBatch data) { cvbPool.offer(data); }
@Override public void returnData(OrcEncodedColumnBatch ecb) { for (int colIx = 0; colIx < ecb.getTotalColCount(); ++colIx) { if (!ecb.hasData(colIx)) continue; ColumnStreamData[] datas = ecb.getColumnData(colIx); for (ColumnStreamData data : datas) { if (data == null || data.decRef() != 0) continue; if (LlapIoImpl.LOCKING_LOGGER.isTraceEnabled()) { for (MemoryBuffer buf : data.getCacheBuffers()) { LlapIoImpl.LOCKING_LOGGER.trace("Unlocking {} at the end of processing", buf); } } bufferManager.decRefBuffers(data.getCacheBuffers()); if (useObjectPools) { CSD_POOL.offer(data); } } } // We can offer ECB even with some streams not discarded; reset() will clear the arrays. if (useObjectPools) { ECB_POOL.offer(ecb); } }
@Override public void returnData(OrcEncodedColumnBatch ecb) { for (int colIx = 0; colIx < ecb.getTotalColCount(); ++colIx) { if (!ecb.hasData(colIx)) continue; // TODO: reuse columnvector-s on hasBatch - save the array by column? take apart each list. ColumnStreamData[] datas = ecb.getColumnData(colIx); for (ColumnStreamData data : datas) { if (data == null || data.decRef() != 0) continue; if (LlapIoImpl.LOCKING_LOGGER.isTraceEnabled()) { for (MemoryBuffer buf : data.getCacheBuffers()) { LlapIoImpl.LOCKING_LOGGER.trace("Unlocking {} at the end of processing", buf); } } bufferManager.decRefBuffers(data.getCacheBuffers()); if (useObjectPools) { CSD_POOL.offer(data); } } } // We can offer ECB even with some streams not discarded; reset() will clear the arrays. if (useObjectPools) { ECB_POOL.offer(ecb); } }