private void fillMeasureData(BlockletScannedResult scannedResult, List<Object[]> listBasedResult) { long startTime = System.currentTimeMillis(); // if list is not empty after filling the dimension data then only fill the measure data if (!listBasedResult.isEmpty()) { fillMeasureDataBatch(listBasedResult, 1, scannedResult); } QueryStatistic measureFillingTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.MEASURE_FILLING_TIME); measureFillingTime.addCountStatistic(QueryStatisticsConstants.MEASURE_FILLING_TIME, measureFillingTime.getCount() + (System.currentTimeMillis() - startTime)); }
private void fillMeasureData(BlockletScannedResult scannedResult, List<Object[]> listBasedResult) { long startTime = System.currentTimeMillis(); // if list is not empty after filling the dimension data then only fill the measure data if (!listBasedResult.isEmpty()) { fillMeasureDataBatch(listBasedResult, 1, scannedResult); } QueryStatistic measureFillingTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.MEASURE_FILLING_TIME); measureFillingTime.addCountStatistic(QueryStatisticsConstants.MEASURE_FILLING_TIME, measureFillingTime.getCount() + (System.currentTimeMillis() - startTime)); }
private void fillMeasureData(BlockletScannedResult scannedResult, List<Object[]> listBasedResult) { long startTime = System.currentTimeMillis(); // if list is not empty after filling the dimension data then only fill the measure data if (!listBasedResult.isEmpty()) { fillMeasureDataBatch(listBasedResult, 1, scannedResult); } QueryStatistic measureFillingTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.MEASURE_FILLING_TIME); measureFillingTime.addCountStatistic(QueryStatisticsConstants.MEASURE_FILLING_TIME, measureFillingTime.getCount() + (System.currentTimeMillis() - startTime)); } }
@Override public void readBlocklet(RawBlockletColumnChunks rawBlockletColumnChunks) throws IOException { long startTime = System.currentTimeMillis(); this.filterExecuter.readColumnChunks(rawBlockletColumnChunks); // adding statistics for carbon read time QueryStatistic readTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.READ_BLOCKlET_TIME); readTime.addCountStatistic(QueryStatisticsConstants.READ_BLOCKlET_TIME, readTime.getCount() + (System.currentTimeMillis() - startTime)); }
/** * This method will log query result count and querytime * @param recordCount * @param recorder */ public void logStatistics(int recordCount, QueryStatisticsRecorder recorder) { // result size if (null != recorder) { QueryStatistic queryStatistic = new QueryStatistic(); queryStatistic.addCountStatistic(QueryStatisticsConstants.RESULT_SIZE, recordCount); recorder.recordStatistics(queryStatistic); } } }
/** * This method will add a record both key and value to list object * it will keep track of how many record is processed, to handle limit scenario */ @Override public List<Object[]> collectResultInRow(BlockletScannedResult scannedResult, int batchSize) { long startTime = System.currentTimeMillis(); List<Object[]> listBasedResult = new ArrayList<>(batchSize); ProjectionMeasure[] queryMeasures = executionInfo.getProjectionMeasures(); // scan the record and add to list scanAndFillData(scannedResult, batchSize, listBasedResult, queryMeasures); QueryStatistic resultPrepTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.RESULT_PREP_TIME); resultPrepTime.addCountStatistic(QueryStatisticsConstants.RESULT_PREP_TIME, resultPrepTime.getCount() + (System.currentTimeMillis() - startTime)); return listBasedResult; }
/** * This method will add a record both key and value to list object * it will keep track of how many record is processed, to handle limit scenario */ @Override public List<Object[]> collectResultInRow(BlockletScannedResult scannedResult, int batchSize) { long startTime = System.currentTimeMillis(); List<Object[]> listBasedResult = new ArrayList<>(batchSize); ProjectionMeasure[] queryMeasures = executionInfo.getProjectionMeasures(); // scan the record and add to list scanAndFillData(scannedResult, batchSize, listBasedResult, queryMeasures); QueryStatistic resultPrepTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.RESULT_PREP_TIME); resultPrepTime.addCountStatistic(QueryStatisticsConstants.RESULT_PREP_TIME, resultPrepTime.getCount() + (System.currentTimeMillis() - startTime)); return listBasedResult; }
/** * This case is used only in case of compaction, since it does not use filter flow. */ public void fillDataChunks() { freeDataChunkMemory(); if (pageCounter >= pageFilteredRowCount.length) { return; } long startTime = System.currentTimeMillis(); for (int i = 0; i < dimensionColumnPages.length; i++) { if (dimensionColumnPages[i][pageCounter] == null && dimRawColumnChunks[i] != null) { dimensionColumnPages[i][pageCounter] = dimRawColumnChunks[i] .convertToDimColDataChunkWithOutCache(pageCounter, null); } } for (int i = 0; i < measureColumnPages.length; i++) { if (measureColumnPages[i][pageCounter] == null && msrRawColumnChunks[i] != null) { measureColumnPages[i][pageCounter] = msrRawColumnChunks[i] .convertToColumnPageWithOutCache(pageCounter, null); } } QueryStatistic pageUncompressTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.PAGE_UNCOMPRESS_TIME); pageUncompressTime.addCountStatistic(QueryStatisticsConstants.PAGE_UNCOMPRESS_TIME, pageUncompressTime.getCount() + (System.currentTimeMillis() - startTime)); }
private void fillDimensionData(BlockletScannedResult scannedResult, List<Object[]> listBasedResult, ProjectionMeasure[] queryMeasures, int batchSize) { long startTime = System.currentTimeMillis(); List<byte[]> dictionaryKeyArrayBatch = scannedResult.getDictionaryKeyArrayBatch(batchSize); List<byte[][]> noDictionaryKeyArrayBatch = scannedResult.getNoDictionaryKeyArrayBatch(batchSize); List<byte[][]> complexTypeKeyArrayBatch = scannedResult.getComplexTypeKeyArrayBatch(batchSize); // it will same for one blocklet so can be computed only once byte[] implicitColumnByteArray = scannedResult.getBlockletId() .getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET)); // Note: size check in for loop is for dictionaryKeyArrayBatch as this size can be lesser than // batch size in case of IUD scenarios for (int i = 0; i < dictionaryKeyArrayBatch.size(); i++) { // 1 for ByteArrayWrapper object which will contain dictionary and no dictionary data Object[] row = new Object[1 + queryMeasures.length]; ByteArrayWrapper wrapper = new ByteArrayWrapper(); wrapper.setDictionaryKey(dictionaryKeyArrayBatch.get(i)); wrapper.setNoDictionaryKeys(noDictionaryKeyArrayBatch.get(i)); wrapper.setComplexTypesKeys(complexTypeKeyArrayBatch.get(i)); wrapper.setImplicitColumnByteArray(implicitColumnByteArray); row[0] = wrapper; listBasedResult.add(row); } QueryStatistic keyColumnFillingTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.KEY_COLUMN_FILLING_TIME); keyColumnFillingTime.addCountStatistic(QueryStatisticsConstants.KEY_COLUMN_FILLING_TIME, keyColumnFillingTime.getCount() + (System.currentTimeMillis() - startTime)); }
/** * This method will add a record both key and value to list object * it will keep track of how many record is processed, to handle limit scenario */ @Override public List<Object[]> collectResultInRow(BlockletScannedResult scannedResult, int batchSize) { long startTime = System.currentTimeMillis(); List<Object[]> listBasedResult = new ArrayList<>(batchSize); ProjectionMeasure[] queryMeasures = executionInfo.getActualQueryMeasures(); // scan the record and add to list scanAndFillData(scannedResult, batchSize, listBasedResult, queryMeasures); // re-fill dictionary and no dictionary key arrays for the newly added columns if (dimensionInfo.isDictionaryColumnAdded()) { fillDictionaryKeyArrayBatchWithLatestSchema(listBasedResult); } if (dimensionInfo.isNoDictionaryColumnAdded()) { fillNoDictionaryKeyArrayBatchWithLatestSchema(listBasedResult); } QueryStatistic resultPrepTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.RESULT_PREP_TIME); resultPrepTime.addCountStatistic(QueryStatisticsConstants.RESULT_PREP_TIME, resultPrepTime.getCount() + (System.currentTimeMillis() - startTime)); return listBasedResult; }
public void loadPage() { if (lazyChunkWrapper.getRawColumnChunk() == null) { try { lazyBlockletLoader.load(); } catch (IOException e) { throw new RuntimeException(e); } } long startTime = System.currentTimeMillis(); if (isMeasure) { ((MeasureRawColumnChunk) lazyChunkWrapper.getRawColumnChunk()) .convertToColumnPageAndFillVector(pageNumber, vectorInfo, reusableDataBuffer); } else { ((DimensionRawColumnChunk) lazyChunkWrapper.getRawColumnChunk()) .convertToDimColDataChunkAndFillVector(pageNumber, vectorInfo, reusableDataBuffer); } if (queryStatisticsModel.isEnabled()) { QueryStatistic pageUncompressTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.PAGE_UNCOMPRESS_TIME); pageUncompressTime.addCountStatistic(QueryStatisticsConstants.PAGE_UNCOMPRESS_TIME, pageUncompressTime.getCount() + (System.currentTimeMillis() - startTime)); } }
keyColumnFillingTime.addCountStatistic(QueryStatisticsConstants.KEY_COLUMN_FILLING_TIME, keyColumnFillingTime.getCount() + (System.currentTimeMillis() - startTime));
keyColumnFillingTime.addCountStatistic(QueryStatisticsConstants.KEY_COLUMN_FILLING_TIME, keyColumnFillingTime.getCount() + (System.currentTimeMillis() - startTime));
@Override public void readBlocklet(RawBlockletColumnChunks rawBlockletColumnChunks) throws IOException { long startTime = System.currentTimeMillis(); DimensionRawColumnChunk[] dimensionRawColumnChunks = rawBlockletColumnChunks.getDataBlock() .readDimensionChunks(rawBlockletColumnChunks.getFileReader(), blockExecutionInfo.getAllSelectedDimensionColumnIndexRange()); rawBlockletColumnChunks.setDimensionRawColumnChunks(dimensionRawColumnChunks); MeasureRawColumnChunk[] measureRawColumnChunks = rawBlockletColumnChunks.getDataBlock() .readMeasureChunks(rawBlockletColumnChunks.getFileReader(), blockExecutionInfo.getAllSelectedMeasureIndexRange()); rawBlockletColumnChunks.setMeasureRawColumnChunks(measureRawColumnChunks); // adding statistics for carbon read time QueryStatistic readTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.READ_BLOCKlET_TIME); readTime.addCountStatistic(QueryStatisticsConstants.READ_BLOCKlET_TIME, readTime.getCount() + (System.currentTimeMillis() - startTime)); }
QueryStatistic totalBlockletStatistic = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM); totalBlockletStatistic.addCountStatistic(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM, totalBlockletStatistic.getCount() + 1); scanTime.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKlET_TIME, scanTime.getCount() + (System.currentTimeMillis() - startTime)); .get(QueryStatisticsConstants.PAGE_SCANNED); scannedPages .addCountStatistic(QueryStatisticsConstants.PAGE_SCANNED, scannedPages.getCount()); return createEmptyResult(); .get(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM); validScannedBlockletStatistic .addCountStatistic(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM, validScannedBlockletStatistic.getCount() + 1); validPages.addCountStatistic(QueryStatisticsConstants.VALID_PAGE_SCANNED, validPages.getCount() + pages.cardinality()); QueryStatistic scannedPages = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.PAGE_SCANNED); scannedPages.addCountStatistic(QueryStatisticsConstants.PAGE_SCANNED, scannedPages.getCount() + pages.cardinality()); scanTime.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKlET_TIME, scanTime.getCount() + (System.currentTimeMillis() - startTime));
queryStatistic.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKS_NUM, blockExecutionInfoList.size()); queryModel.getStatisticsRecorder().recordStatistics(queryStatistic);
QueryStatistic totalBlockletStatistic = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM); totalBlockletStatistic.addCountStatistic(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM, totalBlockletStatistic.getCount() + 1); scanTime.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKlET_TIME, scanTime.getCount() + (System.currentTimeMillis() - startTime)); scannedPages.addCountStatistic(QueryStatisticsConstants.PAGE_SCANNED, scannedPages.getCount() + bitSetGroup.getScannedPages()); return createEmptyResult(); .get(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM); validScannedBlockletStatistic .addCountStatistic(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM, validScannedBlockletStatistic.getCount() + 1); validPages.addCountStatistic(QueryStatisticsConstants.VALID_PAGE_SCANNED, validPages.getCount() + bitSetGroup.getValidPages()); QueryStatistic scannedPages = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.PAGE_SCANNED); scannedPages.addCountStatistic(QueryStatisticsConstants.PAGE_SCANNED, scannedPages.getCount() + bitSetGroup.getScannedPages()); int[] pageFilteredRowCount = new int[bitSetGroup.getNumberOfPages()]; scanTime.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKlET_TIME, scanTime.getCount() + (System.currentTimeMillis() - startTime - dimensionReadTime)); QueryStatistic readTime = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.READ_BLOCKlET_TIME);
QueryStatistic totalBlockletStatistic = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM); totalBlockletStatistic.addCountStatistic(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM, totalBlockletStatistic.getCount() + 1); QueryStatistic validScannedBlockletStatistic = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM); validScannedBlockletStatistic .addCountStatistic(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM, validScannedBlockletStatistic.getCount() + 1); validPages.addCountStatistic(QueryStatisticsConstants.VALID_PAGE_SCANNED, validPages.getCount() + rawBlockletColumnChunks.getDataBlock().numberOfPages()); totalPagesScanned.addCountStatistic(QueryStatisticsConstants.TOTAL_PAGE_SCANNED, totalPagesScanned.getCount() + rawBlockletColumnChunks.getDataBlock().numberOfPages()); String blockletId = blockExecutionInfo.getBlockIdString() + CarbonCommonConstants.FILE_SEPARATOR scanTime.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKlET_TIME, scanTime.getCount() + (System.currentTimeMillis() - startTime)); return scannedResult;
QueryStatistic time = queryStatisticsModel.getStatisticsTypeAndObjMap() .get(QueryStatisticsConstants.READ_BLOCKlET_TIME); time.addCountStatistic(QueryStatisticsConstants.READ_BLOCKlET_TIME, time.getCount() + readTime); isLoaded = true;
totalPagesScanned.addCountStatistic(QueryStatisticsConstants.TOTAL_PAGE_SCANNED, totalPagesScanned.getCount() + dataBlock.numberOfPages());