Codota Logo
QueryStatistic.addCountStatistic
Code IndexAdd Codota to your IDE (free)

How to use
addCountStatistic
method
in
org.apache.carbondata.core.stats.QueryStatistic

Best Java code snippets using org.apache.carbondata.core.stats.QueryStatistic.addCountStatistic (Showing top 20 results out of 315)

  • Add the Codota plugin to your IDE and get smart completions
private void myMethod () {
SimpleDateFormat s =
  • Codota IconString pattern;new SimpleDateFormat(pattern)
  • Codota IconString template;Locale locale;new SimpleDateFormat(template, locale)
  • Codota Iconnew SimpleDateFormat()
  • Smart code suggestions by Codota
}
origin: org.apache.carbondata/carbondata-core

private void fillMeasureData(BlockletScannedResult scannedResult,
  List<Object[]> listBasedResult) {
 long startTime = System.currentTimeMillis();
 // if list is not empty after filling the dimension data then only fill the measure data
 if (!listBasedResult.isEmpty()) {
  fillMeasureDataBatch(listBasedResult, 1, scannedResult);
 }
 QueryStatistic measureFillingTime = queryStatisticsModel.getStatisticsTypeAndObjMap()
   .get(QueryStatisticsConstants.MEASURE_FILLING_TIME);
 measureFillingTime.addCountStatistic(QueryStatisticsConstants.MEASURE_FILLING_TIME,
   measureFillingTime.getCount() + (System.currentTimeMillis() - startTime));
}
origin: org.apache.carbondata/carbondata-core

private void fillMeasureData(BlockletScannedResult scannedResult,
  List<Object[]> listBasedResult) {
 long startTime = System.currentTimeMillis();
 // if list is not empty after filling the dimension data then only fill the measure data
 if (!listBasedResult.isEmpty()) {
  fillMeasureDataBatch(listBasedResult, 1, scannedResult);
 }
 QueryStatistic measureFillingTime = queryStatisticsModel.getStatisticsTypeAndObjMap()
   .get(QueryStatisticsConstants.MEASURE_FILLING_TIME);
 measureFillingTime.addCountStatistic(QueryStatisticsConstants.MEASURE_FILLING_TIME,
   measureFillingTime.getCount() + (System.currentTimeMillis() - startTime));
}
origin: org.apache.carbondata/carbondata-core

 private void fillMeasureData(BlockletScannedResult scannedResult,
                List<Object[]> listBasedResult) {
  long startTime = System.currentTimeMillis();
  // if list is not empty after filling the dimension data then only fill the measure data
  if (!listBasedResult.isEmpty()) {
   fillMeasureDataBatch(listBasedResult, 1, scannedResult);
  }
  QueryStatistic measureFillingTime = queryStatisticsModel.getStatisticsTypeAndObjMap()
    .get(QueryStatisticsConstants.MEASURE_FILLING_TIME);
  measureFillingTime.addCountStatistic(QueryStatisticsConstants.MEASURE_FILLING_TIME,
    measureFillingTime.getCount() + (System.currentTimeMillis() - startTime));
 }
}
origin: org.apache.carbondata/carbondata-core

@Override
public void readBlocklet(RawBlockletColumnChunks rawBlockletColumnChunks) throws IOException {
 long startTime = System.currentTimeMillis();
 this.filterExecuter.readColumnChunks(rawBlockletColumnChunks);
 // adding statistics for carbon read time
 QueryStatistic readTime = queryStatisticsModel.getStatisticsTypeAndObjMap()
   .get(QueryStatisticsConstants.READ_BLOCKlET_TIME);
 readTime.addCountStatistic(QueryStatisticsConstants.READ_BLOCKlET_TIME,
   readTime.getCount() + (System.currentTimeMillis() - startTime));
}
origin: org.apache.carbondata/carbondata-hadoop

 /**
  * This method will log query result count and querytime
  * @param recordCount
  * @param recorder
  */
 public void logStatistics(int recordCount, QueryStatisticsRecorder recorder) {
  // result size
  if (null != recorder) {
   QueryStatistic queryStatistic = new QueryStatistic();
   queryStatistic.addCountStatistic(QueryStatisticsConstants.RESULT_SIZE, recordCount);
   recorder.recordStatistics(queryStatistic);
  }
 }
}
origin: org.apache.carbondata/carbondata-core

/**
 * This method will add a record both key and value to list object
 * it will keep track of how many record is processed, to handle limit scenario
 */
@Override
public List<Object[]> collectResultInRow(BlockletScannedResult scannedResult,
  int batchSize) {
 long startTime = System.currentTimeMillis();
 List<Object[]> listBasedResult = new ArrayList<>(batchSize);
 ProjectionMeasure[] queryMeasures = executionInfo.getProjectionMeasures();
 // scan the record and add to list
 scanAndFillData(scannedResult, batchSize, listBasedResult, queryMeasures);
 QueryStatistic resultPrepTime = queryStatisticsModel.getStatisticsTypeAndObjMap()
   .get(QueryStatisticsConstants.RESULT_PREP_TIME);
 resultPrepTime.addCountStatistic(QueryStatisticsConstants.RESULT_PREP_TIME,
   resultPrepTime.getCount() + (System.currentTimeMillis() - startTime));
 return listBasedResult;
}
origin: org.apache.carbondata/carbondata-core

/**
 * This method will add a record both key and value to list object
 * it will keep track of how many record is processed, to handle limit scenario
 */
@Override
public List<Object[]> collectResultInRow(BlockletScannedResult scannedResult,
  int batchSize) {
 long startTime = System.currentTimeMillis();
 List<Object[]> listBasedResult = new ArrayList<>(batchSize);
 ProjectionMeasure[] queryMeasures = executionInfo.getProjectionMeasures();
 // scan the record and add to list
 scanAndFillData(scannedResult, batchSize, listBasedResult, queryMeasures);
 QueryStatistic resultPrepTime = queryStatisticsModel.getStatisticsTypeAndObjMap()
   .get(QueryStatisticsConstants.RESULT_PREP_TIME);
 resultPrepTime.addCountStatistic(QueryStatisticsConstants.RESULT_PREP_TIME,
   resultPrepTime.getCount() + (System.currentTimeMillis() - startTime));
 return listBasedResult;
}
origin: org.apache.carbondata/carbondata-core

/**
 * This case is used only in case of compaction, since it does not use filter flow.
 */
public void fillDataChunks() {
 freeDataChunkMemory();
 if (pageCounter >= pageFilteredRowCount.length) {
  return;
 }
 long startTime = System.currentTimeMillis();
 for (int i = 0; i < dimensionColumnPages.length; i++) {
  if (dimensionColumnPages[i][pageCounter] == null && dimRawColumnChunks[i] != null) {
   dimensionColumnPages[i][pageCounter] = dimRawColumnChunks[i]
     .convertToDimColDataChunkWithOutCache(pageCounter, null);
  }
 }
 for (int i = 0; i < measureColumnPages.length; i++) {
  if (measureColumnPages[i][pageCounter] == null && msrRawColumnChunks[i] != null) {
   measureColumnPages[i][pageCounter] = msrRawColumnChunks[i]
     .convertToColumnPageWithOutCache(pageCounter, null);
  }
 }
 QueryStatistic pageUncompressTime = queryStatisticsModel.getStatisticsTypeAndObjMap()
   .get(QueryStatisticsConstants.PAGE_UNCOMPRESS_TIME);
 pageUncompressTime.addCountStatistic(QueryStatisticsConstants.PAGE_UNCOMPRESS_TIME,
   pageUncompressTime.getCount() + (System.currentTimeMillis() - startTime));
}
origin: org.apache.carbondata/carbondata-core

private void fillDimensionData(BlockletScannedResult scannedResult,
  List<Object[]> listBasedResult, ProjectionMeasure[] queryMeasures, int batchSize) {
 long startTime = System.currentTimeMillis();
 List<byte[]> dictionaryKeyArrayBatch = scannedResult.getDictionaryKeyArrayBatch(batchSize);
 List<byte[][]> noDictionaryKeyArrayBatch =
   scannedResult.getNoDictionaryKeyArrayBatch(batchSize);
 List<byte[][]> complexTypeKeyArrayBatch = scannedResult.getComplexTypeKeyArrayBatch(batchSize);
 // it will same for one blocklet so can be computed only once
 byte[] implicitColumnByteArray = scannedResult.getBlockletId()
   .getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
 // Note: size check in for loop is for dictionaryKeyArrayBatch as this size can be lesser than
 // batch size in case of IUD scenarios
 for (int i = 0; i < dictionaryKeyArrayBatch.size(); i++) {
  // 1 for ByteArrayWrapper object which will contain dictionary and no dictionary data
  Object[] row = new Object[1 + queryMeasures.length];
  ByteArrayWrapper wrapper = new ByteArrayWrapper();
  wrapper.setDictionaryKey(dictionaryKeyArrayBatch.get(i));
  wrapper.setNoDictionaryKeys(noDictionaryKeyArrayBatch.get(i));
  wrapper.setComplexTypesKeys(complexTypeKeyArrayBatch.get(i));
  wrapper.setImplicitColumnByteArray(implicitColumnByteArray);
  row[0] = wrapper;
  listBasedResult.add(row);
 }
 QueryStatistic keyColumnFillingTime = queryStatisticsModel.getStatisticsTypeAndObjMap()
   .get(QueryStatisticsConstants.KEY_COLUMN_FILLING_TIME);
 keyColumnFillingTime.addCountStatistic(QueryStatisticsConstants.KEY_COLUMN_FILLING_TIME,
   keyColumnFillingTime.getCount() + (System.currentTimeMillis() - startTime));
}
origin: org.apache.carbondata/carbondata-core

/**
 * This method will add a record both key and value to list object
 * it will keep track of how many record is processed, to handle limit scenario
 */
@Override
public List<Object[]> collectResultInRow(BlockletScannedResult scannedResult, int batchSize) {
 long startTime = System.currentTimeMillis();
 List<Object[]> listBasedResult = new ArrayList<>(batchSize);
 ProjectionMeasure[] queryMeasures = executionInfo.getActualQueryMeasures();
 // scan the record and add to list
 scanAndFillData(scannedResult, batchSize, listBasedResult, queryMeasures);
 // re-fill dictionary and no dictionary key arrays for the newly added columns
 if (dimensionInfo.isDictionaryColumnAdded()) {
  fillDictionaryKeyArrayBatchWithLatestSchema(listBasedResult);
 }
 if (dimensionInfo.isNoDictionaryColumnAdded()) {
  fillNoDictionaryKeyArrayBatchWithLatestSchema(listBasedResult);
 }
 QueryStatistic resultPrepTime = queryStatisticsModel.getStatisticsTypeAndObjMap()
   .get(QueryStatisticsConstants.RESULT_PREP_TIME);
 resultPrepTime.addCountStatistic(QueryStatisticsConstants.RESULT_PREP_TIME,
   resultPrepTime.getCount() + (System.currentTimeMillis() - startTime));
 return listBasedResult;
}
origin: org.apache.carbondata/carbondata-core

public void loadPage() {
 if (lazyChunkWrapper.getRawColumnChunk() == null) {
  try {
   lazyBlockletLoader.load();
  } catch (IOException e) {
   throw new RuntimeException(e);
  }
 }
 long startTime = System.currentTimeMillis();
 if (isMeasure) {
  ((MeasureRawColumnChunk) lazyChunkWrapper.getRawColumnChunk())
    .convertToColumnPageAndFillVector(pageNumber, vectorInfo, reusableDataBuffer);
 } else {
  ((DimensionRawColumnChunk) lazyChunkWrapper.getRawColumnChunk())
    .convertToDimColDataChunkAndFillVector(pageNumber, vectorInfo, reusableDataBuffer);
 }
 if (queryStatisticsModel.isEnabled()) {
  QueryStatistic pageUncompressTime = queryStatisticsModel.getStatisticsTypeAndObjMap()
    .get(QueryStatisticsConstants.PAGE_UNCOMPRESS_TIME);
  pageUncompressTime.addCountStatistic(QueryStatisticsConstants.PAGE_UNCOMPRESS_TIME,
    pageUncompressTime.getCount() + (System.currentTimeMillis() - startTime));
 }
}
origin: org.apache.carbondata/carbondata-core

keyColumnFillingTime.addCountStatistic(QueryStatisticsConstants.KEY_COLUMN_FILLING_TIME,
  keyColumnFillingTime.getCount() + (System.currentTimeMillis() - startTime));
origin: org.apache.carbondata/carbondata-core

keyColumnFillingTime.addCountStatistic(QueryStatisticsConstants.KEY_COLUMN_FILLING_TIME,
  keyColumnFillingTime.getCount() + (System.currentTimeMillis() - startTime));
origin: org.apache.carbondata/carbondata-core

@Override
public void readBlocklet(RawBlockletColumnChunks rawBlockletColumnChunks)
  throws IOException {
 long startTime = System.currentTimeMillis();
 DimensionRawColumnChunk[] dimensionRawColumnChunks = rawBlockletColumnChunks.getDataBlock()
   .readDimensionChunks(rawBlockletColumnChunks.getFileReader(),
     blockExecutionInfo.getAllSelectedDimensionColumnIndexRange());
 rawBlockletColumnChunks.setDimensionRawColumnChunks(dimensionRawColumnChunks);
 MeasureRawColumnChunk[] measureRawColumnChunks = rawBlockletColumnChunks.getDataBlock()
   .readMeasureChunks(rawBlockletColumnChunks.getFileReader(),
     blockExecutionInfo.getAllSelectedMeasureIndexRange());
 rawBlockletColumnChunks.setMeasureRawColumnChunks(measureRawColumnChunks);
 // adding statistics for carbon read time
 QueryStatistic readTime = queryStatisticsModel.getStatisticsTypeAndObjMap()
   .get(QueryStatisticsConstants.READ_BLOCKlET_TIME);
 readTime.addCountStatistic(QueryStatisticsConstants.READ_BLOCKlET_TIME,
   readTime.getCount() + (System.currentTimeMillis() - startTime));
}
origin: org.apache.carbondata/carbondata-core

QueryStatistic totalBlockletStatistic = queryStatisticsModel.getStatisticsTypeAndObjMap()
  .get(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM);
totalBlockletStatistic.addCountStatistic(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM,
  totalBlockletStatistic.getCount() + 1);
 scanTime.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKlET_TIME,
   scanTime.getCount() + (System.currentTimeMillis() - startTime));
   .get(QueryStatisticsConstants.PAGE_SCANNED);
 scannedPages
   .addCountStatistic(QueryStatisticsConstants.PAGE_SCANNED, scannedPages.getCount());
 return createEmptyResult();
  .get(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM);
validScannedBlockletStatistic
  .addCountStatistic(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM,
    validScannedBlockletStatistic.getCount() + 1);
validPages.addCountStatistic(QueryStatisticsConstants.VALID_PAGE_SCANNED,
  validPages.getCount() + pages.cardinality());
QueryStatistic scannedPages = queryStatisticsModel.getStatisticsTypeAndObjMap()
  .get(QueryStatisticsConstants.PAGE_SCANNED);
scannedPages.addCountStatistic(QueryStatisticsConstants.PAGE_SCANNED,
  scannedPages.getCount() + pages.cardinality());
scanTime.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKlET_TIME,
  scanTime.getCount() + (System.currentTimeMillis() - startTime));
origin: org.apache.carbondata/carbondata-core

queryStatistic.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKS_NUM,
  blockExecutionInfoList.size());
queryModel.getStatisticsRecorder().recordStatistics(queryStatistic);
origin: org.apache.carbondata/carbondata-core

QueryStatistic totalBlockletStatistic = queryStatisticsModel.getStatisticsTypeAndObjMap()
  .get(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM);
totalBlockletStatistic.addCountStatistic(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM,
  totalBlockletStatistic.getCount() + 1);
 scanTime.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKlET_TIME,
   scanTime.getCount() + (System.currentTimeMillis() - startTime));
 scannedPages.addCountStatistic(QueryStatisticsConstants.PAGE_SCANNED,
   scannedPages.getCount() + bitSetGroup.getScannedPages());
 return createEmptyResult();
  .get(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM);
validScannedBlockletStatistic
  .addCountStatistic(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM,
    validScannedBlockletStatistic.getCount() + 1);
validPages.addCountStatistic(QueryStatisticsConstants.VALID_PAGE_SCANNED,
  validPages.getCount() + bitSetGroup.getValidPages());
QueryStatistic scannedPages = queryStatisticsModel.getStatisticsTypeAndObjMap()
  .get(QueryStatisticsConstants.PAGE_SCANNED);
scannedPages.addCountStatistic(QueryStatisticsConstants.PAGE_SCANNED,
  scannedPages.getCount() + bitSetGroup.getScannedPages());
int[] pageFilteredRowCount = new int[bitSetGroup.getNumberOfPages()];
scanTime.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKlET_TIME,
  scanTime.getCount() + (System.currentTimeMillis() - startTime - dimensionReadTime));
QueryStatistic readTime = queryStatisticsModel.getStatisticsTypeAndObjMap()
  .get(QueryStatisticsConstants.READ_BLOCKlET_TIME);
origin: org.apache.carbondata/carbondata-core

QueryStatistic totalBlockletStatistic = queryStatisticsModel.getStatisticsTypeAndObjMap()
  .get(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM);
totalBlockletStatistic.addCountStatistic(QueryStatisticsConstants.TOTAL_BLOCKLET_NUM,
  totalBlockletStatistic.getCount() + 1);
QueryStatistic validScannedBlockletStatistic = queryStatisticsModel.getStatisticsTypeAndObjMap()
  .get(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM);
validScannedBlockletStatistic
  .addCountStatistic(QueryStatisticsConstants.VALID_SCAN_BLOCKLET_NUM,
    validScannedBlockletStatistic.getCount() + 1);
validPages.addCountStatistic(QueryStatisticsConstants.VALID_PAGE_SCANNED,
  validPages.getCount() + rawBlockletColumnChunks.getDataBlock().numberOfPages());
totalPagesScanned.addCountStatistic(QueryStatisticsConstants.TOTAL_PAGE_SCANNED,
  totalPagesScanned.getCount() + rawBlockletColumnChunks.getDataBlock().numberOfPages());
String blockletId = blockExecutionInfo.getBlockIdString() + CarbonCommonConstants.FILE_SEPARATOR
scanTime.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKlET_TIME,
  scanTime.getCount() + (System.currentTimeMillis() - startTime));
return scannedResult;
origin: org.apache.carbondata/carbondata-core

QueryStatistic time = queryStatisticsModel.getStatisticsTypeAndObjMap()
  .get(QueryStatisticsConstants.READ_BLOCKlET_TIME);
time.addCountStatistic(QueryStatisticsConstants.READ_BLOCKlET_TIME,
  time.getCount() + readTime);
isLoaded = true;
origin: org.apache.carbondata/carbondata-core

totalPagesScanned.addCountStatistic(QueryStatisticsConstants.TOTAL_PAGE_SCANNED,
  totalPagesScanned.getCount() + dataBlock.numberOfPages());
org.apache.carbondata.core.statsQueryStatisticaddCountStatistic

Popular methods of QueryStatistic

  • <init>
  • addStatistics
    below method will be used to add the statistic
  • addFixedTimeStatistic
    Below method will be used to add fixed time statistic. For example total time taken for scan or resu
  • getCount
  • getMessage
  • getStatistics
    Below method will be used to get the statistic message, which will be used to log
  • getTimeTaken

Popular in Java

  • Creating JSON documents from java classes using gson
  • getOriginalFilename (MultipartFile)
    Return the original filename in the client's filesystem.This may contain path information depending
  • setRequestProperty (URLConnection)
    Sets the general request property. If a property with the key already exists, overwrite its value wi
  • getSupportFragmentManager (FragmentActivity)
    Return the FragmentManager for interacting with fragments associated with this activity.
  • Point (java.awt)
    A point representing a location in (x, y) coordinate space, specified in integer precision.
  • Rectangle (java.awt)
    A Rectangle specifies an area in a coordinate space that is enclosed by the Rectangle object's top-
  • MalformedURLException (java.net)
    Thrown to indicate that a malformed URL has occurred. Either no legal protocol could be found in a s
  • NoSuchElementException (java.util)
    Thrown when trying to retrieve an element past the end of an Enumeration or Iterator.
  • ServletException (javax.servlet)
    Defines a general exception a servlet can throw when it encounters difficulty.
  • HttpServlet (javax.servlet.http)
    Provides an abstract class to be subclassed to create an HTTP servlet suitable for a Web site. A sub
Codota Logo
  • Products

    Search for Java codeSearch for JavaScript codeEnterprise
  • IDE Plugins

    IntelliJ IDEAWebStormAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogCodota Academy Plugin user guide Terms of usePrivacy policyJava Code IndexJavascript Code Index
Get Codota for your IDE now