public java.lang.Object getFieldValue(_Fields field) { switch (field) { case NUM_VALUES: return getNum_values(); case ENCODING: return getEncoding(); case DEFINITION_LEVEL_ENCODING: return getDefinition_level_encoding(); case REPETITION_LEVEL_ENCODING: return getRepetition_level_encoding(); case STATISTICS: return getStatistics(); } throw new java.lang.IllegalStateException(); }
public Object getFieldValue(_Fields field) { switch (field) { case NUM_VALUES: return getNum_values(); case ENCODING: return getEncoding(); case DEFINITION_LEVEL_ENCODING: return getDefinition_level_encoding(); case REPETITION_LEVEL_ENCODING: return getRepetition_level_encoding(); case STATISTICS: return getStatistics(); } throw new IllegalStateException(); }
private long readDataPageV1(PageHeader pageHeader, int uncompressedPageSize, int compressedPageSize, List<DataPage> pages) { DataPageHeader dataHeaderV1 = pageHeader.getData_page_header(); pages.add(new DataPageV1( getSlice(compressedPageSize), dataHeaderV1.getNum_values(), uncompressedPageSize, MetadataReader.readStats( dataHeaderV1.getStatistics(), descriptor.getColumnDescriptor().getType()), getParquetEncoding(Encoding.valueOf(dataHeaderV1.getRepetition_level_encoding().name())), getParquetEncoding(Encoding.valueOf(dataHeaderV1.getDefinition_level_encoding().name())), getParquetEncoding(Encoding.valueOf(dataHeaderV1.getEncoding().name())))); return dataHeaderV1.getNum_values(); }
valuesRead += pageHeader.getDictionary_page_header().getNum_values(); } else { valuesRead += pageHeader.getData_page_header().getNum_values(); parent.totalPageValuesRead += valuesRead;
private long readDataPageV1(PageHeader pageHeader, int uncompressedPageSize, int compressedPageSize, List<DataPage> pages) { DataPageHeader dataHeaderV1 = pageHeader.getData_page_header(); pages.add(new DataPageV1( getSlice(compressedPageSize), dataHeaderV1.getNum_values(), uncompressedPageSize, MetadataReader.readStats( dataHeaderV1.getStatistics(), descriptor.getColumnDescriptor().getType()), getParquetEncoding(Encoding.valueOf(dataHeaderV1.getRepetition_level_encoding().name())), getParquetEncoding(Encoding.valueOf(dataHeaderV1.getDefinition_level_encoding().name())), getParquetEncoding(Encoding.valueOf(dataHeaderV1.getEncoding().name())))); return dataHeaderV1.getNum_values(); }
valueReadSoFar += pageHeader.data_page_header.getNum_values(); ByteBuf buf = allocator.buffer(pageHeader.compressed_page_size); lastPage = buf;
new DataPageV1( this.readAsBytesInput(compressedPageSize), dataHeaderV1.getNum_values(), uncompressedPageSize, converter.fromParquetStatistics( converter.getEncoding(dataHeaderV1.getEncoding()) )); valuesCountReadSoFar += dataHeaderV1.getNum_values(); break; case DATA_PAGE_V2:
new DataPageV1( this.readAsBytesInput(compressedPageSize), dataHeaderV1.getNum_values(), uncompressedPageSize, converter.fromParquetStatistics( converter.getEncoding(dataHeaderV1.getEncoding()) )); valuesCountReadSoFar += dataHeaderV1.getNum_values(); ++dataPageCountReadSoFar; break;
valueReadSoFar += pageHeader.data_page_header.getNum_values(); ByteBuffer destBuffer = uncompressPage(pageHeader, true); return new DataPageV1(