private long readDataPageV2(PageHeader pageHeader, int uncompressedPageSize, int compressedPageSize, List<DataPage> pages) { DataPageHeaderV2 dataHeaderV2 = pageHeader.getData_page_header_v2(); int dataSize = compressedPageSize - dataHeaderV2.getRepetition_levels_byte_length() - dataHeaderV2.getDefinition_levels_byte_length(); pages.add(new DataPageV2( dataHeaderV2.getNum_rows(), dataHeaderV2.getNum_nulls(), dataHeaderV2.getNum_values(), getSlice(dataHeaderV2.getRepetition_levels_byte_length()), getSlice(dataHeaderV2.getDefinition_levels_byte_length()), getParquetEncoding(Encoding.valueOf(dataHeaderV2.getEncoding().name())), getSlice(dataSize), uncompressedPageSize, MetadataReader.readStats( dataHeaderV2.getStatistics(), descriptor.getColumnDescriptor().getType()), dataHeaderV2.isIs_compressed())); return dataHeaderV2.getNum_values(); } }
public Object getFieldValue(_Fields field) { switch (field) { case NUM_VALUES: return new Integer(getNum_values()); case NUM_NULLS: return new Integer(getNum_nulls()); case NUM_ROWS: return new Integer(getNum_rows()); case ENCODING: return getEncoding(); case DEFINITION_LEVELS_BYTE_LENGTH: return new Integer(getDefinition_levels_byte_length()); case REPETITION_LEVELS_BYTE_LENGTH: return new Integer(getRepetition_levels_byte_length()); case IS_COMPRESSED: return new Boolean(isIs_compressed()); case STATISTICS: return getStatistics(); } throw new IllegalStateException(); }
public Object getFieldValue(_Fields field) { switch (field) { case NUM_VALUES: return new Integer(getNum_values()); case NUM_NULLS: return new Integer(getNum_nulls()); case NUM_ROWS: return new Integer(getNum_rows()); case ENCODING: return getEncoding(); case DEFINITION_LEVELS_BYTE_LENGTH: return new Integer(getDefinition_levels_byte_length()); case REPETITION_LEVELS_BYTE_LENGTH: return new Integer(getRepetition_levels_byte_length()); case IS_COMPRESSED: return new Boolean(isIs_compressed()); case STATISTICS: return getStatistics(); } throw new IllegalStateException(); }
private long readDataPageV2(PageHeader pageHeader, int uncompressedPageSize, int compressedPageSize, List<DataPage> pages) throws IOException { DataPageHeaderV2 dataHeaderV2 = pageHeader.getData_page_header_v2(); int dataSize = compressedPageSize - dataHeaderV2.getRepetition_levels_byte_length() - dataHeaderV2.getDefinition_levels_byte_length(); pages.add(new DataPageV2( dataHeaderV2.getNum_rows(), dataHeaderV2.getNum_nulls(), dataHeaderV2.getNum_values(), getBytesInput(dataHeaderV2.getRepetition_levels_byte_length()), getBytesInput(dataHeaderV2.getDefinition_levels_byte_length()), Encoding.valueOf(dataHeaderV2.getEncoding().name()), getBytesInput(dataSize), uncompressedPageSize, ParquetMetadataReader.readStats( dataHeaderV2.getStatistics(), descriptor.getColumnDescriptor().getType()), dataHeaderV2.isIs_compressed())); return dataHeaderV2.getNum_values(); } }