.readByteBuffer(filePath, measureColumnChunkOffsets.get(blockletColumnIndex), measureColumnChunkLength.get(blockletColumnIndex));
.readByteBuffer(filePath, measureColumnChunkOffsets.get(columnIndex), dataLength);
buffer = fileReader.readByteBuffer(filePath, currentDimensionOffset, (int) (dimensionChunksOffset.get(endBlockletColumnIndex + 1) - currentDimensionOffset));
buffer = fileReader.readByteBuffer(filePath, currentDimensionOffset, dimensionChunksLength.get(blockletColumnIndex));
buffer = fileReader.readByteBuffer(filePath, currentMeasureOffset, (int) (measureColumnChunkOffsets.get(endColumnIndex + 1) - currentMeasureOffset));
buffer = fileReader.readByteBuffer(filePath, currentDimensionOffset, length);
@Override public MeasureRawColumnChunk readRawMeasureChunk(FileReader fileReader, int columnIndex) throws IOException { int dataLength = 0; if (measureColumnChunkOffsets.size() - 1 == columnIndex) { DataChunk2 metadataChunk = null; synchronized (fileReader) { metadataChunk = CarbonUtil.readDataChunk(ByteBuffer.wrap(fileReader .readByteArray(filePath, measureColumnChunkOffsets.get(columnIndex), measureColumnChunkLength.get(columnIndex))), 0, measureColumnChunkLength.get(columnIndex)); } dataLength = measureColumnChunkLength.get(columnIndex) + metadataChunk.data_page_length; } else { long currentMeasureOffset = measureColumnChunkOffsets.get(columnIndex); dataLength = (int) (measureColumnChunkOffsets.get(columnIndex + 1) - currentMeasureOffset); } ByteBuffer buffer = null; synchronized (fileReader) { buffer = fileReader .readByteBuffer(filePath, measureColumnChunkOffsets.get(columnIndex), dataLength); } MeasureRawColumnChunk rawColumnChunk = new MeasureRawColumnChunk(columnIndex, buffer, 0, dataLength, this); rawColumnChunk.setFileReader(fileReader); rawColumnChunk.setPagesCount(1); rawColumnChunk.setRowCount(new int[] { numberOfRows }); return rawColumnChunk; }
/** * Below method will be used to read the chunk based on block index * * @param fileReader file reader to read the blocks from file * @param columnIndex column to be read * @return dimension column chunk */ public DimensionRawColumnChunk readRawDimensionChunk(FileReader fileReader, int columnIndex) throws IOException { int length = 0; if (dimensionChunksOffset.size() - 1 == columnIndex) { // Incase of last block read only for datachunk and read remaining while converting it. length = dimensionChunksLength.get(columnIndex); } else { long currentDimensionOffset = dimensionChunksOffset.get(columnIndex); length = (int) (dimensionChunksOffset.get(columnIndex + 1) - currentDimensionOffset); } ByteBuffer buffer = null; synchronized (fileReader) { buffer = fileReader.readByteBuffer(filePath, dimensionChunksOffset.get(columnIndex), length); } DimensionRawColumnChunk rawColumnChunk = new DimensionRawColumnChunk(columnIndex, buffer, 0, length, this); rawColumnChunk.setFileReader(fileReader); rawColumnChunk.setPagesCount(1); rawColumnChunk.setRowCount(new int[] { numberOfRows }); return rawColumnChunk; }
ByteBuffer buffer = null; synchronized (fileReader) { buffer = fileReader.readByteBuffer(filePath, currentDimensionOffset, (int) (dimensionChunksOffset.get(endColumnBlockletIndex + 1) - currentDimensionOffset));
ByteBuffer buffer = null; synchronized (fileReader) { buffer = fileReader.readByteBuffer(filePath, currentMeasureOffset, (int) (measureColumnChunkOffsets.get(endColumnIndex + 1) - currentMeasureOffset));
taskAttemptContext.getConfiguration()); ByteBuffer buffer = reader .readByteBuffer(FileFactory.getUpdatedFilePath(splitPath), ((CarbonInputSplit) inputSplit).getDetailInfo().getBlockSize() - 8, 8);
/** * Method to read the blocks data based on block index * * @param fileReader file reader to read the blocks * @param columnIndex column to be read * @return measure data chunk */ @Override public MeasureRawColumnChunk readRawMeasureChunk(FileReader fileReader, int columnIndex) throws IOException { DataChunk dataChunk = measureColumnChunks.get(columnIndex); ByteBuffer buffer = fileReader .readByteBuffer(filePath, dataChunk.getDataPageOffset(), dataChunk.getDataPageLength()); MeasureRawColumnChunk rawColumnChunk = new MeasureRawColumnChunk(columnIndex, buffer, 0, dataChunk.getDataPageLength(), this); rawColumnChunk.setFileReader(fileReader); rawColumnChunk.setPagesCount(1); rawColumnChunk.setRowCount(new int[] { numberOfRows }); return rawColumnChunk; }
/** * Below method will be used to read the raw chunk based on block index * * @param fileReader file reader to read the blocks from file * @param columnIndex column to be read * @return dimension column chunk */ @Override public DimensionRawColumnChunk readRawDimensionChunk(FileReader fileReader, int columnIndex) throws IOException { DataChunk dataChunk = dimensionColumnChunk.get(columnIndex); ByteBuffer buffer = null; synchronized (fileReader) { buffer = fileReader .readByteBuffer(filePath, dataChunk.getDataPageOffset(), dataChunk.getDataPageLength()); } DimensionRawColumnChunk rawColumnChunk = new DimensionRawColumnChunk(columnIndex, buffer, 0, dataChunk.getDataPageLength(), this); rawColumnChunk.setFileReader(fileReader); rawColumnChunk.setPagesCount(1); rawColumnChunk.setRowCount(new int[] { numberOfRows }); return rawColumnChunk; }
/** * This method return the version details in formatted string by reading from carbondata file * * @param dataFilePath * @return * @throws IOException */ public static String getVersionDetails(String dataFilePath) throws IOException { long fileSize = FileFactory.getCarbonFile(dataFilePath, FileFactory.getFileType(dataFilePath)).getSize(); FileReader fileReader = FileFactory.getFileHolder(FileFactory.getFileType(dataFilePath)); ByteBuffer buffer = fileReader.readByteBuffer(FileFactory.getUpdatedFilePath(dataFilePath), fileSize - 8, 8); fileReader.finish(); CarbonFooterReaderV3 footerReader = new CarbonFooterReaderV3(dataFilePath, buffer.getLong()); FileFooter3 footer = footerReader.readFooterVersion3(); if (null != footer.getExtra_info()) { return footer.getExtra_info().get(CarbonCommonConstants.CARBON_WRITTEN_BY_FOOTER_INFO) + " in version: " + footer.getExtra_info() .get(CarbonCommonConstants.CARBON_WRITTEN_VERSION); } else { return "Version Details are not found in carbondata file"; } } }
context.getConfiguration()); ByteBuffer buffer = reader .readByteBuffer(FileFactory.getUpdatedFilePath(splitPath), inputSplit.getLength() - 8, 8); ((CarbonInputSplit) inputSplit).getDetailInfo().setBlockFooterOffset(buffer.getLong());
+ dimensionColumnChunk.rowid_page_length; synchronized (dimensionRawColumnChunk.getFileReader()) { rawData = dimensionRawColumnChunk.getFileReader().readByteBuffer(filePath, dimensionChunksOffset.get(blockIndex) + dimensionChunksLength.get(blockIndex), totalDimensionDataLength);
/** * Below method will be used to convert the compressed measure chunk raw data to actual data * * @param rawColumnPage measure raw chunk * @param pageNumber number * @return DimensionColumnDataChunk */ @Override public ColumnPage decodeColumnPage( MeasureRawColumnChunk rawColumnPage, int pageNumber, ReusableDataBuffer reusableDataBuffer) throws IOException, MemoryException { // data chunk of blocklet column DataChunk3 dataChunk3 = rawColumnPage.getDataChunkV3(); // data chunk of page DataChunk2 pageMetadata = dataChunk3.getData_chunk_list().get(pageNumber); String compressorName = CarbonMetadataUtil.getCompressorNameFromChunkMeta( pageMetadata.getChunk_meta()); this.compressor = CompressorFactory.getInstance().getCompressor(compressorName); // calculating the start point of data // as buffer can contain multiple column data, start point will be datachunkoffset + // data chunk length + page offset long offset = rawColumnPage.getOffSet() + measureColumnChunkLength .get(rawColumnPage.getColumnIndex()) + dataChunk3.getPage_offset().get(pageNumber); ByteBuffer buffer = rawColumnPage.getFileReader() .readByteBuffer(filePath, offset, pageMetadata.data_page_length); BitSet nullBitSet = QueryUtil.getNullBitSet(pageMetadata.presence, this.compressor); ColumnPage decodedPage = decodeMeasure(pageMetadata, buffer, 0, null, nullBitSet, reusableDataBuffer); decodedPage.setNullBits(nullBitSet); return decodedPage; }
.readByteBuffer(filePath, offset, length);