@SuppressWarnings("unused") @Override public void writeAllTo(OutputStream out) throws IOException { for (BytesInput input : inputs) { if (DEBUG) LOG.debug("write " + input.size() + " bytes to out"); if (DEBUG && input instanceof SequenceBytesIn) LOG.debug("{"); input.writeAllTo(out); if (DEBUG && input instanceof SequenceBytesIn) LOG.debug("}"); } }
@SuppressWarnings("unused") @Override public void writeAllTo(OutputStream out) throws IOException { for (BytesInput input : inputs) { if (DEBUG) LOG.debug("write " + input.size() + " bytes to out"); if (DEBUG && input instanceof SequenceBytesIn) LOG.debug("{"); input.writeAllTo(out); if (DEBUG && input instanceof SequenceBytesIn) LOG.debug("}"); } }
@Override public void writeAllTo(OutputStream out) throws IOException { for (BytesInput input : inputs) { if (DEBUG) LOG.debug("write " + input.size() + " bytes to out"); if (DEBUG && input instanceof SequenceBytesIn) LOG.debug("{"); input.writeAllTo(out); if (DEBUG && input instanceof SequenceBytesIn) LOG.debug("}"); } }
@Deprecated @Override public void writePage(BytesInput bytes, int valueCount, Encoding rlEncoding, Encoding dlEncoding, Encoding valuesEncoding) throws IOException { long uncompressedSize = bytes.size(); BytesInput compressedBytes = compressor.compress(bytes); long compressedSize = compressedBytes.size(); BooleanStatistics statistics = new BooleanStatistics(); // dummy stats object parquetMetadataConverter.writeDataPageHeader( (int)uncompressedSize, (int)compressedSize, valueCount, statistics, rlEncoding, dlEncoding, valuesEncoding, buf); this.uncompressedLength += uncompressedSize; this.compressedLength += compressedSize; this.totalValueCount += valueCount; this.pageCount += 1; compressedBytes.writeAllTo(buf); encodings.add(rlEncoding); encodings.add(dlEncoding); encodings.add(valuesEncoding); }
@Override public void writePage(BytesInput bytes, int valueCount, Statistics statistics, Encoding rlEncoding, Encoding dlEncoding, Encoding valuesEncoding) throws IOException { long uncompressedSize = bytes.size(); BytesInput compressedBytes = compressor.compress(bytes); long compressedSize = compressedBytes.size(); parquetMetadataConverter.writeDataPageHeader( (int)uncompressedSize, (int)compressedSize, valueCount, statistics, rlEncoding, dlEncoding, valuesEncoding, buf); this.uncompressedLength += uncompressedSize; this.compressedLength += compressedSize; this.totalValueCount += valueCount; this.pageCount += 1; this.totalStatistics.mergeStatistics(statistics); compressedBytes.writeAllTo(buf); encodings.add(rlEncoding); encodings.add(dlEncoding); encodings.add(valuesEncoding); }
public BytesInput compress(BytesInput bytes) throws IOException { final BytesInput compressedBytes; if (codec == null) { compressedBytes = bytes; } else { compressedOutBuffer.reset(); if (compressor != null) { // null compressor for non-native gzip compressor.reset(); } CompressionOutputStream cos = codec.createOutputStream(compressedOutBuffer, compressor); bytes.writeAllTo(cos); cos.finish(); cos.close(); compressedBytes = BytesInput.from(compressedOutBuffer); } return compressedBytes; }
public BytesInput compress(BytesInput bytes) throws IOException { final BytesInput compressedBytes; if (codec == null) { compressedBytes = bytes; } else { compressedOutBuffer.reset(); if (compressor != null) { // null compressor for non-native gzip compressor.reset(); } CompressionOutputStream cos = codec.createOutputStream(compressedOutBuffer, compressor); bytes.writeAllTo(cos); cos.finish(); cos.close(); compressedBytes = BytesInput.from(compressedOutBuffer); } return compressedBytes; }
/** * * @return a new byte array materializing the contents of this input * @throws IOException */ public byte[] toByteArray() throws IOException { BAOS baos = new BAOS((int)size()); this.writeAllTo(baos); if (DEBUG) LOG.debug("converted " + size() + " to byteArray of " + baos.size() + " bytes"); return baos.getBuf(); }
public BytesInput compress(BytesInput bytes) throws IOException { final BytesInput compressedBytes; if (codec == null) { compressedBytes = bytes; } else { compressedOutBuffer.reset(); if (compressor != null) { compressor.reset(); } CompressionOutputStream outputStream = codec.createOutputStream(compressedOutBuffer, compressor); bytes.writeAllTo(outputStream); outputStream.finish(); outputStream.close(); compressedBytes = BytesInput.from(compressedOutBuffer); } return compressedBytes; }
/** * * @return a new byte array materializing the contents of this input * @throws IOException */ public byte[] toByteArray() throws IOException { BAOS baos = new BAOS((int)size()); this.writeAllTo(baos); if (DEBUG) LOG.debug("converted " + size() + " to byteArray of " + baos.size() + " bytes"); return baos.getBuf(); }
/** * * @return a new byte array materializing the contents of this input * @throws IOException */ public byte[] toByteArray() throws IOException { BAOS baos = new BAOS((int)size()); this.writeAllTo(baos); if (DEBUG) LOG.debug("converted " + size() + " to byteArray of " + baos.size() + " bytes"); return baos.getBuf(); }
this.compressedLength += compressedPageSize + headerSize; if (DEBUG) LOG.debug(out.getPos() + ": write data page content " + compressedPageSize); bytes.writeAllTo(out); currentEncodings.add(rlEncoding); currentEncodings.add(dlEncoding);
this.compressedLength += compressedPageSize + headerSize; if (DEBUG) LOG.debug(out.getPos() + ": write data page content " + compressedPageSize); bytes.writeAllTo(out); currentEncodings.add(rlEncoding); currentEncodings.add(dlEncoding);
/** * writes a number of pages at once * @param bytes bytes to be written including page headers * @param uncompressedTotalPageSize total uncompressed size (without page headers) * @param compressedTotalPageSize total compressed size (without page headers) * @throws IOException */ void writeDataPages(BytesInput bytes, long uncompressedTotalPageSize, long compressedTotalPageSize, Statistics totalStats, List<parquet.column.Encoding> encodings) throws IOException { state = state.write(); if (DEBUG) LOG.debug(out.getPos() + ": write data pages"); long headersSize = bytes.size() - compressedTotalPageSize; this.uncompressedLength += uncompressedTotalPageSize + headersSize; this.compressedLength += compressedTotalPageSize + headersSize; if (DEBUG) LOG.debug(out.getPos() + ": write data pages content"); bytes.writeAllTo(out); currentEncodings.addAll(encodings); currentStatistics = totalStats; }
/** * writes a number of pages at once * @param bytes bytes to be written including page headers * @param uncompressedTotalPageSize total uncompressed size (without page headers) * @param compressedTotalPageSize total compressed size (without page headers) * @throws IOException */ void writeDataPages(BytesInput bytes, long uncompressedTotalPageSize, long compressedTotalPageSize, Statistics totalStats, List<parquet.column.Encoding> encodings) throws IOException { state = state.write(); if (DEBUG) LOG.debug(out.getPos() + ": write data pages"); long headersSize = bytes.size() - compressedTotalPageSize; this.uncompressedLength += uncompressedTotalPageSize + headersSize; this.compressedLength += compressedTotalPageSize + headersSize; if (DEBUG) LOG.debug(out.getPos() + ": write data pages content"); bytes.writeAllTo(out); currentEncodings.addAll(encodings); currentStatistics = totalStats; }
this.compressedLength += compressedPageSize + headerSize; if (DEBUG) LOG.debug(out.getPos() + ": write data page content " + compressedPageSize); bytes.writeAllTo(out); currentStatistics.mergeStatistics(statistics); currentEncodings.add(rlEncoding);
this.compressedLength += compressedPageSize + headerSize; if (DEBUG) LOG.debug(out.getPos() + ": write data page content " + compressedPageSize); bytes.writeAllTo(out); currentStatistics.mergeStatistics(statistics); currentEncodings.add(rlEncoding);
bicw.getBytes().writeAllTo(tmp); bicw.reset();
/** * writes a dictionary page page * @param dictionaryPage the dictionary page */ public void writeDictionaryPage(DictionaryPage dictionaryPage) throws IOException { state = state.write(); if (DEBUG) LOG.debug(out.getPos() + ": write dictionary page: " + dictionaryPage.getDictionarySize() + " values"); currentChunkDictionaryPageOffset = out.getPos(); int uncompressedSize = dictionaryPage.getUncompressedSize(); int compressedPageSize = (int)dictionaryPage.getBytes().size(); // TODO: fix casts metadataConverter.writeDictionaryPageHeader( uncompressedSize, compressedPageSize, dictionaryPage.getDictionarySize(), dictionaryPage.getEncoding(), out); long headerSize = out.getPos() - currentChunkDictionaryPageOffset; this.uncompressedLength += uncompressedSize + headerSize; this.compressedLength += compressedPageSize + headerSize; if (DEBUG) LOG.debug(out.getPos() + ": write dictionary page content " + compressedPageSize); dictionaryPage.getBytes().writeAllTo(out); currentEncodings.add(dictionaryPage.getEncoding()); }
/** * writes a dictionary page page * @param dictionaryPage the dictionary page */ public void writeDictionaryPage(DictionaryPage dictionaryPage) throws IOException { state = state.write(); if (DEBUG) LOG.debug(out.getPos() + ": write dictionary page: " + dictionaryPage.getDictionarySize() + " values"); currentChunkDictionaryPageOffset = out.getPos(); int uncompressedSize = dictionaryPage.getUncompressedSize(); int compressedPageSize = (int)dictionaryPage.getBytes().size(); // TODO: fix casts metadataConverter.writeDictionaryPageHeader( uncompressedSize, compressedPageSize, dictionaryPage.getDictionarySize(), dictionaryPage.getEncoding(), out); long headerSize = out.getPos() - currentChunkDictionaryPageOffset; this.uncompressedLength += uncompressedSize + headerSize; this.compressedLength += compressedPageSize + headerSize; if (DEBUG) LOG.debug(out.getPos() + ": write dictionary page content " + compressedPageSize); dictionaryPage.getBytes().writeAllTo(out); currentEncodings.add(dictionaryPage.getEncoding()); }