@Override public void resetState() throws IOException { finish(); }
@Override public void write(int b) throws IOException { inputBuffer[inputOffset++] = (byte) b; if (inputOffset >= inputMaxSize) { writeNextChunk(inputBuffer, 0, this.inputOffset); } }
@Override public CompressionOutputStream createOutputStream(OutputStream outputStream) throws IOException { return new HadoopSnappyOutputStream(outputStream, getBufferSize()); }
private void writeNextChunk(byte[] input, int inputOffset, int inputLength) throws IOException { int compressedSize = compressor.compress(input, inputOffset, inputLength, outputBuffer, 0, outputBuffer.length); writeBigEndianInt(inputLength); writeBigEndianInt(compressedSize); out.write(outputBuffer, 0, compressedSize); this.inputOffset = 0; }
public HadoopSnappyOutputStream(OutputStream out, int bufferSize) { super(out); inputBuffer = new byte[bufferSize]; // leave extra space free at end of buffers to make compression (slightly) faster inputMaxSize = inputBuffer.length - compressionOverhead(bufferSize); outputBuffer = new byte[compressor.maxCompressedLength(inputMaxSize) + SIZE_OF_LONG]; }
@Override public CompressionOutputStream createOutputStream(OutputStream outputStream, Compressor compressor) throws IOException { if (!(compressor instanceof HadoopSnappyCompressor)) { throw new IllegalArgumentException("Compressor is not the Snappy decompressor"); } return new HadoopSnappyOutputStream(outputStream, getBufferSize()); }
private void writeNextChunk(byte[] input, int inputOffset, int inputLength) throws IOException { int compressedSize = compressor.compress(input, inputOffset, inputLength, outputBuffer, 0, outputBuffer.length); writeBigEndianInt(inputLength); writeBigEndianInt(compressedSize); out.write(outputBuffer, 0, compressedSize); this.inputOffset = 0; }
public HadoopSnappyOutputStream(OutputStream out, int bufferSize) { super(out); inputBuffer = new byte[bufferSize]; // leave extra space free at end of buffers to make compression (slightly) faster inputMaxSize = inputBuffer.length - compressionOverhead(bufferSize); outputBuffer = new byte[compressor.maxCompressedLength(inputMaxSize) + SIZE_OF_LONG]; }
@Override public void finish() throws IOException { if (inputOffset > 0) { writeNextChunk(inputBuffer, 0, this.inputOffset); } }
@Override public CompressionOutputStream createOutputStream(OutputStream outputStream, Compressor compressor) throws IOException { if (!(compressor instanceof HadoopSnappyCompressor)) { throw new IllegalArgumentException("Compressor is not the Snappy decompressor"); } return new HadoopSnappyOutputStream(outputStream, getBufferSize()); }
@Override public void resetState() throws IOException { finish(); }
@Override public void write(int b) throws IOException { inputBuffer[inputOffset++] = (byte) b; if (inputOffset >= inputMaxSize) { writeNextChunk(inputBuffer, 0, this.inputOffset); } }
@Override public CompressionOutputStream createOutputStream(OutputStream outputStream) throws IOException { return new HadoopSnappyOutputStream(outputStream, getBufferSize()); }
@Override public void finish() throws IOException { if (inputOffset > 0) { writeNextChunk(inputBuffer, 0, this.inputOffset); } }
@Override public void write(byte[] buffer, int offset, int length) throws IOException { while (length > 0) { int chunkSize = Math.min(length, inputMaxSize - inputOffset); // favor writing directly from the user buffer to avoid the extra copy if (inputOffset == 0 && length > inputMaxSize) { writeNextChunk(buffer, offset, chunkSize); } else { System.arraycopy(buffer, offset, inputBuffer, inputOffset, chunkSize); inputOffset += chunkSize; if (inputOffset >= inputMaxSize) { writeNextChunk(inputBuffer, 0, inputOffset); } } length -= chunkSize; offset += chunkSize; } }
@Override public void write(byte[] buffer, int offset, int length) throws IOException { while (length > 0) { int chunkSize = Math.min(length, inputMaxSize - inputOffset); // favor writing directly from the user buffer to avoid the extra copy if (inputOffset == 0 && length > inputMaxSize) { writeNextChunk(buffer, offset, chunkSize); } else { System.arraycopy(buffer, offset, inputBuffer, inputOffset, chunkSize); inputOffset += chunkSize; if (inputOffset >= inputMaxSize) { writeNextChunk(inputBuffer, 0, inputOffset); } } length -= chunkSize; offset += chunkSize; } }