@Override public CompressionInputStream createInputStream(InputStream inputStream) throws IOException { return new HadoopSnappyInputStream(inputStream); }
@Override public int read() throws IOException { if (uncompressedChunkOffset >= uncompressedChunkLength) { readNextChunk(uncompressedChunk, 0, uncompressedChunk.length); if (uncompressedChunkLength == 0) { return -1; } } return uncompressedChunk[uncompressedChunkOffset++] & 0xFF; }
uncompressedChunkLength = 0; while (uncompressedBlockLength == 0) { uncompressedBlockLength = readBigEndianInt(); if (uncompressedBlockLength == -1) { uncompressedBlockLength = 0; int compressedChunkLength = readBigEndianInt(); if (compressedChunkLength == -1) { return false; readInput(compressedChunkLength, compressed);
uncompressedChunkLength = 0; while (uncompressedBlockLength == 0) { uncompressedBlockLength = readBigEndianInt(); if (uncompressedBlockLength == -1) { uncompressedBlockLength = 0; int compressedChunkLength = readBigEndianInt(); if (compressedChunkLength == -1) { return false; readInput(compressedChunkLength, compressed);
@Override public int read() throws IOException { if (uncompressedChunkOffset >= uncompressedChunkLength) { readNextChunk(uncompressedChunk, 0, uncompressedChunk.length); if (uncompressedChunkLength == 0) { return -1; } } return uncompressedChunk[uncompressedChunkOffset++] & 0xFF; }
@Override public CompressionInputStream createInputStream(InputStream inputStream) throws IOException { return new HadoopSnappyInputStream(inputStream); }
@Override public int read(byte[] output, int offset, int length) throws IOException { if (uncompressedChunkOffset >= uncompressedChunkLength) { boolean directDecompress = readNextChunk(output, offset, length); if (uncompressedChunkLength == 0) { return -1; } if (directDecompress) { uncompressedChunkOffset += uncompressedChunkLength; return uncompressedChunkLength; } } int size = Math.min(length, uncompressedChunkLength - uncompressedChunkOffset); System.arraycopy(uncompressedChunk, uncompressedChunkOffset, output, offset, size); uncompressedChunkOffset += size; return size; }
@Override public CompressionInputStream createInputStream(InputStream in, Decompressor decompressor) throws IOException { if (!(decompressor instanceof HadoopSnappyDecompressor)) { throw new IllegalArgumentException("Decompressor is not the Snappy decompressor"); } return new HadoopSnappyInputStream(in); }
@Override public int read(byte[] output, int offset, int length) throws IOException { if (uncompressedChunkOffset >= uncompressedChunkLength) { boolean directDecompress = readNextChunk(output, offset, length); if (uncompressedChunkLength == 0) { return -1; } if (directDecompress) { uncompressedChunkOffset += uncompressedChunkLength; return uncompressedChunkLength; } } int size = Math.min(length, uncompressedChunkLength - uncompressedChunkOffset); System.arraycopy(uncompressedChunk, uncompressedChunkOffset, output, offset, size); uncompressedChunkOffset += size; return size; }
@Override public CompressionInputStream createInputStream(InputStream in, Decompressor decompressor) throws IOException { if (!(decompressor instanceof HadoopSnappyDecompressor)) { throw new IllegalArgumentException("Decompressor is not the Snappy decompressor"); } return new HadoopSnappyInputStream(in); }