public static Compressor getCompressor(CompressionCodec codec) { return getCompressor(codec, null); }
public HadoopCompressedSliceOutputSupplier(CompressionCodec codec, int minChunkSize, int maxChunkSize) { this.codec = requireNonNull(codec, "codec is null"); this.compressor = CodecPool.getCompressor(requireNonNull(codec, "codec is null")); this.bufferedOutput = new ChunkedSliceOutput(minChunkSize, maxChunkSize); }
public Compressor getCompressor() { CompressionCodec codec = getCodec(conf); if (codec != null) { Compressor compressor = CodecPool.getCompressor(codec); if (LOG.isTraceEnabled()) LOG.trace("Retrieved compressor " + compressor + " from pool."); if (compressor != null) { if (compressor.finished()) { // Somebody returns the compressor to CodecPool but is still using it. LOG.warn("Compressor obtained from CodecPool is already finished()"); } compressor.reset(); } return compressor; } return null; }
public FlexibleDelimitedFileWriter(LogFilePath path, CompressionCodec codec) throws IOException { Path fsPath = new Path(path.getLogFilePath()); FileSystem fs = FileUtil.getFileSystem(path.getLogFilePath()); this.mCountingStream = new CountingOutputStream(fs.create(fsPath)); this.mWriter = (codec == null) ? new BufferedOutputStream( this.mCountingStream) : new BufferedOutputStream( codec.createOutputStream(this.mCountingStream, mCompressor = CodecPool.getCompressor(codec))); }
public DelimitedTextFileWriter(LogFilePath path, CompressionCodec codec) throws IOException { Path fsPath = new Path(path.getLogFilePath()); FileSystem fs = FileUtil.getFileSystem(path.getLogFilePath()); this.mCountingStream = new CountingOutputStream(fs.create(fsPath)); this.mWriter = (codec == null) ? new BufferedOutputStream( this.mCountingStream) : new BufferedOutputStream( codec.createOutputStream(this.mCountingStream, mCompressor = CodecPool.getCompressor(codec))); }
public Compressor getCompressor() throws IOException { CompressionCodec codec = getCodec(); if (codec != null) { Compressor compressor = CodecPool.getCompressor(codec); if (compressor != null) { if (compressor.finished()) { // Somebody returns the compressor to CodecPool but is still using // it. LOG.warn("Compressor obtained from CodecPool already finished()"); } else { if(LOG.isDebugEnabled()) { LOG.debug("Got a compressor: " + compressor.hashCode()); } } /** * Following statement is necessary to get around bugs in 0.18 where a * compressor is referenced after returned back to the codec pool. */ compressor.reset(); } return compressor; } return null; }
/** * Create an output stream with a codec taken from the global CodecPool. * * @param codec The codec to use to create the output stream. * @param conf The configuration to use if we need to create a new codec. * @param out The output stream to wrap. * @return The new output stream * @throws IOException */ static CompressionOutputStream createOutputStreamWithCodecPool( CompressionCodec codec, Configuration conf, OutputStream out) throws IOException { Compressor compressor = CodecPool.getCompressor(codec, conf); CompressionOutputStream stream = null; try { stream = codec.createOutputStream(out, compressor); } finally { if (stream == null) { CodecPool.returnCompressor(compressor); } else { stream.setTrackedCompressor(compressor); } } return stream; }
compressor = CodecPool.getCompressor(codec, conf);
if (this.codec != null) { ReflectionUtils.setConf(this.codec, this.conf); this.compressor = CodecPool.getCompressor(this.codec); this.deflateFilter = this.codec.createOutputStream(buffer, compressor); this.deflateOut =
private void encodeCellsTo(OutputStream os, CellScanner cellScanner, Codec codec, CompressionCodec compressor) throws IOException { Compressor poolCompressor = null; try { if (compressor != null) { if (compressor instanceof Configurable) { ((Configurable) compressor).setConf(this.conf); } poolCompressor = CodecPool.getCompressor(compressor); os = compressor.createOutputStream(os, poolCompressor); } Codec.Encoder encoder = codec.getEncoder(os); while (cellScanner.advance()) { encoder.write(cellScanner.current()); } encoder.flush(); } catch (BufferOverflowException | IndexOutOfBoundsException e) { throw new DoNotRetryIOException(e); } finally { os.close(); if (poolCompressor != null) { CodecPool.returnCompressor(poolCompressor); } } }
public Compressor getCompressor() { CompressionCodec codec = getCodec(); if (codec != null) { Compressor compressor = CodecPool.getCompressor(codec); if (compressor != null) { if (compressor.finished()) { // Somebody returns the compressor to CodecPool but is still using // it. log.warn("Compressor obtained from CodecPool already finished()"); } else { log.debug("Got a compressor: {}", compressor.hashCode()); } /** * Following statement is necessary to get around bugs in 0.18 where a compressor is * referenced after returned back to the codec pool. */ compressor.reset(); } return compressor; } return null; }
private void encodeCellsTo(OutputStream os, CellScanner cellScanner, Codec codec, CompressionCodec compressor) throws IOException { Compressor poolCompressor = null; try { if (compressor != null) { if (compressor instanceof Configurable) { ((Configurable) compressor).setConf(this.conf); } poolCompressor = CodecPool.getCompressor(compressor); os = compressor.createOutputStream(os, poolCompressor); } Codec.Encoder encoder = codec.getEncoder(os); while (cellScanner.advance()) { encoder.write(cellScanner.current()); } encoder.flush(); } catch (BufferOverflowException | IndexOutOfBoundsException e) { throw new DoNotRetryIOException(e); } finally { os.close(); if (poolCompressor != null) { CodecPool.returnCompressor(poolCompressor); } } }
public BytesCompressor(CompressionCodecName codecName, CompressionCodec codec, int pageSize) { this.codecName = codecName; this.codec = codec; if (codec != null) { this.compressor = CodecPool.getCompressor(codec); this.compressedOutBuffer = new ByteArrayOutputStream(pageSize); } else { this.compressor = null; this.compressedOutBuffer = null; } }
public BytesCompressor(CompressionCodecName codecName, CompressionCodec codec, int pageSize) { this.codecName = codecName; this.codec = codec; if (codec != null) { this.compressor = CodecPool.getCompressor(codec); this.compressedOutBuffer = new ByteArrayOutputStream(pageSize); } else { this.compressor = null; this.compressedOutBuffer = null; } }
HeapBytesCompressor(CompressionCodecName codecName) { this.codecName = codecName; this.codec = getCodec(codecName); if (codec != null) { this.compressor = CodecPool.getCompressor(codec); this.compressedOutBuffer = new ByteArrayOutputStream(pageSize); } else { this.compressor = null; this.compressedOutBuffer = null; } }
public HadoopCompressedSliceOutputSupplier(CompressionCodec codec, int minChunkSize, int maxChunkSize) { this.codec = requireNonNull(codec, "codec is null"); this.compressor = CodecPool.getCompressor(requireNonNull(codec, "codec is null")); this.bufferedOutput = new ChunkedSliceOutput(minChunkSize, maxChunkSize); }
HeapBytesCompressor(CompressionCodecName codecName) { this.codecName = codecName; this.codec = getCodec(codecName); if (codec != null) { this.compressor = CodecPool.getCompressor(codec); this.compressedOutBuffer = new ByteArrayOutputStream(pageSize); } else { this.compressor = null; this.compressedOutBuffer = null; } }
public HadoopCompressedSliceOutputSupplier(CompressionCodec codec, int minChunkSize, int maxChunkSize) { this.codec = requireNonNull(codec, "codec is null"); this.compressor = CodecPool.getCompressor(requireNonNull(codec, "codec is null")); this.bufferedOutput = new ChunkedSliceOutput(minChunkSize, maxChunkSize); }
@Override public Boolean call() throws Exception { Compressor c = CodecPool.getCompressor(codec); queue.put(c); return c != null; } };
@Override public Boolean call() throws Exception { Compressor c = CodecPool.getCompressor(codec); queue.put(c); return c != null; } };