public PhysicalFsWriter(FileSystem fs,
Path path,
OrcFile.WriterOptions opts) throws IOException {
this.path = path;
long defaultStripeSize = opts.getStripeSize();
this.addBlockPadding = opts.getBlockPadding();
if (opts.isEnforceBufferSize()) {
this.bufferSize = opts.getBufferSize();
} else {
this.bufferSize = WriterImpl.getEstimatedBufferSize(defaultStripeSize,
opts.getSchema().getMaximumId() + 1,
opts.getBufferSize());
}
this.compress = opts.getCompress();
this.maxPadding = (int) (opts.getPaddingTolerance() * defaultStripeSize);
this.blockSize = opts.getBlockSize();
LOG.info("ORC writer created for path: {} with stripeSize: {} blockSize: {}" +
" compression: {} bufferSize: {}", path, defaultStripeSize, blockSize,
compress, bufferSize);
rawWriter = fs.create(path, opts.getOverwrite(), HDFS_BUFFER_SIZE,
fs.getDefaultReplication(path), blockSize);
blockOffset = 0;
codec = OrcCodecPool.getCodec(compress);
writer = new OutStream("metadata", bufferSize, codec,
new DirectStream(rawWriter));
protobufWriter = CodedOutputStream.newInstance(writer);
writeVariableLengthBlocks = opts.getWriteVariableLengthBlocks();
shims = opts.getHadoopShims();
}