@Override protected LongOutputStreamV1 createValueOutputStream() { return new LongOutputStreamV1(SNAPPY, COMPRESSION_BLOCK_SIZE, true, DATA); }
static LongOutputStream createLengthOutputStream(CompressionKind compression, int bufferSize, OrcEncoding orcEncoding) { if (orcEncoding == DWRF) { return new LongOutputStreamV1(compression, bufferSize, false, LENGTH); } else { return new LongOutputStreamV2(compression, bufferSize, false, LENGTH); } }
public TimestampColumnWriter(int column, Type type, CompressionKind compression, int bufferSize, OrcEncoding orcEncoding, DateTimeZone hiveStorageTimeZone) { checkArgument(column >= 0, "column is negative"); this.column = column; this.type = requireNonNull(type, "type is null"); this.compressed = requireNonNull(compression, "compression is null") != NONE; if (orcEncoding == DWRF) { this.columnEncoding = new ColumnEncoding(DIRECT, 0); this.secondsStream = new LongOutputStreamV1(compression, bufferSize, true, DATA); this.nanosStream = new LongOutputStreamV1(compression, bufferSize, false, SECONDARY); } else { this.columnEncoding = new ColumnEncoding(DIRECT_V2, 0); this.secondsStream = new LongOutputStreamV2(compression, bufferSize, true, DATA); this.nanosStream = new LongOutputStreamV2(compression, bufferSize, false, SECONDARY); } this.presentStream = new PresentOutputStream(compression, bufferSize); this.baseTimestampInSeconds = new DateTime(2015, 1, 1, 0, 0, requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null")).getMillis() / MILLIS_PER_SECOND; }
public SliceDictionaryColumnWriter(int column, Type type, CompressionKind compression, int bufferSize, OrcEncoding orcEncoding, DataSize stringStatisticsLimit) { checkArgument(column >= 0, "column is negative"); this.column = column; this.type = requireNonNull(type, "type is null"); this.compression = requireNonNull(compression, "compression is null"); this.bufferSize = bufferSize; this.orcEncoding = requireNonNull(orcEncoding, "orcEncoding is null"); this.stringStatisticsLimitInBytes = toIntExact(requireNonNull(stringStatisticsLimit, "stringStatisticsLimit is null").toBytes()); LongOutputStream result; if (orcEncoding == DWRF) { result = new LongOutputStreamV1(compression, bufferSize, false, DATA); } else { result = new LongOutputStreamV2(compression, bufferSize, false, DATA); } this.dataStream = result; this.presentStream = new PresentOutputStream(compression, bufferSize); this.dictionaryDataStream = new ByteArrayOutputStream(compression, bufferSize, StreamKind.DICTIONARY_DATA); this.dictionaryLengthStream = createLengthOutputStream(compression, bufferSize, orcEncoding); values = new IntBigArray(); this.statisticsBuilder = newStringStatisticsBuilder(); }
@Override protected LongOutputStreamV1 createValueOutputStream() { return new LongOutputStreamV1(SNAPPY, COMPRESSION_BLOCK_SIZE, true, DATA); }
static LongOutputStream createLengthOutputStream(CompressionKind compression, int bufferSize, OrcEncoding orcEncoding) { if (orcEncoding == DWRF) { return new LongOutputStreamV1(compression, bufferSize, false, LENGTH); } else { return new LongOutputStreamV2(compression, bufferSize, false, LENGTH); } }
public TimestampColumnWriter(int column, Type type, CompressionKind compression, int bufferSize, OrcEncoding orcEncoding, DateTimeZone hiveStorageTimeZone) { checkArgument(column >= 0, "column is negative"); this.column = column; this.type = requireNonNull(type, "type is null"); this.compressed = requireNonNull(compression, "compression is null") != NONE; if (orcEncoding == DWRF) { this.columnEncoding = new ColumnEncoding(DIRECT, 0); this.secondsStream = new LongOutputStreamV1(compression, bufferSize, true, DATA); this.nanosStream = new LongOutputStreamV1(compression, bufferSize, false, SECONDARY); } else { this.columnEncoding = new ColumnEncoding(DIRECT_V2, 0); this.secondsStream = new LongOutputStreamV2(compression, bufferSize, true, DATA); this.nanosStream = new LongOutputStreamV2(compression, bufferSize, false, SECONDARY); } this.presentStream = new PresentOutputStream(compression, bufferSize); this.baseTimestampInSeconds = new DateTime(2015, 1, 1, 0, 0, requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null")).getMillis() / MILLIS_PER_SECOND; }
public SliceDictionaryColumnWriter(int column, Type type, CompressionKind compression, int bufferSize, OrcEncoding orcEncoding, DataSize stringStatisticsLimit) { checkArgument(column >= 0, "column is negative"); this.column = column; this.type = requireNonNull(type, "type is null"); this.compression = requireNonNull(compression, "compression is null"); this.bufferSize = bufferSize; this.orcEncoding = requireNonNull(orcEncoding, "orcEncoding is null"); this.stringStatisticsLimitInBytes = toIntExact(requireNonNull(stringStatisticsLimit, "stringStatisticsLimit is null").toBytes()); LongOutputStream result; if (orcEncoding == DWRF) { result = new LongOutputStreamV1(compression, bufferSize, false, DATA); } else { result = new LongOutputStreamV2(compression, bufferSize, false, DATA); } this.dataStream = result; this.presentStream = new PresentOutputStream(compression, bufferSize); this.dictionaryDataStream = new ByteArrayOutputStream(compression, bufferSize, StreamKind.DICTIONARY_DATA); this.dictionaryLengthStream = createLengthOutputStream(compression, bufferSize, orcEncoding); values = new IntBigArray(); this.statisticsBuilder = newStringStatisticsBuilder(); }