@Override protected LongOutputStreamV1 createValueOutputStream() { return new LongOutputStreamV1(SNAPPY, COMPRESSION_BLOCK_SIZE, true, DATA); }
flushSequence(); flushLiteralSequence(size - MIN_REPEAT_SIZE); flushRleSequence(runCount); if (size == 1 || !isValidDelta(delta)) {
@Override protected void writeValue(LongOutputStreamV1 outputStream, Long value) { outputStream.writeLong(value); }
private void flushSequence() { if (size == 0) { return; } if (runCount >= MIN_REPEAT_SIZE) { flushRleSequence(runCount); } else { flushLiteralSequence(size); } size = 0; runCount = 0; lastValue = 0; lastDelta = UNMATCHABLE_DELTA_VALUE; }
@Override public void close() { closed = true; flushSequence(); buffer.close(); }
private void flushSequence() { if (size == 0) { return; } if (runCount >= MIN_REPEAT_SIZE) { flushRleSequence(runCount); } else { flushLiteralSequence(size); } size = 0; runCount = 0; lastValue = 0; lastDelta = UNMATCHABLE_DELTA_VALUE; }
@Override public void close() { closed = true; flushSequence(); buffer.close(); }
static LongOutputStream createLengthOutputStream(CompressionKind compression, int bufferSize, OrcEncoding orcEncoding) { if (orcEncoding == DWRF) { return new LongOutputStreamV1(compression, bufferSize, false, LENGTH); } else { return new LongOutputStreamV2(compression, bufferSize, false, LENGTH); } }
flushSequence(); flushLiteralSequence(size - MIN_REPEAT_SIZE); flushRleSequence(runCount); if (size == 1 || !isValidDelta(delta)) {
@Override protected void writeValue(LongOutputStreamV1 outputStream, Long value) { outputStream.writeLong(value); }
public TimestampColumnWriter(int column, Type type, CompressionKind compression, int bufferSize, OrcEncoding orcEncoding, DateTimeZone hiveStorageTimeZone) { checkArgument(column >= 0, "column is negative"); this.column = column; this.type = requireNonNull(type, "type is null"); this.compressed = requireNonNull(compression, "compression is null") != NONE; if (orcEncoding == DWRF) { this.columnEncoding = new ColumnEncoding(DIRECT, 0); this.secondsStream = new LongOutputStreamV1(compression, bufferSize, true, DATA); this.nanosStream = new LongOutputStreamV1(compression, bufferSize, false, SECONDARY); } else { this.columnEncoding = new ColumnEncoding(DIRECT_V2, 0); this.secondsStream = new LongOutputStreamV2(compression, bufferSize, true, DATA); this.nanosStream = new LongOutputStreamV2(compression, bufferSize, false, SECONDARY); } this.presentStream = new PresentOutputStream(compression, bufferSize); this.baseTimestampInSeconds = new DateTime(2015, 1, 1, 0, 0, requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null")).getMillis() / MILLIS_PER_SECOND; }
public SliceDictionaryColumnWriter(int column, Type type, CompressionKind compression, int bufferSize, OrcEncoding orcEncoding, DataSize stringStatisticsLimit) { checkArgument(column >= 0, "column is negative"); this.column = column; this.type = requireNonNull(type, "type is null"); this.compression = requireNonNull(compression, "compression is null"); this.bufferSize = bufferSize; this.orcEncoding = requireNonNull(orcEncoding, "orcEncoding is null"); this.stringStatisticsLimitInBytes = toIntExact(requireNonNull(stringStatisticsLimit, "stringStatisticsLimit is null").toBytes()); LongOutputStream result; if (orcEncoding == DWRF) { result = new LongOutputStreamV1(compression, bufferSize, false, DATA); } else { result = new LongOutputStreamV2(compression, bufferSize, false, DATA); } this.dataStream = result; this.presentStream = new PresentOutputStream(compression, bufferSize); this.dictionaryDataStream = new ByteArrayOutputStream(compression, bufferSize, StreamKind.DICTIONARY_DATA); this.dictionaryLengthStream = createLengthOutputStream(compression, bufferSize, orcEncoding); values = new IntBigArray(); this.statisticsBuilder = newStringStatisticsBuilder(); }
static LongOutputStream createLengthOutputStream(CompressionKind compression, int bufferSize, OrcEncoding orcEncoding) { if (orcEncoding == DWRF) { return new LongOutputStreamV1(compression, bufferSize, false, LENGTH); } else { return new LongOutputStreamV2(compression, bufferSize, false, LENGTH); } }
@Override protected LongOutputStreamV1 createValueOutputStream() { return new LongOutputStreamV1(SNAPPY, COMPRESSION_BLOCK_SIZE, true, DATA); }
public TimestampColumnWriter(int column, Type type, CompressionKind compression, int bufferSize, OrcEncoding orcEncoding, DateTimeZone hiveStorageTimeZone) { checkArgument(column >= 0, "column is negative"); this.column = column; this.type = requireNonNull(type, "type is null"); this.compressed = requireNonNull(compression, "compression is null") != NONE; if (orcEncoding == DWRF) { this.columnEncoding = new ColumnEncoding(DIRECT, 0); this.secondsStream = new LongOutputStreamV1(compression, bufferSize, true, DATA); this.nanosStream = new LongOutputStreamV1(compression, bufferSize, false, SECONDARY); } else { this.columnEncoding = new ColumnEncoding(DIRECT_V2, 0); this.secondsStream = new LongOutputStreamV2(compression, bufferSize, true, DATA); this.nanosStream = new LongOutputStreamV2(compression, bufferSize, false, SECONDARY); } this.presentStream = new PresentOutputStream(compression, bufferSize); this.baseTimestampInSeconds = new DateTime(2015, 1, 1, 0, 0, requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null")).getMillis() / MILLIS_PER_SECOND; }
public SliceDictionaryColumnWriter(int column, Type type, CompressionKind compression, int bufferSize, OrcEncoding orcEncoding, DataSize stringStatisticsLimit) { checkArgument(column >= 0, "column is negative"); this.column = column; this.type = requireNonNull(type, "type is null"); this.compression = requireNonNull(compression, "compression is null"); this.bufferSize = bufferSize; this.orcEncoding = requireNonNull(orcEncoding, "orcEncoding is null"); this.stringStatisticsLimitInBytes = toIntExact(requireNonNull(stringStatisticsLimit, "stringStatisticsLimit is null").toBytes()); LongOutputStream result; if (orcEncoding == DWRF) { result = new LongOutputStreamV1(compression, bufferSize, false, DATA); } else { result = new LongOutputStreamV2(compression, bufferSize, false, DATA); } this.dataStream = result; this.presentStream = new PresentOutputStream(compression, bufferSize); this.dictionaryDataStream = new ByteArrayOutputStream(compression, bufferSize, StreamKind.DICTIONARY_DATA); this.dictionaryLengthStream = createLengthOutputStream(compression, bufferSize, orcEncoding); values = new IntBigArray(); this.statisticsBuilder = newStringStatisticsBuilder(); }