@Override public long getSize() { return dataSource.getSize(); }
@Override public long getSize() { return delegate.getSize(); }
private static OrcDataSource wrapWithCacheIfTiny(OrcDataSource dataSource, DataSize maxCacheSize) { if (dataSource instanceof CachingOrcDataSource) { return dataSource; } if (dataSource.getSize() > maxCacheSize.toBytes()) { return dataSource; } DiskRange diskRange = new DiskRange(0, toIntExact(dataSource.getSize())); return new CachingOrcDataSource(dataSource, desiredOffset -> diskRange); }
public OrcRecordReader createRecordReader( Map<Integer, Type> includedColumns, OrcPredicate predicate, long offset, long length, DateTimeZone hiveStorageTimeZone, AggregatedMemoryContext systemMemoryUsage, int initialBatchSize) { return new OrcRecordReader( requireNonNull(includedColumns, "includedColumns is null"), requireNonNull(predicate, "predicate is null"), footer.getNumberOfRows(), footer.getStripes(), footer.getFileStats(), metadata.getStripeStatsList(), orcDataSource, offset, length, footer.getTypes(), decompressor, footer.getRowsInRowGroup(), requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null"), hiveWriterVersion, metadataReader, maxMergeDistance, tinyStripeThreshold, maxBlockSize, footer.getUserMetadata(), systemMemoryUsage,
long size = orcDataSource.getSize(); if (size <= MAGIC.length()) { throw new OrcCorruptionException(orcDataSource.getId(), "Invalid file size %s", size);
@Override public long getSize() { return dataSource.getSize(); }
@Override public long getSize() { return dataSource.getSize(); }
@Override public long getSize() { return delegate.getSize(); }
@Override public long getSize() { return delegate.getSize(); }
private static OrcDataSource wrapWithCacheIfTiny(OrcDataSource dataSource, DataSize maxCacheSize) { if (dataSource instanceof CachingOrcDataSource) { return dataSource; } if (dataSource.getSize() > maxCacheSize.toBytes()) { return dataSource; } DiskRange diskRange = new DiskRange(0, toIntExact(dataSource.getSize())); return new CachingOrcDataSource(dataSource, desiredOffset -> diskRange); }
private static OrcDataSource wrapWithCacheIfTiny(OrcDataSource dataSource, DataSize maxCacheSize) { if (dataSource instanceof CachingOrcDataSource) { return dataSource; } if (dataSource.getSize() > maxCacheSize.toBytes()) { return dataSource; } DiskRange diskRange = new DiskRange(0, Ints.checkedCast(dataSource.getSize())); return new CachingOrcDataSource(dataSource, desiredOffset -> diskRange); }
public OrcRecordReader createRecordReader( Map<Integer, Type> includedColumns, OrcPredicate predicate, long offset, long length, DateTimeZone hiveStorageTimeZone, AbstractAggregatedMemoryContext systemMemoryUsage) throws IOException { return new OrcRecordReader( requireNonNull(includedColumns, "includedColumns is null"), requireNonNull(predicate, "predicate is null"), footer.getNumberOfRows(), footer.getStripes(), footer.getFileStats(), metadata.getStripeStatsList(), orcDataSource, offset, length, footer.getTypes(), compressionKind, bufferSize, footer.getRowsInRowGroup(), requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null"), metadataReader, maxMergeDistance, maxReadSize, systemMemoryUsage); }
public OrcRecordReader createRecordReader( Map<Integer, Type> includedColumns, OrcPredicate predicate, long offset, long length, DateTimeZone hiveStorageTimeZone, AggregatedMemoryContext systemMemoryUsage, int initialBatchSize) { return new OrcRecordReader( requireNonNull(includedColumns, "includedColumns is null"), requireNonNull(predicate, "predicate is null"), footer.getNumberOfRows(), footer.getStripes(), footer.getFileStats(), metadata.getStripeStatsList(), orcDataSource, offset, length, footer.getTypes(), decompressor, footer.getRowsInRowGroup(), requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null"), hiveWriterVersion, metadataReader, maxMergeDistance, tinyStripeThreshold, maxBlockSize, footer.getUserMetadata(), systemMemoryUsage,
long size = orcDataSource.getSize(); if (size <= 0) { throw new OrcCorruptionException("Malformed ORC file %s. Invalid file size %s", orcDataSource, size);
long size = orcDataSource.getSize(); if (size <= MAGIC.length()) { throw new OrcCorruptionException(orcDataSource.getId(), "Invalid file size %s", size);