public Long lazyLength() { if (length == null) { if (file != null) { // this was created from an input file and length is lazily loaded this.length = file.getLength(); } else { // this was loaded from a file without projecting length, throw an exception return null; } } return length; }
@Override public long getLength() throws IOException { return file.getLength(); }
public ManifestFile toManifestFile() { Preconditions.checkState(closed, "Cannot build ManifestFile, writer is not closed"); return new GenericManifestFile(location, file.toInputFile().getLength(), specId, snapshotId, addedFiles, existingFiles, deletedFiles, stats.summaries()); }
public Builder withInputFile(InputFile file) { if (file instanceof HadoopInputFile) { return withStatus(((HadoopInputFile) file).getStat()); } this.filePath = file.location(); this.fileSizeInBytes = file.getLength(); return this; }
private DataFileReader<D> newFileReader() { try { return (DataFileReader<D>) DataFileReader.openReader( AvroIO.stream(file.newStream(), file.getLength()), reader); } catch (IOException e) { throw new RuntimeIOException(e, "Failed to open file: %s", file); } }
public static DataFile fromParquetInputFile(InputFile file, PartitionData partition, Metrics metrics) { if (file instanceof HadoopInputFile) { return fromParquetStat(((HadoopInputFile) file).getStat(), partition, metrics); } String location = file.location(); FileFormat format = FileFormat.PARQUET; return new GenericDataFile( location, format, partition, file.getLength(), DEFAULT_BLOCK_SIZE, metrics); }
public static DataFile fromInputFile(InputFile file, PartitionData partition, Metrics metrics) { if (file instanceof HadoopInputFile) { return fromStat(((HadoopInputFile) file).getStat(), partition, metrics); } String location = file.location(); FileFormat format = FileFormat.fromFileName(location); return new GenericDataFile( location, format, partition, file.getLength(), DEFAULT_BLOCK_SIZE, metrics); }
public static DataFile fromInputFile(InputFile file, long rowCount) { if (file instanceof HadoopInputFile) { return fromStat(((HadoopInputFile) file).getStat(), rowCount); } String location = file.location(); FileFormat format = FileFormat.fromFileName(location); return new GenericDataFile(location, format, rowCount, file.getLength(), DEFAULT_BLOCK_SIZE); }
public static DataFile fromInputFile(InputFile file, PartitionData partition, long rowCount) { if (file instanceof HadoopInputFile) { return fromStat(((HadoopInputFile) file).getStat(), partition, rowCount); } String location = file.location(); FileFormat format = FileFormat.fromFileName(location); return new GenericDataFile( location, format, partition, rowCount, file.getLength(), DEFAULT_BLOCK_SIZE); }