public Object getFieldValue(_Fields field) { switch (field) { case VERSION: return getVersion(); case SCHEMA: return getSchema(); case NUM_ROWS: return getNum_rows(); case ROW_GROUPS: return getRow_groups(); case KEY_VALUE_METADATA: return getKey_value_metadata(); case CREATED_BY: return getCreated_by(); case COLUMN_ORDERS: return getColumn_orders(); } throw new IllegalStateException(); }
public java.lang.Object getFieldValue(_Fields field) { switch (field) { case VERSION: return getVersion(); case SCHEMA: return getSchema(); case NUM_ROWS: return getNum_rows(); case ROW_GROUPS: return getRow_groups(); case KEY_VALUE_METADATA: return getKey_value_metadata(); case CREATED_BY: return getCreated_by(); case COLUMN_ORDERS: return getColumn_orders(); } throw new java.lang.IllegalStateException(); }
return new ParquetMetadata(new org.apache.parquet.hadoop.metadata.FileMetaData(messageType, keyValueMetaData, fileMetaData.getCreated_by()), blocks);
return new ParquetMetadata(new org.apache.parquet.hadoop.metadata.FileMetaData(messageType, keyValueMetaData, fileMetaData.getCreated_by()), blocks);
fromFormatEncodings(metaData.encodings), fromParquetStatistics( parquetMetadata.getCreated_by(), metaData.statistics, messageType.getType(path.toArray()).asPrimitiveType()), new org.apache.parquet.hadoop.metadata.FileMetaData(messageType, keyValueMetaData, parquetMetadata.getCreated_by()), blocks);
fromFormatEncodings(metaData.encodings), fromParquetStatistics( parquetMetadata.getCreated_by(), metaData.statistics, messageType.getType(path.toArray()).asPrimitiveType()), new org.apache.parquet.hadoop.metadata.FileMetaData(messageType, keyValueMetaData, parquetMetadata.getCreated_by()), blocks);