return new ParquetMetadata(new parquet.hadoop.metadata.FileMetaData(messageType, keyValueMetaData, fileMetaData.getCreated_by()), blocks);
public Object getFieldValue(_Fields field) { switch (field) { case VERSION: return new Integer(getVersion()); case SCHEMA: return getSchema(); case NUM_ROWS: return new Long(getNum_rows()); case ROW_GROUPS: return getRow_groups(); case KEY_VALUE_METADATA: return getKey_value_metadata(); case CREATED_BY: return getCreated_by(); } throw new IllegalStateException(); }
public Object getFieldValue(_Fields field) { switch (field) { case VERSION: return new Integer(getVersion()); case SCHEMA: return getSchema(); case NUM_ROWS: return new Long(getNum_rows()); case ROW_GROUPS: return getRow_groups(); case KEY_VALUE_METADATA: return getKey_value_metadata(); case CREATED_BY: return getCreated_by(); } throw new IllegalStateException(); }
return new ParquetMetadata(new parquet.hadoop.metadata.FileMetaData(messageType, keyValueMetaData, fileMetaData.getCreated_by()), blocks);
new parquet.hadoop.metadata.FileMetaData(messageType, keyValueMetaData, parquetMetadata.getCreated_by()), blocks);