@Test public void testCreateStructTypeFromList(){ List<StructField> fields1 = new ArrayList<>(); fields1.add(new StructField("id", DataTypes.StringType, true, Metadata.empty())); StructType schema1 = StructType$.MODULE$.apply(fields1); Assert.assertEquals(0, schema1.fieldIndex("id")); List<StructField> fields2 = Arrays.asList(new StructField("id", DataTypes.StringType, true, Metadata.empty())); StructType schema2 = StructType$.MODULE$.apply(fields2); Assert.assertEquals(0, schema2.fieldIndex("id")); }
String sparkRequestedSchemaString = configuration.get(ParquetReadSupport$.MODULE$.SPARK_ROW_REQUESTED_SCHEMA()); this.sparkSchema = StructType$.MODULE$.fromString(sparkRequestedSchemaString); this.reader = new ParquetFileReader( configuration, footer.getFileMetaData(), file, blocks, requestedSchema.getColumns());
String sparkRequestedSchemaString = configuration.get(ParquetReadSupport$.MODULE$.SPARK_ROW_REQUESTED_SCHEMA()); this.sparkSchema = StructType$.MODULE$.fromString(sparkRequestedSchemaString); this.reader = new ParquetFileReader( configuration, footer.getFileMetaData(), file, blocks, requestedSchema.getColumns());
/** * Creates a StructType with the given StructField array ({@code fields}). */ public static StructType createStructType(StructField[] fields) { if (fields == null) { throw new IllegalArgumentException("fields should not be null."); } Set<String> distinctNames = new HashSet<>(); for (StructField field : fields) { if (field == null) { throw new IllegalArgumentException( "fields should not contain any null."); } distinctNames.add(field.name()); } if (distinctNames.size() != fields.length) { throw new IllegalArgumentException("fields should have distinct names."); } return StructType$.MODULE$.apply(fields); } }
/** * Creates a StructType with the given StructField array ({@code fields}). */ public static StructType createStructType(StructField[] fields) { if (fields == null) { throw new IllegalArgumentException("fields should not be null."); } Set<String> distinctNames = new HashSet<>(); for (StructField field : fields) { if (field == null) { throw new IllegalArgumentException( "fields should not contain any null."); } distinctNames.add(field.name()); } if (distinctNames.size() != fields.length) { throw new IllegalArgumentException("fields should have distinct names."); } return StructType$.MODULE$.apply(fields); } }
/** * Creates a StructType with the given StructField array ({@code fields}). */ public static StructType createStructType(StructField[] fields) { if (fields == null) { throw new IllegalArgumentException("fields should not be null."); } Set<String> distinctNames = new HashSet<>(); for (StructField field : fields) { if (field == null) { throw new IllegalArgumentException( "fields should not contain any null."); } distinctNames.add(field.name()); } if (distinctNames.size() != fields.length) { throw new IllegalArgumentException("fields should have distinct names."); } return StructType$.MODULE$.apply(fields); } }
@Override public DataType struct(Types.StructType struct, List<DataType> fieldResults) { List<Types.NestedField> fields = struct.fields(); List<StructField> sparkFields = Lists.newArrayListWithExpectedSize(fieldResults.size()); for (int i = 0; i < fields.size(); i += 1) { Types.NestedField field = fields.get(i); DataType type = fieldResults.get(i); sparkFields.add(StructField.apply(field.name(), type, field.isOptional(), Metadata.empty())); } return StructType$.MODULE$.apply(sparkFields); }
String sparkRequestedSchemaString = configuration.get(ParquetReadSupport$.MODULE$.SPARK_ROW_REQUESTED_SCHEMA()); this.sparkSchema = StructType$.MODULE$.fromString(sparkRequestedSchemaString); this.reader = new ParquetFileReader( configuration, footer.getFileMetaData(), file, blocks, requestedSchema.getColumns());
String sparkRequestedSchemaString = configuration.get(ParquetReadSupport$.MODULE$.SPARK_ROW_REQUESTED_SCHEMA()); this.sparkSchema = StructType$.MODULE$.fromString(sparkRequestedSchemaString); this.reader = new ParquetFileReader( configuration, footer.getFileMetaData(), file, blocks, requestedSchema.getColumns());
@Test public void testCreateStructTypeFromList(){ List<StructField> fields1 = new ArrayList<>(); fields1.add(new StructField("id", DataTypes.StringType, true, Metadata.empty())); StructType schema1 = StructType$.MODULE$.apply(fields1); Assert.assertEquals(0, schema1.fieldIndex("id")); List<StructField> fields2 = Arrays.asList(new StructField("id", DataTypes.StringType, true, Metadata.empty())); StructType schema2 = StructType$.MODULE$.apply(fields2); Assert.assertEquals(0, schema2.fieldIndex("id")); }
@Test public void testCreateStructTypeFromList(){ List<StructField> fields1 = new ArrayList<>(); fields1.add(new StructField("id", DataTypes.StringType, true, Metadata.empty())); StructType schema1 = StructType$.MODULE$.apply(fields1); Assert.assertEquals(0, schema1.fieldIndex("id")); List<StructField> fields2 = Arrays.asList(new StructField("id", DataTypes.StringType, true, Metadata.empty())); StructType schema2 = StructType$.MODULE$.apply(fields2); Assert.assertEquals(0, schema2.fieldIndex("id")); }