@Override public WriteContext init(final Configuration configuration) { schema = getSchema(configuration); return new WriteContext(schema, new HashMap<String, String>()); }
@Override public WriteContext init(Configuration configuration) { String schema = configuration.get(PARQUET_CASCADING_SCHEMA); rootSchema = MessageTypeParser.parseMessageType(schema); return new WriteContext(rootSchema, new HashMap<String, String>()); }
@Override public WriteContext init(Configuration configuration) { String schema = configuration.get(PARQUET_CASCADING_SCHEMA); rootSchema = MessageTypeParser.parseMessageType(schema); return new WriteContext(rootSchema, new HashMap<String, String>()); }
@Override public WriteContext init(Configuration configuration) { // if no protobuf descriptor was given in constructor, load descriptor from configuration (set with setProtobufClass) if (protoMessage == null) { Class<? extends Message> pbClass = configuration.getClass(PB_CLASS_WRITE, null, Message.class); if (pbClass != null) { protoMessage = pbClass; } else { String msg = "Protocol buffer class not specified."; String hint = " Please use method ProtoParquetOutputFormat.setProtobufClass(...) or other similar method."; throw new BadConfigurationException(msg + hint); } } MessageType rootSchema = new ProtoSchemaConverter().convert(protoMessage); Descriptors.Descriptor messageDescriptor = Protobufs.getMessageDescriptor(protoMessage); validatedMapping(messageDescriptor, rootSchema); this.messageWriter = new MessageWriter(messageDescriptor, rootSchema); Map<String, String> extraMetaData = new HashMap<String, String>(); extraMetaData.put(ProtoReadSupport.PB_CLASS, protoMessage.getName()); extraMetaData.put(ProtoReadSupport.PB_DESCRIPTOR, serializeDescriptor(protoMessage)); return new WriteContext(rootSchema, extraMetaData); }
@Override public WriteContext init(Configuration configuration) { if (rootAvroSchema == null) { this.rootAvroSchema = new Schema.Parser().parse(configuration.get(AVRO_SCHEMA)); this.rootSchema = new AvroSchemaConverter().convert(rootAvroSchema); } if (model == null) { this.model = getDataModel(configuration); } boolean writeOldListStructure = configuration.getBoolean( WRITE_OLD_LIST_STRUCTURE, WRITE_OLD_LIST_STRUCTURE_DEFAULT); if (writeOldListStructure) { this.listWriter = new TwoLevelListWriter(); } else { this.listWriter = new ThreeLevelListWriter(); } Map<String, String> extraMetaData = new HashMap<String, String>(); extraMetaData.put(AvroReadSupport.AVRO_SCHEMA_METADATA_KEY, rootAvroSchema.toString()); return new WriteContext(rootSchema, extraMetaData); }
@Override public WriteContext init(Configuration config) { return new WriteContext(getSchema(), Collections.emptyMap()); }
@Override public WriteContext init( Configuration configuration ) { try { return new WriteContext( createParquetSchema(), new TreeMap<>() ); } catch ( Exception ex ) { throw new RuntimeException( ex ); } }
@Override public WriteContext init(Configuration configuration) { return new WriteContext(schema, new HashMap<String, String>()); }
@Override public WriteContext init(Configuration configuration) { String schema = configuration.get(PARQUET_CASCADING_SCHEMA); rootSchema = MessageTypeParser.parseMessageType(schema); return new WriteContext(rootSchema, new HashMap<String, String>()); }
/** * Initializes the WriteSupport. * * @param configuration The job's configuration. * @return A WriteContext that describes how to write the file. */ @Override public WriteContext init(Configuration configuration) { Map<String, String> extraMetaData = new HashMap<String, String>(); return new WriteContext(rootSchema, extraMetaData); }
@Override public org.apache.parquet.hadoop.api.WriteSupport.WriteContext init(Configuration configuration) { // if present, prefer the schema passed to the constructor if (schema == null) { schema = getSchema(configuration); } return new WriteContext(schema, this.extraMetaData); }
@Override public WriteContext init(final Configuration configuration) { schema = DataWritableWriteSupport.getSchema(configuration); return new WriteContext(schema, new HashMap<>()); }
/** * Initializes the WriteSupport. * * @param configuration The job's configuration. * @return A WriteContext that describes how to write the file. */ @Override public WriteContext init(Configuration configuration) { Map<String, String> extraMetaData = new HashMap<>(); return new WriteContext(rootSchema, extraMetaData); }
@Override public org.apache.parquet.hadoop.api.WriteSupport.WriteContext init(Configuration configuration) { // if present, prefer the schema passed to the constructor if (schema == null) { schema = getSchema(configuration); } return new WriteContext(schema, this.extraMetaData); }
@Override public org.apache.parquet.hadoop.api.WriteSupport.WriteContext init(final Configuration configuration) { final Map<String, String> extraMeta = new HashMap<>(); if (null != sparkSchema) { extraMeta.put(ParquetReadSupport.SPARK_METADATA_KEY(), sparkSchema.json()); } return new WriteContext(schema, extraMeta); }
@Override public WriteContext init(Configuration configuration) { WriteContext wrappedContext = wrapped.init(configuration); Map<String, String> metadata = ImmutableMap.<String, String>builder() .putAll(keyValueMetadata) .putAll(wrappedContext.getExtraMetaData()) .build(); return new WriteContext(type, metadata); }
@Override public WriteContext init(final Configuration configuration) { schema = DataWritableWriteSupport.getSchema(configuration); return new WriteContext(schema, new HashMap<>()); }
protected void init(Class<T> thriftClass) { this.thriftClass = thriftClass; this.thriftStruct = getThriftStruct(); this.schema = ThriftSchemaConverter.convertWithoutProjection(thriftStruct); final Map<String, String> extraMetaData = new ThriftMetaData(thriftClass.getName(), thriftStruct).toExtraMetaData(); // adding the Pig schema as it would have been mapped from thrift // TODO: make this work for non-tbase types if (isPigLoaded() && TBase.class.isAssignableFrom(thriftClass)) { new PigMetaData(new ThriftToPig((Class<? extends TBase<?,?>>)thriftClass).toSchema()).addToMetaData(extraMetaData); } this.writeContext = new WriteContext(schema, extraMetaData); }
@Override public WriteContext init(final Configuration configuration) { schema = getSchema(configuration); return new WriteContext(schema, new HashMap<String, String>()); }
@Override public WriteContext init(Configuration configuration) { return new WriteContext(type, metadata); }