@Override public V createValue() { return (V) OrcStruct.createValue(schema); }
public OrcMapreduceRecordReader(RecordReader reader, TypeDescription schema) throws IOException { this.batchReader = reader; this.batch = schema.createRowBatch(); this.schema = schema; rowInBatch = 0; this.row = (V) OrcStruct.createValue(schema); }
@Override public void readFields(DataInput input) throws IOException { byte oldTag = tag; tag = input.readByte(); if (input.readBoolean()) { if (oldTag != tag || object == null) { object = OrcStruct.createValue(schema.getChildren().get(tag)); } object.readFields(input); } else { object = null; } }
@Override public void readFields(DataInput input) throws IOException { for(int f=0; f < fields.length; ++f) { if (input.readBoolean()) { if (fields[f] == null) { fields[f] = createValue(schema.getChildren().get(f)); } fields[f].readFields(input); } else { fields[f] = null; } } }
@Override public void configure(JobConf conf) { if (value == null) { TypeDescription schema = TypeDescription.fromString(OrcConf.MAPRED_SHUFFLE_VALUE_SCHEMA .getString(conf)); value = OrcStruct.createValue(schema); } } }
@Override public void configure(JobConf conf) { if (key == null) { TypeDescription schema = TypeDescription.fromString(OrcConf.MAPRED_SHUFFLE_KEY_SCHEMA .getString(conf)); key = OrcStruct.createValue(schema); } }
@Override public void readFields(DataInput input) throws IOException { clear(); int size = input.readInt(); for(int i=0; i < size; ++i) { byte flag = input.readByte(); K key; V value; if ((flag & 2) != 0) { key = (K) OrcStruct.createValue(keySchema); key.readFields(input); } else { key = null; } if ((flag & 1) != 0) { value = (V) OrcStruct.createValue(valueSchema); value.readFields(input); } else { value = null; } put(key, value); } }
@Override public void readFields(DataInput input) throws IOException { clear(); int size = input.readInt(); ensureCapacity(size); for(int i=0; i < size; ++i) { if (input.readBoolean()) { E obj = (E) OrcStruct.createValue(childSchema); obj.readFields(input); add(obj); } else { add(null); } } }
public OrcMapreduceRecordReader(Reader fileReader, Reader.Options options) throws IOException { this.batchReader = fileReader.rows(options); if (options.getSchema() == null) { schema = fileReader.getSchema(); } else { schema = options.getSchema(); } this.batch = schema.createRowBatch(); rowInBatch = 0; this.row = (V) OrcStruct.createValue(schema); }
public RowOrcOutputFormat( InternalType[] fieldTypes, String[] fieldNames, String dir, CompressionKind compression, String filePrefixName, int rowIndexStride) { Preconditions.checkArgument(fieldNames != null && fieldNames.length > 0); Preconditions.checkArgument(fieldTypes != null && fieldTypes.length == fieldNames.length); this.fieldTypes = fieldTypes; this.fieldNames = fieldNames; this.dir = dir; this.compression = compression; this.filePrefixName = filePrefixName; this.rowIndexStride = rowIndexStride; this.serializer = new OrcSerializer(fieldTypes, fieldNames); this.typeDescription = OrcSchemaConverter.convert(fieldTypes, fieldNames).toString(); this.struct = (OrcStruct) OrcStruct.createValue(TypeDescription.fromString(this.typeDescription)); }
public OrcRecordReader(CombineFileSplit split, TaskAttemptContext context, Integer index) throws IOException { this.path = split.getPath(index); this.offset = split.getOffset(index); this.end = offset + split.getLength(index); final Reader reader = OrcFile.createReader(path, OrcFile.readerOptions(context.getConfiguration())); final Reader.Options options = new Reader.Options(); options.range(offset, split.getLength(index)); in = reader.rows(options); schema = reader.getSchema(); this.batch = schema.createRowBatch(); rowInBatch = 0; this.row = (OrcStruct) OrcStruct.createValue(schema); }
int c = 0; for(TypeDescription child: type.getChildren()) { result.setFieldValue(c++, createValue(child));