public StructConverter(ObjectInspector inputOI, SettableStructObjectInspector outputOI) { if (inputOI instanceof StructObjectInspector) { this.inputOI = (StructObjectInspector)inputOI; this.outputOI = outputOI; inputFields = this.inputOI.getAllStructFieldRefs(); outputFields = outputOI.getAllStructFieldRefs(); // If the output has some extra fields, set them to NULL. int minFields = Math.min(inputFields.size(), outputFields.size()); fieldConverters = new ArrayList<Converter>(minFields); for (int f = 0; f < minFields; f++) { fieldConverters.add(getConverter(inputFields.get(f) .getFieldObjectInspector(), outputFields.get(f) .getFieldObjectInspector())); } output = outputOI.create(); } else if (!(inputOI instanceof VoidObjectInspector)) { throw new RuntimeException("Hive internal error: conversion of " + inputOI.getTypeName() + " to " + outputOI.getTypeName() + "not supported yet."); } }
public VectorExpressionWriter init(SettableStructObjectInspector objInspector) throws HiveException { super.init(objInspector); obj = initValue(null); vectorExtractRow = new VectorExtractRow(); structTypeInfo = (StructTypeInfo) TypeInfoUtils.getTypeInfoFromTypeString(objInspector.getTypeName()); return this; }
private static DataSize writeRcFileColumnOld(File outputFile, Format format, Compression compression, Type type, Iterator<?> values) throws Exception { ObjectInspector columnObjectInspector = getJavaObjectInspector(type); RecordWriter recordWriter = createRcFileWriterOld(outputFile, compression, columnObjectInspector); SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", columnObjectInspector); Object row = objectInspector.create(); List<StructField> fields = ImmutableList.copyOf(objectInspector.getAllStructFieldRefs()); @SuppressWarnings("deprecation") Serializer serializer = format.createSerializer(); Properties tableProperties = new Properties(); tableProperties.setProperty("columns", "test"); tableProperties.setProperty("columns.types", objectInspector.getTypeName()); serializer.initialize(new JobConf(false), tableProperties); while (values.hasNext()) { Object value = values.next(); value = preprocessWriteValueOld(type, value); objectInspector.setStructFieldData(row, fields.get(0), value); Writable record = serializer.serialize(row, objectInspector); recordWriter.write(record); } recordWriter.close(false); return new DataSize(outputFile.length(), BYTE).convertToMostSuccinctDataSize(); }
private static void writeOrcColumnPresto(File outputFile, Format format, CompressionKind compression, Type type, Iterator<?> values, OrcWriterStats stats) throws Exception { ImmutableMap.Builder<String, String> metadata = ImmutableMap.builder(); metadata.put("columns", "test"); metadata.put("columns.types", createSettableStructObjectInspector("test", type).getTypeName()); OrcWriter writer; writer = new OrcWriter( new OutputStreamOrcDataSink(new FileOutputStream(outputFile)), ImmutableList.of("test"), ImmutableList.of(type), format.getOrcEncoding(), compression, new OrcWriterOptions(), ImmutableMap.of(), HIVE_STORAGE_TIME_ZONE, true, BOTH, stats); BlockBuilder blockBuilder = type.createBlockBuilder(null, 1024); while (values.hasNext()) { Object value = values.next(); writeValue(type, blockBuilder, value); } writer.write(new Page(blockBuilder.build())); writer.close(); writer.validate(new FileOrcDataSource(outputFile, new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), true)); }
public StructConverter(ObjectInspector inputOI, SettableStructObjectInspector outputOI) { if (inputOI instanceof StructObjectInspector) { this.inputOI = (StructObjectInspector) inputOI; this.outputOI = outputOI; inputFields = this.inputOI.getAllStructFieldRefs(); outputFields = outputOI.getAllStructFieldRefs(); // If the output has some extra fields, set them to NULL. int minFields = Math.min(inputFields.size(), outputFields.size()); fieldConverters = new ArrayList<Converter>(minFields); for (int f = 0; f < minFields; f++) { fieldConverters.add(getConverter(inputFields.get(f).getFieldObjectInspector(), outputFields.get(f).getFieldObjectInspector())); } } else if (!(inputOI instanceof VoidObjectInspector)) { throw new UnsupportedOperationException( "Hive internal error: conversion of " + inputOI.getTypeName() + " to " + outputOI.getTypeName() + "not supported yet."); } }
public StructConverter(ObjectInspector inputOI, SettableStructObjectInspector outputOI) { if (inputOI instanceof StructObjectInspector) { this.inputOI = (StructObjectInspector) inputOI; this.outputOI = outputOI; inputFields = this.inputOI.getAllStructFieldRefs(); outputFields = outputOI.getAllStructFieldRefs(); // If the output has some extra fields, set them to NULL. int minFields = Math.min(inputFields.size(), outputFields.size()); fieldConverters = new ArrayList<Converter>(minFields); for (int f = 0; f < minFields; f++) { fieldConverters.add(getConverter(inputFields.get(f).getFieldObjectInspector(), outputFields.get(f).getFieldObjectInspector())); } } else if (!(inputOI instanceof VoidObjectInspector)) { throw new UnsupportedOperationException( "Hive internal error: conversion of " + inputOI.getTypeName() + " to " + outputOI.getTypeName() + "not supported yet."); } }
public StructConverter(ObjectInspector inputOI, SettableStructObjectInspector outputOI) { if (inputOI instanceof StructObjectInspector) { this.inputOI = (StructObjectInspector)inputOI; this.outputOI = outputOI; inputFields = this.inputOI.getAllStructFieldRefs(); outputFields = outputOI.getAllStructFieldRefs(); // If the output has some extra fields, set them to NULL. int minFields = Math.min(inputFields.size(), outputFields.size()); fieldConverters = new ArrayList<Converter>(minFields); for (int f = 0; f < minFields; f++) { fieldConverters.add(getConverter(inputFields.get(f) .getFieldObjectInspector(), outputFields.get(f) .getFieldObjectInspector())); } output = outputOI.create(); } else if (!(inputOI instanceof VoidObjectInspector)) { throw new RuntimeException("Hive internal error: conversion of " + inputOI.getTypeName() + " to " + outputOI.getTypeName() + "not supported yet."); } }
public StructConverter(ObjectInspector inputOI, SettableStructObjectInspector outputOI) { if (inputOI instanceof StructObjectInspector) { this.inputOI = (StructObjectInspector)inputOI; this.outputOI = outputOI; inputFields = this.inputOI.getAllStructFieldRefs(); outputFields = outputOI.getAllStructFieldRefs(); // If the output has some extra fields, set them to NULL. int minFields = Math.min(inputFields.size(), outputFields.size()); fieldConverters = new ArrayList<Converter>(minFields); for (int f = 0; f < minFields; f++) { fieldConverters.add(getConverter(inputFields.get(f) .getFieldObjectInspector(), outputFields.get(f) .getFieldObjectInspector())); } output = outputOI.create(); } else if (!(inputOI instanceof VoidObjectInspector)) { throw new RuntimeException("Hive internal error: conversion of " + inputOI.getTypeName() + " to " + outputOI.getTypeName() + "not supported yet."); } }
public StructConverter(ObjectInspector inputOI, SettableStructObjectInspector outputOI) { if (inputOI instanceof StructObjectInspector) { this.inputOI = (StructObjectInspector)inputOI; this.outputOI = outputOI; inputFields = this.inputOI.getAllStructFieldRefs(); outputFields = outputOI.getAllStructFieldRefs(); // If the output has some extra fields, set them to NULL. int minFields = Math.min(inputFields.size(), outputFields.size()); fieldConverters = new ArrayList<Converter>(minFields); for (int f = 0; f < minFields; f++) { fieldConverters.add(getConverter(inputFields.get(f) .getFieldObjectInspector(), outputFields.get(f) .getFieldObjectInspector())); } output = outputOI.create(); } else if (!(inputOI instanceof VoidObjectInspector)) { throw new RuntimeException("Hive internal error: conversion of " + inputOI.getTypeName() + " to " + outputOI.getTypeName() + "not supported yet."); } }
public StructConverter(ObjectInspector inputOI, SettableStructObjectInspector outputOI) { if (inputOI instanceof StructObjectInspector) { this.inputOI = (StructObjectInspector)inputOI; this.outputOI = outputOI; inputFields = this.inputOI.getAllStructFieldRefs(); outputFields = outputOI.getAllStructFieldRefs(); // If the output has some extra fields, set them to NULL. int minFields = Math.min(inputFields.size(), outputFields.size()); fieldConverters = new ArrayList<Converter>(minFields); for (int f = 0; f < minFields; f++) { fieldConverters.add(getConverter(inputFields.get(f) .getFieldObjectInspector(), outputFields.get(f) .getFieldObjectInspector())); } output = outputOI.create(); } else if (!(inputOI instanceof VoidObjectInspector)) { throw new RuntimeException("Hive internal error: conversion of " + inputOI.getTypeName() + " to " + outputOI.getTypeName() + "not supported yet."); } }
private static DataSize writeRcFileColumnOld(File outputFile, Format format, Compression compression, Type type, Iterator<?> values) throws Exception { ObjectInspector columnObjectInspector = getJavaObjectInspector(type); RecordWriter recordWriter = createRcFileWriterOld(outputFile, compression, columnObjectInspector); SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", columnObjectInspector); Object row = objectInspector.create(); List<StructField> fields = ImmutableList.copyOf(objectInspector.getAllStructFieldRefs()); @SuppressWarnings("deprecation") Serializer serializer = format.createSerializer(); Properties tableProperties = new Properties(); tableProperties.setProperty("columns", "test"); tableProperties.setProperty("columns.types", objectInspector.getTypeName()); serializer.initialize(new JobConf(false), tableProperties); while (values.hasNext()) { Object value = values.next(); value = preprocessWriteValueOld(type, value); objectInspector.setStructFieldData(row, fields.get(0), value); Writable record = serializer.serialize(row, objectInspector); recordWriter.write(record); } recordWriter.close(false); return new DataSize(outputFile.length(), BYTE).convertToMostSuccinctDataSize(); }
private static DataSize writeRcFileColumnOld(File outputFile, Format format, Compression compression, Type type, Iterator<?> values) throws Exception { ObjectInspector columnObjectInspector = getJavaObjectInspector(type); RecordWriter recordWriter = createRcFileWriterOld(outputFile, compression, columnObjectInspector); SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", columnObjectInspector); Object row = objectInspector.create(); List<StructField> fields = ImmutableList.copyOf(objectInspector.getAllStructFieldRefs()); Serializer serializer = format.createSerializer(); Properties tableProperties = new Properties(); tableProperties.setProperty("columns", "test"); tableProperties.setProperty("columns.types", objectInspector.getTypeName()); serializer.initialize(new JobConf(false), tableProperties); while (values.hasNext()) { Object value = values.next(); value = preprocessWriteValueOld(type, value); objectInspector.setStructFieldData(row, fields.get(0), value); Writable record = serializer.serialize(row, objectInspector); recordWriter.write(record); } recordWriter.close(false); return new DataSize(outputFile.length(), BYTE).convertToMostSuccinctDataSize(); }
private static DataSize writeRcFileColumnOld(File outputFile, Format format, Compression compression, Type type, Iterator<?> values) throws Exception { ObjectInspector columnObjectInspector = getJavaObjectInspector(type); RecordWriter recordWriter = createRcFileWriterOld(outputFile, compression, columnObjectInspector); SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", columnObjectInspector); Object row = objectInspector.create(); List<StructField> fields = ImmutableList.copyOf(objectInspector.getAllStructFieldRefs()); Serializer serializer = format.createSerializer(); Properties tableProperties = new Properties(); tableProperties.setProperty("columns", "test"); tableProperties.setProperty("columns.types", objectInspector.getTypeName()); serializer.initialize(new JobConf(false), tableProperties); while (values.hasNext()) { Object value = values.next(); value = preprocessWriteValueOld(type, value); objectInspector.setStructFieldData(row, fields.get(0), value); Writable record = serializer.serialize(row, objectInspector); recordWriter.write(record); } recordWriter.close(false); return new DataSize(outputFile.length(), BYTE).convertToMostSuccinctDataSize(); }
private static void writeOrcColumnPresto(File outputFile, Format format, CompressionKind compression, Type type, Iterator<?> values, OrcWriterStats stats) throws Exception { ImmutableMap.Builder<String, String> metadata = ImmutableMap.builder(); metadata.put("columns", "test"); metadata.put("columns.types", createSettableStructObjectInspector("test", type).getTypeName()); OrcWriter writer; writer = new OrcWriter( new OutputStreamOrcDataSink(new FileOutputStream(outputFile)), ImmutableList.of("test"), ImmutableList.of(type), format.getOrcEncoding(), compression, new OrcWriterOptions(), ImmutableMap.of(), HIVE_STORAGE_TIME_ZONE, true, BOTH, stats); BlockBuilder blockBuilder = type.createBlockBuilder(null, 1024); while (values.hasNext()) { Object value = values.next(); writeValue(type, blockBuilder, value); } writer.write(new Page(blockBuilder.build())); writer.close(); writer.validate(new FileOrcDataSource(outputFile, new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), true)); }
private static void writeOrcColumnPresto(File outputFile, Format format, CompressionKind compression, Type type, Iterator<?> values, OrcWriterStats stats) throws Exception { ImmutableMap.Builder<String, String> metadata = ImmutableMap.builder(); metadata.put("columns", "test"); metadata.put("columns.types", createSettableStructObjectInspector("test", type).getTypeName()); OrcWriter writer; writer = new OrcWriter( new OutputStreamOrcDataSink(new FileOutputStream(outputFile)), ImmutableList.of("test"), ImmutableList.of(type), format.getOrcEncoding(), compression, new OrcWriterOptions(), ImmutableMap.of(), HIVE_STORAGE_TIME_ZONE, true, BOTH, stats); BlockBuilder blockBuilder = type.createBlockBuilder(null, 1024); while (values.hasNext()) { Object value = values.next(); writeValue(type, blockBuilder, value); } writer.write(new Page(blockBuilder.build())); writer.close(); writer.validate(new FileOrcDataSource(outputFile, new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), true)); }
private static void writeOrcColumnPresto(File outputFile, Format format, CompressionKind compression, Type type, Iterator<?> values, OrcWriterStats stats) throws Exception { ImmutableMap.Builder<String, String> metadata = ImmutableMap.builder(); metadata.put("columns", "test"); metadata.put("columns.types", createSettableStructObjectInspector("test", type).getTypeName()); OrcWriter writer; writer = new OrcWriter( new OutputStreamOrcDataSink(new FileOutputStream(outputFile)), ImmutableList.of("test"), ImmutableList.of(type), format.getOrcEncoding(), compression, new OrcWriterOptions(), ImmutableMap.of(), HIVE_STORAGE_TIME_ZONE, true, BOTH, stats); BlockBuilder blockBuilder = type.createBlockBuilder(null, 1024); while (values.hasNext()) { Object value = values.next(); writeValue(type, blockBuilder, value); } writer.write(new Page(blockBuilder.build())); writer.close(); writer.validate(new FileOrcDataSource(outputFile, new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), true)); }