private AbstractSerDe initializeSerde(Configuration conf, Properties props) throws SerDeException { String serdeName = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEFETCHOUTPUTSERDE); Class<? extends AbstractSerDe> serdeClass; try { serdeClass = Class.forName(serdeName, true, JavaUtils.getClassLoader()).asSubclass(AbstractSerDe.class); } catch (ClassNotFoundException e) { throw new SerDeException(e); } // cast only needed for Hadoop 0.17 compatibility AbstractSerDe serde = ReflectionUtil.newInstance(serdeClass, null); Properties serdeProps = new Properties(); if (serde instanceof DelimitedJSONSerDe) { serdeProps.put(SERIALIZATION_FORMAT, props.getProperty(SERIALIZATION_FORMAT)); serdeProps.put(SERIALIZATION_NULL_FORMAT, props.getProperty(SERIALIZATION_NULL_FORMAT)); } SerDeUtils.initializeSerDe(serde, conf, serdeProps, null); return serde; }
private void initFromProperties(final Properties properties) throws SerDeException { final List<String> columnNames = new ArrayList<>(Utilities.getColumnNames(properties)); if (!columnNames.contains(DruidConstants.DEFAULT_TIMESTAMP_COLUMN)) { throw new SerDeException("Timestamp column (' " + DruidConstants.DEFAULT_TIMESTAMP_COLUMN + "') not specified in create table; list of columns is : " + properties.getProperty(serdeConstants.LIST_COLUMNS)); } final List<PrimitiveTypeInfo> columnTypes = Utilities.getColumnTypes(properties) .stream() .map(TypeInfoFactory::getPrimitiveTypeInfo) .collect(Collectors.toList()) .stream() .map(e -> e instanceof TimestampLocalTZTypeInfo ? tsTZTypeInfo : e) .collect(Collectors.toList()); final List<ObjectInspector> inspectors = columnTypes.stream() .map(PrimitiveObjectInspectorFactory::getPrimitiveJavaObjectInspector) .collect(Collectors.toList()); columns = columnNames.toArray(new String[0]); types = columnTypes.toArray(new PrimitiveTypeInfo[0]); inspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, inspectors); }
String columnNameProperty = tableProperties.getProperty(serdeConstants.LIST_COLUMNS); String columnTypeProperty = tableProperties.getProperty(serdeConstants.LIST_COLUMN_TYPES); String columnNameDelimiter = tableProperties.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tableProperties .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); if (columnNameProperty != null && columnNameProperty.length() > 0) { throw new SerDeException(serdeName + ": columns has " + columnNames.size() + " elements while columns.types has " + columnTypes.size() + " elements!");
StructTypeInfo rowTypeInfo; log.debug("Initializing NiFiRecordSerDe: {}", tbl.entrySet().toArray()); String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); populateFieldPositionMap(); } catch (MalformedRecordException | IOException e) { throw new SerDeException(e);
inputRegex = tbl.getProperty(INPUT_REGEX); String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); boolean inputRegexIgnoreCase = "true".equalsIgnoreCase(tbl .getProperty(INPUT_REGEX_CASE_SENSITIVE)); } else { inputPattern = null; throw new SerDeException( "This table does not have serde property \"input.regex\"!"); columnOIs.add(oi); } else { throw new SerDeException(getClass().getName() + " doesn't allow column [" + c + "] named " + columnNames.get(c) + " with type " + columnTypes.get(c)); outputRowText = new Text();
Properties props = new Properties(); props.putAll(unit.getProps().getProperties()); props.putAll(unit.getStorageProps().getProperties()); props.putAll(unit.getSerDeProps().getProperties()); } catch (SchemaParseException | InvocationTargetException | NoSuchMethodException | IllegalAccessException e) { LOG.warn("Failed to initialize AvroSerDe."); throw new SerDeException(e);
tbl.getProperty(KafkaTableProperties.SERDE_CLASS_NAME.getName(), KafkaTableProperties.SERDE_CLASS_NAME.getDefaultValue()); delegateSerDe = KafkaUtils.createDelegate(className); throw new SerDeException("Was expecting Struct Object Inspector but have " + delegateSerDe.getObjectInspector() .getClass() .getName()); columnNames.addAll(delegateDeserializerOI.getAllStructFieldRefs() .stream() .map(StructField::getFieldName) inspectors.addAll(delegateDeserializerOI.getAllStructFieldRefs() .stream() .map(StructField::getFieldObjectInspector) bytesConverter = new TextBytesConverter(); } else if (delegateSerDe.getSerializedClass() == AvroGenericRecordWritable.class) { String schemaFromProperty = tbl.getProperty(AvroSerdeUtils.AvroTableProperties.SCHEMA_LITERAL.getPropName(), ""); Preconditions.checkArgument(!schemaFromProperty.isEmpty(), "Avro Schema is empty Can not go further"); Schema schema = AvroSerdeUtils.getSchemaFor(schemaFromProperty);
public static void copyStructToArray(Object o, ObjectInspector oi, ObjectInspectorCopyOption objectInspectorOption, Object[] dest, int offset) throws SerDeException { if (o == null) { return; } if (oi.getCategory() != Category.STRUCT) { throw new SerDeException("Unexpected category " + oi.getCategory()); } StructObjectInspector soi = (StructObjectInspector) oi; List<? extends StructField> fields = soi.getAllStructFieldRefs(); for (int i = 0; i < fields.size(); ++i) { StructField f = fields.get(i); dest[offset + i] = copyToStandardObject(soi.getStructFieldData(o, f), f .getFieldObjectInspector(), objectInspectorOption); } }
public Writable serialize(Object obj, ObjectInspector objInspector) throws Exception { if (objInspector.getCategory() != ObjectInspector.Category.STRUCT) { throw new SerDeException(getClass().toString() + " can only serialize struct types, but we got: " + objInspector.getTypeName()); List<? extends StructField> fields = soi.getAllStructFieldRefs(); List<Object> values = soi.getStructFieldsDataAsList(obj); throw new SerDeException("HBase row key cannot be NULL"); ObjectInspector inspector = fields.get(timestampIndex).getFieldObjectInspector(); value = values.get(timestampIndex); if (inspector instanceof LongObjectInspector) {
@Override public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException { if (objInspector.getCategory() != Category.STRUCT) { throw new SerDeException(getClass().toString() + " can only serialize struct types, but we got: " + objInspector.getTypeName()); } StructObjectInspector soi = (StructObjectInspector) objInspector; List<? extends StructField> fields = soi.getAllStructFieldRefs(); StringBuilder sb = new StringBuilder(); for (int i = 0; i < fields.size(); i++) { if (i > 0) { sb.append(separator); } Object column = soi.getStructFieldData(obj, fields.get(i)); if (fields.get(i).getFieldObjectInspector().getCategory() == Category.PRIMITIVE) { // For primitive object, serialize to plain string sb.append(column == null ? nullString : column.toString()); } else { // For complex object, serialize to JSON format sb.append(SerDeUtils.getJSONString(column, fields.get(i) .getFieldObjectInspector())); } } serializeCache.set(sb.toString()); return serializeCache; }
LOGGER.trace("Deserializing from SerDe"); if (!(blob instanceof MapWritable)) { throw new SerDeException("Expected MapWritable. Got " + blob.getClass().getName()); throw new SerDeException("JDBC SerDe hasn't been initialized properly"); Text columnKey = new Text(); columnKey.set(hiveColumnNames[i]); Writable value = input.get(columnKey); Object rowVal;
@Override public Writable serialize(Object o, ObjectInspector objectInspector) throws SerDeException { // Fields... StructObjectInspector inspector = (StructObjectInspector) objectInspector; List<? extends StructField> fields = inspector.getAllStructFieldRefs(); for (int i = 0; i < fields.size(); i++) { StructField f = fields.get(i); String docFieldName = columnNames.get(i); switch (f.getFieldObjectInspector().getCategory()) { case PRIMITIVE: Object value = ObjectInspectorUtils.copyToStandardJavaObject(inspector.getStructFieldData(o, f), f.getFieldObjectInspector()); mapWritable.put(new Text(docFieldName),new Text(value==null?"":value.toString())); break; case STRUCT: case MAP: case LIST: case UNION: throw new SerDeException( "We don't yet support nested types (found " + f.getFieldObjectInspector() .getTypeName() + ")"); } } return mapWritable; }
throw new SerDeException( "This table does not have serde property \"input.regex\"!"); Matcher m = inputPattern.matcher(rowText.toString());
@Override protected void serializeField(ByteStream.Output out, Object obj, ObjectInspector objInspector, LazySerDeParameters serdeParams) throws SerDeException { if (!objInspector.getCategory().equals(Category.PRIMITIVE) || (objInspector.getTypeName().equalsIgnoreCase(serdeConstants.BINARY_TYPE_NAME))) { //do this for all complex types and binary try { serialize(out, SerDeUtils.getJSONString(obj, objInspector, serdeParams.getNullSequence().toString()), PrimitiveObjectInspectorFactory.javaStringObjectInspector, serdeParams.getSeparators(), 1, serdeParams.getNullSequence(), serdeParams.isEscaped(), serdeParams.getEscapeChar(), serdeParams.getNeedsEscape()); } catch (IOException e) { throw new SerDeException(e); } } else { //primitives except binary super.serializeField(out, obj, objInspector, serdeParams); } } }
return (new HiveVarcharWritable(new HiveVarchar(value.textValue(), ((CharTypeInfo) typeInfo).getLength()))); case STRING: return (new Text(value.textValue())); case BOOLEAN: return (new BooleanWritable(value.isBoolean() ? value.booleanValue() : Boolean.valueOf(value.textValue()))); default: throw new SerDeException("Unknown type: " + typeInfo.getTypeName());
throws SerDeException { StructObjectInspector soi = (StructObjectInspector) objInspector; List<? extends StructField> fields = soi.getAllStructFieldRefs(); List<Object> list = soi.getStructFieldsDataAsList(obj); if (fields.size() != numColumns) { throw new SerDeException("Cannot serialize the object because there are " + fields.size() + " fields but the table has " + numColumns + " columns."); ObjectInspector fieldOI = fields.get(c).getFieldObjectInspector(); serdeParams.getNeedsEscape()); } catch (IOException e) { throw new SerDeException(e); serializeCache.set(serializeStream.getData(), 0, serializeStream.getLength()); return serializeCache;
public static int getStructSize(ObjectInspector oi) throws SerDeException { if (oi.getCategory() != Category.STRUCT) { throw new SerDeException("Unexpected category " + oi.getCategory()); } return ((StructObjectInspector)oi).getAllStructFieldRefs().size(); }
/** * Takes JSON string in Text form, and has to return an object representation above * it that's readable by the corresponding object inspector. * * For this implementation, since we're using the jackson parser, we can construct * our own object implementation, and we use HCatRecord for it */ @Override public Object deserialize(Writable blob) throws SerDeException { Object row; Text t = (Text) blob; try { row = structReader.parseStruct(new ByteArrayInputStream((t.getBytes()), 0, t.getLength())); return row; } catch (Exception e) { LOG.warn("Error [{}] parsing json text [{}].", e, t); throw new SerDeException(e); } }
public LazyHCatRecord(Object wrappedObject, ObjectInspector oi) throws Exception { if (oi.getCategory() != Category.STRUCT) { throw new SerDeException(getClass().toString() + " can only make a lazy hcat record from " + "objects of struct types, but we got: " + oi.getTypeName()); } this.soi = (StructObjectInspector)oi; this.wrappedObject = wrappedObject; }
private Object parseStruct(JsonParser parser, StructObjectInspector oi) throws JsonParseException, IOException, SerDeException { Object[] ret = new Object[oi.getAllStructFieldRefs().size()]; throw new SerDeException("struct expected"); throw new SerDeException("undeclared field"); ret[field.getFieldID()] = parseDispatcher(parser, field.getFieldObjectInspector()); } catch (Exception e) { throw new SerDeException("struct field " + name + ": " + e.getMessage(), e); throw new SerDeException("unexpected token: " + currentToken);