/** * Generate a TypeInfo for an Avro Map. This is made slightly simpler in that * Avro only allows maps with strings for keys. */ private static TypeInfo generateMapTypeInfo(Schema schema, Set<Schema> seenSchemas) throws AvroSerdeException { assert schema.getType().equals(Schema.Type.MAP); Schema valueType = schema.getValueType(); TypeInfo ti = generateTypeInfo(valueType, seenSchemas); return TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.getPrimitiveTypeInfo("string"), ti); }
public static TypeInfo convertMapType(RelDataType rType) { return TypeInfoFactory.getMapTypeInfo(convert(rType.getKeyType()), convert(rType.getValueType())); }
return TypeInfoFactory.getMapTypeInfo( getPrimitiveOrcTypeFromPrimitiveAvroType(Schema.Type.STRING), getOrcField(fieldSchema.getValueType()));
public static TypeInfo convertMapType(RelDataType rType) { return TypeInfoFactory.getMapTypeInfo(convert(rType.getKeyType()), convert(rType.getValueType())); }
@Test public void testMapWithFilterExclude() { TestSettings cfg = new TestSettings(); cfg.setProperty("es.mapping.exclude", "xxx"); Map map = new LinkedHashMap(); map.put(new Text("aaa"), new Text("bbb")); map.put(new Text("ccc"), new Text("ddd")); map.put(new Text("xxx"), new Text("zzz")); HiveType type = new MyHiveType(map, getMapTypeInfo(stringTypeInfo, stringTypeInfo)); assertEquals("{\"aaa\":\"bbb\",\"ccc\":\"ddd\"}", hiveTypeToJson(type, cfg)); }
@Test public void testMap() { assertEquals("{\"1\":\"key\"}", hiveTypeToJson(new MyHiveType(Collections.singletonMap(new IntWritable(1), new Text("key")), getMapTypeInfo(intTypeInfo, stringTypeInfo)))); }
@Test public void testMapWithFilterInclude() { TestSettings cfg = new TestSettings(); cfg.setProperty("es.mapping.include", "a*"); Map map = new LinkedHashMap(); map.put(new Text("aaa"), new Text("bbb")); map.put(new Text("ccc"), new Text("ddd")); map.put(new Text("axx"), new Text("zzz")); HiveType type = new MyHiveType(map, getMapTypeInfo(stringTypeInfo, stringTypeInfo)); assertEquals("{\"aaa\":\"bbb\",\"axx\":\"zzz\"}", hiveTypeToJson(type, cfg)); }
return TypeInfoFactory.getMapTypeInfo(getExtendedTypeInfoFromJavaType( pt.getActualTypeArguments()[0], m), getExtendedTypeInfoFromJavaType(pt.getActualTypeArguments()[1], m));
result = TypeInfoFactory.getMapTypeInfo( getTypeInfoFromObjectInspector(moi.getMapKeyObjectInspector()), getTypeInfoFromObjectInspector(moi.getMapValueObjectInspector()));
TypeInfo mapValueType = parseType(); expect(">"); return TypeInfoFactory.getMapTypeInfo(mapKeyType, mapValueType);
TypeInfo keyType = translate(type.getTypeParameters().get(0)); TypeInfo valueType = translate(type.getTypeParameters().get(1)); return getMapTypeInfo(keyType, valueType);
@Test public void generateEventHiveRecord() throws Exception { Map<Writable, Writable> map = new LinkedMapWritable(); map.put(new Text("one"), new IntWritable(1)); map.put(new Text("two"), new IntWritable(2)); map.put(new Text("three"), new IntWritable(3)); HiveType tuple = new HiveType(map, TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo( TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.intTypeInfo))); SerializationEventConverter eventConverter = new SerializationEventConverter(); SerializationFailure iaeFailure = new SerializationFailure(new IllegalArgumentException("garbage"), tuple, new ArrayList<String>()); String rawEvent = eventConverter.getRawEvent(iaeFailure); assertThat(rawEvent, startsWith("HiveType{object={one=1, two=2, three=3}, " + "inspector=org.apache.hadoop.hive.serde2.objectinspector.StandardMapObjectInspector@")); String timestamp = eventConverter.getTimestamp(iaeFailure); assertTrue(StringUtils.hasText(timestamp)); assertTrue(DateUtils.parseDate(timestamp).getTime().getTime() > 1L); String exceptionType = eventConverter.renderExceptionType(iaeFailure); assertEquals("illegal_argument_exception", exceptionType); String exceptionMessage = eventConverter.renderExceptionMessage(iaeFailure); assertEquals("garbage", exceptionMessage); String eventMessage = eventConverter.renderEventMessage(iaeFailure); assertEquals("Could not construct bulk entry from record", eventMessage); }
@Test public void generateEventHiveRecordLimited() throws Exception { Map<Writable, Writable> map = new MapWritable(); map.put(new Text("one"), new IntWritable(1)); map.put(new Text("two"), new IntWritable(2)); map.put(new Text("three"), new IntWritable(3)); HiveType tuple = new HiveType(map, TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo( TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.intTypeInfo))); SerializationEventConverter eventConverter = new SerializationEventConverter(); SerializationFailure iaeFailure = new SerializationFailure(new IllegalArgumentException("garbage"), tuple, new ArrayList<String>()); String rawEvent = eventConverter.getRawEvent(iaeFailure); assertThat(rawEvent, startsWith("HiveType{object=org.apache.hadoop.io.MapWritable@")); String timestamp = eventConverter.getTimestamp(iaeFailure); assertTrue(StringUtils.hasText(timestamp)); assertTrue(DateUtils.parseDate(timestamp).getTime().getTime() > 1L); String exceptionType = eventConverter.renderExceptionType(iaeFailure); assertEquals("illegal_argument_exception", exceptionType); String exceptionMessage = eventConverter.renderExceptionMessage(iaeFailure); assertEquals("garbage", exceptionMessage); String eventMessage = eventConverter.renderEventMessage(iaeFailure); assertEquals("Could not construct bulk entry from record", eventMessage); } }
return TypeInfoFactory.getMapTypeInfo( getPrimitiveOrcTypeFromPrimitiveFieldType(RecordFieldType.STRING.getDataType()), getOrcField(mapDataType.getValueType(), hiveFieldNames));
@Override public TypeInfo map(Schema map, TypeInfo value) { return TypeInfoFactory.getMapTypeInfo( TYPE_TO_TYPEINFO.get(Schema.Type.STRING), value); }
@Override public TypeInfo map(Schema map, TypeInfo value) { return TypeInfoFactory.getMapTypeInfo( TYPE_TO_TYPEINFO.get(Schema.Type.STRING), value); }
/** * Generate a TypeInfo for an Avro Map. This is made slightly simpler in that * Avro only allows maps with strings for keys. */ private static TypeInfo generateMapTypeInfo(Schema schema, Set<Schema> seenSchemas) throws AvroSerdeException { assert schema.getType().equals(Schema.Type.MAP); Schema valueType = schema.getValueType(); TypeInfo ti = generateTypeInfo(valueType, seenSchemas); return TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.getPrimitiveTypeInfo("string"), ti); }
/** * Generate a TypeInfo for an Avro Map. This is made slightly simpler in that * Avro only allows maps with strings for keys. */ private static TypeInfo generateMapTypeInfo(Schema schema) throws AvroSerdeException { assert schema.getType().equals(Schema.Type.MAP); Schema valueType = schema.getValueType(); TypeInfo ti = generateTypeInfo(valueType); return TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.getPrimitiveTypeInfo("string"), ti); }
/** * Generate a TypeInfo for an Avro Map. This is made slightly simpler in that * Avro only allows maps with strings for keys. */ private static TypeInfo generateMapTypeInfo(Schema schema, Set<Schema> seenSchemas) throws AvroSerdeException { assert schema.getType().equals(Schema.Type.MAP); Schema valueType = schema.getValueType(); TypeInfo ti = generateTypeInfo(valueType, seenSchemas); return TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.getPrimitiveTypeInfo("string"), ti); }
/** * Generate a TypeInfo for an Avro Map. This is made slightly simpler in that * Avro only allows maps with strings for keys. */ private static TypeInfo generateMapTypeInfo(Schema schema, Set<Schema> seenSchemas) throws AvroSerdeException { assert schema.getType().equals(Schema.Type.MAP); Schema valueType = schema.getValueType(); TypeInfo ti = generateTypeInfo(valueType, seenSchemas); return TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.getPrimitiveTypeInfo("string"), ti); }