header = readHeader((CsvParser) jp, project, collection); } else { List<SchemaField> vall = metastore.getCollection(project, collection); throw new RakamException(String.format("Table has %d columns but csv file has more than %d columns", indexes.length, indexes.length), HttpResponseStatus.BAD_REQUEST); record.put(indexes[idx], getValue(types.get(idx), jp)); idx += 1; break;
private Map<String, Object> getMap(FieldType mapValueType, String valueAsString) throws IOException { Map<String, Object> map = new HashMap<>(); JsonParser parser = jsonFactory.createParser(valueAsString); JsonToken t = parser.getCurrentToken(); if (t != JsonToken.START_OBJECT) { return null; } else { t = parser.nextToken(); } for (; t == JsonToken.FIELD_NAME; t = parser.nextToken()) { String key = parser.getCurrentName(); if (!parser.nextToken().isScalarValue()) { throw new JsonMappingException(String.format("Nested properties are not supported. ('%s' field)", mapValueType.name())); } map.put(key, getValue(mapValueType, parser)); } return map; } }
Metastore metastore = new InMemoryMetastore(new InMemoryApiKeyService(), new EventBus()); mapper.registerModule(new SimpleModule().addDeserializer(EventList.class, new CsvEventDeserializer(metastore, new ProjectConfig(), new TestingConfigManager(), new SchemaChecker(metastore, build), build)));
private GenericData.Array getArray(FieldType arrayElementType, String valueAsString) throws IOException { JsonParser parser = jsonFactory.createParser(valueAsString); List<Object> objects = new ArrayList<>(); JsonToken t = parser.getCurrentToken(); if (t != JsonToken.START_ARRAY) { return null; } else { t = parser.nextToken(); } for (; t != JsonToken.END_ARRAY; t = parser.nextToken()) { if (!t.isScalarValue()) { throw new JsonMappingException(String.format("Nested properties are not supported. ('%s' field)", arrayElementType.name())); } objects.add(getValue(arrayElementType, parser)); } return new GenericData.Array(generateAvroSchema(arrayElementType), objects); }