private boolean check(String project, String collection, SchemaField existing, SchemaField moduleField) { if (existing.getName().equals(moduleField.getName())) { if (!existing.getType().equals(moduleField.getType())) { throw new IllegalStateException(format("Module field '%s' type does not match existing field in event of project %s.%s. Existing type: %s, Module field type: %s. \n" + "Please change the schema manually of disable the module.", existing.getName(), project, collection, existing.getType(), moduleField.getType())); } return true; } return false; } }
private void checkFields(List<SchemaField> fields) { SchemaField[] collisions = fields.stream() .filter(newField -> constantFields.stream() .anyMatch(f -> f.getName().equals(newField.getName()) && !f.getType().equals(newField.getType()))) .toArray(SchemaField[]::new); checkState(collisions.length == 0, "Module field collides with existing field that has another type exists: ", Arrays.toString(collisions)); collisions = dependentFields.values().stream() .flatMap(col -> col.stream()) .filter(field -> fields.stream().anyMatch(f -> f.getName().equals(field.getName()) && !f.getType().equals(field.getType()))) .toArray(SchemaField[]::new); checkState(collisions.length == 0, "Fields already exist in dependency table: ", Arrays.toString(collisions)); }
public Map<String, FieldType> loadColumns(String project) { Map<String, FieldType> columns = getMetadata(project).stream() .collect(Collectors.toMap(col -> col.getName(), col -> col.getType())); return columns; }
private void addModuleField(Set<SchemaField> fields, SchemaField newField) { Iterator<SchemaField> iterator = fields.iterator(); while (iterator.hasNext()) { SchemaField field = iterator.next(); if (field.getName().equals(newField.getName())) { if (field.getType().equals(newField.getType())) { return; } else { iterator.remove(); break; } } } fields.add(newField); }
@Inject public JsonEventDeserializer(Metastore metastore, ApiKeyService apiKeyService, ConfigManager configManager, SchemaChecker schemaChecker, ProjectConfig projectConfig, FieldDependency fieldDependency) { this.metastore = metastore; this.conditionalMagicFields = fieldDependency.dependentFields; this.apiKeyService = apiKeyService; this.schemaChecker = schemaChecker; this.projectConfig = projectConfig; this.configManager = configManager; this.constantFields = fieldDependency.constantFields; conditionalFieldMapping = conditionalMagicFields.values().stream() .flatMap(e -> e.stream()).collect(toImmutableMap(e -> e.getName(), e -> e.getType())); }
public Recipe export(String project) { final Map<String, Recipe.CollectionDefinition> collections = metastore.getCollections(project).entrySet().stream().collect(Collectors.toMap(e -> e.getKey(), e -> { List<Map<String, Recipe.SchemaFieldInfo>> map = e.getValue().stream() .map(a -> ImmutableMap.of(a.getName(), new Recipe.SchemaFieldInfo(a.getCategory(), a.getType()))) .collect(Collectors.toList()); return new Recipe.CollectionDefinition(map); })); final List<MaterializedView> materializedViews = materializedViewService.list(project).stream() .map(m -> new MaterializedView(m.tableName, m.name, m.query, m.updateInterval, m.incremental, m.realTime, m.options)) .collect(Collectors.toList()); return new Recipe(Recipe.Strategy.SPECIFIC, collections, materializedViews); }
public HashSet<SchemaField> checkNewFields(String collection, Set<SchemaField> newFields) { HashSet<SchemaField> fields = new HashSet<>(newFields); Iterator<SchemaField> it = fields.iterator(); while (it.hasNext()) { SchemaField newField = it.next(); if (fieldDependency.dependentFields.containsKey(newField)) { it.remove(); } if (newField.getName().startsWith("_")) { for (Map.Entry<String, List<SchemaField>> entry : fieldDependency.dependentFields.entrySet()) { Optional<SchemaField> collision = entry.getValue().stream() .filter(e -> e.getName().equals(newField.getName()) && !e.getType().equals(e.getType())) .findAny(); if (collision.isPresent()) { throw new RakamException(format("Field %s.%s collides with one of the magic field with type %s", collection, newField.getName(), collision.get().getType()), BAD_REQUEST); } } } if (newField.getName().equals("$server_time")) { throw new RakamException("$server_time is reserved as a system attribute", BAD_REQUEST); } } fieldDependency.constantFields.forEach(field -> addModuleField(fields, field)); fieldDependency.dependentFields.forEach((fieldName, field) -> addConditionalModuleField(fields, fieldName, field)); return fields; }
public static byte[] exportAsAvro(QueryResult result) { Schema avroSchema = AvroUtil.convertAvroSchema(result.getMetadata()); ByteArrayOutputStream out = new ByteArrayOutputStream(); DatumWriter writer = new FilteredRecordWriter(avroSchema, GenericData.get()); BinaryEncoder encoder = EncoderFactory.get().directBinaryEncoder(out, null); GenericData.Record record = new GenericData.Record(avroSchema); for (List<Object> row : result.getResult()) { List<SchemaField> metadata = result.getMetadata(); for (int i = 0; i < row.size(); i++) { record.put(i, getAvroValue(row.get(i), metadata.get(i).getType())); } try { writer.write(record, encoder); } catch (Exception e) { throw new RuntimeException("Couldn't serialize event", e); } } return out.toByteArray(); }
.anyMatch(e -> e.getValue().stream().anyMatch(z -> z.getType().equals(STRING)));
return f; }) .map(f -> format("\"%s\" %s", f.getName(), toSql(f.getType()))) .collect(Collectors.joining(", ")); if (queryEnd.isEmpty()) { .map(f -> format("ALTER TABLE %s.\"%s\".\"%s\" ADD COLUMN \"%s\" %s", config.getColdStorageConnector(), project, collection, f.getName(), toSql(f.getType()))) .forEach(q -> { try {
private String buildValues(GenericRecord properties, List<SchemaField> schema) { StringBuilder builder = new StringBuilder("select "); int size = properties.getSchema().getFields().size(); appendValue(builder, Instant.now().toEpochMilli(), FieldType.TIMESTAMP); for (int i = 0; i < size; i++) { builder.append(", "); appendValue(builder, properties.get(i), schema.get(i).getType()); } return builder.toString(); }
select = String.format("'{\"_collection\": \"%s\",'||'", e.getCollection()) + cols.stream() .map(field -> { switch (field.getType()) { case LONG: case DOUBLE:
HttpResponseStatus.BAD_REQUEST); if (any.get().getType() != FieldType.STRING) { throw new RakamException("Type of column must be STRING", HttpResponseStatus.BAD_REQUEST);
.filter(field -> { if (field.getName().equals(projectConfig.getUserColumn())) { userType.set(field.getType()); return false; .filter(field -> properties.isPresent() ? properties.get().contains(field.getName()) : (field.getName().equals(projectConfig.getTimeColumn()) || field.getName().equals("_session_id"))) .filter(field -> field.getType() != BINARY) .map(field -> { if (field.getType().isNumeric()) { return format("\"%1$s\": '|| COALESCE(cast(%1$s as varchar), 'null')||'", field.getName()); if (field.getType().isArray() || field.getType().isMap()) { return format("\"%1$s\": '|| json_format(try_cast(%1$s as json)) ||'", field.getName());
int[] indexes = header.getValue(); List<FieldType> types = Arrays.stream(indexes) .mapToObj(i -> header.getKey().get(i).getType()).collect(Collectors.toList());
public static Schema.Field generateAvroField(SchemaField field) { return new Schema.Field(field.getName(), generateAvroSchema(field.getType()), null, NullNode.getInstance()); }
.stream() .map((Function<SchemaField, Map.Entry<String, Type>>) f -> new SimpleImmutableEntry<>(f.getName(), PrestoType.toType(f.getType()))) .collect(Collectors.toList());
Optional<SchemaField> existing = fields.stream().filter(e -> e.getName().equals(newField.getName())).findAny(); if (existing.isPresent()) { if (!existing.get().getType().equals(newField.getType())) { throw new IllegalStateException(String.format("Multiple entries with same key for collection %s field %s: %s,%s", collection, newField.getName(), newField.getType(), existing.get().getType())); .put("id", new AttributeValue(rangeKey)) .put("collection", new AttributeValue(collection)).put("name", new AttributeValue(newField.getName())) .put("type", new AttributeValue(newField.getType().name())).build()) ); fields.add(newField); .put("id", new AttributeValue(rangeKey)) .put("collection", new AttributeValue(collection)).put("name", new AttributeValue(newField.getName())) .put("type", new AttributeValue(newField.getType().name())).build()) ); } catch (ConditionalCheckFailedException e1) {
Row row = sheet.createRow(i + 1); for (int c = 0; c < metadata.size(); c++) { final FieldType type = metadata.get(c).getType();
if (!SUPPORTED_TYPES.contains(field.getType()) || field.getName().equals(projectConfig.getUserColumn())) { continue;