private static JsonNode toJson(ColumnMapping mapping) { ArrayNode mappingJson = JsonNodeFactory.instance.arrayNode(); for (FieldMapping fm : mapping.getFieldMappings()) { mappingJson.add(toJson(fm)); } return mappingJson; }
/** * Ensure that the column mappings for the shared fields between the old and * new schema haven't changed. * * @param oldSchema * @param newSchema * @return true if the mappings are compatible, false if not. */ private static boolean mappingCompatible(EntitySchema oldSchema, EntitySchema newSchema) { for (FieldMapping oldFieldMapping : oldSchema.getColumnMappingDescriptor() .getFieldMappings()) { FieldMapping newFieldMapping = newSchema.getColumnMappingDescriptor() .getFieldMapping(oldFieldMapping.getFieldName()); if (newFieldMapping != null) { if (!oldFieldMapping.equals(newFieldMapping)) { return false; } } } return true; }
/** * Builds and returns a {@link ColumnMapping} from the fields * mappings added to this builder. * * @return a ColumnMapping */ public ColumnMapping build() { return new ColumnMapping(fieldMappings); }
if (!fieldsEqual( field, this.getColumnMappingDescriptor().getFieldMapping(field.name()), entitySchemaField, other.getColumnMappingDescriptor().getFieldMapping( entitySchemaField.name()))) { return false;
private static void printInfo(Logger console, Dataset<?> dataset) { DatasetDescriptor desc = dataset.getDescriptor(); String schema = ColumnMappingParser.removeEmbeddedMapping( PartitionStrategyParser.removeEmbeddedStrategy(desc.getSchema())) .toString(true); Collection<String> properties = desc.listProperties(); console.info("\nDataset \"{}\":", dataset.getName()); console.info("\tURI: \"{}\"", dataset.getUri()); console.info("\tSchema: {}", indent(schema)); if (desc.isPartitioned()) { console.info("\tPartition strategy: {}", indent(desc.getPartitionStrategy().toString(true))); } else { console.info("\tNot partitioned"); } if (desc.isColumnMapped()) { console.info("\tColumn mapping: {}", indent(desc.getColumnMapping().toString(true))); } if (!properties.isEmpty()) { StringBuilder sb = new StringBuilder(); for (String prop : properties) { sb.append("\n\t\t").append(prop).append("=") .append(desc.getProperty(prop)); } console.info("\tProperties:{}", sb.toString()); } }
@Override public Set<String> getRequiredColumnFamilies() { return entitySchema.getColumnMappingDescriptor() .getRequiredColumnFamilies(); }
@Override public Set<String> getRequiredColumns() { return entitySchema.getColumnMappingDescriptor().getRequiredColumns(); }
"}"); Assert.assertTrue(ColumnMappingParser.hasEmbeddedColumnMapping(original)); Assert.assertFalse(ColumnMappingParser.parseFromSchema(original).equals(mapping));
if (!fieldsEqual( field, this.getColumnMappingDescriptor().getFieldMapping(field.name()), entitySchemaField, other.getColumnMappingDescriptor().getFieldMapping( entitySchemaField.name()))) { return false;
.build(); String mapping = descriptor.getColumnMapping().toString(!minimize);
@Override public Set<String> getRequiredColumnFamilies() { return entitySchema.getColumnMappingDescriptor() .getRequiredColumnFamilies(); }
@Override public Set<String> getRequiredColumns() { return entitySchema.getColumnMappingDescriptor().getRequiredColumns(); }
/** * Initialize the AvroRecordBuilderFactories for all keyAsColumn mapped fields * that are record types. We need to be able to get record builders for these * since the records are broken across many columns, and need to be * constructed by the composer. */ private void initRecordBuilderFactories() { for (FieldMapping fieldMapping : avroSchema.getColumnMappingDescriptor().getFieldMappings()) { if (fieldMapping.getMappingType() == MappingType.KEY_AS_COLUMN) { String fieldName = fieldMapping.getFieldName(); Schema fieldSchema = avroSchema.getAvroSchema().getField(fieldName) .schema(); Schema.Type fieldSchemaType = fieldSchema.getType(); if (fieldSchemaType == Schema.Type.RECORD) { AvroRecordBuilderFactory<E> factory = buildAvroRecordBuilderFactory(fieldSchema); kacRecordBuilderFactories.put(fieldName, factory); } } } }
/** * Ensure that the column mappings for the shared fields between the old and * new schema haven't changed. * * @param oldSchema * @param newSchema * @return true if the mappings are compatible, false if not. */ private static boolean mappingCompatible(EntitySchema oldSchema, EntitySchema newSchema) { for (FieldMapping oldFieldMapping : oldSchema.getColumnMappingDescriptor() .getFieldMappings()) { FieldMapping newFieldMapping = newSchema.getColumnMappingDescriptor() .getFieldMapping(oldFieldMapping.getFieldName()); if (newFieldMapping != null) { if (!oldFieldMapping.equals(newFieldMapping)) { return false; } } } return true; }
public RegexEntityFilter(EntitySchema entitySchema, EntitySerDe<?> entitySerDe, String fieldName, String regex, boolean isEqual) { FieldMapping fieldMapping = entitySchema.getColumnMappingDescriptor() .getFieldMapping(fieldName); if (fieldMapping.getMappingType() != MappingType.COLUMN) { throw new DatasetException( "SingleColumnValueFilter only compatible with COLUMN mapping types."); } this.filter = constructFilter(regex, isEqual, fieldMapping); }
public static void checkParser(ColumnMapping expected, String json) { ColumnMapping parsed = ColumnMappingParser.parse(json); Assert.assertEquals(expected, parsed); parsed = ColumnMappingParser.parse(expected.toString()); Assert.assertEquals("Should reparse properly", expected, parsed); }
/** * Prepare the Table descriptor for the given entity Schema */ private HTableDescriptor prepareTableDescriptor(String tableName, String entitySchemaString) { HTableDescriptor descriptor = new HTableDescriptor( Bytes.toBytes(tableName)); AvroEntitySchema entitySchema = parser .parseEntitySchema(entitySchemaString); Set<String> familiesToAdd = entitySchema.getColumnMappingDescriptor() .getRequiredColumnFamilies(); familiesToAdd.add(new String(Constants.SYS_COL_FAMILY)); familiesToAdd.add(new String(Constants.OBSERVABLE_COL_FAMILY)); for (String familyToAdd : familiesToAdd) { if (!descriptor.hasFamily(familyToAdd.getBytes())) { descriptor.addFamily(new HColumnDescriptor(familyToAdd)); } } return descriptor; }
/** * Initialize the AvroRecordBuilderFactories for all keyAsColumn mapped fields * that are record types. We need to be able to get record builders for these * since the records are broken across many columns, and need to be * constructed by the composer. */ private void initRecordBuilderFactories() { for (FieldMapping fieldMapping : avroSchema.getColumnMappingDescriptor().getFieldMappings()) { if (fieldMapping.getMappingType() == MappingType.KEY_AS_COLUMN) { String fieldName = fieldMapping.getFieldName(); Schema fieldSchema = avroSchema.getAvroSchema().getField(fieldName) .schema(); Schema.Type fieldSchemaType = fieldSchema.getType(); if (fieldSchemaType == Schema.Type.RECORD) { AvroRecordBuilderFactory<E> factory = buildAvroRecordBuilderFactory(fieldSchema); kacRecordBuilderFactories.put(fieldName, factory); } } } }
@Test public void testOverrideColumnMapping() { ColumnMapping desc = new ColumnMapping.Builder() .column("field1", "override", "field1") .counter("version", "override", "version").build(); AvroEntitySchema avroEntitySchema = parser.parseEntitySchema(entitySchema, desc); desc = avroEntitySchema.getColumnMappingDescriptor(); assertEquals(2, desc.getFieldMappings().size()); assertEquals(FieldMapping.MappingType.COLUMN, desc.getFieldMapping("field1") .getMappingType()); assertEquals(FieldMapping.MappingType.COUNTER, desc.getFieldMapping("version") .getMappingType()); }
public RegexEntityFilter(EntitySchema entitySchema, EntitySerDe<?> entitySerDe, String fieldName, String regex, boolean isEqual) { FieldMapping fieldMapping = entitySchema.getColumnMappingDescriptor() .getFieldMapping(fieldName); if (fieldMapping.getMappingType() != MappingType.COLUMN) { throw new DatasetException( "SingleColumnValueFilter only compatible with COLUMN mapping types."); } this.filter = constructFilter(regex, isEqual, fieldMapping); }