private String generateDefaultRecord() throws ConfigurationGenerationException, IOException { org.apache.avro.Schema schemaBody = new org.apache.avro.Schema.Parser().parse(body); String fqn = schemaBody.getFullName(); RawSchema rawSchema = new RawSchema(schemaBody.toString()); DefaultRecordGenerationAlgorithm<RawData> algorithm = new DefaultRecordGenerationAlgorithmImpl<>(rawSchema, new RawDataFactory()); return algorithm.getRootData().getRawData(); } }
private Pair<BaseData, RawData> mergeConfiguration(String endpointId, String config, ConfigurationSchemaDto configSchema, Pair<BaseData, RawData> mergedConfiguration) throws GetDeltaException { OverrideAlgorithm configurationMerger = configurationOverrideFactory.createConfigurationOverrideAlgorithm(); OverrideSchema overrideSchema = new OverrideSchema(configSchema.getOverrideSchema()); try { LOG.trace("Merging group configuration with configuration: {}", config); BaseData baseData = configurationMerger.override(mergedConfiguration.getV1(), Collections.singletonList(new OverrideData(overrideSchema, config))); JsonNode json = new ObjectMapper().readTree(baseData.getRawData()); AvroUtils.removeUuids(json); RawData rawData = new RawData(new RawSchema(mergedConfiguration.getV2().getSchema().getRawSchema()), json.toString()); return new Pair<>(baseData, rawData); } catch (OverrideException | IOException oe) { LOG.error("[{}] Unexpected exception occurred while merging configuration: ", endpointId, oe); throw new GetDeltaException(oe); } finally { LOG.trace("[{}] getMergedConfiguration.compute end", endpointId); } }
private <S extends SpecificRecordBase> S buildDefaultProperties(Class<S> propertiesClass) { S result = null; try { Schema schema = (Schema) propertiesClass.getField(SCHEMA).get(null); RawSchema rawSchema = new RawSchema(schema.toString()); DefaultRecordGenerationAlgorithm<RawData> algotithm = new DefaultRecordGenerationAlgorithmImpl<>(rawSchema, new RawDataFactory()); RawData rawData = algotithm.getRootData(); AvroJsonConverter<S> converter = new AvroJsonConverter<>(schema, propertiesClass); result = converter.decodeJson(rawData.getRawData()); } catch (Exception ex) { LOG.error( "Unable to build default specific properties for class " + propertiesClass.getSimpleName(), ex); } return result; }
private CTLSchemaDto generateDefaultRecord(CTLSchemaDto unSavedSchema) { try { String schemaBody = flatExportAsString(unSavedSchema); LOG.debug("Generating default record for flat schema: {}", schemaBody); RawSchema dataSchema = new RawSchema(schemaBody); DefaultRecordGenerationAlgorithm<RawData> dataProcessor = new DefaultRecordGenerationAlgorithmImpl<RawSchema, RawData>( dataSchema, new RawDataFactory()); unSavedSchema.setDefaultRecord(dataProcessor.getRootData().getRawData()); return unSavedSchema; } catch (StackOverflowError ex) { LOG.error("Failed to generate default record. An endless recursion is detected. CTL schema " + "body: {}", unSavedSchema.getBody(), ex); throw new RuntimeException("Unable to generate default record. An endless recursion is " + "detected! " + "Please check non-optional references to nested types."); } catch (ConfigurationGenerationException | IOException | RuntimeException ex) { LOG.error("Failed to generate default record for CTL schema with body: {}", unSavedSchema .getBody(), ex); throw new RuntimeException("An unexpected exception occured: " + ex.toString()); } }
JsonNode json = new ObjectMapper().readTree(baseData.getRawData()); AvroUtils.removeUuids(json); RawData rawData = new RawData(new RawSchema(ctlSchema), json.toString());
.parse(schema.getSchems()); String fqn = schemaBody.getFullName(); RawSchema rawSchema = new RawSchema(schemaBody.toString()); DefaultRecordGenerationAlgorithm<RawData> algorithm = new DefaultRecordGenerationAlgorithmImpl<>(rawSchema, new RawDataFactory());
JsonNode json = new ObjectMapper().readTree(baseData.getRawData()); AvroUtils.removeUuids(json); RawData rawData = new RawData(new RawSchema(ctlSchema), json.toString());
DataStream<byte[]> dataStream4 = env.addSource(new FlinkKafkaConsumer081<>("data_4", new RawSchema(), parameterTool.getProperties())).setParallelism(1);