/** * Calculates which types to import to minimize the need to fully qualify names in a .pdl source file. * * When multiple referenced types have the same unqualified name only one is imported using the following rules: * - Prefer types from the current namespace over types from other namespaces with colliding unqualified names. * - Prefer the first lexically encountered type. * * The resulting import list includes types from the current namespace. These should not be explicitly written * as import statements in the .pdl source, but are essential to keep in the import set to prevent collisions with * types from other namespaces. * * Any type that is not imported must be referenced by fully qualified name through the .pdl source. * * @param schema provide the top level schema to calculate imports for. * @return a sorted map of schema type names to import, keyed by local name. */ private Map<String, Name> computeImports(DataSchema schema) throws IOException { Map<String, Name> imports = new HashMap<>(); computeImports(schema, true, imports); return imports; }
writeProperties(pathParts, (DataMap) value); indent(); write("@"); write(pathToString(pathParts)); newline(); writeProperty(pathParts, value);
TypeRepresentation representation = selectTypeRepresentation(dataSchema, originallyInlined); markEncountered(dataSchema); if (representation == TypeRepresentation.DECLARED_INLINE) boolean requiresNewlineLayout = requiredNewlineLayout(dataSchema); if (requiresNewlineLayout) { newline(); _indentDepth++; writeInlineSchema(dataSchema); if (requiresNewlineLayout) { _indentDepth--; write(toTypeIdentifier((NamedDataSchema) dataSchema));
/** * Write a property string to this encoder's writer. * @param path provides the property's full path. * @param value provides the property's value, it may be any valid pegasus Data binding type (DataList, DataMap, * String, Int, Long, Float, Double, Boolean, ByteArray) */ private void writeProperty(List<String> path, Object value) throws IOException { indent(); write("@"); write(pathToString(path)); write(" = "); write(toJson(value)); newline(); }
/** * Write a documentation string to .pdl code. * The documentation string will be embedded in a properly indented javadoc style doc string delimiters and margin. * @param doc provides the documentation to write. */ private boolean writeDoc(String doc) throws IOException { if (StringUtils.isNotBlank(doc)) { writeLine("/**"); for (String line : doc.split("\n")) { indent(); write(" * "); write(line); newline(); } writeLine(" */"); return true; } return false; }
hasPackageOverride = !StringUtils.isEmpty(namedSchema.getPackage()) && !namedSchema.getPackage().equals(surroundingPackage); if (hasNamespaceOverride || hasPackageOverride) { write("{"); newline(); _indentDepth++; indent(); if (hasNamespaceOverride) { write("namespace "); write(namedSchema.getNamespace()); newline(); indent(); _namespace = namedSchema.getNamespace(); write("package "); write(namedSchema.getPackage()); newline(); indent(); _package = namedSchema.getPackage(); writeRecord((RecordDataSchema) schema); break; case ENUM: writeEnum((EnumDataSchema) schema); break; case FIXED: writeFixed((FixedDataSchema) schema); break; case TYPEREF:
boolean hasDoc = writeDoc(schema.getDoc()); boolean hasProperties = writeProperties(schema.getProperties()); if (hasDoc || hasProperties) { indent(); write("record "); write(toTypeIdentifier(schema)); List<NamedDataSchema> includes = schema.getInclude(); if (includes.size() > 0 && !schema.isFieldsBeforeIncludes()) writeIncludes(schema, includes); write(" {"); newline(); newline(); writeDoc(field.getDoc()); writeProperties(field.getProperties()); if (field.getOrder() != null && !field.getOrder().equals(RecordDataSchema.Field.Order.ASCENDING)) write("@order = \""); write(field.getOrder().name()); write("\""); write("@aliases = ["); write(field.getAliases().stream().map(s -> "\"" + s + "\"").collect(Collectors.joining(", "))); write("]");
private void writeEnum(EnumDataSchema schema) throws IOException boolean hasDoc = writeDoc(schema.getDoc()); DataMap properties = new DataMap(schema.getProperties()); DataMap propertiesMap = new DataMap(coercePropertyToDataMapOrFail(schema, "symbolProperties", properties.remove("symbolProperties"))); DataMap deprecatedMap = coercePropertyToDataMapOrFail(schema, "deprecatedSymbols", properties.remove("deprecatedSymbols")); boolean hasProperties = writeProperties(properties); if (hasDoc || hasProperties) { indent(); write("enum "); write(toTypeIdentifier(schema)); write(" {"); newline(); DataMap symbolProperties = coercePropertyToDataMapOrFail(schema, "symbolProperties." + symbol, propertiesMap.get(symbol)); Object deprecated = deprecatedMap.get(symbol); if (deprecated != null) newline(); writeDoc(docString); writeProperties(symbolProperties); writeLine(symbol); indent(); write("}");
_importsByLocalName = computeImports(schema); writeLine("namespace " + escapeIdentifier(namedSchema.getNamespace())); _namespace = namedSchema.getNamespace(); writeLine("package " + escapeIdentifier(namedSchema.getPackage())); _package = namedSchema.getPackage(); newline(); writeLine("import " + escapeIdentifier(importName.getFullName())); newline(); writeInlineSchema(schema);
private static String encode(DataSchema schema, String format) throws IOException { if (format.equals(PdlSchemaParser.FILETYPE)) { StringWriter writer = new StringWriter(); SchemaToPdlEncoder encoder = new SchemaToPdlEncoder(writer); encoder.setTypeReferenceFormat(AbstractSchemaEncoder.TypeReferenceFormat.PRESERVE); encoder.encode(schema); return writer.toString(); } else if (format.equals(SchemaParser.FILETYPE)) { JsonBuilder.Pretty pretty = JsonBuilder.Pretty.INDENTED; JsonBuilder builder = new JsonBuilder(pretty); try { SchemaToJsonEncoder encoder = new SchemaToJsonEncoder(builder, AbstractSchemaEncoder.TypeReferenceFormat.PRESERVE); encoder.encode(schema); return builder.result(); } finally { builder.closeQuietly(); } } else { throw new IllegalArgumentException("Unsupported format: " + format); } }
/** * Get the .pdl escaped source identifier for the given named type. * If the type is imported, it's simple name will be returned, else it's fully qualified name will be returned. * * @param schema provides the named schema to get a .pdl escaped source identifier for. * @return a escaped source identifier. */ private String toTypeIdentifier(NamedDataSchema schema) { if (schema.getNamespace().equals(_namespace) || (_importsByLocalName.containsKey(schema.getName()) && _importsByLocalName.get(schema.getName()).getNamespace().equals(schema.getNamespace()))) { return escapeIdentifier(schema.getName()); } else { return escapeIdentifier(schema.getFullName()); } }
for (RecordDataSchema.Field field : recordSchema.getFields()) computeImports(field.getType(), field.isDeclaredInline(), importsAcc); computeImports(include, true, importsAcc); computeImports(typerefSchema.getRef(), typerefSchema.isRefDeclaredInline(), importsAcc); for (UnionDataSchema.Member member : unionSchema.getMembers()) computeImports(member.getType(), member.isDeclaredInline(), importsAcc); computeImports(mapSchema.getValues(), mapSchema.isValuesDeclaredInline(), importsAcc); computeImports(arraySchema.getItems(), arraySchema.isItemsDeclaredInline(), importsAcc);