/** * Encode a {@link DataSchema} to a JSON encoded string. * * @param schema is the {@link DataSchema} to build a JSON encoded output for. * @param pretty is the pretty printing mode. * @return the JSON encoded string representing the {@link DataSchema}. */ public static String schemaToJson(DataSchema schema, JsonBuilder.Pretty pretty) { JsonBuilder builder = null; try { builder = new JsonBuilder(pretty); final SchemaToJsonEncoder encoder = new SchemaToJsonEncoder(builder); encoder.encode(schema); return builder.result(); } catch (IOException exc) { return exc.getMessage(); } finally { if (builder != null) { builder.closeQuietly(); } } }
/** * Print the {@link DataSchema} to JSON with space between fields, items, names, values, ... etc. * * @return JSON representation of {@link DataSchema}. */ @Override public String toString() { return SchemaToJsonEncoder.schemaToJson(this, JsonBuilder.Pretty.SPACES); }
writeSchemaName(schema); return; encodeName(schema); final String packageName = schema.getPackage(); if (packageName != null && !_currentPackage.equals(packageName)) _builder.writeFieldName(REF_KEY); TyperefDataSchema typerefDataSchema = (TyperefDataSchema) schema; encode(typerefDataSchema.getRef(), typerefDataSchema.isRefDeclaredInline()); break; case ENUM: case RECORD: RecordDataSchema recordDataSchema = (RecordDataSchema) schema; boolean hasIncludes = isEncodeInclude() && !recordDataSchema.getInclude().isEmpty(); boolean fieldsBeforeIncludes = recordDataSchema.isFieldsBeforeIncludes(); if (hasIncludes && !fieldsBeforeIncludes) writeIncludes(recordDataSchema); encodeFields(recordDataSchema); if (hasIncludes && fieldsBeforeIncludes) writeIncludes(recordDataSchema); encodeProperties(schema); List<String> aliases = new ArrayList<String>(); for (Name name : schema.getAliases())
/** * Encode the specified {@link DataSchema}. * @param schema to encode. * @throws IOException if there is an error while encoding. */ public void encode(DataSchema schema) throws IOException { encode(schema, true); }
_builder.writeFieldName(ITEMS_KEY); ArrayDataSchema arrayDataSchema = (ArrayDataSchema) schema; encode(arrayDataSchema.getItems(), arrayDataSchema.isItemsDeclaredInline()); encodeProperties(schema); _builder.writeEndObject(); break; _builder.writeFieldName(VALUES_KEY); MapDataSchema mapDataSchema = (MapDataSchema) schema; encode(mapDataSchema.getValues(), mapDataSchema.isValuesDeclaredInline()); encodeProperties(schema); _builder.writeEndObject(); break; case UNION: encodeUnion((UnionDataSchema) schema); break; default:
encodeFieldType(field); encodeFieldDefault(field); encodeFieldOptional(field); encodeFieldProperties(field);
/** * Encode a the fields of a {@link RecordDataSchema}. * * This method does not output a key. The key should be emitted before calling this method. * If {@link #isEncodeInclude()} returns true, then only fields that are defined in the record being * encoded will be encoded, else all fields including those from included records will be encoded. * * @param recordDataSchema the {@link RecordDataSchema} being encoded. * @throws IOException if there is an error while encoding. */ protected void encodeFields(RecordDataSchema recordDataSchema) throws IOException { Collection<RecordDataSchema.Field> fields = recordDataSchema.getFields(); _builder.writeStartArray(); boolean encodeInclude = isEncodeInclude(); for (RecordDataSchema.Field field : fields) { if (encodeInclude == false || recordDataSchema == field.getRecord()) { encodeField(field); } } _builder.writeEndArray(); }
@Override protected void encodeProperties(DataSchema schema) throws IOException { if (_options.getEmbeddedSchema() == EmbedSchemaMode.ROOT_ONLY) { DataSchema dereferencedSchema = _rootSchema.getDereferencedDataSchema(); if (schema == dereferencedSchema && schema.getType() != DataSchema.Type.UNION) { encodePropertiesWithEmbeddedSchema(schema); return; } } super.encodeProperties(schema); }
@Override protected void encodeField(RecordDataSchema.Field field) throws IOException { super.encodeField(field); // Reset the field's type and default if there is an override FieldOverride schemaOverride = _fieldOverridesProvider.getSchemaOverride(field); if (schemaOverride != null) { field.setType(schemaOverride.getSchema()); field.setDefault(schemaOverride.getValue()); } }
/** * Encode a {@link DataSchema}. * * Special handling is required for typeref's. All typeref's are * de-referenced to the actual type. * * @param schema to encode. * @throws IOException */ @Override protected void encode(DataSchema schema, boolean originallyInlined) throws IOException { if (encodeCustomAvroSchema(schema) == false) { super.encode(schema.getDereferencedDataSchema(), originallyInlined); } }
final SchemaToJsonEncoder encoder = new SchemaToJsonEncoder(builder); for (DataSchema schema : schemas) encoder.encode(schema);
/** * Encode a {@link DataSchema}. * * Special handling is required for typeref's. All typeref's are * de-referenced to the actual type. * * @param schema to encode. * @throws IOException */ @Override public void encode(DataSchema schema) throws IOException { if (encodeCustomAvroSchema(schema) == false) { super.encode(schema.getDereferencedDataSchema()); } }
String schemaJson = SchemaToJsonEncoder.schemaToJson(filteredSchema, JsonBuilder.Pretty.INDENTED); fout.write(schemaJson.getBytes(RestConstants.DEFAULT_CHARSET)); fout.close();
private static String encode(DataSchema schema, String format) throws IOException { if (format.equals(PdlSchemaParser.FILETYPE)) { StringWriter writer = new StringWriter(); SchemaToPdlEncoder encoder = new SchemaToPdlEncoder(writer); encoder.setTypeReferenceFormat(AbstractSchemaEncoder.TypeReferenceFormat.PRESERVE); encoder.encode(schema); return writer.toString(); } else if (format.equals(SchemaParser.FILETYPE)) { JsonBuilder.Pretty pretty = JsonBuilder.Pretty.INDENTED; JsonBuilder builder = new JsonBuilder(pretty); try { SchemaToJsonEncoder encoder = new SchemaToJsonEncoder(builder, AbstractSchemaEncoder.TypeReferenceFormat.PRESERVE); encoder.encode(schema); return builder.result(); } finally { builder.closeQuietly(); } } else { throw new IllegalArgumentException("Unsupported format: " + format); } }
/** * Encode a field's type (i.e. {@link DataSchema}. * * @param field providing the type to encode. * @throws IOException if there is an error while encoding. */ protected void encodeFieldType(RecordDataSchema.Field field) throws IOException { _builder.writeFieldName(TYPE_KEY); DataSchema fieldSchema = field.getType(); encode(fieldSchema, field.isDeclaredInline()); }
private MapDataSchema parseMap(MapDeclarationContext map) throws ParseException { TypeAssignmentContext keyType = map.typeParams.key; TypeAssignmentContext valueType = map.typeParams.value; MapDataSchema schema = new MapDataSchema(toDataSchema(valueType)); Map<String, Object> propsToAdd = new HashMap<String, Object>(); if (keyType.typeReference() != null) { String typeName = keyType.typeReference().value; // TODO(jbetz): // Replace with https://github.com/coursera/courier/tree/with-restli-upstream-fixes // once https://github.com/linkedin/rest.li/pull/61 is accepted. if (!typeName.equals("string")) { String qualifiedKeyName = computeFullName(typeName); propsToAdd.put("keys", qualifiedKeyName); } } else if (keyType.typeDeclaration() != null) { DataSchema keySchema = parseType(keyType.typeDeclaration()); String json = SchemaToJsonEncoder.schemaToJson(keySchema, JsonBuilder.Pretty.COMPACT); try { DataMap dataMap = codec.stringToMap(json); propsToAdd.put("keys", dataMap); } catch (IOException e) { startErrorMessage(map) .append("Unexpected error parsing map: ").append(e.getMessage()).append("\n"); } } schema.setProperties(propsToAdd); return schema; }
public File writeFile(File outdirFile, String fileName) throws IOException { fileName += RestConstants.SNAPSHOT_FILENAME_EXTENTION; final File file = new File(outdirFile, fileName); FileOutputStream fileOutputStream = new FileOutputStream(file); JsonBuilder jsonBuilder = new JsonBuilder(JsonBuilder.Pretty.INDENTED); SchemaToJsonEncoder encoder = new SchemaToJsonEncoder(jsonBuilder); jsonBuilder.writeStartObject(); jsonBuilder.writeFieldName(Snapshot.MODELS_KEY); jsonBuilder.writeStartArray(); List<NamedDataSchema> models = generateModelList(); models.sort(Comparator.comparing(NamedDataSchema::getFullName)); for(DataSchema model : models){ encoder.encode(model); } jsonBuilder.writeEndArray(); jsonBuilder.writeFieldName(Snapshot.SCHEMA_KEY); jsonBuilder.writeDataTemplate(_topLevelSchema, true); jsonBuilder.writeEndObject(); try { fileOutputStream.write(jsonBuilder.result().getBytes()); } finally { fileOutputStream.close(); jsonBuilder.close(); } return file; }
private void writeIncludes(RecordDataSchema recordDataSchema) throws IOException { _builder.writeFieldName(INCLUDE_KEY); _builder.writeStartArray(); for (NamedDataSchema includedSchema : recordDataSchema.getInclude()) { encode(includedSchema); } _builder.writeEndArray(); }
String json = SchemaToJsonEncoder.schemaToJson(keySchema, JsonBuilder.Pretty.COMPACT); startErrorMessage(map) .append("Unsupported map key type declaration: ").append(json)
/*package*/ static String buildDataSchemaType(DataSchema schema) { if (schema instanceof PrimitiveDataSchema || schema instanceof NamedDataSchema) { return schema.getUnionMemberKey(); } JsonBuilder builder = null; try { builder = new JsonBuilder(JsonBuilder.Pretty.SPACES); final SchemaToJsonEncoder encoder = new SchemaToJsonEncoder(builder, AbstractSchemaEncoder.TypeReferenceFormat.MINIMIZE); encoder.encode(schema); return builder.result(); } catch (IOException e) { throw new RestLiInternalException("could not encode schema for '" + schema.toString() + "'", e); } finally { if (builder != null) { builder.closeQuietly(); } } }