/** * Parse data schema in JSON format to obtain a {@link DataSchema}. * * @param schemaText provides the data schema in JSON format. * @param schemaResolver for resolving referenced schemas * @return the {@link DataSchema} parsed from the data schema in JSON format. * @throws IllegalArgumentException if the data schema in JSON format is invalid or * there is more than one top level schema. */ public static DataSchema parseSchema(String schemaText, DataSchemaResolver schemaResolver) throws IllegalArgumentException { PegasusSchemaParser parser = SchemaParserFactory.instance().create(schemaResolver); parser.parse(schemaText); if (parser.hasError()) { if (debug) { out.println(parser.errorMessage()); } throw new IllegalArgumentException(parser.errorMessage()); } if (parser.topLevelDataSchemas().size() != 1) { throw new IllegalArgumentException("More than one top level schemas"); } return parser.topLevelDataSchemas().get(0); }
parser.setLocation(location); parser.parse(new FilterInputStream(inputStream) if (parser.hasError()) errorMessageBuilder.append(parser.errorMessageBuilder()); errorMessageBuilder.append("Done parsing ").append(location).append(".\n");
private NamedDataSchema extractSchema(String className) { if (_schemaParser == null) { // 'online mode': resolve data schema from RecordTemplate Class SCHEMA field final StringBuilder errorMessage = new StringBuilder(); final NamedDataSchema schema = _schemaResolver.findDataSchema(className, errorMessage); if (errorMessage.length() > 0) { return null; } return schema; } else { // 'offline mode': resolve data schema from input final DataSchema schema = _schemaParser.lookupName(className); // we're currently only interested in records return schema instanceof RecordDataSchema ? (RecordDataSchema)schema : null; } }
private static void checkForErrors(String resolverPath, File file, String schemaFullname, PegasusSchemaParser parser) { StringBuilder errorMessageBuilder = parser.errorMessageBuilder(); if (errorMessageBuilder.length() > 0) { _log.error( "Failed to parse schema: " + file.getAbsolutePath() + "\nfullname: " + schemaFullname + "\nerrors: " + errorMessageBuilder.toString() + "\nresolverPath: " + resolverPath); System.exit(1); } if (parser.topLevelDataSchemas().size() != 1) { _log.error( "Failed to parse any schemas from: " + file.getAbsolutePath() + "\nfullname: " + schemaFullname + "\nerrors: " + errorMessageBuilder.toString() + "\nresolverPath: " + resolverPath); System.exit(1); } }
parser.parse(inputStream); List<DataSchema> topLevelDataSchemas = parser.topLevelDataSchemas(); assert(topLevelDataSchemas.size() <= 1); if (parser.hasError())
private void translateFile(File sourceFile, File destinationFile, String schemaFullname) throws IOException { // When translating files 1:1, a new resolver and parser are required for each file translated // so that a single top level output schema is matched to each input file. MultiFormatDataSchemaResolver resolver = MultiFormatDataSchemaResolver.withBuiltinFormats(_resolverPath); PegasusSchemaParser parser = AbstractSchemaParser.parserForFileExtension(_sourceFormat, resolver); parser.parse(new FileInputStream(sourceFile)); checkForErrors(_resolverPath, sourceFile, schemaFullname, parser); List<DataSchema> topLevelSchemas = parser.topLevelDataSchemas(); if (topLevelSchemas.size() == 1) { DataSchema schema = topLevelSchemas.get(0); String encoded = encode(schema, _destFormat); _log.debug("Writing " + destinationFile.getAbsolutePath()); FileUtils.writeStringToFile(destinationFile, encoded); } else { _log.error("Expected one top level schema for " + destinationFile.getAbsolutePath() + " but got " + topLevelSchemas.size()); } }
if (map == null) parser.errorMessageBuilder().append(NO_SCHEMA_LEFT); if (resultDataSchema == null) parser.errorMessageBuilder().append(INVALID_SCHEMA_LEFT + map);
/** * @return all parsed schema names */ public Set<String> getSchemaNames() { return Collections.unmodifiableSet(_schemaParser.getResolver().bindings().keySet()); }
try parser.setLocation(new FileDataSchemaLocation(schemaSourceFile)); parser.parse(schemaStream); if (parser.hasError()) return parser.topLevelDataSchemas(); if (parser.hasError()) getMessage().append(parser.errorMessage());
private DataMap buildDataMap(ParentSchemas parentSchemas, String pegasusDataSchemaName, DataGenerationOptions spec) { final DataSchema schema = _schemaParser.lookupName(pegasusDataSchemaName); spec = preventRecursionIntoAlreadyTraversedSchemas(parentSchemas, spec, schema); parentSchemas.incrementReferences(schema); if (schema == null) { throw new IllegalArgumentException(String.format("Could not find pegasus data schema '%s'", pegasusDataSchemaName)); } assert(schema instanceof RecordDataSchema); final DataMap data = buildRecordData(parentSchemas, (RecordDataSchema) schema, spec); parentSchemas.decrementReferences(schema); return data; }
parser.parse(avroSchemaInJson); if (parser.hasError()) throw new IllegalArgumentException(parser.errorMessage()); assert(parser.topLevelDataSchemas().size() == 1); DataSchema dataSchema = parser.topLevelDataSchemas().get(0); DataSchema resultDataSchema = null;