private void checkKey(String property) { if (!containsKey(property)) { throw new IllegalArgumentException("Property " + property + " not found"); } }
public static void checkRequiredProperties(TypedProperties props, List<String> checkPropNames) { checkPropNames.stream().forEach(prop -> { if (!props.containsKey(prop)) { throw new HoodieNotSupportedException("Required property " + prop + " is missing"); } }); }
public boolean getBoolean(String property, boolean defaultValue) { return containsKey(property) ? Boolean.valueOf(getProperty(property)) : defaultValue; }
public double getDouble(String property, double defaultValue) { return containsKey(property) ? Double.valueOf(getProperty(property)) : defaultValue; } }
public long getLong(String property, long defaultValue) { return containsKey(property) ? Long.valueOf(getProperty(property)) : defaultValue; }
public String getString(String property, String defaultValue) { return containsKey(property) ? getProperty(property) : defaultValue; }
public static void checkRequiredProperties(TypedProperties props, List<String> checkPropNames) { checkPropNames.stream().forEach(prop -> { if (!props.containsKey(prop)) { throw new HoodieNotSupportedException("Required property " + prop + " is missing"); } }); }
public int getInteger(String property, int defaultValue) { return containsKey(property) ? Integer.valueOf(getProperty(property)) : defaultValue; }
public FilebasedSchemaProvider(TypedProperties props, JavaSparkContext jssc) { super(props, jssc); DataSourceUtils.checkRequiredProperties(props, Collections.singletonList(Config.SOURCE_SCHEMA_FILE_PROP)); this.fs = FSUtils.getFs(props.getString(Config.SOURCE_SCHEMA_FILE_PROP), jssc.hadoopConfiguration()); try { this.sourceSchema = new Schema.Parser().parse( fs.open(new Path(props.getString(Config.SOURCE_SCHEMA_FILE_PROP)))); if (props.containsKey(Config.TARGET_SCHEMA_FILE_PROP)) { this.targetSchema = new Schema.Parser().parse( fs.open(new Path(props.getString(Config.TARGET_SCHEMA_FILE_PROP)))); } } catch (IOException ioe) { throw new HoodieIOException("Error reading schema", ioe); } }
public FilebasedSchemaProvider(TypedProperties props, JavaSparkContext jssc) { super(props, jssc); DataSourceUtils.checkRequiredProperties(props, Arrays.asList(Config.SOURCE_SCHEMA_FILE_PROP)); this.fs = FSUtils.getFs(props.getString(Config.SOURCE_SCHEMA_FILE_PROP), jssc.hadoopConfiguration()); try { this.sourceSchema = new Schema.Parser().parse( fs.open(new Path(props.getString(Config.SOURCE_SCHEMA_FILE_PROP)))); if (props.containsKey(Config.TARGET_SCHEMA_FILE_PROP)) { this.targetSchema = new Schema.Parser().parse( fs.open(new Path(props.getString(Config.TARGET_SCHEMA_FILE_PROP)))); } } catch (IOException ioe) { throw new HoodieIOException("Error reading schema", ioe); } }