@Override public FieldName getKey(){ return getName(); } }
@Override public FieldName getKey(){ return getName(); } }
public FieldName getName(){ if(this.name == null){ Field<?> field = getField(); return field.getName(); } return this.name; }
public InputField(Field<?> field, MiningField miningField){ super(field); setMiningField(Objects.requireNonNull(miningField)); if(!Objects.equals(field.getName(), miningField.getName())){ throw new IllegalArgumentException(); } }
static public <F extends Field<?>> Map<FieldName, F> nameMap(Collection<? extends F> fields){ Map<FieldName, F> result = new LinkedHashMap<>(); for(F field : fields){ FieldName name = field.getName(); F previousField = result.put(name, field); if(previousField != null){ throw new IllegalArgumentException("Fields " + format(field) + " and " + format(previousField) + " have the same name " + name); } } return result; }
static public <F extends Field<?>> Map<FieldName, F> nameMap(Collection<? extends F> fields){ Map<FieldName, F> result = new LinkedHashMap<>(); for(F field : fields){ FieldName name = field.getName(); F previousField = result.put(name, field); if(previousField != null){ throw new IllegalArgumentException("Fields " + format(field) + " and " + format(previousField) + " have the same name " + name); } } return result; }
@Override public VisitorAction visit(Field<?> field){ FieldName name = field.getName(); if(name != null){ this.mappings.put(name, hash(name)); } return super.visit(field); }
public DocumentFeature(SparkMLEncoder encoder, Field<?> field, String wordSeparatorRE){ super(encoder, field.getName(), field.getDataType()); setWordSeparatorRE(wordSeparatorRE); }
public void addField(Field<?> field, List<String> categoryNames, List<String> categoryValues){ RExpEncoder encoder = getEncoder(); if(categoryNames.size() != categoryValues.size()){ throw new IllegalArgumentException(); } CategoricalFeature categoricalFeature; if((DataType.BOOLEAN).equals(field.getDataType()) && (BooleanFeature.VALUES).equals(categoryValues)){ categoricalFeature = new BooleanFeature(encoder, field); } else { categoricalFeature = new CategoricalFeature(encoder, field, categoryValues); } putFeature(field.getName(), categoricalFeature); for(int i = 0; i < categoryNames.size(); i++){ String categoryName = categoryNames.get(i); String categoryValue = categoryValues.get(i); BinaryFeature binaryFeature = new BinaryFeature(encoder, field, categoryValue); putFeature(FieldName.create((field.getName()).getValue() + categoryName), binaryFeature); } this.fields.add(field); }
private MLModelField getModelField(Field dataField) { return new MLModelField(dataField.getName().getValue(), dataField.getDataType().toString()); }
private MLModelField getModelField(Field dataField) { return new MLModelField(dataField.getName().getValue(), dataField.getDataType().toString()); }
public void addField(Field<?> field){ RExpEncoder encoder = getEncoder(); Feature feature = new ContinuousFeature(encoder, field); if(field instanceof DerivedField){ DerivedField derivedField = (DerivedField)field; Expression expression = derivedField.getExpression(); if(expression instanceof Apply){ Apply apply = (Apply)expression; if(checkApply(apply, "pow", FieldRef.class, Constant.class)){ List<Expression> expressions = apply.getExpressions(); FieldRef fieldRef = (FieldRef)expressions.get(0); Constant constant = (Constant)expressions.get(1); try { int power = Integer.parseInt(constant.getValue()); feature = new PowerFeature(encoder, fieldRef.getField(), DataType.DOUBLE, power); } catch(NumberFormatException nfe){ // Ignored } } } } putFeature(field.getName(), feature); this.fields.add(field); }
static public FieldValue performMissingValueTreatment(Field<?> field, MiningField miningField){ MissingValueTreatmentMethod missingValueTreatmentMethod = miningField.getMissingValueTreatment(); String missingValueReplacement = miningField.getMissingValueReplacement(); if(missingValueTreatmentMethod == null){ missingValueTreatmentMethod = MissingValueTreatmentMethod.AS_IS; } switch(missingValueTreatmentMethod){ case AS_IS: case AS_MEAN: case AS_MODE: case AS_MEDIAN: case AS_VALUE: return createMissingInputValue(field, miningField); case RETURN_INVALID: if(missingValueReplacement != null){ throw new MisplacedAttributeException(miningField, PMMLAttributes.MININGFIELD_MISSINGVALUEREPLACEMENT, missingValueReplacement); } throw new InvalidResultException("Field " + PMMLException.formatKey(field.getName()) + " requires user input value", miningField); default: throw new UnsupportedAttributeException(miningField, missingValueTreatmentMethod); } }
static public FieldValue performInvalidValueTreatment(Field<?> field, MiningField miningField, Object value){ InvalidValueTreatmentMethod invalidValueTreatmentMethod = miningField.getInvalidValueTreatment(); String invalidValueReplacement = miningField.getInvalidValueReplacement(); switch(invalidValueTreatmentMethod){ case AS_IS: break; case AS_MISSING: case RETURN_INVALID: if(invalidValueReplacement != null){ throw new MisplacedAttributeException(miningField, PMMLAttributes.MININGFIELD_INVALIDVALUEREPLACEMENT, invalidValueReplacement); } break; default: throw new UnsupportedAttributeException(miningField, invalidValueTreatmentMethod); } switch(invalidValueTreatmentMethod){ case RETURN_INVALID: throw new InvalidResultException("Field " + PMMLException.formatKey(field.getName()) + " cannot accept user input value " + PMMLException.formatValue(value), miningField); case AS_IS: if(invalidValueReplacement != null){ return createInputValue(field, miningField, invalidValueReplacement); } return createInputValue(field, miningField, value); case AS_MISSING: return createMissingInputValue(field, miningField); default: throw new UnsupportedAttributeException(miningField, invalidValueTreatmentMethod); } }