static public <E extends Estimator & HasBooster & HasXGBoostOptions> MiningModel encodeBooster(E estimator, Schema schema){ Learner learner = getLearner(estimator); Boolean compact = (Boolean)estimator.getOption(HasXGBoostOptions.OPTION_COMPACT, Boolean.TRUE); Integer ntreeLimit = (Integer)estimator.getOption(HasXGBoostOptions.OPTION_NTREE_LIMIT, null); Map<String, Object> options = new LinkedHashMap<>(); options.put(HasXGBoostOptions.OPTION_COMPACT, compact); options.put(HasXGBoostOptions.OPTION_NTREE_LIMIT, ntreeLimit); Schema xgbSchema = XGBoostUtil.toXGBoostSchema(schema); MiningModel miningModel = learner.encodeMiningModel(options, xgbSchema); return miningModel; }
static public Learner loadLearner(InputStream is, ByteOrder byteOrder, String charset) throws IOException { XGBoostDataInput input = new XGBoostDataInput(is, byteOrder, charset); Learner learner = new Learner(); learner.load(input); int eof = is.read(); if(eof != -1){ throw new IOException(); } return learner; }
static public <E extends Estimator & HasBooster & HasXGBoostOptions> int getNumberOfFeatures(E estimator){ Learner learner = getLearner(estimator); return learner.getNumFeatures(); }
private void run() throws Exception { Learner learner; ByteOrder byteOrder = ByteOrderUtil.forValue(this.byteOrder); try(InputStream is = new FileInputStream(this.modelInput)){ learner = XGBoostUtil.loadLearner(is, byteOrder, this.charset); } FeatureMap featureMap; try(InputStream is = new FileInputStream(this.fmapInput)){ featureMap = XGBoostUtil.loadFeatureMap(is); } if(this.missingValue != null){ featureMap.addMissingValue(this.missingValue); } Map<String, Object> options = new LinkedHashMap<>(); options.put(HasXGBoostOptions.OPTION_COMPACT, this.compact); options.put(HasXGBoostOptions.OPTION_NTREE_LIMIT, this.ntreeLimit); PMML pmml = learner.encodePMML(this.targetName != null ? FieldName.create(this.targetName) : null, this.targetCategories, featureMap, options); try(OutputStream os = new FileOutputStream(this.pmmlOutput)){ MetroJAXBUtil.marshalPMML(pmml, os); } } }
ObjFunction obj = learner.getObj();
private void run() throws Exception { Learner learner; ByteOrder byteOrder = ByteOrderUtil.forValue(this.byteOrder); try(InputStream is = new FileInputStream(this.modelInput)){ learner = XGBoostUtil.loadLearner(is, byteOrder, this.charset); } FeatureMap featureMap; try(InputStream is = new FileInputStream(this.fmapInput)){ featureMap = XGBoostUtil.loadFeatureMap(is); } if(this.missingValue != null){ featureMap.addMissingValue(this.missingValue); } Map<String, Object> options = new LinkedHashMap<>(); options.put(HasXGBoostOptions.OPTION_COMPACT, this.compact); options.put(HasXGBoostOptions.OPTION_NTREE_LIMIT, this.ntreeLimit); PMML pmml = learner.encodePMML(this.targetName != null ? FieldName.create(this.targetName) : null, this.targetCategories, featureMap, options); try(OutputStream os = new FileOutputStream(this.pmmlOutput)){ MetroJAXBUtil.marshalPMML(pmml, os); } } }
@Override public MiningModel encodeModel(Schema schema){ XGBoostMojoModel model = getModel(); byte[] boosterBytes = model.getBoosterBytes(); Learner learner; try(InputStream is = new ByteArrayInputStream(boosterBytes)){ learner = XGBoostUtil.loadLearner(is); } catch(IOException ioe){ throw new IllegalArgumentException(ioe); } Map<String, Object> options = new LinkedHashMap<>(); options.put(HasXGBoostOptions.OPTION_COMPACT, Boolean.TRUE); Schema xgbSchema = XGBoostUtil.toXGBoostSchema(schema); return learner.encodeMiningModel(options, xgbSchema); } }
static public Learner loadLearner(InputStream is, ByteOrder byteOrder, String charset) throws IOException { XGBoostDataInput input = new XGBoostDataInput(is, byteOrder, charset); Learner learner = new Learner(); learner.load(input); int eof = is.read(); if(eof != -1){ throw new IOException(); } return learner; }
@Override public MiningModel encodeModel(Schema schema){ RGenericVector booster = getObject(); RNumberVector<?> ntreeLimit = (RNumberVector<?>)booster.getValue("ntreelimit", true); Learner learner = ensureLearner(); Map<String, Object> options = new LinkedHashMap<>(); options.put(HasXGBoostOptions.OPTION_COMPACT, this.compact); options.put(HasXGBoostOptions.OPTION_NTREE_LIMIT, ntreeLimit != null ? ValueUtil.asInteger(ntreeLimit.asScalar()) : null); Schema xgbSchema = XGBoostUtil.toXGBoostSchema(schema); MiningModel miningModel = learner.encodeMiningModel(options, xgbSchema); return miningModel; }
public PMML encodePMML(FieldName targetField, List<String> targetCategories, FeatureMap featureMap, Map<String, ?> options){ XGBoostEncoder encoder = new XGBoostEncoder(); if(targetField == null){ targetField = FieldName.create("_target"); } Label label = this.obj.encodeLabel(targetField, targetCategories, encoder); List<Feature> features = featureMap.encodeFeatures(encoder); Schema schema = new Schema(label, features); MiningModel miningModel = encodeMiningModel(options, schema); PMML pmml = encoder.encodePMML(miningModel); return pmml; }
public PMML encodePMML(FieldName targetField, List<String> targetCategories, FeatureMap featureMap, Map<String, ?> options){ XGBoostEncoder encoder = new XGBoostEncoder(); if(targetField == null){ targetField = FieldName.create("_target"); } Label label = this.obj.encodeLabel(targetField, targetCategories, encoder); List<Feature> features = featureMap.encodeFeatures(encoder); Schema schema = new Schema(label, features); MiningModel miningModel = encodeMiningModel(options, schema); PMML pmml = encoder.encodePMML(miningModel); return pmml; }