static public Learner loadLearner(InputStream is) throws IOException { return loadLearner(is, ByteOrder.nativeOrder(), null); }
static private FeatureMap loadFeatureMap(RStringVector fmap) throws IOException { File file = new File(fmap.asScalar()); try(InputStream is = new FileInputStream(file)){ return XGBoostUtil.loadFeatureMap(is); } }
@Override public MiningModel encodeModel(Schema schema){ XGBoostMojoModel model = getModel(); byte[] boosterBytes = model.getBoosterBytes(); Learner learner; try(InputStream is = new ByteArrayInputStream(boosterBytes)){ learner = XGBoostUtil.loadLearner(is); } catch(IOException ioe){ throw new IllegalArgumentException(ioe); } Map<String, Object> options = new LinkedHashMap<>(); options.put(HasXGBoostOptions.OPTION_COMPACT, Boolean.TRUE); Schema xgbSchema = XGBoostUtil.toXGBoostSchema(schema); return learner.encodeMiningModel(options, xgbSchema); } }
private void run() throws Exception { Learner learner; ByteOrder byteOrder = ByteOrderUtil.forValue(this.byteOrder); try(InputStream is = new FileInputStream(this.modelInput)){ learner = XGBoostUtil.loadLearner(is, byteOrder, this.charset); } FeatureMap featureMap; try(InputStream is = new FileInputStream(this.fmapInput)){ featureMap = XGBoostUtil.loadFeatureMap(is); } if(this.missingValue != null){ featureMap.addMissingValue(this.missingValue); } Map<String, Object> options = new LinkedHashMap<>(); options.put(HasXGBoostOptions.OPTION_COMPACT, this.compact); options.put(HasXGBoostOptions.OPTION_NTREE_LIMIT, this.ntreeLimit); PMML pmml = learner.encodePMML(this.targetName != null ? FieldName.create(this.targetName) : null, this.targetCategories, featureMap, options); try(OutputStream os = new FileOutputStream(this.pmmlOutput)){ MetroJAXBUtil.marshalPMML(pmml, os); } } }
static public <E extends Estimator & HasBooster & HasXGBoostOptions> MiningModel encodeBooster(E estimator, Schema schema){ Learner learner = getLearner(estimator); Boolean compact = (Boolean)estimator.getOption(HasXGBoostOptions.OPTION_COMPACT, Boolean.TRUE); Integer ntreeLimit = (Integer)estimator.getOption(HasXGBoostOptions.OPTION_NTREE_LIMIT, null); Map<String, Object> options = new LinkedHashMap<>(); options.put(HasXGBoostOptions.OPTION_COMPACT, compact); options.put(HasXGBoostOptions.OPTION_NTREE_LIMIT, ntreeLimit); Schema xgbSchema = XGBoostUtil.toXGBoostSchema(schema); MiningModel miningModel = learner.encodeMiningModel(options, xgbSchema); return miningModel; }
static public FeatureMap loadFeatureMap(InputStream is) throws IOException { FeatureMap featureMap = new FeatureMap(); Iterator<String> lines = parseFeatureMap(is); for(int i = 0; lines.hasNext(); i++){ String line = lines.next(); StringTokenizer st = new StringTokenizer(line, "\t"); if(st.countTokens() != 3){ throw new IllegalArgumentException(line); } String id = st.nextToken(); String name = st.nextToken(); String type = st.nextToken(); if(Integer.parseInt(id) != i){ throw new IllegalArgumentException(id); } featureMap.addEntry(name, type); } return featureMap; }
private void run() throws Exception { Learner learner; ByteOrder byteOrder = ByteOrderUtil.forValue(this.byteOrder); try(InputStream is = new FileInputStream(this.modelInput)){ learner = XGBoostUtil.loadLearner(is, byteOrder, this.charset); } FeatureMap featureMap; try(InputStream is = new FileInputStream(this.fmapInput)){ featureMap = XGBoostUtil.loadFeatureMap(is); } if(this.missingValue != null){ featureMap.addMissingValue(this.missingValue); } Map<String, Object> options = new LinkedHashMap<>(); options.put(HasXGBoostOptions.OPTION_COMPACT, this.compact); options.put(HasXGBoostOptions.OPTION_NTREE_LIMIT, this.ntreeLimit); PMML pmml = learner.encodePMML(this.targetName != null ? FieldName.create(this.targetName) : null, this.targetCategories, featureMap, options); try(OutputStream os = new FileOutputStream(this.pmmlOutput)){ MetroJAXBUtil.marshalPMML(pmml, os); } } }
@Override public MiningModel encodeModel(Schema schema){ RGenericVector booster = getObject(); RNumberVector<?> ntreeLimit = (RNumberVector<?>)booster.getValue("ntreelimit", true); Learner learner = ensureLearner(); Map<String, Object> options = new LinkedHashMap<>(); options.put(HasXGBoostOptions.OPTION_COMPACT, this.compact); options.put(HasXGBoostOptions.OPTION_NTREE_LIMIT, ntreeLimit != null ? ValueUtil.asInteger(ntreeLimit.asScalar()) : null); Schema xgbSchema = XGBoostUtil.toXGBoostSchema(schema); MiningModel miningModel = learner.encodeMiningModel(options, xgbSchema); return miningModel; }
static public FeatureMap loadFeatureMap(InputStream is) throws IOException { FeatureMap featureMap = new FeatureMap(); Iterator<String> lines = parseFeatureMap(is); for(int i = 0; lines.hasNext(); i++){ String line = lines.next(); StringTokenizer st = new StringTokenizer(line, "\t"); if(st.countTokens() != 3){ throw new IllegalArgumentException(line); } String id = st.nextToken(); String name = st.nextToken(); String type = st.nextToken(); if(Integer.parseInt(id) != i){ throw new IllegalArgumentException(id); } featureMap.addEntry(name, type); } return featureMap; }
static public Learner loadLearner(InputStream is) throws IOException { return loadLearner(is, ByteOrder.nativeOrder(), null); }
private Learner loadLearner(ByteOrder byteOrder, String charset){ byte[] handle = getHandle(); try(InputStream is = new ByteArrayInputStream(handle)){ return XGBoostUtil.loadLearner(is, byteOrder, charset); } catch(IOException ioe){ throw new RuntimeException(ioe); } }
static private Learner loadLearner(RRaw raw) throws IOException { byte[] value = raw.getValue(); try(InputStream is = new ByteArrayInputStream(value)){ return XGBoostUtil.loadLearner(is); } } }