static public <E extends Estimator & HasBooster & HasXGBoostOptions> MiningModel encodeBooster(E estimator, Schema schema){ Learner learner = getLearner(estimator); Boolean compact = (Boolean)estimator.getOption(HasXGBoostOptions.OPTION_COMPACT, Boolean.TRUE); Integer ntreeLimit = (Integer)estimator.getOption(HasXGBoostOptions.OPTION_NTREE_LIMIT, null); Map<String, Object> options = new LinkedHashMap<>(); options.put(HasXGBoostOptions.OPTION_COMPACT, compact); options.put(HasXGBoostOptions.OPTION_NTREE_LIMIT, ntreeLimit); Schema xgbSchema = XGBoostUtil.toXGBoostSchema(schema); MiningModel miningModel = learner.encodeMiningModel(options, xgbSchema); return miningModel; }
@Override public MiningModel encodeModel(Schema schema){ XGBoostMojoModel model = getModel(); byte[] boosterBytes = model.getBoosterBytes(); Learner learner; try(InputStream is = new ByteArrayInputStream(boosterBytes)){ learner = XGBoostUtil.loadLearner(is); } catch(IOException ioe){ throw new IllegalArgumentException(ioe); } Map<String, Object> options = new LinkedHashMap<>(); options.put(HasXGBoostOptions.OPTION_COMPACT, Boolean.TRUE); Schema xgbSchema = XGBoostUtil.toXGBoostSchema(schema); return learner.encodeMiningModel(options, xgbSchema); } }
@Override public MiningModel encodeModel(Schema schema){ RGenericVector booster = getObject(); RNumberVector<?> ntreeLimit = (RNumberVector<?>)booster.getValue("ntreelimit", true); Learner learner = ensureLearner(); Map<String, Object> options = new LinkedHashMap<>(); options.put(HasXGBoostOptions.OPTION_COMPACT, this.compact); options.put(HasXGBoostOptions.OPTION_NTREE_LIMIT, ntreeLimit != null ? ValueUtil.asInteger(ntreeLimit.asScalar()) : null); Schema xgbSchema = XGBoostUtil.toXGBoostSchema(schema); MiningModel miningModel = learner.encodeMiningModel(options, xgbSchema); return miningModel; }