private void setMetricAndDirection(String metric, String[] otherMetrics, boolean sortDecreasing) { this.sort_metric = metric; this.other_metrics = otherMetrics; this.sort_decreasing = sortDecreasing; this.have_set_sort_metric = true; DKV.put(this); }
private static <T extends Keyed> Key<T> publish(Keyed<T> keyed) { Scope.track_generic(keyed); DKV.put(keyed); return keyed._key; }
/** * @return Frame that is registered in DKV */ static public Frame register(Frame frame) { frame._key = Key.make(); DKV.put(frame); return frame; }
public AggregateTask(Key<DataInfo> dataInfoKey, double radius, Key<Job> jobKey, int maxExemplars, Key terminateKey) { _delta = radius*radius; _dataInfoKey = dataInfoKey; _jobKey = jobKey; _maxExemplars = maxExemplars; _terminateKey = terminateKey; if (_terminateKey!=null) DKV.put(_terminateKey, new IcedInt(0)); } private boolean isTerminated() {
private void terminate() { if (_terminateKey != null) DKV.put(_terminateKey, new IcedInt(1)); }
public void close(C1NChunk c, int cidx, Futures fs) { assert _len==-1; // Not closed c._vec = this; // Attach chunk to this vec. DKV.put(chunkKey(cidx),c,fs); // Write updated chunk back into K/V _len = ((_nchunks-1L)<<H2O.LOG_CHK)+c._len; }
/** * After all maps are done on a node, this is called to store the per-node model into DKV (for elastic averaging) * Otherwise, do nothing. */ @Override protected void closeLocal() { if (_localmodel.get_params()._elastic_averaging) { // store local model, as it will be reduced in the following, and hence averaged with other models DKV.put(_localmodel.localModelInfoKey(H2O.SELF), _localmodel, _fs); } _sharedmodel = null; //avoid serialization overhead }
public UserFeedback(AutoML autoML) { this._key = make(idForRun(autoML._key)); this.autoML = autoML; UserFeedback old = DKV.getGet(this._key); if (null == old || null == feedbackEvents) { feedbackEvents = new UserFeedbackEvent[0]; DKV.put(this); } }
private void putMeAsBestModel(Key bestModelKey) { DKV.put(bestModelKey, IcedUtils.deepCopy(this)); assert DKV.get(bestModelKey) != null; assert ((DeepWaterModel)DKV.getGet(bestModelKey)).compareTo(this) <= 0; }
@Override public Frame outputFrame(Key<Frame> key, String [] names, String [][] domains){ _predFrame = new Frame(key, names, _predFrame.vecs()); if (domains!=null) _predFrame.vec(0).setDomain(domains[0]); //only the label is ever categorical if (_predFrame._key!=null) DKV.put(_predFrame); return _predFrame; } @Override public void map(Chunk[] chks, NewChunk[] cpreds) { }
private void putMeAsBestModel(Key bestModelKey) { DeepLearningModel bestModel = IcedUtils.deepCopy(this); DKV.put(bestModelKey, bestModel); if (model_info().get_params()._elastic_averaging) { DeepLearningModelInfo eamodel = DKV.getGet(model_info.elasticAverageModelInfoKey()); if (eamodel != null) DKV.put(bestModel.model_info().elasticAverageModelInfoKey(), eamodel); } assert (DKV.get(bestModelKey) != null); assert (bestModel.compareTo(this) <= 0); }
public void onException(Throwable ex) { UKV.remove(dest()); Value v = DKV.get(progressKey()); if( v != null ) { ChunkProgress p = v.get(); p = p.error(ex.getMessage()); DKV.put(progressKey(), p); } cancel(ex); }
private static DataInfo makeDataInfo(Frame fr, int nResponses) { final DataInfo dinfo = new DataInfo(fr, null, nResponses, false, DataInfo.TransformType.DEMEAN, DataInfo.TransformType.NONE, true, false, false, false, false, false, null) .disableIntercept(); Scope.track_generic(dinfo); DKV.put(dinfo); return dinfo; }
Frame groupingIgnoringFordColumn(String foldColumnName, Frame targetEncodingMap, String teColumnName) { if (foldColumnName != null) { int teColumnIndex = targetEncodingMap.find(teColumnName); Frame newTargetEncodingMap = groupByTEColumnAndAggregate(targetEncodingMap, teColumnIndex); renameColumn(newTargetEncodingMap, "sum_numerator", "numerator"); renameColumn(newTargetEncodingMap, "sum_denominator", "denominator"); return newTargetEncodingMap; } else { Frame targetEncodingMapCopy = targetEncodingMap.deepCopy(Key.make().toString()); DKV.put(targetEncodingMapCopy); return targetEncodingMapCopy; } }
private void convert2Enum(Frame f, int[] cols) { for (int col : cols) { f.replace(col, f.vec(col).toCategoricalVec()).remove(); } DKV.put(f); }
public Frame makeUVec(SVDModel model, String u_name, Frame u, Frame qfrm, Matrix atqJ, SingularValueDecomposition svdJ ) { model._output._u_key = Key.make(u_name); double[][] svdJ_u = svdJ.getV().getMatrix(0, atqJ.getColumnDimension() - 1, 0, _parms._nv - 1).getArray(); DataInfo qinfo = new DataInfo(qfrm, null, true, DataInfo.TransformType.NONE, false, false, false); DKV.put(qinfo._key, qinfo); BMulTask btsk = new BMulTask(_job._key, qinfo, ArrayUtils.transpose(svdJ_u)); btsk.doAll(_parms._nv, Vec.T_NUM, qinfo._adaptedFrame); qinfo.remove(); return btsk.outputFrame(model._output._u_key, null, null); // DKV.remove(qinfo._key); } @Override
@Test public void testMakeTreeKey() { try { Scope.enter(); CompressedTree ct = new CompressedTree(new byte[0], 123, 42, 17); Scope.track_generic(ct); DKV.put(ct); CompressedTree.TreeCoords tc = ct.getTreeCoords(); assertEquals(42, tc._treeId); assertEquals(17, tc._clazz); } finally { Scope.exit(); } }
@Test public void testBuildSingle() { Scope.enter(); try { Frame fr = parse_test_file(Key.make("prostate_single.hex"), "smalldata/logreg/prostate.csv"); fr.remove("ID").remove(); Scope.track(fr); DKV.put(fr); buildXValDRF(fr, "AGE"); } finally { Scope.exit(); } }
@Test public void testBuildConcurrent() { Scope.enter(); try { Frame fr = parse_test_file(Key.make("prostate_concurrent.hex"), "smalldata/logreg/prostate.csv"); Scope.track(fr); fr.remove("ID").remove(); DKV.put(fr); TrainSingleFun fun = new TrainSingleFun(fr); H2O.submitTask(new LocalMR(fun, 100)).join(); } finally { Scope.exit(); } }
private GBMModel trainGbm(final int ntrees) { Frame f = Scope.track(parse_test_file("smalldata/logreg/prostate.csv")); final String response = "CAPSULE"; f.replace(f.find(response), f.vec(response).toCategoricalVec()).remove(); DKV.put(f._key, f); GBMModel.GBMParameters gbmParams = new GBMModel.GBMParameters(); gbmParams._seed = 123; gbmParams._train = f._key; gbmParams._ignored_columns = new String[]{"ID"}; gbmParams._response_column = response; gbmParams._ntrees = ntrees; gbmParams._score_each_iteration = true; return(GBMModel) Scope.track_generic(new GBM(gbmParams).trainModel().get()); }