Vocabulary(IcedHashMapGeneric<BufferedString, Integer> data) { super(Key.<Vocabulary>make()); _data = data; } }
WordCounts(IcedHashMap<BufferedString, IcedLong> data) { super(Key.<WordCounts>make()); _data = data; } }
public H2OJob(H2ORunnable runnable, long max_runtime_msecs) { this(runnable, Key.make(), max_runtime_msecs); }
/** * @return Frame that is registered in DKV */ static public Frame register(Frame frame) { frame._key = Key.make(); DKV.put(frame); return frame; }
private Key<Model> modelKey(String algoName, boolean with_counter) { String counterStr = with_counter ? "_" + nextInstanceCounter(algoName, algoInstanceCounters) : ""; return Key.make(algoName + counterStr + "_AutoML_" + timestampFormatForKeys.format(this.startTime)); }
private Key<Grid> gridKey(String algoName) { return Key.make(algoName + "_grid_" + nextInstanceCounter(algoName, gridInstanceCounters) + "_AutoML_" + timestampFormatForKeys.format(this.startTime)); }
public static AutoML getFromDKV(String param_name, String key_str) { return getFromDKV(param_name, Key.make(key_str)); }
/** * */ public Leaderboard(String project_name, UserFeedback userFeedback, Frame leaderboardFrame, String sort_metric) { this._key = make(idForProject(project_name)); this.project_name = project_name; this.userFeedback = userFeedback; this.leaderboardFrame = leaderboardFrame; this.leaderboardFrameChecksum = leaderboardFrame == null ? 0 : leaderboardFrame.checksum(); this.sort_metric = sort_metric == null ? null : sort_metric.toLowerCase(); }
public UserFeedback(AutoML autoML) { this._key = make(idForRun(autoML._key)); this.autoML = autoML; UserFeedback old = DKV.getGet(this._key); if (null == old || null == feedbackEvents) { feedbackEvents = new UserFeedbackEvent[0]; DKV.put(this); } }
public Frame transform(Vec wordVec, AggregateMethod aggregateMethod) { if (wordVec.get_type() != Vec.T_STR) { throw new IllegalArgumentException("Expected a string vector, got " + wordVec.get_type_str() + " vector."); } byte[] types = new byte[_output._vecSize]; Arrays.fill(types, Vec.T_NUM); MRTask<?> transformTask = aggregateMethod == AggregateMethod.AVERAGE ? new Word2VecAggregateTask(this) : new Word2VecTransformTask(this); return transformTask.doAll(types, wordVec).outputFrame(Key.<Frame>make(), null, null); }
@Override public DeepWaterModel createImpl() { DeepWaterParameters parms = parameters.createImpl(); return new DeepWaterModel(Key.<DeepWaterModel>make() /*dest*/, parms, new DeepWaterModelOutput(null), null, null, 0); } }
@Override public DeepLearningModel createImpl() { DeepLearningModel.DeepLearningParameters parms = parameters.createImpl(); return new DeepLearningModel(Key.make() /*dest*/, parms, new DeepLearningModel.DeepLearningModelOutput(null), null, null, 0); } }
public Key<Frame> gen_representation_key(Frame fr) { if ((_parms.train() != null) && (fr.checksum() == _parms.train().checksum()) && fr._key.equals(_parms.train()._key)) // use training X factor here. return _output._representation_key; else return Key.make("GLRMLoading_"+fr._key); }
@Override protected Frame predictScoreImpl(Frame fr, Frame adaptFrm, String destination_key, Job j, boolean computeMetrics, CFuncRef customMetricFunc) { int nResponses = 0; for (String col : _parms.responseCols()) if (adaptFrm.find(col) != -1) nResponses++; DataInfo scoringInfo = _output.data_info.scoringInfo(_output._names, adaptFrm, nResponses, false); return new CoxPHScore(scoringInfo, _output, _parms.isStratified()) .doAll(Vec.T_NUM, scoringInfo._adaptedFrame) .outputFrame(Key.<Frame>make(destination_key), new String[]{"lp"}, null); }
Frame groupingIgnoringFordColumn(String foldColumnName, Frame targetEncodingMap, String teColumnName) { if (foldColumnName != null) { int teColumnIndex = targetEncodingMap.find(teColumnName); Frame newTargetEncodingMap = groupByTEColumnAndAggregate(targetEncodingMap, teColumnIndex); renameColumn(newTargetEncodingMap, "sum_numerator", "numerator"); renameColumn(newTargetEncodingMap, "sum_denominator", "denominator"); return newTargetEncodingMap; } else { Frame targetEncodingMapCopy = targetEncodingMap.deepCopy(Key.make().toString()); DKV.put(targetEncodingMapCopy); return targetEncodingMapCopy; } }
public Frame makeUVec(SVDModel model, String u_name, Frame u, Frame qfrm, Matrix atqJ, SingularValueDecomposition svdJ ) { model._output._u_key = Key.make(u_name); double[][] svdJ_u = svdJ.getV().getMatrix(0, atqJ.getColumnDimension() - 1, 0, _parms._nv - 1).getArray(); DataInfo qinfo = new DataInfo(qfrm, null, true, DataInfo.TransformType.NONE, false, false, false); DKV.put(qinfo._key, qinfo); BMulTask btsk = new BMulTask(_job._key, qinfo, ArrayUtils.transpose(svdJ_u)); btsk.doAll(_parms._nv, Vec.T_NUM, qinfo._adaptedFrame); qinfo.remove(); return btsk.outputFrame(model._output._u_key, null, null); // DKV.remove(qinfo._key); } @Override
private static Frame selectByPredicate(Frame fr, Frame predicateFrame) { String[] names = fr.names().clone(); byte[] types = fr.types().clone(); String[][] domains = fr.domains().clone(); fr.add("predicate", predicateFrame.anyVec()); Frame filtered = new Frame.DeepSelect().doAll(types, fr).outputFrame(Key.<Frame>make(), names, domains); predicateFrame.delete(); fr.remove("predicate"); return filtered; }
@BeforeClass public static void setup() { stall_till_cloudsize(1); _covtype = parse_test_file("smalldata/covtype/covtype.20k.data"); _covtype.replace(_covtype.numCols()-1,_covtype.lastVec().toCategoricalVec()).remove(); Key[] keys = new Key[]{Key.make("train"),Key.make("test")}; H2O.submitTask(new FrameSplitter(_covtype, new double[]{.8},keys,null)).join(); _train = DKV.getGet(keys[0]); _test = DKV.getGet(keys[1]); }
@Test public void testBuildSingle() { Scope.enter(); try { Frame fr = parse_test_file(Key.make("prostate_single.hex"), "smalldata/logreg/prostate.csv"); fr.remove("ID").remove(); Scope.track(fr); DKV.put(fr); buildXValDRF(fr, "AGE"); } finally { Scope.exit(); } }
@Test public void testBuildConcurrent() { Scope.enter(); try { Frame fr = parse_test_file(Key.make("prostate_concurrent.hex"), "smalldata/logreg/prostate.csv"); Scope.track(fr); fr.remove("ID").remove(); DKV.put(fr); TrainSingleFun fun = new TrainSingleFun(fr); H2O.submitTask(new LocalMR(fun, 100)).join(); } finally { Scope.exit(); } }