public KMeans2Model(KMeans2 params, Key selfKey, Key dataKey, String names[], String domains[][]) { super(selfKey, dataKey, names, domains, /* priorClassDistribution */ null, /* modelClassDistribution */ null); _ncats = params._ncats; parameters = params; // only for backward-compatibility of JSON response k = params.k; normalized = params.normalize; max_iter = params.max_iter; _clustersKey = Key.make(selfKey.toString() + "_clusters"); }
@Override protected void setupLocal() { _model = _modelKey.get(); assert _model != null; }
@Override protected void setupLocal() { _model_mem_size = 0; for (int i=0; i< trees_so_far; ++i) { Key<CompressedTree>[] per_class = _treeKeys[i]; for (int j=0; j<per_class.length; ++j) { if (per_class[j] == null) continue; if (!per_class[j].home()) continue; // only look at homed tree keys _model_mem_size += DKV.get(per_class[j])._max; } } }
public static Key<CompressedTree> makeTreeKey(int treeId, int clazz) { return Key.makeSystem("tree_" + treeId + "_" + clazz + "_" + Key.rand()); }
public DeepLearningModel(final Key destKey, final Key jobKey, final Key dataKey, final DataInfo dinfo, final DeepLearning params, final float[] priorDist) { super(destKey, dataKey, dinfo._adaptedFrame, priorDist); this.jobKey = jobKey; this._validationKey = params.validation != null ? params.validation._key : null; run_time = 0; start_time = System.currentTimeMillis(); _timeLastScoreEnter = start_time; model_info = new DeepLearningModelInfo(params, dinfo); actual_best_model_key = Key.makeSystem(Key.make().toString()); if (params.n_folds != 0) actual_best_model_key = null; Object job = UKV.get(jobKey); if (job instanceof DeepLearning) get_params().state = ((DeepLearning)UKV.get(jobKey)).state; //make the job state consistent else get_params().state = ((Job.JobHandle)UKV.get(jobKey)).state; //make the job state consistent if (!get_params().autoencoder) { errors = new Errors[1]; errors[0] = new Errors(); errors[0].validation = (params.validation != null); errors[0].num_folds = params.n_folds; } assert(Arrays.equals(_key._kb, destKey._kb)); }
public static String idForRun(Key<AutoML> runKey) { if (null == runKey) return "AutoML_Feedback_dummy"; return "AutoML_Feedback_" + runKey.toString(); }
Vocabulary(IcedHashMapGeneric<BufferedString, Integer> data) { super(Key.<Vocabulary>make()); _data = data; } }
@Test public void testLocalKeySet(){ Key [] userKeys = new Key[100]; Key [] systemKeys = new Key[100]; int homeKeys = 0; Futures fs = new Futures(); try { for( int i = 0; i < userKeys.length; ++i ) { DKV.put(userKeys[i] = Key.make("key" + i), new Utils.IcedInt(i),fs,true); if( userKeys[i].home() ) ++homeKeys; DKV.put(systemKeys[i] = Key.makeSystem(Key.rand()), new Utils.IcedInt(i),fs,true); } fs.blockForPending(); Key[] keys = H2O.KeySnapshot.localSnapshot().keys(); Assert.assertEquals(homeKeys, keys.length); for (Key k:keys) Assert.assertTrue(k.home()); } finally { for (int i = 0; i < userKeys.length; ++i) { DKV.remove(userKeys[i],fs); DKV.remove(systemKeys[i],fs); } fs.blockForPending(); } }
try { for (int i = 0; i < 1e2; ++i) { Key k = Key.makeSystem(Key.rand()); keys.add(k); DKV.put(k, new Value(k, new Utils.IcedInt(i))); Key k = Key.make("key" + i); keys.add(k); DKV.put(k, new Value(k, new Utils.IcedInt(i))); Key k = Key.make("kei" + i); keys.add(k); DKV.put(k, new Value(k, new Utils.IcedInt(i))); Key k = Key.make(); if (k.toString().startsWith("k")) k = Key.make(); keys.add(k); DKV.put(k, new Utils.IcedInt(i));
/** * Rebalance a frame for load balancing * @param fr Input frame * @param local whether to only create enough chunks to max out all cores on one node only * @return Frame that has potentially more chunks */ private Frame reBalance(final Frame fr, boolean local) { int chunks = (int)Math.min( 4 * H2O.NUMCPUS * (local ? 1 : H2O.CLOUD.size()), fr.numRows()); if (fr.anyVec().nChunks() > chunks && !reproducible) { Log.info("Dataset already contains " + fr.anyVec().nChunks() + " chunks. No need to rebalance."); return fr; } else if (reproducible) { Log.warn("Reproducibility enforced - using only 1 thread - can be slow."); chunks = 1; } if (!quiet_mode) Log.info("ReBalancing dataset into (at least) " + chunks + " chunks."); // return MRUtils.shuffleAndBalance(fr, chunks, seed, local, shuffle_training_data); String snewKey = fr._key != null ? (fr._key.toString() + ".balanced") : Key.rand(); Key newKey = Key.makeSystem(snewKey); RebalanceDataSet rb = new RebalanceDataSet(fr, newKey, chunks); H2O.submitTask(rb); rb.join(); return UKV.get(newKey); }
@Test public void testGlobalKeySet(){ Key keys[] = new Key[100]; Futures fs = new Futures(); try { for (int i = 0; i < 100; ++i) DKV.put(Key.make("key" + i), new Utils.IcedInt(i),fs,true); for( int i = 0; i < 100; ++i) DKV.put(keys[i] = Key.makeSystem(Key.rand()), new Utils.IcedInt(i),fs,true); fs.blockForPending(); Key[] keys2 = H2O.KeySnapshot.globalSnapshot().keys(); Assert.assertEquals(100, keys2.length); } finally { for (int i = 0; i < 100; ++i) { DKV.remove(Key.make("key" + i),fs); DKV.remove(keys[i],fs); } fs.blockForPending(); } }
@Test public void testQuantilesRange() { int nbins = 13; int nbins_cats = nbins; byte isInt = 0; double min = 1; double maxEx = 6.900000000000001; long seed = 1234; SharedTreeModel.SharedTreeParameters.HistogramType histoType = SharedTreeModel.SharedTreeParameters.HistogramType.QuantilesGlobal; double[] splitPts = new double[]{1,1.5,2,2.5,3,4,5,6.1,6.2,6.3,6.7,6.8,6.85}; Key k = Key.make(); DKV.put(new DHistogram.HistoQuantiles(k,splitPts)); DHistogram hist = new DHistogram("myhisto",nbins,nbins_cats,isInt,min,maxEx,0,histoType,seed,k); hist.init(); assert(hist.binAt(0)==min); assert(hist.binAt(nbins-1)<maxEx); assert(hist.bin(min) == 0); assert(hist.bin(maxEx-1e-15) == nbins-1); k.remove(); }
private boolean is_locked(Key job_key) { if( _lockers==null ) return false; for( int i=(_lockers.length==1?0:1); i<_lockers.length; i++ ) { Key k = _lockers[i]; if( job_key==k || (job_key != null && k != null && job_key.equals(k)) ) return true; } return false; } protected boolean is_wlocked() { return _lockers!=null && _lockers.length==1; }
public void deleteBaseModelPredictions() { if (_output._base_model_predictions_keys != null) { for (Key<Frame> key : _output._base_model_predictions_keys) { if (_output._levelone_frame_id != null && key.get() != null) Frame.deleteTempFrameAndItsNonSharedVecs(key.get(), _output._levelone_frame_id); else key.remove(); } _output._base_model_predictions_keys = null; } }
public static Key buildKey(UniqueId model, UniqueId frame) { return Key.makeSystem("modelmetrics_" + model.getId() + "_on_" + frame.getId()); }