private static Model[] modelsForModelKeys(Key<Model>[] modelKeys, Model[] models) { assert models.length >= modelKeys.length; int i = 0; for (Key<Model> modelKey : modelKeys) models[i++] = getGet(modelKey); return models; }
@Override protected Value defaultValue() { if (_defaultValue == null) return null; return DKV.get(_defaultValue); }
@Override public void cancel(String msg){ if(!_grid) { source.unlock(self()); } DKV.remove(_progressKey); Value v = DKV.get(destination_key); if(v != null){ GLMModel m = v.get(); Key [] xvals = m.xvalModels(); if(xvals != null) for(Key k:xvals) DKV.remove(k); DKV.remove(destination_key); } DKV.remove(destination_key); super.cancel(msg); }
private void setMetricAndDirection(String metric, String[] otherMetrics, boolean sortDecreasing) { this.sort_metric = metric; this.other_metrics = otherMetrics; this.sort_decreasing = sortDecreasing; this.have_set_sort_metric = true; DKV.put(this); }
public Vec replace(int col, Vec nv) { if (col >= numCols()) throw new IllegalArgumentException("Trying to select column "+(col+1)+" but only "+numCols()+" present."); Vec rv = vecs()[col]; assert rv.group().equals(nv.group()); _vecs[col] = nv; _keys[col] = nv._key; if( DKV.get(nv._key)==null ) // If not already in KV, put it there DKV.put(nv._key, nv); return rv; }
public Futures delete_trees(Futures fs) { for (int tid = 0; tid < treeKeys.length; tid++) /* over all trees */ for (int cid = 0; cid < treeKeys[tid].length; cid++) /* over all classes */ // 2-binary classifiers can contain null for the second if (treeKeys[tid][cid]!=null) DKV.remove(treeKeys[tid][cid], fs); return fs; }
/** * Compute quantile-based threshold (in reconstruction error) to find outliers * @param mse Vector containing reconstruction errors * @param quantile Quantile for cut-off * @return Threshold in MSE value for a point to be above the quantile */ public double calcOutlierThreshold(Vec mse, double quantile) { Frame mse_frame = new Frame(Key.<Frame>make(), new String[]{"Reconstruction.MSE"}, new Vec[]{mse}); DKV.put(mse_frame._key, mse_frame); QuantileModel.QuantileParameters parms = new QuantileModel.QuantileParameters(); parms._train = mse_frame._key; parms._probs = new double[]{quantile}; Job<QuantileModel> job = new Quantile(parms).trainModel(); QuantileModel kmm = job.get(); job.remove(); double q = kmm._output._quantiles[0][0]; kmm.delete(); DKV.remove(mse_frame._key); return q; }
@Test public void testRemoteBitSet() throws Exception { Scope.enter(); // Issue a slew of remote key puts Key[] keys = new Key[32]; for( int i = 0; i < keys.length; ++i ) { Key k = keys[i] = Key.make("key"+i); byte[] bits = new byte[4]; bits[0] = (byte)i; // Each value holds a shift-count Value val = new Value(k,bits); DKV.put(k,val); } DKV.write_barrier(); RemoteBitSet r = new RemoteBitSet(); r.invoke(keys); assertEquals((int)((1L<<keys.length)-1), r._x); //for( Key k : keys ) DKV.remove(k); Scope.exit(); }
protected void serveHdfs() throws IOException{ if (isBareS3NBucketWithoutTrailingSlash(path)) { path += "/"; } Log.info("ImportHDFS processing (" + path + ")"); ArrayList<String> succ = new ArrayList<String>(); ArrayList<String> fail = new ArrayList<String>(); PersistHdfs.addFolder2(new Path(path), succ, fail); keys = succ.toArray(new String[succ.size()]); files = keys; fails = fail.toArray(new String[fail.size()]); this.prefix = getCommonPrefix(keys); DKV.write_barrier(); }
public AggregateTask(Key<DataInfo> dataInfoKey, double radius, Key<Job> jobKey, int maxExemplars, Key terminateKey) { _delta = radius*radius; _dataInfoKey = dataInfoKey; _jobKey = jobKey; _maxExemplars = maxExemplars; _terminateKey = terminateKey; if (_terminateKey!=null) DKV.put(_terminateKey, new IcedInt(0)); } private boolean isTerminated() {
static Leaderboard getOrMakeLeaderboard(String project_name, UserFeedback userFeedback, Frame leaderboardFrame, String sort_metric) { Leaderboard exists = DKV.getGet(Key.make(idForProject(project_name))); if (null != exists) { exists.userFeedback = userFeedback; exists.leaderboardFrame = leaderboardFrame; if (sort_metric != null) { exists.sort_metric = sort_metric.toLowerCase(); exists.sort_decreasing = exists.sort_metric.equals("auc"); } exists.leaderboardFrameChecksum = leaderboardFrame == null ? 0 : leaderboardFrame.checksum(); DKV.put(exists); return exists; } Leaderboard newLeaderboard = new Leaderboard(project_name, userFeedback, leaderboardFrame, sort_metric); DKV.put(newLeaderboard); return newLeaderboard; }
public void delete_best_model( ) { if (actual_best_model_key != null && actual_best_model_key != _key) DKV.remove(actual_best_model_key); }
/** * @return list of keys of models sorted by the default metric for the model category, fetched from the DKV */ Key<Model>[] getModelKeys() { return ((Leaderboard)DKV.getGet(this._key)).models; }
@Override protected void setupLocal() { _model_mem_size = 0; for (int i=0; i< trees_so_far; ++i) { Key<CompressedTree>[] per_class = _treeKeys[i]; for (int j=0; j<per_class.length; ++j) { if (per_class[j] == null) continue; if (!per_class[j].home()) continue; // only look at homed tree keys _model_mem_size += DKV.get(per_class[j])._max; } } }
private void putMeAsBestModel(Key bestModelKey) { DeepLearningModel bestModel = IcedUtils.deepCopy(this); DKV.put(bestModelKey, bestModel); if (model_info().get_params()._elastic_averaging) { DeepLearningModelInfo eamodel = DKV.getGet(model_info.elasticAverageModelInfoKey()); if (eamodel != null) DKV.put(bestModel.model_info().elasticAverageModelInfoKey(), eamodel); } assert (DKV.get(bestModelKey) != null); assert (bestModel.compareTo(this) <= 0); }