public Futures delete_trees(Futures fs) { for (int tid = 0; tid < treeKeys.length; tid++) /* over all trees */ for (int cid = 0; cid < treeKeys[tid].length; cid++) /* over all classes */ // 2-binary classifiers can contain null for the second if (treeKeys[tid][cid]!=null) DKV.remove(treeKeys[tid][cid], fs); return fs; }
public void delete_best_model( ) { if (actual_best_model_key != null && actual_best_model_key != _key) DKV.remove(actual_best_model_key); }
static public Value remove( Key key ) { return remove(key,null); } static public Value remove( Key key, Futures fs ) { return put(key,null,fs); }
protected void cleanup() { if (!_parms._keep_base_model_predictions) { _model.deleteBaseModelPredictions(); } if (!_parms._keep_levelone_frame) { DKV.remove(_levelOneTrainingFrame._key); //Remove Level One Training Frame from DKV } if (null != _levelOneValidationFrame) { DKV.remove(_levelOneValidationFrame._key); //Remove Level One Validation Frame from DKV } } }
@Override public void remove() { DKV.remove(_progress); super.remove(); }
private Futures onExceptionCleanup(Futures fs) { int nchunks = _chunk2Enum.length; int ncols = _setup._ncols; for( int i = 0; i < ncols; ++i ) { Key vkey = _vg.vecKey(_vecIdStart + i); DKV.remove(vkey,fs); for( int c = 0; c < nchunks; ++c ) DKV.remove(Vec.chunkKey(vkey,c),fs); } cancel(true); return fs; } }
@Override protected Futures remove_impl(Futures fs) { if (_output.weights != null && _output.biases != null) { for (Key k : _output.weights) if (k!=null) k.remove(fs); for (Key k : _output.biases) if (k!=null) k.remove(fs); } if (actual_best_model_key!=null) DKV.remove(actual_best_model_key); DKV.remove(model_info().data_info()._key, fs); deleteElasticAverageModels(); return super.remove_impl(fs); }
public Futures remove( Futures fs ) { for( int i=0; i<nChunks(); i++ ) UKV.remove(chunkKey(i),fs); DKV.remove(_key,fs); return fs; }
static public Futures remove( Key key, Futures fs ) { if( key.isVec() ) { Value val = DKV.get(key); if (val == null) return fs; ((Vec)val.get()).remove(fs); } DKV.remove(key,fs); return fs; }
@Override public void cancel(){ _cancelled = true; for(GLM2 g:_jobs) g.cancel(); source.unlock(self()); DKV.remove(destination_key); super.cancel(); } @Override
private void cleanUpCache(Futures fs) { final Key[] cacheKeys = KeySnapshot.globalSnapshot().filter(new KeySnapshot.KVFilter() { @Override public boolean filter(KeySnapshot.KeyInfo k) { return Value.isSubclassOf(k._type, DeepWaterImageIterator.IcedImage.class) && k._key.toString().contains(CACHE_MARKER) || Value.isSubclassOf(k._type, DeepWaterDatasetIterator.IcedRow.class) && k._key.toString().contains(CACHE_MARKER); } }).keys(); if (fs==null) fs = new Futures(); for (Key k : cacheKeys) DKV.remove(k, fs); fs.blockForPending(); }
public T invokeOnAllNodes() { H2O cloud = H2O.CLOUD; Key[] args = new Key[cloud.size()]; String skey = "RunOnAll"+Key.rand(); for( int i = 0; i < args.length; ++i ) args[i] = Key.make(skey,(byte)0,Key.DFJ_INTERNAL_USER,cloud._memary[i]); invoke(args); for( Key arg : args ) DKV.remove(arg); return self(); }
@Override protected Futures remove_impl(Futures fs) { cleanUpCache(fs); removeNativeState(); if (actual_best_model_key!=null) DKV.remove(actual_best_model_key); if (model_info()._dataInfo !=null) model_info()._dataInfo.remove(fs); return super.remove_impl(fs); }
public static Frame mmul(Frame x, Frame y) { MatrixMulJob mmj = new MatrixMulJob(Key.make("mmul" + ++cnt),Key.make("mmulProgress"),x,y); mmj.fork()._fjtask.join(); DKV.remove(mmj._dstKey); // do not leave garbage in KV mmj._z.reloadVecs(); return mmj._z; }
public void delete( Key job_key, float dummy ) { if( _key != null ) { Log.debug(Log.Tag.Sys.LOCKS,"lock-then-delete "+_key+" by job "+job_key); new PriorWriteLock(job_key).invoke(_key); } Futures fs = new Futures(); delete_impl(fs); if( _key != null ) DKV.remove(_key,fs); // Delete self also fs.blockForPending(); }
@Override public void lcompute() { int keysetSize = H2O.localKeySet().size(); int numNodes = H2O.CLOUD._memary.length; int nodeIdx = H2O.SELF.index(); Log.info("Removing "+keysetSize+" keys on this node; nodeIdx("+nodeIdx+") numNodes("+numNodes+")"); // Now remove all keys. Futures fs = new Futures(); for( Key key : H2O.localKeySet() ) DKV.remove(key, fs); fs.blockForPending(); Log.info("Keys remaining: "+H2O.store_size()); tryComplete(); }
public Frame makeCompatible( Frame f) { // Small data frames are always "compatible" if( anyVec()==null) // Or it is small return f; // Then must be compatible // Same VectorGroup is also compatible if( f.anyVec() == null || f.anyVec().group().equals(anyVec().group()) && Arrays.equals(f.anyVec()._espc,anyVec()._espc)) return f; // Ok, here make some new Vecs with compatible layout Key k = Key.make(); H2O.submitTask(new RebalanceDataSet(this, f, k)).join(); Frame f2 = DKV.get(k).get(); DKV.remove(k); return f2; }
@Test public void registerTest() { Scope.enter(); try { Frame fr = new TestFrameBuilder() .withName("testFrame") .build(); Scope.track(fr); Key<Frame> keyBefore = fr._key; DKV.remove(keyBefore); Frame res = register(fr); Scope.track(res); assertNotSame(res._key, keyBefore); } finally { Scope.exit(); } } }
@Test public void testBasicCRUD() { Key k1 = Key.make("key1"); Value v0 = DKV.get(k1); assertNull(v0); Value v1 = new Value(k1,"test0 bits for Value"); DKV.put(k1,v1); assertEquals(v1._key,k1); Value v2 = DKV.get(k1); assertEquals(v1,v2); DKV.remove(k1); Value v3 = DKV.get(k1); assertNull(v3); }