/** Remove any Model internal Keys */ @Override public Futures delete_impl(Futures fs) { Lockable.delete(_clustersKey); return fs; } }
public static void delete( Key k ) { delete(k,null); } // Write-lock & delete 'k'. Will fail if 'k' is locked by anybody other than 'job_key'
public void delete( ) { delete(null,0.0f); } // Will fail if locked by anybody other than 'job_key'
static public void exit () { Stack<HashSet<Key>> keys = _scope.get()._keys; if( keys.size()==0 ) return; for( Key key : keys.pop() ) Lockable.delete(key); } static public Key exit(Key key) { throw H2O.unimpl(); }
@Override protected Response serve() { try { Lockable.delete(_key.value()._key); } catch( Throwable e ) { return Response.error(e); } JsonObject response = new JsonObject(); response.addProperty(KEY, _key.toString()); return Response.redirect(response, StoreView.class, null); } }
public static void delete( Key k, Key job_key ) { if( k == null ) return; Value val = DKV.get(k); if( val == null ) return; // Or just nothing there to delete if( !val.isLockable() ) UKV.remove(k); // Simple things being deleted else ((Lockable)val.get()).delete(job_key,0.0f); // Lockable being deleted } // Will fail if locked by anybody.
@Test public void testParse() { for( int i=0; i<25; i++ ) {// One iteration to keep it fast //Key k1 = loadAndParseKey("h.hex","smalldata/fail2_24_100000_10.csv.gz"); Key k1 = load_test_file("smalldata/fail1_100x11000.csv.gz"); Lockable.delete(k1); } } }
@Test public void testProstate() { Key k1 = null,k2 = null; try { k2 = loadAndParseFolder("multipart.hex","smalldata/parse_folder_test" ); k1 = loadAndParseFile("full.hex","smalldata/glm_test/prostate_cat_replaced.csv"); Value v1 = DKV.get(k1); Value v2 = DKV.get(k2); assertTrue("parsed values do not match!",v1.isBitIdentical(v2)); } finally { Lockable.delete(k1); Lockable.delete(k2); } } }
@Test public void testDdplyBig() { Key k0 = Key.make("cars.hex"); Key k1 = Key.make("orange.hex"); try { Frame fr0 = parseFrame(k0,"smalldata/cars.csv"); checkStr("ddply(cars.hex,c(3),nrow)"); // More complex multi-return checkStr("ddply(cars.hex,c(3),function(x) {cbind(mean(x[,2]),mean(x[,3]))})"); // A big enough file to distribute across multiple nodes. // Trimmed down to run in reasonable time. //Frame fr1 = parseFrame(k1,"smalldata/unbalanced/orange_small_train.data.zip"); //checkStr("ddply(orange.hex,c(7),nrow)"); //checkStr("ddply(orange.hex,c(206,207),function(x){ cbind( mean(x$Var6), sum(x$Var6+x$Var7) ) })"); // A more complex ddply that works as of 3/1/2014 but is slow for a junit //checkStr("ddply(orange.hex,c(206,207),function(x){"+ // "max6 = max(x$Var6);"+ // "min6 = min(x$Var6);"+ // "len = max6-min6+1;"+ // "tot = sum(x$Var7);"+ // "avg = tot/len"+ // "})"); } finally { Lockable.delete(k0); // Remove original hex frame key Lockable.delete(k1); // Remove original hex frame key } }
@Test public void testIris(){ Key k1 = null,k2 = null,k3 = null, k4 = null; try { k1 = loadAndParseFile("csv.hex","smalldata/iris/iris_wheader.csv"); k2 = loadAndParseFile("xls.hex","smalldata/iris/iris.xls"); k3 = loadAndParseFile("gzip.hex","smalldata/iris/iris_wheader.csv.gz"); k4 = loadAndParseFile("zip.hex","smalldata/iris/iris_wheader.csv.zip"); Value v1 = DKV.get(k1); Value v2 = DKV.get(k2); Value v3 = DKV.get(k3); Value v4 = DKV.get(k4); assertTrue(v1.isBitIdentical(v2)); assertTrue(v2.isBitIdentical(v3)); assertTrue(v3.isBitIdentical(v4)); } finally { Lockable.delete(k1); Lockable.delete(k2); Lockable.delete(k3); Lockable.delete(k4); } } }
@Test public void testMultiMbFile() throws Exception { File file = find_test_file("target/h2o.jar"); Key h2okey = load_test_file(file); ByteHisto bh = new ByteHisto(); bh.doAll((Vec)DKV.get(h2okey).get()); int sum=0; for( int i=0; i<bh._x.length; i++ ) sum += bh._x[i]; assertEquals(file.length(),sum); Lockable.delete(h2okey); }
@Test public void testInvokeThrow() { File file = find_test_file("target/h2o.jar"); Key h2okey = load_test_file(file); NFSFileVec nfs = DKV.get(h2okey).get(); try { for(int i = 0; i < H2O.CLOUD._memary.length; ++i){ ByteHistoThrow bh = new ByteHistoThrow(); bh._throwAt = H2O.CLOUD._memary[i].toString(); try { bh.doAll(nfs); // invoke should throw DistributedException wrapped up in RunTimeException fail("should've thrown"); } catch(RuntimeException e){ assertTrue(e.getMessage().contains("test")); } catch(Throwable ex){ ex.printStackTrace(); fail("Expected RuntimeException, got " + ex.toString()); } } } finally { // currently canceled RPC calls do not properly wait for all other nodes... // so once a map() call fails, other map calls can lazily load data after we call delete() try { Thread.sleep(100); } catch( InterruptedException ignore ) {} Lockable.delete(h2okey); } }
@Test public void testGetThrow() { File file = find_test_file("target/h2o.jar"); Key h2okey = load_test_file(file); NFSFileVec nfs = DKV.get(h2okey).get(); try { for(int i = 0; i < H2O.CLOUD._memary.length; ++i){ ByteHistoThrow bh = new ByteHistoThrow(); bh._throwAt = H2O.CLOUD._memary[i].toString(); try { bh.dfork(nfs).get(); // invoke should throw DistributedException wrapped up in RunTimeException fail("should've thrown"); } catch(ExecutionException e){ assertTrue(e.getMessage().contains("test")); } catch(Throwable ex){ ex.printStackTrace(); fail("Expected ExecutionException, got " + ex.toString()); } } } finally { // currently canceled RPC calls do not properly wait for all other nodes... // so once a map() call fails, other map calls can lazily load data after we call delete() try { Thread.sleep(100); } catch( InterruptedException ignore ) {} Lockable.delete(h2okey); } }
Lockable.delete(h2okey);
@Test public void testSingleQuotes() { Key k = Key.make("q.hex"); try { Frame fr = TestUtil.parseFrame(k,"smalldata/test/test_quote.csv"); Assert.assertEquals(fr.numCols(),11); Assert.assertEquals(fr.numRows(), 7); } finally { Lockable.delete(k); } }
Lockable.delete(key); DKV.remove(newVecKey); Log.err("Frame::readPut cleaned up new frame and vector successfully");
Lockable.delete(k);
if( delete_on_done ) for( Key k : fkeys ) Lockable.delete(k,job.self()); else for( Key k : fkeys ) { Lockable l = UKV.get(k);
@Test public void rbindTest() { Key dest1 = Key.make("f1"); float[] ratios = arf(0.5f); Frame[] splits = null; File file1 = TestUtil.find_test_file("smalldata/tnc3_10.csv"); //File file = TestUtil.find_test_file("smalldata/iris/iris_wheader.csv"); //File file = TestUtil.find_test_file("smalldata/cars.csv"); Key fkey1 = NFSFileVec.make(file1); Frame f = ParseDataset2.parse(dest1,new Key[]{fkey1}); FrameSplitter fs = new FrameSplitter(f, ratios); H2O.submitTask(fs).join(); splits = fs.getResult(); Frame rbinded_frame; Env ev = Exec2.exec("rbind("+splits[0]._key+","+splits[1]._key+")" ); try { rbinded_frame = ev.popAry(); } finally { if (ev!=null) ev.remove_and_unlock(); } assertEquals(rbinded_frame.numRows(),f.numRows()); rbinded_frame.delete(); Lockable.delete(dest1); for (Frame s : splits) if (s != null) s.delete(); }
Lockable.delete(dest); // Remove original hex frame key