public void solve(final double [][] ys){ RecursiveAction [] ras = new RecursiveAction[ys.length]; for(int i = 0; i < ras.length; ++i) { final int fi = i; ras[i] = new RecursiveAction() { @Override protected void compute() { ys[fi][fi] = 1; solve(ys[fi]); } }; } ForkJoinTask.invokeAll(ras); } public double [][] getInv(){
public double [] getInvDiag(){ final double [] res = new double[_xx.length + _diag.length]; RecursiveAction [] ras = new RecursiveAction[res.length]; for(int i = 0; i < ras.length; ++i) { final int fi = i; ras[i] = new RecursiveAction() { @Override protected void compute() { double [] tmp = new double[res.length]; tmp[fi] = 1; solve(tmp); res[fi] = tmp[fi]; } }; } ForkJoinTask.invokeAll(ras); return res; }
@Override public FVecDataOut close(final Futures fs){ if( _nvs == null ) return this; // Might call close twice for(NewChunk nc:_nvs) assert nc._len == _nLines:"incompatible lengths after parsing chunk, " + _nLines + " != " + nc._len; RecursiveAction [] rs = new RecursiveAction[_nvs.length]; for(int i = 0; i < _nvs.length; ++i) { final int fi = i; rs[i] = new RecursiveAction() { @Override protected void compute() { _nvs[fi].close(_cidx, fs); _nvs[fi] = null; // Free for GC } }; } ForkJoinTask.invokeAll(rs); _nChunks++; _nvs = null; // Free for GC return this; } @Override public FVecDataOut nextChunk(){
public void shrink() { if(_jobKey != null && !Job.isRunning(_jobKey)) throw new Job.JobCancelledException(); // for ( Col c: _c) c.shrink(); // sort columns in parallel: c.shrink() calls single-threaded Arrays.sort() RecursiveAction [] ras = new RecursiveAction[_c.length]; int i=0; for ( final Col c: _c) { ras[i++] = new RecursiveAction() { @Override public void compute() { c.shrink(); } }; } ForkJoinTask.invokeAll(ras); }
@Override public void compute2() { final int log_rows_per_chunk = Math.max(1, FileVec.DFLT_LOG2_CHUNK_SIZE - (int) Math.floor(Math.log(_m.rows()) / Math.log(2.))); Vec vs[] = new Vec[_m.cols()]; FillVec[] fv = new FillVec[_m.cols()]; for (int i = 0; i < _m.cols(); ++i) { vs[i] = makeCon(0, _m.rows(), log_rows_per_chunk); fv[i] = new FillVec(_m, vs[i], i); } ForkJoinTask.invokeAll(fv); Frame f = new Frame(_key, vs, true); DKV.put(_key, f); tryComplete(); }
@Override public void compute2() { int k = 0; for(int i = 0; i < H2O.CLOUD.size(); ++i) for(int j = 0; j < H2O.CLOUD.size(); ++j){ if(i == j) continue; dropTests[k++] = new UDPDropTester(H2O.CLOUD._memary[i],H2O.CLOUD._memary[j],msg_sizes,10); } ForkJoinTask.invokeAll(dropTests); tryComplete(); } }).join();
ForkJoinTask.invokeAll(rs);
ForkJoinTask.invokeAll(fjts);
invokeAll(tasks.toArray(new ForkJoinTask<?>[tasks.size()])); return tasks;
public Split bestCol(UndecidedNode u, DHistogram hs[], Constraints cs) { DTree.Split best = null; if( hs == null ) return null; final int maxCols = u._scoreCols == null /* all cols */ ? hs.length : u._scoreCols.length; List<FindSplits> findSplits = new ArrayList<>(); //total work is to find the best split across sum_over_cols_to_split(nbins) long nbinsSum = 0; for( int i=0; i<maxCols; i++ ) { int col = u._scoreCols == null ? i : u._scoreCols[i]; if( hs[col]==null || hs[col].nbins() <= 1 ) continue; nbinsSum += hs[col].nbins(); } // for small work loads, do a serial loop, otherwise, submit work to FJ thread pool final boolean isSmall = (nbinsSum <= 1024); //heuristic - 50 cols with 20 nbins, or 1 column with 1024 bins, etc. for( int i=0; i<maxCols; i++ ) { int col = u._scoreCols == null ? i : u._scoreCols[i]; if( hs[col]==null || hs[col].nbins() <= 1 ) continue; FindSplits fs = new FindSplits(hs, cs, col, u._nid); findSplits.add(fs); if (isSmall) fs.compute(); } if (!isSmall) jsr166y.ForkJoinTask.invokeAll(findSplits); for( FindSplits fs : findSplits) { DTree.Split s = fs._s; if( s == null ) continue; if (best == null || s.se() < best.se()) best = s; } return best; }
ForkJoinTask.invokeAll(dataInhaleJobs);
public void solve(final double [][] ys){ RecursiveAction [] ras = new RecursiveAction[ys.length]; for(int i = 0; i < ras.length; ++i) { final int fi = i; ras[i] = new RecursiveAction() { @Override protected void compute() { ys[fi][fi] = 1; solve(ys[fi]); } }; } ForkJoinTask.invokeAll(ras); } public double [][] getInv(){
public double [] getInvDiag(){ final double [] res = new double[_xx.length + _diag.length]; RecursiveAction [] ras = new RecursiveAction[res.length]; for(int i = 0; i < ras.length; ++i) { final int fi = i; ras[i] = new RecursiveAction() { @Override protected void compute() { double [] tmp = new double[res.length]; tmp[fi] = 1; solve(tmp); res[fi] = tmp[fi]; } }; } ForkJoinTask.invokeAll(ras); return res; }
@Override public void compute2() { final int log_rows_per_chunk = Math.max(1, FileVec.DFLT_LOG2_CHUNK_SIZE - (int) Math.floor(Math.log(_m.rows()) / Math.log(2.))); Vec vs[] = new Vec[_m.cols()]; FillVec[] fv = new FillVec[_m.cols()]; for (int i = 0; i < _m.cols(); ++i) { vs[i] = makeCon(0, _m.rows(), log_rows_per_chunk); fv[i] = new FillVec(_m, vs[i], i); } ForkJoinTask.invokeAll(fv); Frame f = new Frame(_key, vs, true); DKV.put(_key, f); tryComplete(); }
invokeAll(tasks.toArray(new ForkJoinTask<?>[tasks.size()])); return tasks;
public Split bestCol(UndecidedNode u, DHistogram hs[], Constraints cs) { DTree.Split best = null; if( hs == null ) return null; final int maxCols = u._scoreCols == null /* all cols */ ? hs.length : u._scoreCols.length; List<FindSplits> findSplits = new ArrayList<>(); //total work is to find the best split across sum_over_cols_to_split(nbins) long nbinsSum = 0; for( int i=0; i<maxCols; i++ ) { int col = u._scoreCols == null ? i : u._scoreCols[i]; if( hs[col]==null || hs[col].nbins() <= 1 ) continue; nbinsSum += hs[col].nbins(); } // for small work loads, do a serial loop, otherwise, submit work to FJ thread pool final boolean isSmall = (nbinsSum <= 1024); //heuristic - 50 cols with 20 nbins, or 1 column with 1024 bins, etc. for( int i=0; i<maxCols; i++ ) { int col = u._scoreCols == null ? i : u._scoreCols[i]; if( hs[col]==null || hs[col].nbins() <= 1 ) continue; FindSplits fs = new FindSplits(hs, cs, col, u._nid); findSplits.add(fs); if (isSmall) fs.compute(); } if (!isSmall) jsr166y.ForkJoinTask.invokeAll(findSplits); for( FindSplits fs : findSplits) { DTree.Split s = fs._s; if( s == null ) continue; if (best == null || s.se() < best.se()) best = s; } return best; }