@Override public synchronized UDPPing read(AutoBuffer ab){ if(_done)return this; _done = true; _t2 = System.currentTimeMillis(); _retries = ab.get4(); byte [] bs = ab.getA1(); _payload = bs; return this; }
public static double classify( AutoBuffer ts, double[] ds, double badat, boolean regression ) { ts.get4(); // Skip tree-id ts.get8(); // Skip seed ts.get1(); // Skip producer id byte b; while( (b = (byte) ts.get1()) != '[' ) { // While not a leaf indicator assert b == '(' || b == 'S' || b == 'E'; int col = ts.get2(); // Column number in model-space float fcmp = ts.get4f(); // Float to compare against float fdat = Double.isNaN(ds[col]) ? fcmp - 1 : (float)ds[col]; int skip = (ts.get1()&0xFF); if( skip == 0 ) skip = ts.get3(); if (b == 'E') { if (fdat != fcmp) ts.position(ts.position() + skip); } else { // Picking right subtree? then skip left subtree if( fdat > fcmp ) ts.position(ts.position() + skip); } } if(regression) return ts.get4f(); return ts.get1()&0xFF; // Return the leaf's class }
/** Classify this serialized tree - withOUT inflating it to a full tree. Use row 'row' in the dataset 'ary' (with pre-fetched bits 'databits') Returns classes from 0 to N-1*/ public static float classify( AutoBuffer ts, Chunk[] chks, int row, int modelDataMap[], short badData, boolean regression ) { ts.get4(); // Skip tree-id ts.get8(); // Skip seed ts.get1(); // Skip producer id byte b; while( (b = (byte) ts.get1()) != '[' ) { // While not a leaf indicator assert b == '(' || b == 'S' || b == 'E'; int col = modelDataMap[ts.get2()]; // Column number in model-space mapped to data-space float fcmp = ts.get4f(); // Float to compare against if( chks[col].isNA0(row) ) return badData; float fdat = (float)chks[col].at0(row); int skip = (ts.get1()&0xFF); if( skip == 0 ) skip = ts.get3(); if (b == 'E') { if (fdat != fcmp) ts.position(ts.position() + skip); } else { // Picking right subtree? then skip left subtree if( fdat > fcmp ) ts.position(ts.position() + skip); } } if (regression) { return ts.get4f(); } return (float)((short) ( ts.get1()&0xFF )); // Return the leaf's class }
protected T loadHeader(AutoBuffer ab) { int smId = ab.get4(); // type hash String smCN = ab.getStr(); // type name // Load it Class klazz = null; T m = null; try { klazz = Class.forName(smCN); m = (T) klazz.newInstance(); } catch( Exception e ) { throw new IllegalArgumentException("Cannot instantiate the type " + smCN, e); } int amId = id(m); if (amId != smId) throw new IllegalArgumentException("Trying to load incompatible model! Actual model id = " + amId + ", stored id = " + smId+", type="+smCN); return m; } }
case 1: skip = ab.get2(); break; case 2: skip = ab.get3(); break; case 3: skip = ab.get4(); break; case 16: skip = _nclass < 256?1:2; break; // Small leaf case 48: skip = 4; break; // skip the prediction
@Override public IcedArrayList<T> read(AutoBuffer bb) { int n = bb.get4(); for(int i = 0; i < n; ++i) add(bb.<T>get()); return this; }
case 1: skip = _ts.get2(); break; case 2: skip = _ts.get3(); break; case 3: skip = _ts.get4(); break; case 48: skip = 4; break; // skip is always 4 for direct leaves (see DecidedNode.size() and LeafNode.size() methods) default: assert false:"illegal lmask value " + lmask;
public TreeVisitor( AutoBuffer tbits, boolean regression ) { _ts = tbits; _ts.get4(); // Skip tree ID _ts.get8(); // Skip seed _ts.get1(); // Skip producer id _regression = regression; }
case 1: skip = _ts.get2(); break; case 2: skip = _ts.get3(); break; case 3: skip = _ts.get4(); break; case 16: skip = _ct._nclass < 256?1:2; break; // Small leaf case 48: skip = 4; break; // skip is always 4 for direct leaves (see DecidedNode.size() and LeafNode.size() methods)
int getInt( ) { int x = get1(); if( x <= 253 ) return x-1; if( x==255 ) return (short)get2(); assert x==254; return get4(); }
@Override public IcedHashMap<K,V> read(AutoBuffer bb) { int n = bb.get4(); for(int i = 0; i < n; ++i) put(bb.<K>get(),bb.<V>get()); return this; }
@Override public Enum read( AutoBuffer ab ) { assert _map == null || _map.size()==0; _map = null; if( ab.get1() == 1 ) return this; // Killed? _maxId = ab.get4(); _map = new NonBlockingHashMap<ValueString, Integer>(); int len = 0; while( (len = ab.get2()) != 65535 ) // Read until end-of-map marker _map.put(new ValueString(ab.getA1(len)),ab.get4()); return this; } }
@Override public GroupTask read( AutoBuffer ab ) { super.read(ab); int len = ab.get4(); if( len == 0 ) return this; _grp2val= new NonBlockingHashMap<Group,Double>(); for( int i=0; i<len; i++ ) _grp2val.put(ab.get(Group.class),ab.get8d()); return this; } @Override public void copyOver( Freezable dt ) {
@Override protected AutoBuffer postLoad(Model m, AutoBuffer ab) { int ntrees = ab.get4(); Futures fs = new Futures(); for (int i=0; i<ntrees; i++) { CompressedTree[] ts = ab.getA(CompressedTree.class); for (int j=0; j<ts.length; j++) { Key k = ((TreeModel) m).treeKeys[i][j]; assert k == null && ts[j] == null || k != null && ts[j] != null : "Incosistency in model serialization: key is null but model is not null, OR vice versa!"; if (k!=null) { UKV.put(k, ts[j], fs); } } } fs.blockForPending(); return ab; } };
public long[] getA8( ) { _arys++; // Get the lengths of lead & trailing zero sections, and the non-zero // middle section. int x = getInt(); if( x == -1 ) return null; int y = getInt(); // Non-zero in the middle int z = y==0 ? 0 : getInt();// Trailing zeros long[] buf = MemoryManager.malloc8(x+y+z); switch( get1() ) { // 1,2,4 or 8 for how the middle section is passed case 1: for( int i=x; i<x+y; i++ ) buf[i] = get1(); return buf; case 2: for( int i=x; i<x+y; i++ ) buf[i] = (short)get2(); return buf; case 4: for( int i=x; i<x+y; i++ ) buf[i] = get4(); return buf; case 8: break; default: throw H2O.fail(); } int sofar = x; while( sofar < x+y ) { LongBuffer as = _bb.asLongBuffer(); int more = Math.min(as.remaining(), x+y - sofar); as.get(buf, sofar, more); sofar += more; _bb.position(_bb.position() + as.position()*8); if( sofar < x+y ) getSp(Math.min(_bb.capacity()-7, (x+y-sofar)*8)); } return buf; } public double[] getA8d( ) {
@Override public WordCount read(AutoBuffer ab) { super.read(ab); final long start = System.currentTimeMillis(); int cnt=0; _words = WORDS; int len = 0; while( (len = ab.get2()) != 65535 ) { // Read until end-of-map marker VStr vs = new VStr(ab.getA1(len),(short)0); vs._len = (short)len; vs._cnt = ab.get4(); VStr vs2 = WORDS.putIfAbsent(vs,vs); if( vs2 != null ) vs2.inc(vs._cnt); // Inc count on added word cnt++; } final long t = System.currentTimeMillis() - start; System.out.println("WC Read takes "+t+"msec for "+cnt+" words"); return this; } @Override public void copyOver(Freezable wc) { _words = ((WordCount)wc)._words; }
@Override public ddplyPass1 read( AutoBuffer ab ) { super.read(ab); assert _groups == null; _gatherRows = ab.getZ(); _cols = ab.getA4(); _uniq = ab.get(); int len = ab.get4(); if( len == 0 ) return this; _groups = new NonBlockingHashMap<Group,NewChunk>(); for( int i=0; i<len; i++ ) _groups.put(ab.get(Group.class),new NewChunk(null,-99)); return this; } @Override public void copyOver( Freezable dt ) {
@Override protected AutoBuffer postLoad(Model m, AutoBuffer ab) { int ntrees = ab.get4(); Futures fs = new Futures(); for (int i = 0; i < ntrees; ++i) { DKV.put(t_keys[i],new Value(t_keys[i],ab.getA1()), fs); for (int j = 0; j < nclasses(); ++j) { if (dtreeKeys[i][j] == null) continue; UKV.put(dtreeKeys[i][j], new Value(dtreeKeys[i][j], ab.get(DTree.TreeModel.CompressedTree.class)), fs); } } fs.blockForPending(); return ab; } };