@Override protected AutoBuffer compress(AutoBuffer ab, AutoBuffer abAux) { assert !Double.isNaN(_pred); return ab.put4f(_pred); } @Override protected int size() { return 4; }
@Override protected AutoBuffer compress(AutoBuffer ab) { assert !Double.isNaN(_pred); return ab.put4f((float)_pred); } @Override protected int size() { return 4; }
@Override AutoBuffer compress(AutoBuffer ab) { assert !Float.isNaN(classify( null )); // a little hacky here return ab.put4f(classify( null )); } @Override int dtreeSize() { return 4; }
@Override protected AutoBuffer compress(AutoBuffer ab) { assert !Double.isNaN(pred()); return ab.put4f((float)pred()); } @Override protected int size() { return 4; }
@Override void write( AutoBuffer bs ) { bs.put1('['); // Leaf indicator if (_class == -1) { bs.put4f(_c); } else { bs.put1(_class); } } @Override int size_impl( ) {
if (_split._equal == 0 || _split._equal == 1) ab.put4f(_splat); else if(_split._equal == 2) _split._bs.compress2(ab); else _split._bs.compress3(ab); abAux.put4(_nid); abAux.put4(_tree.node(_nids[0]).numNodes()); // number of nodes in the left subtree; this used to be 'parent node id' abAux.put4f((float)_split._n0); abAux.put4f((float)_split._n1); abAux.put4f((float)_split._p0); abAux.put4f((float)_split._p1); abAux.put4f((float)_split._se0); abAux.put4f((float)_split._se1); abAux.put4(_nids[0]); abAux.put4(_nids[1]);
@Override void write( AutoBuffer bs ) { bs.put1('E'); // Node indicator assert Short.MIN_VALUE <= _column && _column < Short.MAX_VALUE; bs.put2((short)_column); bs.put4f(split_value()); int skip = _l.size(); // Drop down the amount to skip over the left column if( skip <= 254 ) bs.put1(skip); else { bs.put1(0); bs.put3(skip); } _l.write(bs); _r.write(bs); } public boolean isIn(Row row) { return row.getEncodedColumnValue(_column) == _split; }
result.put1(_nodeType); result.put2((short) _col); result.put4f(splitValue); if (skip <= 255) { if (leftType == 'S' || leftType == 'E') result.put1(skip); // leaf will have no skip size because its size is fixed. if (regression) { result.put4f(ab.get4f());} else { result.put4f((float)ab.get1());} } else { /* running out of the buffer*/ return result;}
@Override void write( AutoBuffer bs ) { bs.put1('S'); // Node indicator assert Short.MIN_VALUE <= _column && _column < Short.MAX_VALUE; bs.put2((short) _column); bs.put4f(split_value()); int skip = _l.size(); // Drop down the amount to skip over the left column if( skip <= 254 ) bs.put1(skip); else { bs.put1(0); if (! ((-1<<24) <= skip && skip < (1<<24))) throw H2O.fail("Trees have grown too deep. Use BigData RF or limit the tree depth for your model. For more information, contact support: support@h2o.ai"); bs.put3(skip); } _l.write(bs); _r.write(bs); } @Override public int size_impl( ) {
assert _column != -1; ab.put2((short)_column); ab.put4f(_originalSplit); // assuming we only have _equal == 0 or 1 which is binary split
assert _column != -1; ab.put2((short)_column); ab.put4f(_originalSplit); // assuming we only have _equal == 0 or 1 which is binary split if( _l instanceof LeafNode ) { /* don't have skip size if left child is leaf.*/} else {
ab.put4f(_splat); else if(_split._equal == 2) {
@Override protected AutoBuffer compress(AutoBuffer ab, AutoBuffer abAux) { assert !Double.isNaN(_pred); return ab.put4f(_pred); } @Override protected int size() { return 4; }
if (_split._equal == 0 || _split._equal == 1) ab.put4f(_splat); else if(_split._equal == 2) _split._bs.compress2(ab); else _split._bs.compress3(ab); abAux.put4(_nid); abAux.put4(_tree.node(_nids[0]).numNodes()); // number of nodes in the left subtree; this used to be 'parent node id' abAux.put4f((float)_split._n0); abAux.put4f((float)_split._n1); abAux.put4f((float)_split._p0); abAux.put4f((float)_split._p1); abAux.put4f((float)_split._se0); abAux.put4f((float)_split._se1); abAux.put4(_nids[0]); abAux.put4(_nids[1]);