private void append2slowUUID() { final int CHUNK_SZ = 1 << H2O.LOG_CHK; if( _sparseLen > CHUNK_SZ ) throw new ArrayIndexOutOfBoundsException(_sparseLen); if( _ds==null && _ls!=null ) { // This can happen for columns with all NAs and then a UUID _xs=null; alloc_doubles(_sparseLen); Arrays.fill(_ls,C16Chunk._LO_NA); Arrays.fill(_ds,Double.longBitsToDouble(C16Chunk._HI_NA)); } if( _ls != null && _ls.length > 0 ) { _ls = MemoryManager.arrayCopyOf(_ls,_sparseLen<<1); _ds = MemoryManager.arrayCopyOf(_ds,_sparseLen<<1); } else { alloc_mantissa(4); alloc_doubles(4); } assert _sparseLen == 0 || _ls.length > _sparseLen:"_ls.length = " + _ls.length + ", _sparseLen = " + _sparseLen; } // Slow-path append data
public double[] denormalizeBeta(double [] beta) { int N = fullN()+1; assert (beta.length % N) == 0:"beta len = " + beta.length + " expected multiple of" + N; int nclasses = beta.length/N; beta = MemoryManager.arrayCopyOf(beta,beta.length); if (_predictor_transform == DataInfo.TransformType.STANDARDIZE) { for(int c = 0; c < nclasses; ++c) { int off = N*c; double norm = 0.0; // Reverse any normalization on the intercept // denormalize only the numeric coefs (categoricals are not normalized) final int numoff = numStart(); for (int i = numoff; i < N-1; i++) { double b = beta[off + i] * _normMul[i - numoff]; norm += b * _normSub[i - numoff]; // Also accumulate the intercept adjustment beta[off + i] = b; } beta[off + N - 1] -= norm; } } return beta; }
else _id = MemoryManager.arrayCopyOf(_id,_sparseLen<<1); _ls = MemoryManager.arrayCopyOf(_ls,_sparseLen<<1); _xs = MemoryManager.arrayCopyOf(_xs,_sparseLen<<1); } else { alloc_mantissa(4);
private void append2slowd() { final int CHUNK_SZ = 1 << H2O.LOG_CHK; if( _sparseLen > CHUNK_SZ ) throw new ArrayIndexOutOfBoundsException(_sparseLen); assert _ls==null; if(_ds != null && _ds.length > 0){ if(_id == null){ // check for sparseness int nzs = 0; // assume one non-zero for the element currently being stored for(double d:_ds)if(d != 0)++nzs; if((nzs+1)*_sparseRatio < _len) set_sparse(nzs); } else _id = MemoryManager.arrayCopyOf(_id, _sparseLen << 1); _ds = MemoryManager.arrayCopyOf(_ds,_sparseLen<<1); } else { alloc_doubles(4); if (sparse()) alloc_indices(4); } assert _sparseLen == 0 || _ds.length > _sparseLen :"_ds.length = " + _ds.length + ", _sparseLen = " + _sparseLen; } // Slow-path append data
_xs = MemoryManager.arrayCopyOf(_xs,_xs.length<<1); _ls = MemoryManager.arrayCopyOf(_ls,_xs.length); System.arraycopy(nc._ls,0,_ls,_sparseLen,nc._sparseLen); System.arraycopy(nc._xs,0,_xs,_sparseLen,nc._sparseLen); if(_id != null) { assert nc._id != null; _id = MemoryManager.arrayCopyOf(_id,_xs.length); System.arraycopy(nc._id,0,_id,_sparseLen,nc._sparseLen); for(int i = _sparseLen; i < _sparseLen + nc._sparseLen; ++i) _id[i] += _len;
public void solve(double[] result) { System.arraycopy(_xy, 0, result, 0, _xy.length); _chol.solve(result); double gerr = Double.POSITIVE_INFINITY; if (_addedL2) { // had to add l2-pen to turn the gram to be SPD double[] oldRes = MemoryManager.arrayCopyOf(result, result.length); for (int i = 0; i < 1000; ++i) { solve(oldRes, result); double[] g = gradient(result)._gradient; gerr = Math.max(-ArrayUtils.minValue(g), ArrayUtils.maxValue(g)); if (gerr < 1e-4) return; System.arraycopy(result, 0, oldRes, 0, result.length); } Log.warn("Gram solver did not converge, gerr = " + gerr); } }
map = FrameUtils.asInts(v); int[] sortedMap = MemoryManager.arrayCopyOf(map, map.length); Arrays.sort(sortedMap); for (int i = 1; i < sortedMap.length; ++i)
public double[] denormalizeBeta(double [] beta) { int N = fullN()+1; assert (beta.length % N) == 0:"beta len = " + beta.length + " expected multiple of" + N; int nclasses = beta.length/N; beta = MemoryManager.arrayCopyOf(beta,beta.length); if (_predictor_transform == DataInfo.TransformType.STANDARDIZE) { for(int c = 0; c < nclasses; ++c) { int off = N*c; double norm = 0.0; // Reverse any normalization on the intercept // denormalize only the numeric coefs (categoricals are not normalized) final int numoff = numStart(); for (int i = numoff; i < N-1; i++) { double b = beta[off + i] * _normMul[i - numoff]; norm += b * _normSub[i - numoff]; // Also accumulate the intercept adjustment beta[off + i] = b; } beta[off + N - 1] -= norm; } } return beta; }
public void solve(double[] result) { System.arraycopy(_xy, 0, result, 0, _xy.length); _chol.solve(result); double gerr = Double.POSITIVE_INFINITY; if (_addedL2) { // had to add l2-pen to turn the gram to be SPD double[] oldRes = MemoryManager.arrayCopyOf(result, result.length); for (int i = 0; i < 1000; ++i) { solve(oldRes, result); double[] g = gradient(result)._gradient; gerr = Math.max(-ArrayUtils.minValue(g), ArrayUtils.maxValue(g)); if (gerr < 1e-4) return; System.arraycopy(result, 0, oldRes, 0, result.length); } Log.warn("Gram solver did not converge, gerr = " + gerr); } }
map = FrameUtils.asInts(v); int[] sortedMap = MemoryManager.arrayCopyOf(map, map.length); Arrays.sort(sortedMap); for (int i = 1; i < sortedMap.length; ++i)