public byte[] getA1( ) { _arys++; int len = getInt(); return len == -1 ? null : getA1(len); } public byte[] getA1( int len ) {
@Override public synchronized UDPPing read(AutoBuffer ab){ if(_done)return this; _done = true; _t2 = System.currentTimeMillis(); _retries = ab.get4(); byte [] bs = ab.getA1(); _payload = bs; return this; }
public String getStr( ) { int len = getInt(); return len == -1 ? null : new String(getA1(len)); }
@Override public final Key read(AutoBuffer bb) { return make(bb.getA1()); } @Override public final AutoBuffer write(AutoBuffer bb) { return bb.putA1(_kb); }
public byte[][] getAA1( ) { _arys++; long xy = getZA(); if( xy == -1 ) return null; int x=(int)(xy>>32); // Leading nulls int y=(int)xy; // Middle non-zeros int z = y==0 ? 0 : getInt(); // Trailing nulls byte[][] ary = new byte[x+y+z][]; for( int i=x; i<x+y; i++ ) ary[i] = getA1(); return ary; } public short[][] getAA2( ) {
static H2Okey read( AutoBuffer ab ) { InetAddress inet; try { inet = InetAddress.getByAddress(ab.getA1(4)); } catch( UnknownHostException e ) { throw Log.errRTExcept(e); } int port = ab.get2(); return new H2Okey(inet,port); } // Canonical ordering based on inet & port
public Value read(AutoBuffer bb) { assert _key == null; // Not set yet _persist = (byte) bb.get1(); _type = (short) bb.get2(); _mem = bb.getA1(); _max = _mem.length; _pojo = null; // On remote nodes _rwlock is initialized to 0 (signaling a remote PUT is // in progress) flips to -1 when the remote PUT is done, or +1 if a notify // needs to happen. _rwlock.set(-1); // Set as 'remote put is done' touch(); return this; }
@Override public byte[] load(Value v) { File f = getFile(v); if( f.length() < v._max ) { // Should be fully on disk... // or it's a racey delete of a spilled value assert !v.isPersisted() : f.length() + " " + v._max + " " + v._key; return null; // No value } try { FileInputStream s = new FileInputStream(f); try { AutoBuffer ab = new AutoBuffer(s.getChannel(), true, Value.ICE); byte[] b = ab.getA1(v._max); ab.close(); return b; } finally { s.close(); } } catch( IOException e ) { // Broken disk / short-file??? throw new RuntimeException(Log.err("File load failed: ", e)); } }
public final WordCountTask read_impl(AutoBuffer ab) { _counts = new IcedHashMap<>(); int len; while ((len = ab.get2()) != 65535) { // Read until end-of-map marker byte[] bs = ab.getA1(len); long cnt = ab.get8(); _counts.put(new BufferedString(new String(bs)), new IcedLong(cnt)); } return this; }
@Override public byte[] load(Value v) { long skip = 0; Key k = v._key; // Convert a chunk into a long-offset from the base file. if( k._kb[0] == Key.DVEC ) skip = water.fvec.NFSFileVec.chunkOffset(k); // The offset try { FileInputStream s = null; try { s = new FileInputStream(getFileForKey(k)); FileChannel fc = s.getChannel(); fc.position(skip); AutoBuffer ab = new AutoBuffer(fc, true, Value.NFS); byte[] b = ab.getA1(v._max); ab.close(); assert v.isPersisted(); return b; } finally { if( s != null ) s.close(); } } catch( IOException e ) { // Broken disk / short-file??? H2O.ignore(e); return null; } }
@Override public Enum read( AutoBuffer ab ) { assert _map == null || _map.size()==0; _map = null; if( ab.get1() == 1 ) return this; // Killed? _maxId = ab.get4(); _map = new NonBlockingHashMap<ValueString, Integer>(); int len = 0; while( (len = ab.get2()) != 65535 ) // Read until end-of-map marker _map.put(new ValueString(ab.getA1(len)),ab.get4()); return this; } }
@Override public WordCount read(AutoBuffer ab) { super.read(ab); final long start = System.currentTimeMillis(); int cnt=0; _words = WORDS; int len = 0; while( (len = ab.get2()) != 65535 ) { // Read until end-of-map marker VStr vs = new VStr(ab.getA1(len),(short)0); vs._len = (short)len; vs._cnt = ab.get4(); VStr vs2 = WORDS.putIfAbsent(vs,vs); if( vs2 != null ) vs2.inc(vs._cnt); // Inc count on added word cnt++; } final long t = System.currentTimeMillis() - start; System.out.println("WC Read takes "+t+"msec for "+cnt+" words"); return this; } @Override public void copyOver(Freezable wc) { _words = ((WordCount)wc)._words; }
@Override protected AutoBuffer postLoad(Model m, AutoBuffer ab) { int ntrees = ab.get4(); Futures fs = new Futures(); for (int i = 0; i < ntrees; ++i) { DKV.put(t_keys[i],new Value(t_keys[i],ab.getA1()), fs); for (int j = 0; j < nclasses(); ++j) { if (dtreeKeys[i][j] == null) continue; UKV.put(dtreeKeys[i][j], new Value(dtreeKeys[i][j], ab.get(DTree.TreeModel.CompressedTree.class)), fs); } } fs.blockForPending(); return ab; } };
public String getStr( ) { int len = ab.get4(); return len == -1 ? null : new String(ab.getA1(len), UTF_8); }
public final WordCountTask read_impl(AutoBuffer ab) { _counts = new IcedHashMap<>(); int len; while ((len = ab.get2()) != 65535) { // Read until end-of-map marker byte[] bs = ab.getA1(len); long cnt = ab.get8(); _counts.put(new BufferedString(new String(bs)), new IcedLong(cnt)); } return this; }