public void open( InputStream in ) { if( in instanceof HadoopTupleInputStream ) inputStream = (HadoopTupleInputStream) in; else inputStream = new HadoopTupleInputStream( in, elementReader ); }
return readString(); return readNullFloat(); if( type == Double.class ) return readNullDouble(); if( type == Integer.class ) return readNullVInt(); if( type == Long.class ) return readNullVLong(); if( type == Boolean.class ) return readNullBoolean(); if( type == Short.class ) return readNullShort(); return readFloat(); if( type == Double.TYPE ) return readDouble(); if( type == Integer.TYPE ) return readVInt(); if( type == Long.TYPE ) return readVLong(); if( type == Boolean.TYPE ) return readBoolean(); if( type == Short.TYPE ) return readShort(); return readTuple(); if( type == TuplePair.class ) return readTuplePair();
@Override public int compare( HadoopTupleInputStream lhsStream, HadoopTupleInputStream rhsStream ) { try { // pop off element type, its assumed we know it as we have a stream comparator // to delegate too int lhsToken = lhsStream.readToken(); int rhsToken = rhsStream.readToken(); if( lhsToken == HadoopTupleOutputStream.WRITABLE_TOKEN ) lhsStream.readString(); if( rhsToken == HadoopTupleOutputStream.WRITABLE_TOKEN ) rhsStream.readString(); InputStream lhs = lhsToken == 0 ? null : lhsStream.getInputStream(); InputStream rhs = rhsToken == 0 ? null : rhsStream.getInputStream(); return comparator.compare( lhs, rhs ); } catch( Exception exception ) { throw new CascadingException( "unable to compare Tuples, likely a CoGroup is being attempted on fields of " + "different types or custom comparators are incorrectly set on Fields", exception ); } } }
protected final Object readType( int type ) throws IOException { switch( type ) { case 0: return null; case 1: return readString(); case 2: return readFloat(); case 3: return readDouble(); case 4: return readVInt(); case 5: return readVLong(); case 6: return readBoolean(); case 7: return readShort(); case 8: return readTuple(); case 9: return readTuplePair(); case 10: return readIndexTuple(); default: return elementReader.read( type, this ); } }
@Override public int compare( BufferedInputStream lhsStream, BufferedInputStream rhsStream ) { HadoopTupleInputStream lhsInput = new HadoopTupleInputStream( lhsStream, new TupleSerialization().getElementReader() ); HadoopTupleInputStream rhsInput = new HadoopTupleInputStream( rhsStream, new TupleSerialization().getElementReader() ); try { // explicit for debugging purposes String s1 = (String) lhsInput.readString(); String s2 = (String) rhsInput.readString(); return reverse ? s2.compareTo( s1 ) : s1.compareTo( s2 ); } catch( IOException exception ) { throw new CascadingException( exception ); } }
@Override public int compare( BufferedInputStream lhsStream, BufferedInputStream rhsStream ) { if( lhsStream == null && rhsStream == null ) return 0; if( lhsStream == null ) return !reverse ? -1 : 1; if( rhsStream == null ) return !reverse ? 1 : -1; HadoopTupleInputStream lhsInput = new HadoopTupleInputStream( lhsStream, new TupleSerialization().getElementReader() ); HadoopTupleInputStream rhsInput = new HadoopTupleInputStream( rhsStream, new TupleSerialization().getElementReader() ); try { // explicit for debugging purposes Long l1 = (Long) lhsInput.readVLong(); Long l2 = (Long) rhsInput.readVLong(); return reverse ? l2.compareTo( l1 ) : l1.compareTo( l2 ); } catch( Exception exception ) { throw new CascadingException( exception ); } }
public T deserialize( IndexTuple tuple ) throws IOException { if( tuple == null ) tuple = createTuple(); int ordinal = inputStream.readVInt(); tuple.setIndex( ordinal ); Class[] types = getTypesFor( ordinal ); // in both cases, we need to fill a new Tuple instance if( types == null ) tuple.setTuple( inputStream.readUnTyped( new Tuple() ) ); else tuple.setTuple( inputStream.readTyped( types, new Tuple() ) ); return (T) tuple; }
public int readToken() throws IOException { return readVInt(); }
@Override public int compare( HadoopTupleInputStream lhsStream, HadoopTupleInputStream rhsStream ) { try { InputStream lhs = lhsStream.getInputStream(); InputStream rhs = rhsStream.getInputStream(); if( !type.isPrimitive() ) { int lhsToken = lhsStream.readToken(); int rhsToken = rhsStream.readToken(); lhs = lhsToken == 0 ? null : lhs; rhs = rhsToken == 0 ? null : rhs; } return comparator.compare( lhs, rhs ); } catch( Exception exception ) { throw new CascadingException( "unable to compare Tuples, likely a CoGroup is being attempted on fields of " + "different types or custom comparators are incorrectly set on Fields", exception ); } } }
public void close() throws IOException { try { if( inputStream != null ) inputStream.close(); } finally { inputStream = null; } } }
final int compareUnTypedTuples( Comparator[] comparators ) throws IOException { int lhsLen = lhsStream.getNumElements(); int rhsLen = rhsStream.getNumElements(); int c = lhsLen - rhsLen; if( c != 0 ) return c; for( int i = 0; i < lhsLen; i++ ) { try { c = ( (StreamComparator) comparators[ i % comparators.length ] ).compare( lhsStream, rhsStream ); } catch( Exception exception ) { throw new CascadingException( "unable to compare stream elements in position: " + i, exception ); } if( c != 0 ) return c; } return 0; } }
protected final Object readType( int type ) throws IOException { switch( type ) { case 0: return null; case 1: return readString(); case 2: return readFloat(); case 3: return readDouble(); case 4: return readVInt(); case 5: return readVLong(); case 6: return readBoolean(); case 7: return readShort(); case 8: return readTuple(); case 9: return readTuplePair(); case 10: return readIndexTuple(); default: return elementReader.read( type, this ); } }
@Override public int compare( BufferedInputStream lhsStream, BufferedInputStream rhsStream ) { HadoopTupleInputStream lhsInput = new HadoopTupleInputStream( lhsStream, new TupleSerialization().getElementReader() ); HadoopTupleInputStream rhsInput = new HadoopTupleInputStream( rhsStream, new TupleSerialization().getElementReader() ); try { // explicit for debugging purposes String s1 = (String) lhsInput.readString(); String s2 = (String) rhsInput.readString(); return reverse ? s2.compareTo( s1 ) : s1.compareTo( s2 ); } catch( IOException exception ) { throw new CascadingException( exception ); } }
@Override public int compare( BufferedInputStream lhsStream, BufferedInputStream rhsStream ) { if( lhsStream == null && rhsStream == null ) return 0; if( lhsStream == null ) return !reverse ? -1 : 1; if( rhsStream == null ) return !reverse ? 1 : -1; HadoopTupleInputStream lhsInput = new HadoopTupleInputStream( lhsStream, new TupleSerialization().getElementReader() ); HadoopTupleInputStream rhsInput = new HadoopTupleInputStream( rhsStream, new TupleSerialization().getElementReader() ); try { // explicit for debugging purposes Long l1 = (Long) lhsInput.readVLong(); Long l2 = (Long) rhsInput.readVLong(); return reverse ? l2.compareTo( l1 ) : l1.compareTo( l2 ); } catch( Exception exception ) { throw new CascadingException( exception ); } }
public T deserialize( IndexTuple tuple ) throws IOException { if( tuple == null ) tuple = createTuple(); int ordinal = inputStream.readVInt(); tuple.setIndex( ordinal ); Class[] types = getTypesFor( ordinal ); // in both cases, we need to fill a new Tuple instance if( types == null ) tuple.setTuple( inputStream.readUnTyped( new Tuple() ) ); else tuple.setTuple( inputStream.readTyped( types, new Tuple() ) ); return (T) tuple; }
public int getNumElements() throws IOException { return readVInt(); }