public void open( OutputStream out ) { if( out instanceof HadoopTupleOutputStream ) outputStream = (HadoopTupleOutputStream) out; else outputStream = new HadoopTupleOutputStream( out, elementWriter ); }
public void close() throws IOException { try { if( outputStream != null ) // my never be opened outputStream.close(); } finally { outputStream = null; } } }
public void serialize( Tuple tuple ) throws IOException { outputStream.writeTuple( tuple ); } }
@Override public void serialize( T tuple ) throws IOException { if( writers == null ) outputStream.writeUnTyped( tuple ); else outputStream.writeWith( writers, tuple ); }
public void serialize( T tuple ) throws IOException { outputStream.writeIntInternal( tuple.getIndex() ); Class[] types = getTypesFor( tuple.getIndex() ); if( types == null ) outputStream.writeUnTyped( tuple.getTuple() ); else outputStream.writeTyped( types, tuple.getTuple() ); }
public void writeIndexTuple( IndexTuple indexTuple ) throws IOException { writeIntInternal( indexTuple.getIndex() ); writeTuple( indexTuple.getTuple() ); } }
protected void setWriters( Fields fields ) { if( fields == null ) return; Class[] classes = elementWriter.getTupleSerialization().getTypesFor( fields ); if( elementWriter.getTupleSerialization().areTypesRequired() ) { if( classes == null ) throw new IllegalStateException( "types are required to perform serialization, declared fields: " + fields ); } writers = HadoopTupleOutputStream.getWritersFor( elementWriter, classes ); }
public void serialize( T tuple ) throws IOException { outputStream.writeIntInternal( tuple.getIndex() ); Class[] types = getTypesFor( tuple.getIndex() ); if( types == null ) outputStream.writeUnTyped( tuple.getTuple() ); else outputStream.writeTyped( types, tuple.getTuple() ); }
@Override public void serialize( T tuple ) throws IOException { if( writers == null ) outputStream.writeUnTyped( tuple ); else outputStream.writeWith( writers, tuple ); }
public void writeIndexTuple( IndexTuple indexTuple ) throws IOException { writeIntInternal( indexTuple.getIndex() ); writeTuple( indexTuple.getTuple() ); } }
protected void setWriters( Fields fields ) { if( fields == null ) return; Class[] classes = elementWriter.getTupleSerialization().getTypesFor( fields ); if( elementWriter.getTupleSerialization().areTypesRequired() ) { if( classes == null ) throw new IllegalStateException( "types are required to perform serialization, declared fields: " + fields ); } writers = HadoopTupleOutputStream.getWritersFor( elementWriter, classes ); }
public void open( OutputStream out ) { if( out instanceof HadoopTupleOutputStream ) outputStream = (HadoopTupleOutputStream) out; else outputStream = new HadoopTupleOutputStream( out, elementWriter ); }
public void serialize( TuplePair tuple ) throws IOException { if( keyWriters == null ) outputStream.writeUnTyped( tuple.getLhs() ); else outputStream.writeWith( keyWriters, tuple.getLhs() ); if( sortWriters == null ) outputStream.writeUnTyped( tuple.getRhs() ); else outputStream.writeWith( sortWriters, tuple.getRhs() ); } }
public TuplePairSerializer( TupleSerialization.SerializationElementWriter elementWriter ) { super( elementWriter ); Class[] keyClasses = elementWriter.getTupleSerialization().getKeyTypes(); Class[] sortClasses = elementWriter.getTupleSerialization().getSortTypes(); if( elementWriter.getTupleSerialization().areTypesRequired() ) { if( keyClasses == null ) throw new IllegalStateException( "types are required to perform serialization, grouping declared fields: " + elementWriter.getTupleSerialization().getKeyFields() ); if( sortClasses == null ) throw new IllegalStateException( "types are required to perform serialization, sorting declared fields: " + elementWriter.getTupleSerialization().getSortFields() ); } keyWriters = HadoopTupleOutputStream.getWritersFor( elementWriter, keyClasses ); sortWriters = HadoopTupleOutputStream.getWritersFor( elementWriter, sortClasses ); }
public void close() throws IOException { try { if( outputStream != null ) // my never be opened outputStream.close(); } finally { outputStream = null; } } }
public void serialize( Tuple tuple ) throws IOException { outputStream.writeTuple( tuple ); } }
private void initSerializer() { init(); if (bytesOutputStream == null) { bytesOutputStream = new ByteArrayOutputStream(BUFFER_SIZE); } if (tupleOutputStream == null) { tupleOutputStream = new HadoopTupleOutputStream(bytesOutputStream, serialization.getElementWriter()); } if (tupleSerializer == null) { tupleSerializer = serialization.getSerializer(Tuple.class); } }
public void serialize( TuplePair tuple ) throws IOException { if( keyWriters == null ) outputStream.writeUnTyped( tuple.getLhs() ); else outputStream.writeWith( keyWriters, tuple.getLhs() ); if( sortWriters == null ) outputStream.writeUnTyped( tuple.getRhs() ); else outputStream.writeWith( sortWriters, tuple.getRhs() ); } }
public TuplePairSerializer( TupleSerialization.SerializationElementWriter elementWriter ) { super( elementWriter ); Class[] keyClasses = elementWriter.getTupleSerialization().getKeyTypes(); Class[] sortClasses = elementWriter.getTupleSerialization().getSortTypes(); if( elementWriter.getTupleSerialization().areTypesRequired() ) { if( keyClasses == null ) throw new IllegalStateException( "types are required to perform serialization, grouping declared fields: " + elementWriter.getTupleSerialization().getKeyFields() ); if( sortClasses == null ) throw new IllegalStateException( "types are required to perform serialization, sorting declared fields: " + elementWriter.getTupleSerialization().getSortFields() ); } keyWriters = HadoopTupleOutputStream.getWritersFor( elementWriter, keyClasses ); sortWriters = HadoopTupleOutputStream.getWritersFor( elementWriter, sortClasses ); }
@Override public void close() throws IOException { try { super.close(); } finally { if( finalCompressor != null ) CodecPool.returnCompressor( finalCompressor ); } } };