@Override protected HadoopGroupByClosure createClosure() { // todo: collapse keyFields here if an array size > 1 return new HadoopGroupByClosure( flowProcess, keyFields, valuesFields ); }
@Override public Iterator<Tuple> getIterator( int pos ) { if( pos != 0 ) throw new IllegalArgumentException( "invalid group position: " + pos ); return makeIterator( 0, getValueIterator( 0 ) ); }
public Tuple getGroup() { return closure.getGrouping(); }
public void accept( Tuple key, Iterator<Tuple>[] values ) { key = unwrapGrouping( key ); closure.reset( key, values ); // Buffer is using JoinerClosure directly if( !isBufferJoin ) tupleEntryIterator.reset( splice.getJoiner().getIterator( closure ) ); else tupleEntryIterator.reset( values ); keyEntry.setTuple( closure.getGroupTuple( key ) ); next.receive( this, 0, grouping ); }
@Override public void reset( Tuple grouping, Iterator<Tuple>[] values ) { super.reset( grouping, values ); build(); }
protected Iterator<Tuple> getArgumentsIterator(int pos) { return closure.getIterator(pos); }
public void accept( Tuple key, Iterator<Tuple>[] values ) { key = unwrapGrouping( key ); closure.reset( key, values ); // Buffer is using JoinerClosure directly if( !isBufferJoin ) tupleEntryIterator.reset( splice.getJoiner().getIterator( closure ) ); else tupleEntryIterator.reset( values ); keyEntry.setTuple( closure.getGroupTuple( key ) ); next.receive( this, 0, grouping ); }
@Override public void reset( Tuple grouping, Iterator<Tuple>[] values ) { super.reset( grouping, values ); build(); }
@Override public Iterator<Tuple> getIterator( int pos ) { if( pos != 0 ) throw new IllegalArgumentException( "invalid group position: " + pos ); return makeIterator( 0, getValueIterator( 0 ) ); }
@Override protected HadoopGroupByClosure createClosure() { // todo: collapse keyFields here if an array size > 1 return new HadoopGroupByClosure( flowProcess, keyFields, valuesFields ); }
protected void emit(Tuple result) { call.getOutputCollector().add(closure.getGrouping().append(result)); }