@Override public void reduce(final Iterable<Tuple2<KEYIN, VALUEIN>> values, final Collector<Tuple2<KEYOUT, VALUEOUT>> out) throws Exception { reduceCollector.setFlinkCollector(out); valueIterator.set(values.iterator()); reducer.reduce(valueIterator.getCurrentKey(), valueIterator, reduceCollector, reporter); }
@Override public void flatMap(final Tuple2<KEYIN, VALUEIN> value, final Collector<Tuple2<KEYOUT, VALUEOUT>> out) throws Exception { outputCollector.setFlinkCollector(out); mapper.map(value.f0, value.f1, outputCollector, reporter); }
/** * Set the Flink iterator to wrap. * * @param iterator The Flink iterator to wrap. */ @Override public void set(final Iterator<Tuple2<KEY, VALUE>> iterator) { this.iterator = iterator; if (this.hasNext()) { final Tuple2<KEY, VALUE> tuple = iterator.next(); this.curKey = keySerializer.copy(tuple.f0); this.firstValue = tuple.f1; this.atFirst = true; } else { this.atFirst = false; } }
new HadoopTupleUnwrappingIterator<IntWritable, IntWritable>(new WritableSerializer <IntWritable>(IntWritable.class)); valIt.set(tList.iterator()); Assert.assertTrue(valIt.getCurrentKey().get() == expectedKey); for (int expectedValue : expectedValues) { Assert.assertTrue(valIt.hasNext()); Assert.assertTrue(valIt.hasNext()); Assert.assertTrue(valIt.next().get() == expectedValue); Assert.assertTrue(valIt.getCurrentKey().get() == expectedKey); Assert.assertFalse(valIt.hasNext()); Assert.assertFalse(valIt.hasNext()); Assert.assertTrue(valIt.getCurrentKey().get() == expectedKey); valIt.set(tList.iterator()); Assert.assertTrue(valIt.getCurrentKey().get() == expectedKey); for (int expectedValue : expectedValues) { Assert.assertTrue(valIt.hasNext()); Assert.assertTrue(valIt.hasNext()); Assert.assertTrue(valIt.next().get() == expectedValue); Assert.assertTrue(valIt.getCurrentKey().get() == expectedKey); Assert.assertFalse(valIt.hasNext()); Assert.assertFalse(valIt.hasNext()); Assert.assertTrue(valIt.getCurrentKey().get() == expectedKey); valIt.set(tList.iterator()); Assert.assertTrue(valIt.hasNext());
@SuppressWarnings("unchecked") @Override public void open(Configuration parameters) throws Exception { super.open(parameters); this.reducer.configure(jobConf); this.combiner.configure(jobConf); this.reporter = new HadoopDummyReporter(); Class<KEYIN> inKeyClass = (Class<KEYIN>) TypeExtractor.getParameterType(Reducer.class, reducer.getClass(), 0); TypeSerializer<KEYIN> keySerializer = TypeExtractor.getForClass(inKeyClass).createSerializer(getRuntimeContext().getExecutionConfig()); this.valueIterator = new HadoopTupleUnwrappingIterator<>(keySerializer); this.combineCollector = new HadoopOutputCollector<>(); this.reduceCollector = new HadoopOutputCollector<>(); }
@Override public void open(Configuration parameters) throws Exception { super.open(parameters); this.mapper.configure(jobConf); this.reporter = new HadoopDummyReporter(); this.outputCollector = new HadoopOutputCollector<KEYOUT, VALUEOUT>(); }
@Override public void reduce(final Iterable<Tuple2<KEYIN, VALUEIN>> values, final Collector<Tuple2<KEYOUT, VALUEOUT>> out) throws Exception { reduceCollector.setFlinkCollector(out); valueIterator.set(values.iterator()); reducer.reduce(valueIterator.getCurrentKey(), valueIterator, reduceCollector, reporter); }
@SuppressWarnings("unchecked") @Override public void open(Configuration parameters) throws Exception { super.open(parameters); this.reducer.configure(jobConf); this.reporter = new HadoopDummyReporter(); this.reduceCollector = new HadoopOutputCollector<KEYOUT, VALUEOUT>(); Class<KEYIN> inKeyClass = (Class<KEYIN>) TypeExtractor.getParameterType(Reducer.class, reducer.getClass(), 0); TypeSerializer<KEYIN> keySerializer = TypeExtractor.getForClass(inKeyClass).createSerializer(getRuntimeContext().getExecutionConfig()); this.valueIterator = new HadoopTupleUnwrappingIterator<KEYIN, VALUEIN>(keySerializer); }
@Override public void flatMap(final Tuple2<KEYIN, VALUEIN> value, final Collector<Tuple2<KEYOUT, VALUEOUT>> out) throws Exception { outputCollector.setFlinkCollector(out); mapper.map(value.f0, value.f1, outputCollector, reporter); }
@Override public void combine(final Iterable<Tuple2<KEYIN, VALUEIN>> values, final Collector<Tuple2<KEYIN, VALUEIN>> out) throws Exception { combineCollector.setFlinkCollector(out); valueIterator.set(values.iterator()); combiner.reduce(valueIterator.getCurrentKey(), valueIterator, combineCollector, reporter); }
@Override public void flatMap(final Tuple2<KEYIN, VALUEIN> value, final Collector<Tuple2<KEYOUT, VALUEOUT>> out) throws Exception { outputCollector.setFlinkCollector(out); mapper.map(value.f0, value.f1, outputCollector, reporter); }
@Override public void combine(final Iterable<Tuple2<KEYIN, VALUEIN>> values, final Collector<Tuple2<KEYIN, VALUEIN>> out) throws Exception { combineCollector.setFlinkCollector(out); valueIterator.set(values.iterator()); combiner.reduce(valueIterator.getCurrentKey(), valueIterator, combineCollector, reporter); }
@Override public void reduce(final Iterable<Tuple2<KEYIN, VALUEIN>> values, final Collector<Tuple2<KEYOUT, VALUEOUT>> out) throws Exception { reduceCollector.setFlinkCollector(out); valueIterator.set(values.iterator()); reducer.reduce(valueIterator.getCurrentKey(), valueIterator, reduceCollector, reporter); }
@Override public void combine(final Iterable<Tuple2<KEYIN, VALUEIN>> values, final Collector<Tuple2<KEYIN, VALUEIN>> out) throws Exception { combineCollector.setFlinkCollector(out); valueIterator.set(values.iterator()); combiner.reduce(valueIterator.getCurrentKey(), valueIterator, combineCollector, reporter); }
@Override public void reduce(final Iterable<Tuple2<KEYIN, VALUEIN>> values, final Collector<Tuple2<KEYOUT, VALUEOUT>> out) throws Exception { reduceCollector.setFlinkCollector(out); valueIterator.set(values.iterator()); reducer.reduce(valueIterator.getCurrentKey(), valueIterator, reduceCollector, reporter); }
@Override public void combine(final Iterable<Tuple2<KEYIN, VALUEIN>> values, final Collector<Tuple2<KEYIN, VALUEIN>> out) throws Exception { combineCollector.setFlinkCollector(out); valueIterator.set(values.iterator()); combiner.reduce(valueIterator.getCurrentKey(), valueIterator, combineCollector, reporter); }
@Override public void reduce(final Iterable<Tuple2<KEYIN, VALUEIN>> values, final Collector<Tuple2<KEYOUT, VALUEOUT>> out) throws Exception { reduceCollector.setFlinkCollector(out); valueIterator.set(values.iterator()); reducer.reduce(valueIterator.getCurrentKey(), valueIterator, reduceCollector, reporter); }
@Override public void reduce(final Iterable<Tuple2<KEYIN, VALUEIN>> values, final Collector<Tuple2<KEYOUT, VALUEOUT>> out) throws Exception { reduceCollector.setFlinkCollector(out); valueIterator.set(values.iterator()); reducer.reduce(valueIterator.getCurrentKey(), valueIterator, reduceCollector, reporter); }
@Override public void reduce(final Iterable<Tuple2<KEYIN, VALUEIN>> values, final Collector<Tuple2<KEYOUT, VALUEOUT>> out) throws Exception { reduceCollector.setFlinkCollector(out); valueIterator.set(values.iterator()); reducer.reduce(valueIterator.getCurrentKey(), valueIterator, reduceCollector, reporter); }
@Override public void reduce(final Iterable<Tuple2<KEYIN, VALUEIN>> values, final Collector<Tuple2<KEYOUT, VALUEOUT>> out) throws Exception { reduceCollector.setFlinkCollector(out); valueIterator.set(values.iterator()); reducer.reduce(valueIterator.getCurrentKey(), valueIterator, reduceCollector, reporter); }