@Override public void open(Configuration parameters) throws Exception { super.open(parameters); this.mapper.configure(jobConf); this.reporter = new HadoopDummyReporter(); this.outputCollector = new HadoopOutputCollector<KEYOUT, VALUEOUT>(); }
@Override public void flatMap(final Tuple2<KEYIN, VALUEIN> value, final Collector<Tuple2<KEYOUT, VALUEOUT>> out) throws Exception { outputCollector.setFlinkCollector(out); mapper.map(value.f0, value.f1, outputCollector, reporter); }
@Override public void reduce(final Iterable<Tuple2<KEYIN, VALUEIN>> values, final Collector<Tuple2<KEYOUT, VALUEOUT>> out) throws Exception { reduceCollector.setFlinkCollector(out); valueIterator.set(values.iterator()); reducer.reduce(valueIterator.getCurrentKey(), valueIterator, reduceCollector, reporter); }
@SuppressWarnings("unchecked") @Override public void open(Configuration parameters) throws Exception { super.open(parameters); this.reducer.configure(jobConf); this.combiner.configure(jobConf); this.reporter = new HadoopDummyReporter(); Class<KEYIN> inKeyClass = (Class<KEYIN>) TypeExtractor.getParameterType(Reducer.class, reducer.getClass(), 0); TypeSerializer<KEYIN> keySerializer = TypeExtractor.getForClass(inKeyClass).createSerializer(getRuntimeContext().getExecutionConfig()); this.valueIterator = new HadoopTupleUnwrappingIterator<>(keySerializer); this.combineCollector = new HadoopOutputCollector<>(); this.reduceCollector = new HadoopOutputCollector<>(); }
@Override public void reduce(final Iterable<Tuple2<KEYIN, VALUEIN>> values, final Collector<Tuple2<KEYOUT, VALUEOUT>> out) throws Exception { reduceCollector.setFlinkCollector(out); valueIterator.set(values.iterator()); reducer.reduce(valueIterator.getCurrentKey(), valueIterator, reduceCollector, reporter); }
@SuppressWarnings("unchecked") @Override public void open(Configuration parameters) throws Exception { super.open(parameters); this.reducer.configure(jobConf); this.reporter = new HadoopDummyReporter(); this.reduceCollector = new HadoopOutputCollector<KEYOUT, VALUEOUT>(); Class<KEYIN> inKeyClass = (Class<KEYIN>) TypeExtractor.getParameterType(Reducer.class, reducer.getClass(), 0); TypeSerializer<KEYIN> keySerializer = TypeExtractor.getForClass(inKeyClass).createSerializer(getRuntimeContext().getExecutionConfig()); this.valueIterator = new HadoopTupleUnwrappingIterator<KEYIN, VALUEIN>(keySerializer); }
@Override public void combine(final Iterable<Tuple2<KEYIN, VALUEIN>> values, final Collector<Tuple2<KEYIN, VALUEIN>> out) throws Exception { combineCollector.setFlinkCollector(out); valueIterator.set(values.iterator()); combiner.reduce(valueIterator.getCurrentKey(), valueIterator, combineCollector, reporter); }
@Override public void open(Configuration parameters) throws Exception { super.open(parameters); this.mapper.configure(jobConf); this.reporter = new HadoopDummyReporter(); this.outputCollector = new HadoopOutputCollector<KEYOUT, VALUEOUT>(); }
@Override public void flatMap(final Tuple2<KEYIN, VALUEIN> value, final Collector<Tuple2<KEYOUT, VALUEOUT>> out) throws Exception { outputCollector.setFlinkCollector(out); mapper.map(value.f0, value.f1, outputCollector, reporter); }
@Override public void open(Configuration parameters) throws Exception { super.open(parameters); this.mapper.configure(jobConf); this.reporter = new HadoopDummyReporter(); this.outputCollector = new HadoopOutputCollector<KEYOUT, VALUEOUT>(); }
@Override public void flatMap(final Tuple2<KEYIN, VALUEIN> value, final Collector<Tuple2<KEYOUT, VALUEOUT>> out) throws Exception { outputCollector.setFlinkCollector(out); mapper.map(value.f0, value.f1, outputCollector, reporter); }
@Override public void open(Configuration parameters) throws Exception { super.open(parameters); this.mapper.configure(jobConf); this.reporter = new HadoopDummyReporter(); this.outputCollector = new HadoopOutputCollector<KEYOUT, VALUEOUT>(); }
@Override public void flatMap(final Tuple2<KEYIN, VALUEIN> value, final Collector<Tuple2<KEYOUT, VALUEOUT>> out) throws Exception { outputCollector.setFlinkCollector(out); mapper.map(value.f0, value.f1, outputCollector, reporter); }
@SuppressWarnings("unchecked") @Override public void open(Configuration parameters) throws Exception { super.open(parameters); this.reducer.configure(jobConf); this.combiner.configure(jobConf); this.reporter = new HadoopDummyReporter(); Class<KEYIN> inKeyClass = (Class<KEYIN>) TypeExtractor.getParameterType(Reducer.class, reducer.getClass(), 0); TypeSerializer<KEYIN> keySerializer = TypeExtractor.getForClass(inKeyClass).createSerializer(getRuntimeContext().getExecutionConfig()); this.valueIterator = new HadoopTupleUnwrappingIterator<>(keySerializer); this.combineCollector = new HadoopOutputCollector<>(); this.reduceCollector = new HadoopOutputCollector<>(); }
@Override public void combine(final Iterable<Tuple2<KEYIN, VALUEIN>> values, final Collector<Tuple2<KEYIN, VALUEIN>> out) throws Exception { combineCollector.setFlinkCollector(out); valueIterator.set(values.iterator()); combiner.reduce(valueIterator.getCurrentKey(), valueIterator, combineCollector, reporter); }
@SuppressWarnings("unchecked") @Override public void open(Configuration parameters) throws Exception { super.open(parameters); this.reducer.configure(jobConf); this.combiner.configure(jobConf); this.reporter = new HadoopDummyReporter(); Class<KEYIN> inKeyClass = (Class<KEYIN>) TypeExtractor.getParameterType(Reducer.class, reducer.getClass(), 0); TypeSerializer<KEYIN> keySerializer = TypeExtractor.getForClass(inKeyClass).createSerializer(getRuntimeContext().getExecutionConfig()); this.valueIterator = new HadoopTupleUnwrappingIterator<>(keySerializer); this.combineCollector = new HadoopOutputCollector<>(); this.reduceCollector = new HadoopOutputCollector<>(); }
@Override public void reduce(final Iterable<Tuple2<KEYIN, VALUEIN>> values, final Collector<Tuple2<KEYOUT, VALUEOUT>> out) throws Exception { reduceCollector.setFlinkCollector(out); valueIterator.set(values.iterator()); reducer.reduce(valueIterator.getCurrentKey(), valueIterator, reduceCollector, reporter); }
@SuppressWarnings("unchecked") @Override public void open(Configuration parameters) throws Exception { super.open(parameters); this.reducer.configure(jobConf); this.combiner.configure(jobConf); this.reporter = new HadoopDummyReporter(); Class<KEYIN> inKeyClass = (Class<KEYIN>) TypeExtractor.getParameterType(Reducer.class, reducer.getClass(), 0); TypeSerializer<KEYIN> keySerializer = TypeExtractor.getForClass(inKeyClass).createSerializer(getRuntimeContext().getExecutionConfig()); this.valueIterator = new HadoopTupleUnwrappingIterator<>(keySerializer); this.combineCollector = new HadoopOutputCollector<>(); this.reduceCollector = new HadoopOutputCollector<>(); }
@Override public void combine(final Iterable<Tuple2<KEYIN, VALUEIN>> values, final Collector<Tuple2<KEYIN, VALUEIN>> out) throws Exception { combineCollector.setFlinkCollector(out); valueIterator.set(values.iterator()); combiner.reduce(valueIterator.getCurrentKey(), valueIterator, combineCollector, reporter); }
@SuppressWarnings("unchecked") @Override public void open(Configuration parameters) throws Exception { super.open(parameters); this.reducer.configure(jobConf); this.reporter = new HadoopDummyReporter(); this.reduceCollector = new HadoopOutputCollector<KEYOUT, VALUEOUT>(); Class<KEYIN> inKeyClass = (Class<KEYIN>) TypeExtractor.getParameterType(Reducer.class, reducer.getClass(), 0); TypeSerializer<KEYIN> keySerializer = TypeExtractor.getForClass(inKeyClass).createSerializer(getRuntimeContext().getExecutionConfig()); this.valueIterator = new HadoopTupleUnwrappingIterator<KEYIN, VALUEIN>(keySerializer); }