executor.execute(new ReduceStateByKeyReducer( reduceStateByKey, reduceStateByKey.getName() + "#part-" + (i++), q, output, keyExtractor, valueExtractor,
executor.execute(new ReduceStateByKeyReducer( reduceStateByKey, reduceStateByKey.getName() + "#part-" + (i++), q, output, keyExtractor, valueExtractor,
@Test public void testBuild_ImplicitName() { Flow flow = Flow.create("TEST"); Dataset<String> dataset = Util.createMockDataset(flow, 2); ReduceStateByKey.of(dataset) .keyBy(s -> s) .valueBy(s -> 1L) .stateFactory(WordCountState::new) .mergeStatesBy(WordCountState::combine) .output(); ReduceStateByKey reduce = (ReduceStateByKey) flow.operators().iterator().next(); assertEquals("ReduceStateByKey", reduce.getName()); }
input .flatMapToPair(new CompositeKeyExtractor(keyExtractor, valueExtractor, windowing)) .setName(operator.getName() + "::extract-key-value"); tuples .repartitionAndSortWithinPartitions(groupingPartitioner, comparator) .setName(operator.getName() + "::sort"); new LazyAccumulatorProvider( context.getAccumulatorFactory(), context.getSettings()))) .setName(operator.getName() + "::apply-udf");
@Test public void testBuild() { Flow flow = Flow.create("TEST"); Dataset<String> dataset = Util.createMockDataset(flow, 2); Time<String> windowing = Time.of(Duration.ofHours(1)); Dataset<Pair<String, Long>> reduced = ReduceStateByKey.named("ReduceStateByKey1") .of(dataset) .keyBy(s -> s) .valueBy(s -> 1L) .stateFactory(WordCountState::new) .mergeStatesBy(WordCountState::combine) .windowBy(windowing) .output(); assertEquals(flow, reduced.getFlow()); assertEquals(1, flow.size()); ReduceStateByKey reduce = (ReduceStateByKey) flow.operators().iterator().next(); assertEquals(flow, reduce.getFlow()); assertEquals("ReduceStateByKey1", reduce.getName()); assertNotNull(reduce.getKeyExtractor()); assertNotNull(reduce.getValueExtractor()); assertNotNull(reduce.getStateMerger()); assertNotNull(reduce.getStateFactory()); assertEquals(reduced, reduce.output()); assertSame(windowing, reduce.getWindowing()); }