private void preserveOrDiscardIfSeenSlideFactorTimes( List<Tuple2<Event, Integer>> newEvenstSeenSoFar, Tuple2<Event, Integer> windowValue) { int timesSeen = windowValue.f1 + 1; if (timesSeen != slideFactor) { newEvenstSeenSoFar.add(Tuple2.of(windowValue.f0, timesSeen)); } }
@Parameterized.Parameters(name = "Migrate Savepoint / Backend: {0}") public static Collection<Tuple2<MigrationVersion, String>> parameters () { return Arrays.asList( Tuple2.of(MigrationVersion.v1_3, StateBackendLoader.MEMORY_STATE_BACKEND_NAME), Tuple2.of(MigrationVersion.v1_3, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME), Tuple2.of(MigrationVersion.v1_4, StateBackendLoader.MEMORY_STATE_BACKEND_NAME), Tuple2.of(MigrationVersion.v1_4, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME), Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.MEMORY_STATE_BACKEND_NAME), Tuple2.of(MigrationVersion.v1_5, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME), Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.MEMORY_STATE_BACKEND_NAME), Tuple2.of(MigrationVersion.v1_6, StateBackendLoader.ROCKSDB_STATE_BACKEND_NAME)); }
@Override public ScheduledFuture<?> scheduleAtFixedRate(ProcessingTimeCallback callback, long initialDelay, long period) { if (isTerminated) { throw new IllegalStateException("terminated"); } if (isQuiesced) { return new CallbackTask(null); } PeriodicCallbackTask periodicCallbackTask = new PeriodicCallbackTask(callback, period); priorityQueue.offer(Tuple2.<Long, CallbackTask>of(currentTime + initialDelay, periodicCallbackTask)); return periodicCallbackTask; }
@Override public ScheduledFuture<?> registerTimer(long timestamp, ProcessingTimeCallback target) { if (isTerminated) { throw new IllegalStateException("terminated"); } if (isQuiesced) { return new CallbackTask(null); } CallbackTask callbackTask = new CallbackTask(target); priorityQueue.offer(Tuple2.of(timestamp, callbackTask)); return callbackTask; }
@Override protected void writeKeyAndNamespaceSerializers(DataOutputView out) throws IOException { // write key / namespace serializers, and their configuration snapshots TypeSerializerSerializationUtil.writeSerializersAndConfigsWithResilience( out, Arrays.asList( Tuple2.of(timersSnapshot.getKeySerializer(), timersSnapshot.getKeySerializerConfigSnapshot()), Tuple2.of(timersSnapshot.getNamespaceSerializer(), timersSnapshot.getNamespaceSerializerConfigSnapshot()))); } }
@Override public void run(SourceContext<Tuple2<String, Integer>> ctx) throws Exception { ctx.collect(Tuple2.of("a", 0)); ctx.collect(Tuple2.of("a", 1)); ctx.collect(Tuple2.of("b", 3)); ctx.collect(Tuple2.of("c", 6)); ctx.collect(Tuple2.of("c", 7)); ctx.collect(Tuple2.of("c", 8)); // source is finite, so it will have an implicit MAX watermark when it finishes }
@Override public CompletableFuture<Acknowledge> rescaleJob(JobID jobId, int newParallelism) { rescaleJobFuture.complete(Tuple2.of(jobId, newParallelism)); return CompletableFuture.completedFuture(Acknowledge.get()); } }
@Override public void run(SourceFunction.SourceContext<Tuple2<Integer, String>> ctx) throws Exception { for (int i = 0; i < NUM_ELEMENTS && running; i++) { ctx.collect(Tuple2.of(i, DATA_PREFIX + i)); } }
private static void verifyRebalancePartitioning(List<Tuple2<Integer, String>> rebalancePartitionResult) { sort(rebalancePartitionResult, Comparator.comparing(o -> o.f1)); final Tuple2<Integer, String> firstEntry = rebalancePartitionResult.get(0); int offset = firstEntry.f0; final List<Tuple2<Integer, String>> expected = IntStream.range(0, rebalancePartitionResult.size()) .mapToObj(index -> Tuple2.of((offset + index) % PARALLELISM, INPUT.get(index))) .collect(Collectors.toList()); assertEquals(expected, rebalancePartitionResult); }
@Override protected List<Tuple2<RestHandlerSpecification, ChannelInboundHandler>> initializeHandlers(final CompletableFuture<String> localAddressFuture) { final List<Tuple2<RestHandlerSpecification, ChannelInboundHandler>> handlers = new ArrayList<>(); for (final AbstractRestHandler abstractRestHandler : abstractRestHandlers) { handlers.add(Tuple2.of( abstractRestHandler.getMessageHeaders(), abstractRestHandler)); } return handlers; }
@Override public void run(SourceContext<Tuple2<Integer, Long>> ctx) throws Exception { Object lock = ctx.getCheckpointLock(); while (count < NUM_INPUT){ synchronized (lock){ for (int i = 0; i < PARALLELISM; i++) { ctx.collect(Tuple2.of(i, count + 1)); } count++; } } }
@Override public void processTimedOutMatch( Map<String, List<Event>> match, PatternProcessFunction.Context ctx) throws Exception { ctx.output(timedOutTag, Tuple2.of(match, ctx.timestamp())); } }
private <T> Set<Tuple2<String, StateTransitionAction>> unfoldTransitions(final State<T> state) { final Set<Tuple2<String, StateTransitionAction>> transitions = new HashSet<>(); for (StateTransition<T> transition : state.getStateTransitions()) { transitions.add(Tuple2.of( transition.getTargetState().getName(), transition.getAction())); } return transitions; }
@Test(expected = CompositeType.InvalidFieldReferenceException.class) public void testIllegalFlatTuple() { Tuple2<String, Integer> t = Tuple2.of("aa", 5); TupleTypeInfo<Tuple2<String, Integer>> tpeInfo = (TupleTypeInfo<Tuple2<String, Integer>>) TypeExtractor.getForObject(t); FieldAccessorFactory.getAccessor(tpeInfo, "illegal", null); }
@Override public void flatMap(Integer value, Collector<Tuple2<Integer, Integer>> out) throws Exception { int count = counter.value() + 1; counter.update(count); int s = sum.value() + value; sum.update(s); if (count % numberElements == 0) { out.collect(Tuple2.of(getRuntimeContext().getIndexOfThisSubtask(), s)); workCompletedLatch.countDown(); } }
@Test(expected = NullPointerException.class) public void testFailsWithoutUpperBound() { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); env.setParallelism(1); DataStream<Tuple2<String, Integer>> streamOne = env.fromElements(Tuple2.of("1", 1)); DataStream<Tuple2<String, Integer>> streamTwo = env.fromElements(Tuple2.of("1", 1)); streamOne .keyBy(new Tuple2KeyExtractor()) .intervalJoin(streamTwo.keyBy(new Tuple2KeyExtractor())) .between(Time.milliseconds(0), null); }
@Test(expected = NullPointerException.class) public void testFailsWithoutLowerBound() { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); env.setParallelism(1); DataStream<Tuple2<String, Integer>> streamOne = env.fromElements(Tuple2.of("1", 1)); DataStream<Tuple2<String, Integer>> streamTwo = env.fromElements(Tuple2.of("1", 1)); streamOne .keyBy(new Tuple2KeyExtractor()) .intervalJoin(streamTwo.keyBy(new Tuple2KeyExtractor())) .between(null, Time.milliseconds(1)); }