@Test(expected = IllegalArgumentException.class) public void testEmptyStringRejectedWithTypeInfo() { new OutputTag<>("", BasicTypeInfo.INT_TYPE_INFO); } }
@Test(expected = NullPointerException.class) public void testNullRejected() { new OutputTag<Integer>(null); }
@Test(expected = NullPointerException.class) public void testNullRejectedWithTypeInfo() { new OutputTag<>(null, BasicTypeInfo.INT_TYPE_INFO); }
@Test(expected = IllegalArgumentException.class) public void testEmptyStringRejected() { new OutputTag<Integer>(""); }
/** * Gets the {@link DataStream} that contains the elements that are emitted from an operation * into the side output with the given {@link OutputTag}. * * @see org.apache.flink.streaming.api.functions.ProcessFunction.Context#output(OutputTag, Object) */ public <X> DataStream<X> getSideOutput(OutputTag<X> sideOutputTag) { if (wasSplitApplied) { throw new UnsupportedOperationException("getSideOutput() and split() may not be called on the same DataStream. " + "As a work-around, please add a no-op map function before the split() call."); } sideOutputTag = clean(requireNonNull(sideOutputTag)); // make a defensive copy sideOutputTag = new OutputTag<X>(sideOutputTag.getId(), sideOutputTag.getTypeInfo()); TypeInformation<?> type = requestedSideOutputs.get(sideOutputTag); if (type != null && !type.equals(sideOutputTag.getTypeInfo())) { throw new UnsupportedOperationException("A side output with a matching id was " + "already requested with a different type. This is not allowed, side output " + "ids need to be unique."); } requestedSideOutputs.put(sideOutputTag, sideOutputTag.getTypeInfo()); SideOutputTransformation<X> sideOutputTransformation = new SideOutputTransformation<>(this.getTransformation(), sideOutputTag); return new DataStream<>(this.getExecutionEnvironment(), sideOutputTransformation); } }
final OutputTag<L> outputTag = new OutputTag<>(UUID.randomUUID().toString(), timeoutTypeInfo);
false); final OutputTag<L> outputTag = new OutputTag<>(UUID.randomUUID().toString(), timedOutTypeInfo);
Event middle1Event3 = new Event(41, "a", 4.0); OutputTag<Event> lateDataTag = new OutputTag<Event>("late-data", TypeInformation.of(Event.class));
new OutputTag<Tuple2<Map<String, List<Event>>, Long>>("timedOut") {}; final KeyedOneInputStreamOperatorTestHarness<Integer, Event, Map<String, List<Event>>> harness = new KeyedOneInputStreamOperatorTestHarness<>(
public OutputTag<OutT> output() { TypeInformation<OutT> outType = requireNonNull(builder.resultType); return new OutputTag<>(builder.name, outType); } }
public OutputTag<OutT> output() { TypeInformation<OutT> outType = requireNonNull(builder.resultType); return new OutputTag<>(builder.name, outType); } }
private static List<OutputTag<?>> createSideOutputTags(List<InputAndSpec<?, ?>> specs) { List<OutputTag<?>> outputTags = new ArrayList<>(); for (InputAndSpec<?, ?> streamWithSpec : specs) { OutputTag<?> outputTag = new OutputTag<>(streamWithSpec.streamName, streamWithSpec.streamSpec.resultType); outputTags.add(outputTag); } return outputTags; }
private static List<OutputTag<?>> createSideOutputTags(List<InputAndSpec<?, ?>> specs) { List<OutputTag<?>> outputTags = new ArrayList<>(); for (InputAndSpec<?, ?> streamWithSpec : specs) { OutputTag<?> outputTag = new OutputTag<>(streamWithSpec.streamName, streamWithSpec.streamSpec.resultType); outputTags.add(outputTag); } return outputTags; }
/** * Gets the {@link DataStream} that contains the elements that are emitted from an operation * into the side output with the given {@link OutputTag}. * * @see org.apache.flink.streaming.api.functions.ProcessFunction.Context#output(OutputTag, Object) */ public <X> DataStream<X> getSideOutput(OutputTag<X> sideOutputTag) { if (wasSplitApplied) { throw new UnsupportedOperationException("getSideOutput() and split() may not be called on the same DataStream. " + "As a work-around, please add a no-op map function before the split() call."); } sideOutputTag = clean(requireNonNull(sideOutputTag)); // make a defensive copy sideOutputTag = new OutputTag<X>(sideOutputTag.getId(), sideOutputTag.getTypeInfo()); TypeInformation<?> type = requestedSideOutputs.get(sideOutputTag); if (type != null && !type.equals(sideOutputTag.getTypeInfo())) { throw new UnsupportedOperationException("A side output with a matching id was " + "already requested with a different type. This is not allowed, side output " + "ids need to be unique."); } requestedSideOutputs.put(sideOutputTag, sideOutputTag.getTypeInfo()); SideOutputTransformation<X> sideOutputTransformation = new SideOutputTransformation<>(this.getTransformation(), sideOutputTag); return new DataStream<>(this.getExecutionEnvironment(), sideOutputTransformation); } }
/** * Gets the {@link DataStream} that contains the elements that are emitted from an operation * into the side output with the given {@link OutputTag}. * * @see org.apache.flink.streaming.api.functions.ProcessFunction.Context#output(OutputTag, Object) */ public <X> DataStream<X> getSideOutput(OutputTag<X> sideOutputTag) { if (wasSplitApplied) { throw new UnsupportedOperationException("getSideOutput() and split() may not be called on the same DataStream. " + "As a work-around, please add a no-op map function before the split() call."); } sideOutputTag = clean(requireNonNull(sideOutputTag)); // make a defensive copy sideOutputTag = new OutputTag<X>(sideOutputTag.getId(), sideOutputTag.getTypeInfo()); TypeInformation<?> type = requestedSideOutputs.get(sideOutputTag); if (type != null && !type.equals(sideOutputTag.getTypeInfo())) { throw new UnsupportedOperationException("A side output with a matching id was " + "already requested with a different type. This is not allowed, side output " + "ids need to be unique."); } requestedSideOutputs.put(sideOutputTag, sideOutputTag.getTypeInfo()); SideOutputTransformation<X> sideOutputTransformation = new SideOutputTransformation<>(this.getTransformation(), sideOutputTag); return new DataStream<>(this.getExecutionEnvironment(), sideOutputTransformation); } }
/** * Gets the {@link DataStream} that contains the elements that are emitted from an operation * into the side output with the given {@link OutputTag}. * * @see org.apache.flink.streaming.api.functions.ProcessFunction.Context#output(OutputTag, Object) */ public <X> DataStream<X> getSideOutput(OutputTag<X> sideOutputTag) { sideOutputTag = clean(requireNonNull(sideOutputTag)); // make a defensive copy sideOutputTag = new OutputTag<X>(sideOutputTag.getId(), sideOutputTag.getTypeInfo()); TypeInformation<?> type = requestedSideOutputs.get(sideOutputTag); if (type != null && !type.equals(sideOutputTag.getTypeInfo())) { throw new UnsupportedOperationException("A side output with a matching id was " + "already requested with a different type. This is not allowed, side output " + "ids need to be unique."); } requestedSideOutputs.put(sideOutputTag, sideOutputTag.getTypeInfo()); SideOutputTransformation<X> sideOutputTransformation = new SideOutputTransformation<>(this.getTransformation(), sideOutputTag); return new DataStream<>(this.getExecutionEnvironment(), sideOutputTransformation); } }
new DepositsThenTransactionsSource(recordsPerSecond)); final OutputTag<TransactionEvent> transactionsSideOutput = new OutputTag<>( "transactions side output", TypeInformation.of(TransactionEvent.class));
public DataStream<String> constructTestPipeline(DataStream<String> source) { OutputTag<Integer> filtered = new OutputTag<>("filter", BasicTypeInfo.INT_TYPE_INFO); OutputTag<Integer> process = new OutputTag<>("process", BasicTypeInfo.INT_TYPE_INFO); SingleOutputStreamOperator<String> input = source.process(new ProcessFunction<String, String>() { private static final long serialVersionUID = 1L; @Override public void processElement(String s, Context ctx, Collector<String> out) throws Exception { if (s.startsWith("filter ")) { ctx.output(filtered, Integer.parseInt(s.substring(7))); } else if (s.startsWith("process ")) { ctx.output(process, Integer.parseInt(s.substring(8))); } else { throw new RuntimeException("oOoO"); } } }); BroadcastStream<Integer> broadcast = input.getSideOutput(filtered).broadcast(bcstate); return input.getSideOutput(process).keyBy(i -> i).connect(broadcast).process(new BroadcastProcessor(bcstate)) .uid("stateful"); }
.put( additionalOutput, new OutputTag<>(additionalOutput.getId(), TypeInformation.of(Integer.class))) .build(); ImmutableMap<TupleTag<?>, Coder<WindowedValue<?>>> tagsToCoders =
.put( additionalOutput, new OutputTag<>(additionalOutput.getId(), TypeInformation.of(Integer.class))) .build(); ImmutableMap<TupleTag<?>, Coder<WindowedValue<?>>> tagsToCoders =