FlinkAssignContext(WindowFn<InputT, W> fn, WindowedValue<InputT> value) { fn.super(); if (Iterables.size(value.getWindows()) != 1) { throw new IllegalArgumentException( String.format( "%s passed to window assignment must be in a single window, but it was in %s: %s", WindowedValue.class.getSimpleName(), Iterables.size(value.getWindows()), value.getWindows())); } this.value = value; }
@Override public BoundedWindow window() { return Iterables.getOnlyElement(value.getWindows()); } }
@Override public Boolean read() { try { Iterable<T> result = flinkStateBackend .getPartitionedState( namespace.stringKey(), StringSerializer.INSTANCE, flinkStateDescriptor) .keys(); return result == null || Iterables.isEmpty(result); } catch (Exception e) { throw new RuntimeException("Error isEmpty from state.", e); } }
Iterables.addAll(newPushedBack, justPushedBack);
Iterables.addAll(inputs, elements);
@SuppressWarnings("unchecked") public <T extends PValue> T getOutput(PTransform<?, T> transform) { return (T) Iterables.getOnlyElement(currentTransform.getOutputs().values()); }
@SuppressWarnings("unchecked") <T extends PValue> T getInput(PTransform<T, ?> transform) { return (T) Iterables.getOnlyElement(TransformInputs.nonAdditionalInputs(currentTransform)); }
@SuppressWarnings("unchecked") <T extends PValue> T getOutput(PTransform<?, T> transform) { return (T) Iterables.getOnlyElement(currentTransform.getOutputs().values()); } }
@SuppressWarnings("unchecked") public <T extends PValue> T getInput(PTransform<T, ?> transform) { return (T) Iterables.getOnlyElement(TransformInputs.nonAdditionalInputs(currentTransform)); }
private <K, V> void translateReshuffle( String id, RunnerApi.Pipeline pipeline, StreamingTranslationContext context) { RunnerApi.PTransform transform = pipeline.getComponents().getTransformsOrThrow(id); DataStream<WindowedValue<KV<K, V>>> inputDataStream = context.getDataStreamOrThrow(Iterables.getOnlyElement(transform.getInputsMap().values())); context.addDataStream( Iterables.getOnlyElement(transform.getOutputsMap().values()), inputDataStream.rebalance()); }
private static <K, V> void translateReshuffle( PTransformNode transform, RunnerApi.Pipeline pipeline, BatchTranslationContext context) { DataSet<WindowedValue<KV<K, V>>> inputDataSet = context.getDataSetOrThrow( Iterables.getOnlyElement(transform.getTransform().getInputsMap().values())); context.addDataSet( Iterables.getOnlyElement(transform.getTransform().getOutputsMap().values()), inputDataSet.rebalance()); }
@Override public PTransformReplacement<PCollection<ElemT>, PCollection<ElemT>> getReplacementTransform( AppliedPTransform< PCollection<ElemT>, PCollection<ElemT>, PTransform<PCollection<ElemT>, PCollection<ElemT>>> transform) { PCollection<ElemT> collection = (PCollection<ElemT>) Iterables.getOnlyElement(transform.getInputs().values()); PCollectionView<ViewT> view; try { view = CreatePCollectionViewTranslation.getView(transform); } catch (IOException e) { throw new RuntimeException(e); } CreateStreamingFlinkView<ElemT, ViewT> createFlinkView = new CreateStreamingFlinkView<>(view); return PTransformReplacement.of(collection, createFlinkView); }
private void translateStreamingImpulse( String id, RunnerApi.Pipeline pipeline, StreamingTranslationContext context) { RunnerApi.PTransform pTransform = pipeline.getComponents().getTransformsOrThrow(id); TypeInformation<WindowedValue<byte[]>> typeInfo = new CoderTypeInformation<>( WindowedValue.getFullCoder(ByteArrayCoder.of(), GlobalWindow.Coder.INSTANCE)); ObjectMapper objectMapper = new ObjectMapper(); final int intervalMillis; final int messageCount; try { JsonNode config = objectMapper.readTree(pTransform.getSpec().getPayload().toByteArray()); intervalMillis = config.path("interval_ms").asInt(100); messageCount = config.path("message_count").asInt(0); } catch (IOException e) { throw new RuntimeException("Failed to parse configuration for streaming impulse", e); } SingleOutputStreamOperator<WindowedValue<byte[]>> source = context .getExecutionEnvironment() .addSource( new StreamingImpulseSource(intervalMillis, messageCount), StreamingImpulseSource.class.getSimpleName()) .returns(typeInfo); context.addDataStream(Iterables.getOnlyElement(pTransform.getOutputsMap().values()), source); }
int currentStart = 0; IntervalWindow currentWindow = (IntervalWindow) Iterables.getOnlyElement(elements.get(0).getWindows()); IntervalWindow nextWindow = (IntervalWindow) Iterables.getOnlyElement(nextValue.getWindows()); if (currentWindow.intersects(nextWindow)) {
private <T> void translateAssignWindows( String id, RunnerApi.Pipeline pipeline, StreamingTranslationContext context) { RunnerApi.Components components = pipeline.getComponents(); RunnerApi.PTransform transform = components.getTransformsOrThrow(id); RunnerApi.WindowIntoPayload payload; try { payload = RunnerApi.WindowIntoPayload.parseFrom(transform.getSpec().getPayload()); } catch (InvalidProtocolBufferException e) { throw new IllegalArgumentException(e); } //TODO: https://issues.apache.org/jira/browse/BEAM-4296 // This only works for well known window fns, we should defer this execution to the SDK // if the WindowFn can't be parsed or just defer it all the time. WindowFn<T, ? extends BoundedWindow> windowFn = (WindowFn<T, ? extends BoundedWindow>) WindowingStrategyTranslation.windowFnFromProto(payload.getWindowFn()); String inputCollectionId = Iterables.getOnlyElement(transform.getInputsMap().values()); String outputCollectionId = Iterables.getOnlyElement(transform.getOutputsMap().values()); Coder<WindowedValue<T>> outputCoder = instantiateCoder(outputCollectionId, components); TypeInformation<WindowedValue<T>> resultTypeInfo = new CoderTypeInformation<>(outputCoder); DataStream<WindowedValue<T>> inputDataStream = context.getDataStreamOrThrow(inputCollectionId); FlinkAssignWindows<T, ? extends BoundedWindow> assignWindowsFunction = new FlinkAssignWindows<>(windowFn); DataStream<WindowedValue<T>> resultDataStream = inputDataStream .flatMap(assignWindowsFunction) .name(transform.getUniqueName()) .returns(resultTypeInfo); context.addDataStream(outputCollectionId, resultDataStream); }
private void translateImpulse( String id, RunnerApi.Pipeline pipeline, StreamingTranslationContext context) { RunnerApi.PTransform pTransform = pipeline.getComponents().getTransformsOrThrow(id); TypeInformation<WindowedValue<byte[]>> typeInfo = new CoderTypeInformation<>( WindowedValue.getFullCoder(ByteArrayCoder.of(), GlobalWindow.Coder.INSTANCE)); boolean keepSourceAlive = !context.getPipelineOptions().isShutdownSourcesOnFinalWatermark(); SingleOutputStreamOperator<WindowedValue<byte[]>> source = context .getExecutionEnvironment() .addSource(new ImpulseSourceFunction(keepSourceAlive), "Impulse") .returns(typeInfo); context.addDataStream(Iterables.getOnlyElement(pTransform.getOutputsMap().values()), source); }
private static void translateImpulse( PTransformNode transform, RunnerApi.Pipeline pipeline, BatchTranslationContext context) { TypeInformation<WindowedValue<byte[]>> typeInformation = new CoderTypeInformation<>( WindowedValue.getFullCoder(ByteArrayCoder.of(), GlobalWindow.Coder.INSTANCE)); DataSource<WindowedValue<byte[]>> dataSource = new DataSource<>( context.getExecutionEnvironment(), new ImpulseInputFormat(), typeInformation, transform.getTransform().getUniqueName()) .name("Impulse"); context.addDataSet( Iterables.getOnlyElement(transform.getTransform().getOutputsMap().values()), dataSource); }
String inputPCollectionId = Iterables.getOnlyElement(pTransform.getInputsMap().values()); Iterables.getOnlyElement(pTransform.getOutputsMap().values()), outputDataStream);
WindowedValue.getFullCoder( (Coder<T>) VoidCoder.of(), GlobalWindow.Coder.INSTANCE))); context.addDataStream(Iterables.getOnlyElement(transform.getOutputsMap().values()), result); } else { DataStream<T> result = null; context.addDataStream(Iterables.getOnlyElement(transform.getOutputsMap().values()), result);