@Override public Tuple<Collection<ExecutionLineageNode>, Collection<ChannelInstance>> evaluate( ChannelInstance[] inputs, ChannelInstance[] outputs, JavaExecutor javaExecutor, OptimizationContext.OperatorContext operatorContext) { assert inputs.length == 1; assert outputs.length == 1; CollectionChannel.Instance inputChannelInstance = (CollectionChannel.Instance) inputs[0]; final Collection<?> dataQuanta = inputChannelInstance.provideCollection(); Collection<Iterable<?>> dataQuantaGroup = new ArrayList<>(1); dataQuantaGroup.add(dataQuanta); CollectionChannel.Instance outputChannelInstance = (CollectionChannel.Instance) outputs[0]; outputChannelInstance.accept(dataQuantaGroup); return ExecutionOperator.modelEagerExecution(inputs, outputs, operatorContext); }
((CollectionChannel.Instance) inputs[0]).provideCollection().size();
@Override public Tuple<Collection<ExecutionLineageNode>, Collection<ChannelInstance>> evaluate( ChannelInstance[] inputs, ChannelInstance[] outputs, JavaExecutor javaExecutor, OptimizationContext.OperatorContext operatorContext) { assert inputs.length == this.getNumInputs(); assert outputs.length == this.getNumOutputs(); final JavaChannelInstance input = (JavaChannelInstance) inputs[0]; final long count; if (input instanceof CollectionChannel.Instance) { count = ((CollectionChannel.Instance) input).provideCollection().size(); } else { count = input.provideStream().count(); } ((CollectionChannel.Instance) outputs[0]).accept(Collections.singleton(count)); return ExecutionOperator.modelEagerExecution(inputs, outputs, operatorContext); }
ChannelInstance materializedInput, probingInput; if (inputs[0] instanceof CollectionChannel.Instance) { final Collection<InputType0> collection = ((CollectionChannel.Instance) inputs[0]).provideCollection(); final Stream<InputType1> stream = ((JavaChannelInstance) inputs[1]).provideStream(); output.<Tuple2<InputType0, InputType1>>accept( final Collection<InputType1> collection = ((CollectionChannel.Instance) inputs[1]).provideCollection(); output.<Tuple2<InputType0, InputType1>>accept( stream.flatMap(e0 -> collection.stream().map(
Stream<Input> stream1; if (inputs[0] instanceof CollectionChannel.Instance) { final Collection<Input> collection = ((CollectionChannel.Instance) inputs[0]).provideCollection(); stream0 = collection.stream(); stream1 = ((JavaChannelInstance) inputs[1]).provideStream(); } else if (inputs[1] instanceof CollectionChannel.Instance) { stream0 = ((JavaChannelInstance) inputs[0]).provideStream(); final Collection<Input> collection = ((CollectionChannel.Instance) inputs[1]).provideCollection(); stream1 = collection.stream(); } else {
final Collection<ConvergenceType> convergenceCollection = convergenceInput.provideCollection(); try { endloop = stoppingCondition.call(convergenceCollection);
convergenceCollection = ((CollectionChannel.Instance) inputs[ITERATION_CONVERGENCE_INPUT_INDEX]).provideCollection();
final Collection<Input> collection = ((CollectionChannel.Instance) inputs[0]).provideCollection(); stream0 = collection.stream(); } else {
assert inputs[CONVERGENCE_INPUT_INDEX] != null; convergenceCollection = ((CollectionChannel.Instance) inputs[CONVERGENCE_INPUT_INDEX]).provideCollection(); executionLineageNode.addPredecessor(inputs[CONVERGENCE_INPUT_INDEX].getLineage()); endloop = stoppingCondition.test(convergenceCollection);
convergenceCollection = ((CollectionChannel.Instance) inputs[ITERATION_CONVERGENCE_INPUT_INDEX]).provideCollection(); executionLineageNode.addPredecessor(inputs[ITERATION_CONVERGENCE_INPUT_INDEX].getLineage());
@Override public Tuple<Collection<ExecutionLineageNode>, Collection<ChannelInstance>> evaluate( ChannelInstance[] inputs, ChannelInstance[] outputs, SparkExecutor sparkExecutor, OptimizationContext.OperatorContext operatorContext) { assert inputs.length <= 1; assert outputs.length == this.getNumOutputs(); final Collection<Type> collection; if (this.collection != null) { collection = this.collection; } else { final CollectionChannel.Instance input = (CollectionChannel.Instance) inputs[0]; collection = input.provideCollection(); assert collection != null : String.format("Instance of %s is not providing a collection.", input.getChannel()); } final List<Type> list = RheemCollections.asList(collection); final RddChannel.Instance output = (RddChannel.Instance) outputs[0]; final JavaRDD<Type> rdd = sparkExecutor.sc.parallelize(list, sparkExecutor.getNumDefaultPartitions()); this.name(rdd); output.accept(rdd, sparkExecutor); return ExecutionOperator.modelLazyExecution(inputs, outputs, operatorContext); }
/** * Utility method to forward a {@link JavaChannelInstance} to another. * * @param input that should be forwarded * @param output to that should be forwarded */ static void forward(ChannelInstance input, ChannelInstance output) { // Do the forward. if (output instanceof CollectionChannel.Instance) { ((CollectionChannel.Instance) output).accept(((CollectionChannel.Instance) input).provideCollection()); } else if (output instanceof StreamChannel.Instance) { ((StreamChannel.Instance) output).accept(((JavaChannelInstance) input).provideStream()); } else { throw new RheemException(String.format("Cannot forward %s to %s.", input, output)); } // Manipulate the lineage. output.getLineage().addPredecessor(input.getLineage()); }
@Override public Tuple<Collection<ExecutionLineageNode>, Collection<ChannelInstance>> evaluate( ChannelInstance[] inputs, ChannelInstance[] outputs, SparkExecutor sparkExecutor, OptimizationContext.OperatorContext operatorContext) { assert inputs.length == this.getNumInputs(); assert outputs.length == this.getNumOutputs(); final CollectionChannel.Instance input = (CollectionChannel.Instance) inputs[0]; final BroadcastChannel.Instance output = (BroadcastChannel.Instance) outputs[0]; final Collection<?> collection = input.provideCollection(); final Broadcast<?> broadcast = sparkExecutor.sc.broadcast(collection); output.accept(broadcast); return ExecutionOperator.modelEagerExecution(inputs, outputs, operatorContext); }
@Override public Tuple<Collection<ExecutionLineageNode>, Collection<ChannelInstance>> evaluate( ChannelInstance[] inputs, ChannelInstance[] outputs, JavaExecutor javaExecutor, OptimizationContext.OperatorContext operatorContext) { CollectionChannel.Instance input = (CollectionChannel.Instance) inputs[0]; StreamChannel.Instance output = (StreamChannel.Instance) outputs[0]; final Collection<Tuple2<Long, Long>> edges = input.provideCollection(); final TLongFloatMap pageRanks = this.pageRank(edges); final Stream<Tuple2<Long, Float>> pageRankStream = this.stream(pageRanks); output.accept(pageRankStream); return ExecutionOperator.modelQuasiEagerExecution(inputs, outputs, operatorContext); }
@Override @SuppressWarnings("unchecked") public <T> Collection<T> getBroadcast(String name) { for (int i = 0; i < this.operator.getNumInputs(); i++) { final InputSlot<?> input = this.operator.getInput(i); if (input.isBroadcast() && input.getName().equals(name)) { final CollectionChannel.Instance broadcastChannelInstance = (CollectionChannel.Instance) this.inputs[i]; return (Collection<T>) broadcastChannelInstance.provideCollection(); } } throw new RheemException("No such broadcast found: " + name); }