@Override public Fields getOutputFields() { return _stream.getOutputFields(); }
private void projectionValidation(Fields projFields) { if (projFields == null) { return; } Fields allFields = this.getOutputFields(); for (String field : projFields) { if (!allFields.contains(field)) { throw new IllegalArgumentException("Trying to select non-existent field: '" + field + "' from stream containing fields fields: <" + allFields + ">"); } } } }
/** * Returns a stream consisting of the elements of this stream that match the given filter. * * @param filter the filter to apply to each trident tuple to determine if it should be included. * @return the new stream */ public Stream filter(Filter filter) { return each(getOutputFields(), filter); }
private static List<Fields> strippedInputFields(List<Stream> streams, List<Fields> joinFields) { List<Fields> ret = new ArrayList<>(); for(int i=0; i<streams.size(); i++) { ret.add(TridentUtils.fieldsSubtract(streams.get(i).getOutputFields(), joinFields.get(i))); } return ret; }
public Stream merge(List<Stream> streams) { return merge(streams.get(0).getOutputFields(), streams); }
private <T> Stream comparableAggregateStream(String inputFieldName, Aggregator<T> aggregator) { if(inputFieldName != null) { projectionValidation(new Fields(inputFieldName)); } return partitionAggregate(getOutputFields(), aggregator, getOutputFields()); }
/** * Returns a stream consisting of the result of applying the given mapping function to the values of this stream. * * @param function a mapping function to be applied to each value in this stream. * @return the new stream */ public Stream map(MapFunction function) { projectionValidation(getOutputFields()); return _topology.addSourcedNode(this, new ProcessorNode( _topology.getUniqueStreamId(), _name, getOutputFields(), getOutputFields(), new MapProcessor(getOutputFields(), new MapFunctionExecutor(function)))); }
/** * Returns a stream consisting of the results of replacing each value of this stream with the contents * produced by applying the provided mapping function to each value. This has the effect of applying * a one-to-many transformation to the values of the stream, and then flattening the resulting elements into a new stream. * * @param function a mapping function to be applied to each value in this stream which produces new values. * @return the new stream */ public Stream flatMap(FlatMapFunction function) { projectionValidation(getOutputFields()); return _topology.addSourcedNode(this, new ProcessorNode( _topology.getUniqueStreamId(), _name, getOutputFields(), getOutputFields(), new MapProcessor(getOutputFields(), new FlatMapFunctionExecutor(function)))); }
/** * Returns a stream consisting of the trident tuples of this stream, additionally performing the provided action on * each trident tuple as they are consumed from the resulting stream. This is mostly useful for debugging * to see the tuples as they flow past a certain point in a pipeline. * * @param action the action to perform on the trident tuple as they are consumed from the stream * @return the new stream */ public Stream peek(Consumer action) { projectionValidation(getOutputFields()); return _topology.addSourcedNode(this, new ProcessorNode( _topology.getUniqueStreamId(), _name, getOutputFields(), getOutputFields(), new MapProcessor(getOutputFields(), new ConsumerExecutor(action)))); }
@Override public Stream apply(Stream input) { Fields outputFields = input.getOutputFields(); return input.partitionAggregate(outputFields, _agg, outputFields) .global() .partitionAggregate(outputFields, _agg, outputFields); }
public Stream stateQuery(TridentState state, Fields inputFields, QueryFunction function, Fields functionFields) { projectionValidation(inputFields); String stateId = state._node.stateInfo.id; Node n = new ProcessorNode(_topology.getUniqueStreamId(), _name, TridentUtils.fieldsConcat(getOutputFields(), functionFields), functionFields, new StateQueryProcessor(stateId, inputFields, function)); _topology._colocate.get(stateId).add(n); return _topology.addSourcedNode(this, n); }
@Override public Stream each(Fields inputFields, Function function, Fields functionFields) { projectionValidation(inputFields); return _topology.addSourcedNode(this, new ProcessorNode(_topology.getUniqueStreamId(), _name, TridentUtils.fieldsConcat(getOutputFields(), functionFields), functionFields, new EachProcessor(inputFields, function))); } //creates brand new tuples with brand new fields
/** * ## Repartitioning Operation * * This method takes in a custom partitioning function that implements * {@link org.apache.storm.grouping.CustomStreamGrouping} * * @param grouping * @return */ public Stream partition(Grouping grouping) { if (_node instanceof PartitionNode) { return each(new Fields(), new TrueFilter()).partition(grouping); } else { return _topology.addSourcedNode(this, new PartitionNode(_node.streamId, _name, getOutputFields(), grouping)); } }
private void projectionValidation(Fields projFields) { if (projFields == null) { return; } Fields allFields = this.getOutputFields(); for (String field : projFields) { if (!allFields.contains(field)) { throw new IllegalArgumentException("Trying to select non-existent field: '" + field + "' from stream containing fields fields: <" + allFields + ">"); } } } }
private static List<Fields> strippedInputFields(List<Stream> streams, List<Fields> joinFields) { List<Fields> ret = new ArrayList<>(); for(int i=0; i<streams.size(); i++) { ret.add(TridentUtils.fieldsSubtract(streams.get(i).getOutputFields(), joinFields.get(i))); } return ret; }
private void projectionValidation(Fields projFields) { if (projFields == null) { return; } Fields allFields = this.getOutputFields(); for (String field : projFields) { if (!allFields.contains(field)) { throw new IllegalArgumentException("Trying to select non-existent field: '" + field + "' from stream containing fields fields: <" + allFields + ">"); } } } }
private <T> Stream comparableAggregateStream(String inputFieldName, Aggregator<T> aggregator) { if(inputFieldName != null) { projectionValidation(new Fields(inputFieldName)); } return partitionAggregate(getOutputFields(), aggregator, getOutputFields()); }
@Override public Stream apply(Stream input) { Fields outputFields = input.getOutputFields(); return input.partitionAggregate(outputFields, _agg, outputFields) .global() .partitionAggregate(outputFields, _agg, outputFields); }
public Stream stateQuery(TridentState state, Fields inputFields, QueryFunction function, Fields functionFields) { projectionValidation(inputFields); String stateId = state._node.stateInfo.id; Node n = new ProcessorNode(_topology.getUniqueStreamId(), _name, TridentUtils.fieldsConcat(getOutputFields(), functionFields), functionFields, new StateQueryProcessor(stateId, inputFields, function)); _topology._colocate.get(stateId).add(n); return _topology.addSourcedNode(this, n); }
@Override public Stream each(Fields inputFields, Function function, Fields functionFields) { projectionValidation(inputFields); return _topology.addSourcedNode(this, new ProcessorNode(_topology.getUniqueStreamId(), _name, TridentUtils.fieldsConcat(getOutputFields(), functionFields), functionFields, new EachProcessor(inputFields, function))); } //creates brand new tuples with brand new fields