.from(event) .sourceStream(input.getSourceStreamId()) .fieldsAndValues(mappedEventMap) .build(); .from(e) .sourceStream(outputStream) .fieldsAndValues(newFieldsAndValues) .build(); collector.emit(outputStream, input, new Values(toEmit));
.fieldsAndValues(fieldsMap) .dataSourceId("notificationsTestBolt") .header(header) .build(); collector.emit(consoleNotificationStream, new Values(streamlineEvent)); .fieldsAndValues(fieldsMap) .dataSourceId("notificationsTestBolt") .header(header) .build(); collector.emit(emailNotificationStream, new Values(streamlineEvent));
@Override public void nextTuple() { try (BufferedReader br = new BufferedReader(Files.newBufferedReader(Paths.get(path), StandardCharsets.UTF_8))) { String line; while ((line = br.readLine()) != null) { String[] result = line.split(delimiter); if (result.length != 2) { LOG.error("Format of input file not as expected. Expecting {} separated first name and last name", delimiter); } else { List<Object> values = new ArrayList<>(); values.add(StreamlineEventImpl.builder().sourceStream(outputStream).put(FIRST_NAME, result[0].trim()).put(LAST_NAME, result[1].trim()) .build()); spoutOutputCollector.emit(outputStream, values, UUID.randomUUID()); } } } catch (IOException e) { LOG.error("Got exception while reading file at {}", path); throw new RuntimeException(e); } }
"Added PMML output (field,val)=({},{}) to StreamlineEvent"); final StreamlineEvent scoredEvent = eventBuilder.build(); LOG.debug("Scored StreamlineEvent {}", scoredEvent); finalEventBuilder.putAll(scoredEvent); finalEventBuilder.putAll(nonScoredFieldsEvent); streamsToEvents.put(stream.getId(), Collections.singletonList(finalEventBuilder.dataSourceId(modelId).build()));
Map<String, Object> record = recordOptional.get(); StreamlineEventImpl streamlineEvent = StreamlineEventImpl.builder() .fieldsAndValues(record) .dataSourceId(testRunSource.getId()) .build(); LOG.debug("Emitting event {} to stream {}", streamlineEvent, outputStream); collector.emit(outputStream, new Values(streamlineEvent), streamlineEvent.getId());
@Override public StreamlineEvent convert(CorrelatedValues input, StreamlineEvent inputEvent) { if (this.eventCorrelationInjector == null) { this.eventCorrelationInjector = new EventCorrelationInjector(); } StreamlineEvent result; if (input == null) { return null; } else if (outputFields != null && !outputFields.isEmpty()) { StreamlineEventImpl.Builder builder = StreamlineEventImpl.builder(); for (int i = 0; i < outputFields.size(); i++) { builder.put(outputFields.get(i), input.get(i)); } if (inputEvent != null) { result = builder.dataSourceId(inputEvent.getDataSourceId()) .header(inputEvent.getHeader()) .sourceStream(inputEvent.getSourceStream()) .build(); } else { result = builder.build(); } } else { result = inputEvent; } return eventCorrelationInjector.injectCorrelationInformation(result, input.getCorrelated()); }
protected ArrayList<Object> doProjectionStreamLine(ArrayList<Tuple> tuplesRow, FieldSelector[] projectionKeys) { String finalOutputFieldNames[] = new String[rawCommaSeparatedOutputKeys.length]; for ( int i = 0; i < rawCommaSeparatedOutputKeys.length; ++i) { finalOutputFieldNames[i] = getAliasOrKeyName(rawCommaSeparatedOutputKeys[i]); } StreamlineEventImpl.Builder eventBuilder = StreamlineEventImpl.builder(); // Todo: note to self: may be able to optimize this ... perhaps inner loop can be outside to avoid rescanning tuples for ( int i = 0; i < projectionKeys.length; i++ ) { for ( Tuple cell : tuplesRow ) { Object field = lookupField(projectionKeys[i], cell) ; if (field != null) { eventBuilder.put(finalOutputFieldNames[i], field); break; } } } ArrayList<Object> resultRow = new ArrayList<>(); StreamlineEventImpl slEvent = eventBuilder.dataSourceId("multiple sources").build(); resultRow.add(slEvent); return resultRow; }
public List<Object> next() throws IOException, ParseException { List<Object> lineTuple = super.next(); if (lineTuple==null) return null; String jsonLine = (String) lineTuple.get(0); if ( jsonLine.trim().isEmpty() ) return next(); try { //1- convert Json to Map<> HashMap<String, Object> jsonMap = new ObjectMapper().readValue(jsonLine, HashMap.class); //2- make StreamlineEvent from map StreamlineEventImpl slEvent = StreamlineEventImpl.builder().putAll(jsonMap).dataSourceId("HdfsSpout").build(); //3- create tuple from StreamlineEvent return Collections.singletonList(slEvent); } catch (JsonProcessingException e) { throw new ParseException("Json parsing error at location : " + getFileOffset().toString(), e); } } }
/** * NOTE: Streamline specific convenience method. Creates output tuple as a StreamlineEvent * @param tuple1 can be null * @param tuple2 can be null * @return */ @Override protected List<Object> doProjection(Tuple tuple1, Tuple tuple2) { StreamlineEventImpl.Builder eventBuilder = StreamlineEventImpl.builder(); for ( int i = 0; i < outputFields.length; i++ ) { FieldSelector outField = outputFields[i]; Object field = outField.findField(tuple1) ; if (field==null) field = outField.findField(tuple2); String outputKeyName = dropStreamLineEventPrefix(outField.outputName ); eventBuilder.put(outputKeyName, field); // adds null if field is not found in both tuples } StreamlineEventImpl slEvent = eventBuilder.dataSourceId("multiple sources").build(); return Collections.singletonList(slEvent); }
@Override public StreamlineEvent join(EventGroup eventGroup) { Map<String, Object> fieldValues = new HashMap<>(); Map<String, Object> auxiliaryFieldValues = new HashMap<>(); for (StreamlineEvent subEvent : eventGroup.getSplitEvents()) { if(subEvent.getAuxiliaryFieldsAndValues() != null) { auxiliaryFieldValues.putAll(subEvent.getAuxiliaryFieldsAndValues()); } fieldValues.putAll(subEvent); } return StreamlineEventImpl.builder() .fieldsAndValues(fieldValues) .dataSourceId(eventGroup.getDataSourceId()) .auxiliaryFieldsAndValues(auxiliaryFieldValues) .build(); } }
public static StreamlineEvent fromString(String s) { ObjectMapper mapper = new ObjectMapper(); try { Map<String, Object> event = mapper.readValue( s.substring(s.indexOf(TO_STRING_PREFIX) + TO_STRING_PREFIX.length()), new TypeReference<Map<String, Object>>() {}); return StreamlineEventImpl.builder() .header((Map<String, Object>) event.get("header")) .sourceStream((String) event.get("sourceStream")) .auxiliaryFieldsAndValues((Map<String, Object>) event.get("auxiliaryFieldsAndValues")) .dataSourceId((String) event.get("dataSourceId")) .putAll((Map<String, Object>) event.get("fieldsAndValues")) .build(); } catch (IOException e) { throw new RuntimeException(e); } }
/** * Returns a new Streamline event with the given headers added to the existing headers. * All the other fields are copied from this event. * @param headers the map of fieldsAndValues to add or overwrite * @return the new StreamlineEvent */ @Override public StreamlineEvent addHeaders(Map<String, Object> headers) { Map<String, Object> headerMap = new HashMap<>(); headerMap.putAll(this.getHeader()); headerMap.putAll(headers); return StreamlineEventImpl.builder().from(this) .header(headerMap) .build(); }
private StreamlineEvent getStreamlineEventWithStream(StreamlineEvent event, Tuple tuple) { return StreamlineEventImpl.builder().from(event).sourceStream(tuple.getSourceStreamId()).build(); }
public void process(Tuple inputTuple, StreamlineEvent event) throws Exception { LOG.debug("Normalizing received StreamlineEvent: [{}] with tuple: [{}]", event, inputTuple); //todo this bolt will be replaced with custom baseprocessor bolt. StreamlineEventImpl eventWithStream = StreamlineEventImpl.builder() .from(event) .sourceStream(inputTuple.getSourceStreamId()) .build(); List<Result> outputEvents = normalizationProcessorRuntime.process(eventWithStream); LOG.debug("Emitting events to collector: [{}]", outputEvents); for (Result outputEvent : outputEvents) { for (StreamlineEvent e : outputEvent.events) { collector.emit(outputEvent.stream, inputTuple, new Values(e)); } } }
private StreamlineEvent getStreamlineEvent(StreamlineEvent event, String stream) { return StreamlineEventImpl.builder().from(event).sourceStream(stream).build(); }
private StreamlineEvent getStreamlineEventWithStream(StreamlineEvent event, Tuple tuple) { return StreamlineEventImpl.builder().from(event).sourceStream(tuple.getSourceStreamId()).build(); }
public final StreamlineEvent execute(StreamlineEvent event) throws NormalizationException { Map<String, Object> result = normalize(event); return StreamlineEventImpl.builder().from(event).fieldsAndValues(result).build(); }
private StreamlineEvent getStreamlineEventWithStream(StreamlineEvent event, Tuple tuple) { StreamlineEventImpl newEvent = StreamlineEventImpl.builder().from(event).sourceStream(tuple.getSourceStreamId()).build(); return new IdPreservedStreamlineEvent(newEvent, event.getId()); }
@Override public List<StreamlineEvent> execute(StreamlineEvent input) { Map<String, Object> header = new HashMap<>(input.getHeader()); if(addHeaderTransform.getFixedHeader() != null) { header.putAll(addHeaderTransform.getFixedHeader()); } header.put(HEADER_FIELD_DATASOURCE_IDS, Collections.singletonList(input.getDataSourceId())); header.put(HEADER_FIELD_EVENT_IDS, Collections.singletonList(input.getId())); header.put(HEADER_FIELD_TIMESTAMP, System.currentTimeMillis()); return Collections.singletonList( StreamlineEventImpl.builder().from(input).header(header).build()); }