public static StreamlineEvent fromString(String s) { ObjectMapper mapper = new ObjectMapper(); try { Map<String, Object> event = mapper.readValue( s.substring(s.indexOf(TO_STRING_PREFIX) + TO_STRING_PREFIX.length()), new TypeReference<Map<String, Object>>() {}); return StreamlineEventImpl.builder() .header((Map<String, Object>) event.get("header")) .sourceStream((String) event.get("sourceStream")) .auxiliaryFieldsAndValues((Map<String, Object>) event.get("auxiliaryFieldsAndValues")) .dataSourceId((String) event.get("dataSourceId")) .putAll((Map<String, Object>) event.get("fieldsAndValues")) .build(); } catch (IOException e) { throw new RuntimeException(e); } }
@Override public List<StreamlineEvent> execute(StreamlineEvent input) { StreamlineEventImpl.Builder builder = StreamlineEventImpl.builder(); builder.putAll(input); for (Map.Entry<String, ?> entry : mergeTransform.getDefaults().entrySet()) { if (!input.containsKey(entry.getKey())) { builder.put(entry.getKey(), entry.getValue()); } } return Collections.<StreamlineEvent>singletonList(builder.dataSourceId(input.getDataSourceId()).build()); }
public List<Object> next() throws IOException, ParseException { List<Object> lineTuple = super.next(); if (lineTuple==null) return null; String jsonLine = (String) lineTuple.get(0); if ( jsonLine.trim().isEmpty() ) return next(); try { //1- convert Json to Map<> HashMap<String, Object> jsonMap = new ObjectMapper().readValue(jsonLine, HashMap.class); //2- make StreamlineEvent from map StreamlineEventImpl slEvent = StreamlineEventImpl.builder().putAll(jsonMap).dataSourceId("HdfsSpout").build(); //3- create tuple from StreamlineEvent return Collections.singletonList(slEvent); } catch (JsonProcessingException e) { throw new ParseException("Json parsing error at location : " + getFileOffset().toString(), e); } } }
@Override public List<Object> apply (ConsumerRecord<Object, ByteBuffer> consumerRecord) { Map < String, Object > keyValues = (Map<String, Object>) deserializer().deserialize(new ByteBufferInputStream(consumerRecord.value()), readerSchemaVersion); StreamlineEvent streamlineEvent = StreamlineEventImpl.builder().putAll(keyValues).dataSourceId(dataSourceId).build(); KafkaTuple kafkaTuple = new KafkaTuple(streamlineEvent); kafkaTuple.routedTo(outputStream); return kafkaTuple; }
public Builder from(StreamlineEvent other) { return this.header(other.getHeader()) .sourceStream(other.getSourceStream()) .dataSourceId(other.getDataSourceId()) .auxiliaryFieldsAndValues(other.getAuxiliaryFieldsAndValues()) .putAll(other); }
/** * Returns a new Streamline event with the given fieldsAndValues added to the existing fieldsAndValues * * @param fieldsAndValues the map of fieldsAndValues to add * @return the new StreamlineEvent */ @Override public StreamlineEvent addFieldsAndValues(Map<String, Object> fieldsAndValues) { Objects.requireNonNull(fieldsAndValues, "keyValues is null"); return builder().from(this).putAll(fieldsAndValues).build(); }