private long getMsgId(Tuple input) { return input.getLongByField(msgIdFieldName); }
@Override public void execute(Tuple input) { if (isTupleTs()) { long ts = input.getLongByField(tupleTsFieldName); if (waterMarkEventGenerator.track(input.getSourceGlobalStreamid(), ts)) { windowManager.add(input, ts); } else { LOG.info("Received a late tuple {} with ts {}. This will not processed.", input, ts); } } else { windowManager.add(input); } }
long txid = input.getLongByField(CheckpointSpout.CHECKPOINT_FIELD_TXID); if (shouldProcessTransaction(action, txid)) { LOG.debug("Processing action {}, txid {}", action, txid);
@Override public long mapToIncrementAmount(Tuple tuple) { return tuple.getLongByField(incrementAmountField); }
private long getMsgId(Tuple input) { return input.getLongByField(msgIdFieldName); }
@Override public int compareTo(ComparableTuple o) { return this.tuple.getLongByField(Field.TIMESTAMP).compareTo( o.tuple.getLongByField(Field.TIMESTAMP)); } }
@Override public Writable key(Tuple tuple) { if(this.key == null){ this.key = new LongWritable(); } this.key.set(tuple.getLongByField(this.keyField)); return this.key; }
@Override public void execute(Tuple tuple) { Long startTime = tuple.getLongByField("startTime"); LOG.info("get startTime {}", startTime); Long endTime = startTime + stormConfig.aggregationDuration * 1000; if (metricsAggregateContainer.aggregate(startTime, endTime)) { collector.ack(tuple); LOG.info("succeed startTime {}", startTime); } else { collector.fail(tuple); LOG.warn("failed startTime {}", startTime); } }
@Override public void execute(Tuple tuple) { String component = tuple.getSourceComponent(); if (component.equals(Component.GLOBAL_MEDIAN)) { long timestamp = tuple.getLongByField(Field.TIMESTAMP); double globalMedianLoad = tuple.getDoubleByField(Field.GLOBAL_MEDIAN_LOAD); globalMedianBacklog.put(timestamp, globalMedianLoad); // ordered based on the timestamps while (!unprocessedMessages.isEmpty() && unprocessedMessages.peek().tuple.getLongByField(Field.TIMESTAMP).equals(timestamp)) { Tuple perPlugMedianTuple = unprocessedMessages.poll().tuple; processPerPlugMedianTuple(perPlugMedianTuple); } } else { processPerPlugMedianTuple(tuple); } }
@Override public void execute(Tuple input) { String errorSourceBolt = input.getStringByField(Constants.ERROR_SOURCE_BOLT); JsonObject object = new JsonObject(); object.addProperty(Constants.ERROR_TIMESTAMP, input.getLongByField(Constants.ERROR_TIMESTAMP)); object.addProperty(Constants.ERROR_MESSAGE, input.getStringByField(Constants.ERROR_MESSAGE)); object.addProperty(Constants.ERROR_EXCEPTION, input.getStringByField(Constants.ERROR_EXCEPTION)); object.addProperty(Constants.ERROR_SOURCE, input.getStringByField(Constants.ERROR_SOURCE)); object.addProperty(Constants.ERROR_SOURCE_BOLT, errorSourceBolt); object.addProperty(Constants.ERROR_EXCEPTION_MESSAGE, input.getStringByField(Constants.ERROR_EXCEPTION_MESSAGE)); if (debug) { logger.info(object.toString()); } collector.emit(Constants.KAFKA_ERROR_STREAM, input, new Values(errorSourceBolt, object.toString())); collector.ack(input); }
@Override public void execute(Tuple input) { String errorSourceBolt = input.getStringByField(Constants.ERROR_SOURCE_BOLT); JsonObject object = new JsonObject(); object.addProperty(Constants.ERROR_TIMESTAMP, input.getLongByField(Constants.ERROR_TIMESTAMP)); object.addProperty(Constants.ERROR_MESSAGE, input.getStringByField(Constants.ERROR_MESSAGE)); object.addProperty(Constants.ERROR_EXCEPTION, input.getStringByField(Constants.ERROR_EXCEPTION)); object.addProperty(Constants.ERROR_SOURCE, input.getStringByField(Constants.ERROR_SOURCE)); object.addProperty(Constants.ERROR_SOURCE_BOLT, errorSourceBolt); object.addProperty(Constants.ERROR_EXCEPTION_MESSAGE, input.getStringByField(Constants.ERROR_EXCEPTION_MESSAGE)); if (debug) { logger.info(object.toString()); } collector.emit(Constants.KAFKA_ERROR_STREAM, input, new Values(errorSourceBolt, object.toString())); collector.ack(input); }
@Override public void execute(Tuple input) { long timestamp = input.getLongByField(Field.TIMESTAMP); String id = input.getStringByField(Field.ID); double dataInstanceAnomalyScore = input.getDoubleByField(Field.DATAINST_ANOMALY_SCORE); Queue<Double> slidingWindow = slidingWindowMap.get(id); if (slidingWindow == null) { slidingWindow = new LinkedList<>(); } // update sliding window slidingWindow.add(dataInstanceAnomalyScore); if (slidingWindow.size() > this.windowLength) { slidingWindow.poll(); } slidingWindowMap.put(id, slidingWindow); double sumScore = 0.0; for (double score : slidingWindow) { sumScore += score; } collector.emit(new Values(id, sumScore, timestamp, input.getValue(3), dataInstanceAnomalyScore)); collector.ack(input); }
@Override public void execute(Tuple input) { if (isTupleTs()) { long ts = input.getLongByField(tupleTsFieldName); if (waterMarkEventGenerator.track(input.getSourceGlobalStreamid(), ts)) { windowManager.add(input, ts); } else { LOG.info("Received a late tuple {} with ts {}. This will not processed.", input, ts); } } else { windowManager.add(input); } }
@Override public void execute(Tuple tuple) { JsonObject obj = new JsonObject(); obj.addProperty(Constants.FIELD_TIMESTAMP, tuple.getLongByField(Constants.FIELD_TIMESTAMP)); obj.addProperty(Constants.FIELD_AGGREGATION_WINDOW, tuple.getIntegerByField(Constants.FIELD_AGGREGATION_WINDOW)); obj.addProperty(Constants.FIELD_RULE_ACTION_ID, tuple.getStringByField(Constants.FIELD_RULE_ACTION_ID)); obj.addProperty(Constants.FIELD_AGGREGATION_KEY, tuple.getStringByField(Constants.FIELD_AGGREGATION_KEY)); if (tuple.contains(Constants.FIELD_STATE_TRACK)) { obj.addProperty(Constants.FIELD_STATE_TRACK, tuple.getBooleanByField(Constants.FIELD_STATE_TRACK)); } else if (tuple.contains(Constants.FIELD_AGGREGATION_VALUE)) { obj.addProperty(Constants.FIELD_AGGREGATION_VALUE, tuple.getValueByField(Constants.FIELD_AGGREGATION_VALUE).toString()); } else { // invalid event collector.fail(tuple); return; } collector.emit(tuple, new Values(tuple.getStringByField(Constants.FIELD_RULE_ACTION_ID) + "_" + tuple.getStringByField(Constants.FIELD_AGGREGATION_KEY), gson.toJson(obj))); collector.ack(tuple); }
@Override public void execute(Tuple input) { long timestamp = input.getLongByField(Field.TIMESTAMP); if (timestamp > previousTimestamp) { // a new batch of observation, calculate the scores of old batch and then emit if (!observationList.isEmpty()) { List<ScorePackage> scorePackageList = dataInstanceScorer.getScores(observationList); for (ScorePackage scorePackage : scorePackageList) { collector.emit(new Values(scorePackage.getId(), scorePackage.getScore(), previousTimestamp, scorePackage.getObj())); } observationList.clear(); } previousTimestamp = timestamp; } observationList.add(input.getValueByField(Field.OBSERVATION)); collector.ack(input); }
@Override public void execute(Tuple input) { long minute = input.getLongByField(Field.TIMESTAMP_MINUTES); MutableLong count = counts.get(minute); if (count == null) { if (buffer.isFull()) { long oldMinute = (Long) buffer.remove(); counts.remove(oldMinute); } count = new MutableLong(1); counts.put(minute, count); buffer.add(minute); } else { count.increment(); } collector.emit(input, new Values(minute, count.longValue())); collector.ack(input); }
@Test public void testBasicSerialization() { AggregationSerializerBolt bolt = new AggregationSerializerBolt(); Map<String, String> conf = new HashMap<>(); bolt.prepare(conf, null, collector); when(tuple.getLongByField(Constants.FIELD_TIMESTAMP)).thenReturn(143322L); when(tuple.getStringByField(Constants.FIELD_RULE_ACTION_ID)).thenReturn("34_22"); when(tuple.getStringByField(Constants.FIELD_AGGREGATION_KEY)).thenReturn("host1"); when(tuple.getIntegerByField(Constants.FIELD_AGGREGATION_WINDOW)).thenReturn(100); when(tuple.getBooleanByField(Constants.FIELD_STATE_TRACK)).thenReturn(true); when(tuple.contains(Constants.FIELD_STATE_TRACK)).thenReturn(true); bolt.execute(tuple); verify(collector, times(1)).ack(tuple); }
@Override public List<StreamEvent> map(Tuple tuple) throws Exception { long timestamp; if (tuple.getFields().contains(TIMESTAMP_FIELD)) { try { timestamp = tuple.getLongByField("timestamp"); } catch (Exception ex) { // if timestamp is not null LOGGER.error(ex.getMessage(), ex); timestamp = 0; } } else { timestamp = System.currentTimeMillis(); } Object[] values = new Object[tuple.getFields().size()]; for (int i = 0; i < tuple.getFields().size(); i++) { values[i] = tuple.getValue(i); } StreamEvent event = new StreamEvent(); event.setTimestamp(timestamp); event.setStreamId(streamId); event.setData(values); return Collections.singletonList(event); } }
protected void aggregate(Tuple tuple) { try { aggregationHit.scope(Utils.separateRuleActionId(tuple.getStringByField(Constants.FIELD_RULE_ACTION_ID)) .getKey().toString()).incr(); engine.aggregate(tuple.getLongByField(Constants.FIELD_TIMESTAMP), tuple.getIntegerByField(Constants.FIELD_AGGREGATION_WINDOW), tuple.getStringByField(Constants.FIELD_RULE_ACTION_ID), tuple.getStringByField(Constants.FIELD_AGGREGATION_KEY), tuple.getStringByField(Constants.FIELD_AGGREGATION_VALUE)); buffer.add(tuple); if (buffer.size() >= bufferSize) { flushAckAndClearBuffer(); } } catch (AggregationRejectException e) { StormContextUtil.emitErrorTuple(collector, tuple, MarkovianAggregationBolt.class, "", "Aggregation rejected", e); collector.ack(tuple); } catch (IOException e) { failAndClearBuffer(); StormContextUtil.emitErrorTuple(collector, tuple, MarkovianAggregationBolt.class, "", "Aggregation flush failed", e); } }
@Override public void execute(Tuple tuple) { int operation = tuple.getIntegerByField(Field.SLIDING_WINDOW_ACTION); double value = tuple.getDoubleByField(Field.VALUE); long timestamp = tuple.getLongByField(Field.TIMESTAMP); if (operation == SlidingWindowAction.ADD){ double median = medianCalc.getMedian(value); if (lastUpdatedTs < timestamp) { // the sliding window has moved lastUpdatedTs = timestamp; collector.emit(new Values(timestamp, median)); } } else { medianCalc.remove(value); } }