@Override public Object valAt(Object o) { try { if(o instanceof Keyword) { return getValueByField(((Keyword) o).getName()); } else if(o instanceof String) { return getValueByField((String) o); } } catch(IllegalArgumentException e) { } return null; }
private IKvState<K, V> getSpecifiedKeyRangeState(Tuple input, Fields fields) { Object key = null; if (fields.size() == 1) { key = input.getValueByField(fields.get(0)); } else { List<Object> fieldedValuesTobeHash = Lists.newArrayList(); for (String field : fields) { fieldedValuesTobeHash.add(input.getValueByField(field)); } key = fieldedValuesTobeHash; } return keyRangeState.getRangeStateByKey(key); } }
private void updateMetrics(Tuple input) { String workerSlot = (String) input.getValueByField(TopologyMaster.FIELD_METRIC_WORKER); WorkerUploadMetrics metrics = (WorkerUploadMetrics) input.getValueByField(TopologyMaster.FIELD_METRIC_METRICS); topologyMetricContext.addToMemCache(workerSlot, metrics.get_allMetrics()); metricLogger.info("received metrics from:{}, size:{}", workerSlot, metrics.get_allMetrics().get_metrics_size()); }
@Override public void execute(Tuple tuple) { List<Object> id = tuple.select(_idFields); GlobalStreamId streamId = new GlobalStreamId(tuple.getSourceComponent(), tuple.getSourceStreamId()); if (!_pending.containsKey(id)) { _pending.put(id, new HashMap<GlobalStreamId, Tuple>()); } Map<GlobalStreamId, Tuple> parts = _pending.get(id); if (parts.containsKey(streamId)) throw new RuntimeException("Received same side of single join twice"); parts.put(streamId, tuple); if (parts.size() == _numSources) { _pending.remove(id); List<Object> joinResult = new ArrayList<Object>(); for (String outField : _outFields) { GlobalStreamId loc = _fieldLocations.get(outField); joinResult.add(parts.get(loc).getValueByField(outField)); } _collector.emit(new ArrayList<Tuple>(parts.values()), joinResult); for (Tuple part : parts.values()) { _collector.ack(part); } } }
Action action = (Action) input.getValueByField(CheckpointSpout.CHECKPOINT_FIELD_ACTION); long txid = input.getLongByField(CheckpointSpout.CHECKPOINT_FIELD_TXID); if (shouldProcessTransaction(action, txid)) {
Tuple tuple = event; if (event.getSourceStreamId().equals(Common.TOPOLOGY_MASTER_CONTROL_STREAM_ID)) { TopoMasterCtrlEvent ctrlEvent = (TopoMasterCtrlEvent) tuple.getValueByField("ctrlEvent"); if (ctrlEvent.isTransactionEvent()) { if (spout instanceof ICtrlMsgSpout) {
private static Map tupleToMap(Tuple tuple) { Map values = new HashMap<>(); for (String field : tuple.getFields()) { values.put(field, tuple.getValueByField(field)); } return values; }
@Override public String getMessageFromTuple(Tuple tuple) { if (gson == null) { gson = new Gson(); } DEngineEvent event = (DEngineEvent) tuple.getValueByField(Constants.FIELD_EVENT); return gson.toJson(event.getHeaders()); }
@Override public void execute(Tuple input) { String lang = (String) input.getValueByField("lang"); String word = (String) input.getValueByField("word"); if (!IGNORE_LIST.contains(word)) { collector.emit(new Values(lang, word)); } }
@Override public void execute(Tuple tuple) { if(tuple.contains(EVENT)) { synchronized (eventsReceived) { eventsReceived.add((Event) tuple.getValueByField(EVENT)); } } }
@Override public List<String> mapToColumnList(Tuple tuple) { Fields fields = tuple.getFields(); List<String> result = new ArrayList<String>(); Iterator<String> it = fields.iterator(); while (it.hasNext()) { String fieldName = it.next(); if (!fieldName.equals(rowKeyField) && !fieldName.equals(incrementAmountField)) result.add(tuple.getValueByField(fieldName).toString()); } return result; }
@Override public void execute(Tuple input) { for (final String field : input.getFields()) { final Object value = input.getValueByField(field); System.out.println(String.format(fieldValueFormat, field, value)); } collector.ack(input); }
@Override public void execute(Tuple input) { for (final String field : input.getFields()) { final Object value = input.getValueByField(field); System.out.println(String.format(fieldValueFormat, field, value)); } collector.ack(input); }
@Override public void execute(Tuple input) { try { final String t = (String) input.getValueByField("tweet"); processTweet(t); } catch (final Exception e) { logger.error("Failed to read tweet from tuple: ", e); } collector.ack(input); }
private void updateMetrics(Tuple input) { String workerSlot = (String) input.getValueByField(TopologyMaster.FIELD_METRIC_WORKER); WorkerUploadMetrics metrics = (WorkerUploadMetrics) input.getValueByField(TopologyMaster.FIELD_METRIC_METRICS); topologyMetricContext.addToMemCache(workerSlot, metrics.get_allMetrics()); metricLogger.info("received metrics from:{}, size:{}", workerSlot, metrics.get_allMetrics().get_metrics_size()); }
@Override final protected void execute(Tuple input, RecordCollector collector) { LogRecord record = (LogRecord) input.getValueByField(RECORD); process(record); collector.emit(record); }
@Override public Composite mapToRowKey(Tuple tuple) { Composite keyName = new Composite(); for (String rowKeyField : this.rowKeyFields){ Object component = tuple.getValueByField(rowKeyField); if (component == null) { component = "[NULL]"; } keyName.addComponent(component.toString(), StringSerializer.get()); } return keyName; }
@Override public void execute(Tuple tuple) { Alert alert = (Alert) tuple.getValueByField(Constants.FIELD_ALERT); try { httpService.sendHttpCallback(alert); } catch (AlertDeliveryException e) { StormContextUtil.emitErrorTuple(collector, tuple, HttpBolt.class, alert.toString(), "Failed to make http callback", e); } collector.ack(tuple); }
@Override public void execute(Tuple input) { try { this.streamContext.counter().incr("execute_count"); this.router.nextEvent(deserialize(input.getValueByField(AlertConstants.FIELD_0)).withAnchor(input)); } catch (Exception ex) { this.streamContext.counter().incr("fail_count"); LOG.error(ex.getMessage(), ex); this.collector.fail(input); } }
public FlowInfo(Tuple tuple) { flowId = tuple.getStringByField(FLOW_ID); event = (Event) tuple.getValueByField(EVENT); idx = tuple.getIntegerByField(FLOW_OP_IDX); idx++; streamName = tuple.getStringByField(STREAM_NAME); previousStream = tuple.getStringByField(LAST_STREAM); if(tuple.contains(PARTITION)) partition = tuple.getStringByField(PARTITION); }