private ComponentMetric extractMetric(String componentName, Map<String, ?> componentMap) { Long inputRecords = getLongValueOrDefault(componentMap, STATS_JSON_EXECUTED_TUPLES, 0L); Long outputRecords = getLongValueOrDefault(componentMap, STATS_JSON_EMITTED_TUPLES, 0L); Long failedRecords = getLongValueOrDefault(componentMap, STATS_JSON_FAILED_TUPLES, 0L); Double processedTime = getDoubleValueFromStringOrDefault(componentMap, STATS_JSON_PROCESS_LATENCY, 0.0d); return new ComponentMetric(StormTopologyUtil.extractStreamlineComponentName(componentName), inputRecords, outputRecords, failedRecords, processedTime); }
private Map<Long, Double> queryTopologyMetrics(String stormTopologyName, StormMappedMetric mappedMetric, long from, long to) { Map<Long, Double> metrics = Collections.emptyMap(); // empty if time-series querier is not set if (timeSeriesQuerier != null) { metrics = timeSeriesQuerier.getTopologyLevelMetrics(stormTopologyName, mappedMetric.getStormMetricName(), mappedMetric.getAggregateFunction(), from, to); } return new TreeMap<>(metrics); }
private void extractMetrics(Map<String, ComponentMetric> metricMap, List<Map<String, ?>> components, String topologyJsonID) { for (Map<String, ?> component : components) { String name = (String) component.get(topologyJsonID); String componentId = StormTopologyUtil.extractStreamlineComponentId(name); ComponentMetric metric = extractMetric(name, component); metricMap.put(componentId, metric); } }
@Override public Map<String, Map<Long, Double>> getkafkaTopicOffsets(TopologyLayout topology, Component component, long from, long to, String asUser) { String stormTopologyName = StormTopologyUtil.findOrGenerateTopologyName(client, topology.getId(), topology.getName(), asUser); String stormComponentName = getComponentName(component); String topicName = findKafkaTopicName(component); if (topicName == null) { throw new IllegalStateException("Cannot find Kafka topic name from source config - topology name: " + topology.getName() + " / source : " + component.getName()); } StormMappedMetric[] metrics = { StormMappedMetric.logsize, StormMappedMetric.offset, StormMappedMetric.lag }; Map<String, Map<Long, Double>> kafkaOffsets = new HashMap<>(); for (StormMappedMetric metric : metrics) { kafkaOffsets.put(metric.name(), queryKafkaMetrics(stormTopologyName, stormComponentName, metric, topicName, from, to)); } return kafkaOffsets; }
@Override public TimeSeriesComponentMetric getComponentStats(TopologyLayout topology, Component component, long from, long to, String asUser) { String stormTopologyName = StormTopologyUtil.findOrGenerateTopologyName(client, topology.getId(), topology.getName(), asUser); String stormComponentName = getComponentName(component); StormMappedMetric[] stats; if (component instanceof Source) { stats = STATS_METRICS_SOURCE; } else { stats = STATS_METRICS; } // empty map if time-series DB is not set to the namespace Map<String, Map<Long, Double>> componentStats = ParallelStreamUtil.execute(() -> Arrays.asList(stats) .parallelStream() .collect(toMap(m -> m.name(), m -> queryComponentMetrics(stormTopologyName, stormComponentName, m, from, to))), FORK_JOIN_POOL); return buildTimeSeriesComponentMetric(component.getName(), componentStats); }
Map<String, ?> responseMap = getTopologyInfo(topologyId, asUser); for (Map<String, ?> topoStats : topologyStatsList) { String windowStr = (String) topoStats.get(TOPOLOGY_JSON_WINDOW); Long window = convertWindowString(windowStr, uptimeSeconds); if (smallestWindow > window) { smallestWindow = window; Long acked = getLongValueOrDefault(topologyStatsMap, STATS_JSON_ACKED_TUPLES, 0L); Long failedRecords = getLongValueOrDefault(topologyStatsMap, STATS_JSON_FAILED_TUPLES, 0L); Double completeLatency = getDoubleValueFromStringOrDefault(topologyStatsMap, STATS_JSON_COMPLETE_LATENCY, 0.0d); Long emittedTotal = getLongValueOrDefault(topologyStatsMap, STATS_JSON_EMITTED_TUPLES, 0L); Long transferred = getLongValueOrDefault(topologyStatsMap, STATS_JSON_TRANSFERRED_TUPLES, 0L); Long errorsTotal = getErrorCountFromAllComponents(topologyId, spouts, bolts, asUser);
@Override public TimeSeriesComponentMetric getTopologyStats(TopologyLayout topology, long from, long to, String asUser) { String stormTopologyName = StormTopologyUtil.findOrGenerateTopologyName(client, topology.getId(), topology.getName(), asUser); Map<String, Map<Long, Double>> stats = ParallelStreamUtil.execute(() -> Arrays.asList(STATS_METRICS_SOURCE) .parallelStream() .collect(toMap(m -> m.name(), m -> queryTopologyMetrics(stormTopologyName, m, from, to))), FORK_JOIN_POOL); return buildTimeSeriesComponentMetric(topology.getName(), stats); }
private TimeSeriesComponentMetric buildTimeSeriesComponentMetric(String name, Map<String, Map<Long, Double>> stats) { Map<String, Map<Long, Double>> misc = new HashMap<>(); misc.put(StormMappedMetric.ackedRecords.name(), stats.get(StormMappedMetric.ackedRecords.name())); if (stats.containsKey(StormMappedMetric.completeLatency.name())) { misc.put(StormMappedMetric.completeLatency.name(), stats.get(StormMappedMetric.completeLatency.name())); } if (stats.containsKey(StormMappedMetric.executeTime.name())) { misc.put(StormMappedMetric.executeTime.name(), stats.get(StormMappedMetric.executeTime.name())); } TimeSeriesComponentMetric metric = new TimeSeriesComponentMetric(name, stats.getOrDefault(StormMappedMetric.inputRecords.name(), Collections.emptyMap()), stats.getOrDefault(StormMappedMetric.outputRecords.name(), Collections.emptyMap()), stats.getOrDefault(StormMappedMetric.failedRecords.name(), Collections.emptyMap()), stats.getOrDefault(StormMappedMetric.processedTime.name(), Collections.emptyMap()), stats.getOrDefault(StormMappedMetric.recordsInWaitQueue.name(), Collections.emptyMap()), misc); return metric; }
@Override public Map<Long, Double> getCompleteLatency(TopologyLayout topology, Component component, long from, long to, String asUser) { String stormTopologyName = StormTopologyUtil.findOrGenerateTopologyName(client, topology.getId(), topology.getName(), asUser); String stormComponentName = getComponentName(component); return queryComponentMetrics(stormTopologyName, stormComponentName, StormMappedMetric.completeLatency, from, to); }
/** * {@inheritDoc} */ @Override public Map<String, ComponentMetric> getMetricsForTopology(TopologyLayout topology, String asUser) { String topologyId = StormTopologyUtil.findStormTopologyId(client, topology.getId(), asUser); if (StringUtils.isEmpty(topologyId)) { throw new TopologyNotAliveException("Topology not found in Storm Cluster - topology id: " + topology.getId()); } Map<String, ?> responseMap = getTopologyInfo(topologyId, asUser); Map<String, ComponentMetric> metricMap = new HashMap<>(); List<Map<String, ?>> spouts = (List<Map<String, ?>>) responseMap.get(TOPOLOGY_JSON_SPOUTS); extractMetrics(metricMap, spouts, TOPOLOGY_JSON_SPOUT_ID); List<Map<String, ?>> bolts = (List<Map<String, ?>>) responseMap.get(TOPOLOGY_JSON_BOLTS); extractMetrics(metricMap, bolts, TOPOLOGY_JSON_BOLT_ID); return metricMap; }
timeSeriesMetrics = new StormTopologyTimeSeriesMetricsImpl(client); topologyRetrieveCache = CacheBuilder.newBuilder() .maximumSize(MAX_SIZE_TOPOLOGY_CACHE)
Map componentStats = getComponentInfo(topologyId, componentId, asUser); List<?> componentErrors = (List<?>) componentStats.get(TOPOLOGY_JSON_COMPONENT_ERRORS); if (componentErrors != null && !componentErrors.isEmpty()) {
private Map<Long, Double> queryComponentMetrics(String stormTopologyName, String sourceId, StormMappedMetric mappedMetric, long from, long to) { Map<Long, Double> metrics = Collections.emptyMap(); // empty if time-series querier is not set if (timeSeriesQuerier != null) { metrics = timeSeriesQuerier.getMetrics(stormTopologyName, sourceId, mappedMetric.getStormMetricName(), mappedMetric.getAggregateFunction(), from, to); } return new TreeMap<>(metrics); }
@VisibleForTesting static double aggregateCompleteLatency(TopologyTimeSeriesMetrics.TimeSeriesComponentMetric metrics) { Map<Long, Double> keyMetrics = metrics.getMisc().getOrDefault(StormMappedMetric.completeLatency.name(), Collections.emptyMap()); Map<Long, Double> weightMetrics = metrics.getMisc().getOrDefault(METRIC_NAME_ACK_COUNT, Collections.emptyMap()); return calculateWeightedAverage(keyMetrics, weightMetrics); }
private Map<Long, Double> queryKafkaMetrics(String stormTopologyName, String sourceId, StormMappedMetric mappedMetric, String kafkaTopic, long from, long to) { Map<Long, Double> metrics = Collections.emptyMap(); // empty if time-series querier is not set if (timeSeriesQuerier != null) { metrics = timeSeriesQuerier.getMetrics(stormTopologyName, sourceId, String.format(mappedMetric.getStormMetricName(), kafkaTopic), mappedMetric.getAggregateFunction(), from, to); } return new TreeMap<>(metrics); }
@VisibleForTesting static double aggregateExecuteLatency(TopologyTimeSeriesMetrics.TimeSeriesComponentMetric metrics) { Map<Long, Double> keyMetrics = metrics.getMisc().getOrDefault(StormMappedMetric.executeTime.name(), Collections.emptyMap()); return calculateWeightedAverage(keyMetrics, metrics.getInputRecords()); }