public static Map<String, TopicWorkload> retrieveTopicInRate(long timeInMs, long windowInMs, String c3Host, int c3Port, String kafkaCluster, List<String> topics) throws IOException { Map<String, TopicWorkload> workloads = new HashMap<>(); if (c3Port == 0) { return workloads; } long endSec = (timeInMs / 1000 - DEFAULT_QUERY_MINIMUM_END_TO_CURRENT_SEC) / 600 * 600; long startSec = endSec - windowInMs / 1000L; LOGGER.info("Retrieve workload for [{}, {}]", startSec, endSec); for (int i = 0; i < topics.size(); i += DEFAULT_BATCH_TOPICS) { StringBuilder query = new StringBuilder(); query.append("startSec="); query.append(startSec); query.append("&endSec="); query.append(endSec); query.append("&tier="); query.append(kafkaCluster); query.append("&topicList="); List<String> batch = topics.subList(i, Math.min(i + DEFAULT_BATCH_TOPICS, topics.size())); query.append(StringUtils.join(batch, ",")); String jsonStr = makeQuery(c3Host, c3Port, query.toString()); extractJsonResults(jsonStr, batch, workloads); } return workloads; }
C3QueryUtils.extractJsonResults( "{\"topic1\":[{\"endTimeSec\":1485991200,\"startTimeSec\":1485988200,\"totalBytes\":30000,\"totalCount\":3000}]," + "\"topic2\":[{\"endTimeSec\":1485991200,\"invalidCount\":0,\"maxLatencyFromCreation\":3750,\"meanLatencyFromCreation\":1576.2152157017917,\"p99LatencyFromCreation\":3529,\"startTimeSec\":1485988200,\"totalBytes\":3000000,\"totalCount\":1500}]}", C3QueryUtils.extractJsonResults("", Arrays.asList("topic1", "topic2"), workloads); Assert.assertTrue(workloads.isEmpty()); C3QueryUtils.extractJsonResults("{}", Arrays.asList("topic1", "topic2"), workloads); Assert.assertTrue(workloads.isEmpty()); C3QueryUtils.extractJsonResults("{\"topic1\":[]}", Arrays.asList("topic1", "topic2"), workloads); Assert.assertTrue(workloads.isEmpty()); C3QueryUtils.extractJsonResults( "{\"topic1\":[{,\"startTimeSec\":1485988200,\"totalBytes\":30000,\"totalCount\":3000}]," + "\"topic2\":[{\"endTimeSec\":1485991200,\"invalidCount\":0,\"maxLatencyFromCreation\":3750,\"meanLatencyFromCreation\":1576.2152157017917,\"p99LatencyFromCreation\":3529,\"startTimeSec\":1485988200,\"totalBytes\":3000000,\"totalCount\":1500}]}", C3QueryUtils.extractJsonResults( "{\"topic1\":[{\"endTimeSec\":1485988200,\"startTimeSec\":1485988200,\"totalBytes\":30000,\"totalCount\":3000}]," + "\"topic2\":[{\"endTimeSec\":1485991200,\"invalidCount\":0,\"maxLatencyFromCreation\":3750,\"meanLatencyFromCreation\":1576.2152157017917,\"p99LatencyFromCreation\":3529,\"startTimeSec\":1485988200,\"totalBytes\":3000000,\"totalCount\":1500}]}",
private void retrieveWorkload(long timeInMs, long windowInMs, Map<String, Integer> topicsPartitions) throws IOException { long current = System.currentTimeMillis(); Map<String, TopicWorkload> topicWorkloads = C3QueryUtils.retrieveTopicInRate(timeInMs, windowInMs, _helixMirrorMakerManager.getControllerConf().getC3Host(), _helixMirrorMakerManager.getControllerConf().getC3Port(), _srcKafkaCluster, new ArrayList<>(topicsPartitions.keySet())); synchronized (_topicWorkloadMap) { for (Map.Entry<String, TopicWorkload> entry : topicWorkloads.entrySet()) { String topic = entry.getKey(); TopicWorkload workload = entry.getValue(); Integer partitions = topicsPartitions.get(topic); if (partitions != null) { workload.setParitions(partitions); LinkedList<TopicWorkload> tws = _topicWorkloadMap.get(topic); if (tws == null) { tws = new LinkedList<>(); _topicWorkloadMap.put(topic, tws); } if (tws.isEmpty() || tws.getLast().getLastUpdate() < workload.getLastUpdate()) { tws.add(workload); } // purge the data points out of the valid window while (!tws.isEmpty() && (current - tws.getFirst().getLastUpdate() > _maxValidTimeMillis)) { tws.removeFirst(); } } } } }