private Set<String> getCandidateTopicsToWhitelist() { Set<String> candidateTopics = new HashSet<String>(_srcKafkaTopicObserver.getAllTopics()); candidateTopics.retainAll(_destKafkaTopicObserver.getAllTopics()); candidateTopics.removeAll(_helixMirrorMakerManager.getTopicLists()); candidateTopics.addAll(getPartitionMismatchedTopics()); loadBlacklistedTopics(); LOGGER.info("BlacklistedTopics={} and ExcludingPattern={}", _blacklistedTopics, _patternToExcludeTopics); Iterator<String> itr = candidateTopics.iterator(); while (itr.hasNext()) { String topic = itr.next(); if (_blacklistedTopics.contains(topic)) { LOGGER.info("Exclude topic={} by blacklist", topic); itr.remove(); } else if (topic.matches(_patternToExcludeTopics)) { LOGGER.info("Exclude topic={} by pattern", topic); itr.remove(); } } return candidateTopics; }
@Override public void handleChildChange(String parentPath, List<String> currentChilds) throws Exception { if (!tryToRefreshCache()) { synchronized (_lock) { Set<String> newAddedTopics = new HashSet<String>(currentChilds); Set<String> currentServingTopics = getAllTopics(); newAddedTopics.removeAll(currentServingTopics); for (String existedTopic : currentServingTopics) { if (!currentChilds.contains(existedTopic)) { _topicPartitionInfoMap.remove(existedTopic); } } scala.collection.mutable.Map<String, scala.collection.Map<Object, Seq<Object>>> partitionAssignmentForTopics = _zkUtils.getPartitionAssignmentForTopics( JavaConversions.asScalaBuffer(ImmutableList.copyOf(newAddedTopics))); for (String topic : newAddedTopics) { try { scala.collection.Map<Object, Seq<Object>> partitionsMap = partitionAssignmentForTopics.get(topic).get(); TopicPartition tp = new TopicPartition(topic, partitionsMap.size()); _topicPartitionInfoMap.put(topic, tp); } catch (Exception e) { LOGGER.warn("Failed to get topicPartition info for {} from kafka zk: {}", topic, e); } } _kafkaTopicsCounter.inc(_topicPartitionInfoMap.size() - _kafkaTopicsCounter.getCount()); } } }
return; for (String existedTopic : getAllTopics()) { if (!servingTopics.contains(existedTopic)) { _topicPartitionInfoMap.remove(existedTopic);
@Test public void testAutoTopic() { for (int i = 0; i < 10; ++i) { Assert.assertEquals(helixMirrorMakerManager.getTopicLists().size(), i); String topicName = "testTopic" + i; // Create Kafka topic KafkaStarterUtils.createTopic(topicName, KafkaStarterUtils.DEFAULT_ZK_STR); try { Thread.sleep(5000); } catch (Exception e) { } Assert.assertEquals(kafkaBrokerTopicObserver.getNumTopics(), 1 + i); for (int j = 0; j <= i; ++j) { Assert.assertTrue(kafkaBrokerTopicObserver.getAllTopics().contains("testTopic" + j)); Assert.assertEquals( kafkaBrokerTopicObserver.getTopicPartition("testTopic" + j).getPartition(), 1); } } } }
@Test public void testKafkaBrokerTopicObserver() { Assert.assertEquals(kafkaBrokerTopicObserver.getNumTopics(), 1); Assert.assertEquals(kafkaBrokerTopicObserver.getTopicPartition("testTopic0").getPartition(), 1); for (int i = 1; i < 10; ++i) { String topicName = "testTopic" + i; // Create Kafka topic KafkaStarterUtils.createTopic(topicName, KafkaStarterUtils.DEFAULT_ZK_STR); try { Thread.sleep(5000); } catch (Exception e) { } Assert.assertEquals(kafkaBrokerTopicObserver.getNumTopics(), 1 + i); for (int j = 0; j <= i; ++j) { Assert.assertTrue(kafkaBrokerTopicObserver.getAllTopics().contains("testTopic" + j)); Assert.assertEquals( kafkaBrokerTopicObserver.getTopicPartition("testTopic" + j).getPartition(), 1); } } } }
Assert.assertTrue(kafkaBrokerTopicObserver.getAllTopics().contains("testTopic" + j)); Assert.assertEquals( kafkaBrokerTopicObserver.getTopicPartition("testTopic" + j).getPartition(), 1); Assert.assertTrue(kafkaBrokerTopicObserver.getAllTopics().contains("testTopic" + j)); Assert.assertEquals( kafkaBrokerTopicObserver.getTopicPartition("testTopic" + j).getPartition(), 1); Assert.assertTrue(kafkaBrokerTopicObserver.getAllTopics().contains("testTopic" + j)); Assert.assertEquals( kafkaBrokerTopicObserver.getTopicPartition("testTopic" + j).getPartition(), 1);
private Set<String> getCandidateTopicsToWhitelist() { Set<String> candidateTopics = new HashSet<String>(_srcKafkaTopicObserver.getAllTopics()); candidateTopics.retainAll(_destKafkaTopicObserver.getAllTopics()); candidateTopics.removeAll(_helixMirrorMakerManager.getTopicLists()); candidateTopics.addAll(getPartitionMismatchedTopics()); loadBlacklistedTopics(); LOGGER.info("BlacklistedTopics={} and ExcludingPattern={}", _blacklistedTopics, _patternToExcludeTopics); Iterator<String> itr = candidateTopics.iterator(); while (itr.hasNext()) { String topic = itr.next(); if (_blacklistedTopics.contains(topic)) { LOGGER.info("Exclude topic={} by blacklist", topic); itr.remove(); } else if (topic.matches(_patternToExcludeTopics)) { LOGGER.info("Exclude topic={} by pattern", topic); itr.remove(); } } return candidateTopics; }
for (String existedTopic : getAllTopics()) { if (!servingTopics.contains(existedTopic)) { _topicPartitionInfoMap.remove(existedTopic);
LOGGER.info("starting to refresh topic list due to zk child change"); Set<String> newAddedTopics = new HashSet<>(currentChilds); Set<String> currentServingTopics = getAllTopics(); newAddedTopics.removeAll(currentServingTopics); for (String existedTopic : currentServingTopics) {
private void tryAddTopic(String topic) { scala.collection.mutable.Map<String, scala.collection.Map<Object, Seq<Object>>> partitionAssignmentForTopics = _zkUtils.getPartitionAssignmentForTopics(JavaConversions.asScalaBuffer(ImmutableList.of(topic))); if (partitionAssignmentForTopics.get(topic).isEmpty() || partitionAssignmentForTopics.get(topic).get().size() == 0) { LOGGER.info("try to refresh for topic {} but found no topic partition for it", topic); return; } synchronized (_lock) { LOGGER.info("starting to refresh for adding topic {}", topic); if (!getAllTopics().contains(topic)) { try { _topicPartitionInfoMap.put(topic, new TopicPartition(topic, partitionAssignmentForTopics.get(topic).get().size())); } catch (Exception e) { LOGGER.warn("Failed to get topicPartition info for {} from kafka zk: {}", topic, e); } } LOGGER.info("finished refreshing for adding topic {}", topic); } }
@Test public void testAutoTopic() { for (int i = 0; i < 10; ++i) { Assert.assertEquals(helixMirrorMakerManager.getTopicLists().size(), i); String topicName = "testTopic" + i; // Create Kafka topic KafkaStarterUtils.createTopic(topicName, KafkaStarterUtils.DEFAULT_ZK_STR); try { Thread.sleep(5000); } catch (Exception e) { } Assert.assertEquals(kafkaBrokerTopicObserver.getNumTopics(), 1 + i); for (int j = 0; j <= i; ++j) { Assert.assertTrue(kafkaBrokerTopicObserver.getAllTopics().contains("testTopic" + j)); Assert.assertEquals( kafkaBrokerTopicObserver.getTopicPartition("testTopic" + j).getPartition(), 1); } } } }
@Test public void testKafkaBrokerTopicObserver() { Assert.assertEquals(kafkaBrokerTopicObserver.getNumTopics(), 1); Assert.assertEquals(kafkaBrokerTopicObserver.getTopicPartition("testTopic0").getPartition(), 1); for (int i = 1; i < 10; ++i) { String topicName = "testTopic" + i; // Create Kafka topic KafkaStarterUtils.createTopic(topicName, KafkaStarterUtils.DEFAULT_ZK_STR); try { Thread.sleep(5000); } catch (Exception e) { } Assert.assertEquals(kafkaBrokerTopicObserver.getNumTopics(), 1 + i); for (int j = 0; j <= i; ++j) { Assert.assertTrue(kafkaBrokerTopicObserver.getAllTopics().contains("testTopic" + j)); Assert.assertEquals( kafkaBrokerTopicObserver.getTopicPartition("testTopic" + j).getPartition(), 1); } } } }
Assert.assertTrue(kafkaBrokerTopicObserver.getAllTopics().contains("testTopic" + j)); Assert.assertEquals( kafkaBrokerTopicObserver.getTopicPartition("testTopic" + j).getPartition(), 1); Assert.assertTrue(kafkaBrokerTopicObserver.getAllTopics().contains("testTopic" + j)); Assert.assertEquals( kafkaBrokerTopicObserver.getTopicPartition("testTopic" + j).getPartition(), 1); Assert.assertTrue(kafkaBrokerTopicObserver.getAllTopics().contains("testTopic" + j)); Assert.assertEquals( kafkaBrokerTopicObserver.getTopicPartition("testTopic" + j).getPartition(), 1);