private SourceKafkaClusterValidationManager getSourceKafkaClusterValidationManager() { if (_config.getEnableSrcKafkaValidation()) { if (!_kafkaBrokerTopicObserverMap.containsKey(SRC_KAFKA_CLUSTER)) { _kafkaBrokerTopicObserverMap.put(SRC_KAFKA_CLUSTER, new KafkaBrokerTopicObserver(SRC_KAFKA_CLUSTER, _config.getSrcKafkaZkPath())); } return new SourceKafkaClusterValidationManager(_kafkaBrokerTopicObserverMap.get(SRC_KAFKA_CLUSTER), _helixMirrorMakerManager, _config.getEnableAutoTopicExpansion()); } else { LOGGER.info("Not init SourceKafkaClusterValidationManager!"); return null; } }
@Test public void testKafkaBrokerTopicObserver() { Assert.assertEquals(kafkaBrokerTopicObserver.getNumTopics(), 1); Assert.assertEquals(kafkaBrokerTopicObserver.getTopicPartition("testTopic0").getPartition(), 1); for (int i = 1; i < 10; ++i) { String topicName = "testTopic" + i; // Create Kafka topic KafkaStarterUtils.createTopic(topicName, KafkaStarterUtils.DEFAULT_ZK_STR); try { Thread.sleep(5000); } catch (Exception e) { } Assert.assertEquals(kafkaBrokerTopicObserver.getNumTopics(), 1 + i); for (int j = 0; j <= i; ++j) { Assert.assertTrue(kafkaBrokerTopicObserver.getAllTopics().contains("testTopic" + j)); Assert.assertEquals( kafkaBrokerTopicObserver.getTopicPartition("testTopic" + j).getPartition(), 1); } } } }
public void stop() { try { LOGGER.info("stopping broker topic observers"); for (String key : _kafkaBrokerTopicObserverMap.keySet()) { try { KafkaBrokerTopicObserver observer = _kafkaBrokerTopicObserverMap.get(key); observer.stop(); } catch (Exception e) { LOGGER.error("Failed to stop KafkaBrokerTopicObserver: {}!", key); } } LOGGER.info("stopping api component"); _component.stop(); LOGGER.info("stopping resource manager"); _helixMirrorMakerManager.stop(); } catch (final Exception e) { LOGGER.error("Caught exception", e); } }
@Override public void handleChildChange(String parentPath, List<String> currentChilds) throws Exception { if (!tryToRefreshCache()) { synchronized (_lock) { Set<String> newAddedTopics = new HashSet<String>(currentChilds); Set<String> currentServingTopics = getAllTopics(); newAddedTopics.removeAll(currentServingTopics); for (String existedTopic : currentServingTopics) { if (!currentChilds.contains(existedTopic)) { _topicPartitionInfoMap.remove(existedTopic); } } scala.collection.mutable.Map<String, scala.collection.Map<Object, Seq<Object>>> partitionAssignmentForTopics = _zkUtils.getPartitionAssignmentForTopics( JavaConversions.asScalaBuffer(ImmutableList.copyOf(newAddedTopics))); for (String topic : newAddedTopics) { try { scala.collection.Map<Object, Seq<Object>> partitionsMap = partitionAssignmentForTopics.get(topic).get(); TopicPartition tp = new TopicPartition(topic, partitionsMap.size()); _topicPartitionInfoMap.put(topic, tp); } catch (Exception e) { LOGGER.warn("Failed to get topicPartition info for {} from kafka zk: {}", topic, e); } } _kafkaTopicsCounter.inc(_topicPartitionInfoMap.size() - _kafkaTopicsCounter.getCount()); } } }
private Set<String> getPartitionMismatchedTopics() { Set<String> partitionsMismatchedTopics = new HashSet<String>(); for (String topicName : _helixMirrorMakerManager.getTopicLists()) { int numPartitionsInHelix = _helixMirrorMakerManager.getIdealStateForTopic(topicName).getNumPartitions(); if (_srcKafkaTopicObserver.getTopicPartition(topicName) != null) { int numPartitionsInSrcBroker = _srcKafkaTopicObserver.getTopicPartition(topicName).getPartition(); if (numPartitionsInHelix != numPartitionsInSrcBroker) { partitionsMismatchedTopics.add(topicName); } } } return partitionsMismatchedTopics; }
private Set<String> getCandidateTopicsToWhitelist() { Set<String> candidateTopics = new HashSet<String>(_srcKafkaTopicObserver.getAllTopics()); candidateTopics.retainAll(_destKafkaTopicObserver.getAllTopics()); candidateTopics.removeAll(_helixMirrorMakerManager.getTopicLists()); candidateTopics.addAll(getPartitionMismatchedTopics()); loadBlacklistedTopics(); LOGGER.info("BlacklistedTopics={} and ExcludingPattern={}", _blacklistedTopics, _patternToExcludeTopics); Iterator<String> itr = candidateTopics.iterator(); while (itr.hasNext()) { String topic = itr.next(); if (_blacklistedTopics.contains(topic)) { LOGGER.info("Exclude topic={} by blacklist", topic); itr.remove(); } else if (topic.matches(_patternToExcludeTopics)) { LOGGER.info("Exclude topic={} by pattern", topic); itr.remove(); } } return candidateTopics; }
public TopicPartition getTopicPartitionWithRefresh(String topic) { TopicPartition topicPartition = getTopicPartition(topic); if (topicPartition == null) { LOGGER.info("couldn't find topic {}, going to add topic and retry", topic); tryAddTopic(topic); LOGGER.info("refreshed and tried to fetch topic info again", topic); topicPartition = getTopicPartition(topic); } return topicPartition; }
private synchronized boolean tryToRefreshCache() { if (_refreshTimeIntervalInMillis + _lastRefreshTime.get() < System.currentTimeMillis()) { refreshCache(); return true; } else { LOGGER.debug("Not hitting next refresh interval, wait for the next run!"); return false; } }
public KafkaBrokerTopicObserver(String brokerClusterName, String zkString) { LOGGER.info("Trying to init KafkaBrokerTopicObserver {} with ZK: {}", brokerClusterName, zkString); _kakfaClusterName = brokerClusterName; _zkUtils = ZkUtils.apply(zkString, 30000, 30000, false); _zkClient = ZkUtils.createZkClient(zkString, 30000, 30000); _zkClient.subscribeChildChanges(KAFKA_TOPICS_PATH, this); registerMetric(); executorService.scheduleAtFixedRate(new Runnable() { @Override public void run() { tryToRefreshCache(); } }, 0, 600, TimeUnit.SECONDS); }
private void whitelistCandiateTopics(Set<String> candidateTopicsToWhitelist) { for (String topic : candidateTopicsToWhitelist) { TopicPartition tp = _srcKafkaTopicObserver.getTopicPartition(topic); if (tp == null) { LOGGER.error("Shouldn't hit here, don't know why topic {} is not in src Kafka cluster", topic); _numErrorTopics.inc(); } else { if (_helixMirrorMakerManager.isTopicExisted(topic)) { LOGGER.info("Trying to expand topic: {} with {} partitions", tp.getTopic(), tp.getPartition()); _helixMirrorMakerManager.expandTopicInMirrorMaker(tp); _numAutoExpandedTopics.inc(); } else { LOGGER.info("Trying to whitelist topic: {} with {} partitions", tp.getTopic(), tp.getPartition()); _helixMirrorMakerManager.addTopicToMirrorMaker(tp); _numWhitelistedTopics.inc(); } } } }
return; for (String existedTopic : getAllTopics()) { if (!servingTopics.contains(existedTopic)) { _topicPartitionInfoMap.remove(existedTopic);
@Override public void handleChildChange(String parentPath, List<String> currentChilds) throws Exception { if (!tryToRefreshCache()) { synchronized (_lock) { LOGGER.info("starting to refresh topic list due to zk child change"); Set<String> newAddedTopics = new HashSet<>(currentChilds); Set<String> currentServingTopics = getAllTopics(); newAddedTopics.removeAll(currentServingTopics); for (String existedTopic : currentServingTopics) {
private boolean tryToRefreshCache() { if (_refreshTimeIntervalInMillis + _lastRefreshTime.get() < System.currentTimeMillis()) { refreshCache(); return true; } else { LOGGER.debug("Not hitting next refresh interval, wait for the next run!"); return false; } }
public KafkaBrokerTopicObserver(String brokerClusterName, String zkString, long refreshTimeIntervalInMillis) { LOGGER.info("Trying to init KafkaBrokerTopicObserver {} with ZK: {}", brokerClusterName, zkString); _kakfaClusterName = brokerClusterName; _refreshTimeIntervalInMillis = refreshTimeIntervalInMillis; _zkClient = new ZkClient(zkString, 30000, 30000, ZKStringSerializer$.MODULE$); _zkClient.subscribeChildChanges(KAFKA_TOPICS_PATH, this); _zkUtils = ZkUtils.apply(_zkClient, false); registerMetric(); executorService.scheduleAtFixedRate(new Runnable() { @Override public void run() { tryToRefreshCache(); } }, 0, _refreshTimeIntervalInMillis, TimeUnit.SECONDS); }
@Test public void testAutoTopic() { for (int i = 0; i < 10; ++i) { Assert.assertEquals(helixMirrorMakerManager.getTopicLists().size(), i); String topicName = "testTopic" + i; // Create Kafka topic KafkaStarterUtils.createTopic(topicName, KafkaStarterUtils.DEFAULT_ZK_STR); try { Thread.sleep(5000); } catch (Exception e) { } Assert.assertEquals(kafkaBrokerTopicObserver.getNumTopics(), 1 + i); for (int j = 0; j <= i; ++j) { Assert.assertTrue(kafkaBrokerTopicObserver.getAllTopics().contains("testTopic" + j)); Assert.assertEquals( kafkaBrokerTopicObserver.getTopicPartition("testTopic" + j).getPartition(), 1); } } } }
private AutoTopicWhitelistingManager getAutoTopicWhitelistingManager() { if (_config.getEnableAutoWhitelist()) { if (!_kafkaBrokerTopicObserverMap.containsKey(SRC_KAFKA_CLUSTER)) { _kafkaBrokerTopicObserverMap.put(SRC_KAFKA_CLUSTER, new KafkaBrokerTopicObserver(SRC_KAFKA_CLUSTER, _config.getSrcKafkaZkPath())); } if (!_kafkaBrokerTopicObserverMap.containsKey(DEST_KAFKA_CLUSTER)) { _kafkaBrokerTopicObserverMap.put(DEST_KAFKA_CLUSTER, new KafkaBrokerTopicObserver(DEST_KAFKA_CLUSTER, _config.getDestKafkaZkPath())); } String patternToExcludeTopics = _config.getPatternToExcludeTopics(); if (patternToExcludeTopics != null && patternToExcludeTopics.trim().length() > 0) { patternToExcludeTopics = patternToExcludeTopics.trim(); } else { patternToExcludeTopics = ""; } LOGGER.info("Pattern to exclude topics is " + patternToExcludeTopics); return new AutoTopicWhitelistingManager(_kafkaBrokerTopicObserverMap.get(SRC_KAFKA_CLUSTER), _kafkaBrokerTopicObserverMap.get(DEST_KAFKA_CLUSTER), _helixMirrorMakerManager, patternToExcludeTopics, _config.getRefreshTimeInSeconds(), _config.getInitWaitTimeInSeconds()); } else { LOGGER.info("Not init AutoTopicWhitelistingManager!"); return null; } }
@AfterTest public void shutdown() { LOGGER.info("Trying to shutdown"); kafkaBrokerTopicObserver.stop(); KafkaStarterUtils.stopServer(kafkaStarter); ZkStarter.stopLocalZkServer(); }
int numMismatchedTopicPartitions = 0; for (String topic : _helixMirrorMakerManager.getTopicLists()) { TopicPartition tp = _sourceKafkaTopicObserver.getTopicPartition(topic); if (tp == null) { LOGGER.warn("Topic {} is not in source kafka broker!", topic);
private Set<String> getCandidateTopicsToWhitelist() { Set<String> candidateTopics = new HashSet<String>(_srcKafkaTopicObserver.getAllTopics()); candidateTopics.retainAll(_destKafkaTopicObserver.getAllTopics()); candidateTopics.removeAll(_helixMirrorMakerManager.getTopicLists()); candidateTopics.addAll(getPartitionMismatchedTopics()); loadBlacklistedTopics(); LOGGER.info("BlacklistedTopics={} and ExcludingPattern={}", _blacklistedTopics, _patternToExcludeTopics); Iterator<String> itr = candidateTopics.iterator(); while (itr.hasNext()) { String topic = itr.next(); if (_blacklistedTopics.contains(topic)) { LOGGER.info("Exclude topic={} by blacklist", topic); itr.remove(); } else if (topic.matches(_patternToExcludeTopics)) { LOGGER.info("Exclude topic={} by pattern", topic); itr.remove(); } } return candidateTopics; }
} catch (Exception e) { Assert.assertEquals(kafkaBrokerTopicObserver.getNumTopics(), 1 + i); for (int j = 0; j <= i; ++j) { Assert.assertTrue(kafkaBrokerTopicObserver.getAllTopics().contains("testTopic" + j)); Assert.assertEquals( kafkaBrokerTopicObserver.getTopicPartition("testTopic" + j).getPartition(), 1); } catch (Exception e) { Assert.assertEquals(kafkaBrokerTopicObserver.getNumTopics(), 1 + i); for (int j = 0; j <= i; ++j) { Assert.assertTrue(kafkaBrokerTopicObserver.getAllTopics().contains("testTopic" + j)); Assert.assertEquals( kafkaBrokerTopicObserver.getTopicPartition("testTopic" + j).getPartition(), 1); } catch (Exception e) { Assert.assertEquals(kafkaBrokerTopicObserver.getNumTopics(), 1 + i); for (int j = 0; j <= i; ++j) { Assert.assertTrue(kafkaBrokerTopicObserver.getAllTopics().contains("testTopic" + j)); Assert.assertEquals( kafkaBrokerTopicObserver.getTopicPartition("testTopic" + j).getPartition(), 1);