@Override public void run() { if (_helixMirrorMakerManager.isLeader()) { LOGGER.info("Trying to run the source kafka cluster info validation job"); validateSourceKafkaCluster(); } else { cleanupMetrics(); LOGGER.debug("Not leader, skip validation for source kafka cluster!"); } }
@Override public void run() { if (_helixMirrorMakerManager.isLeader()) { _isLeaderCounter.inc(1 - _isLeaderCounter.getCount()); LOGGER.info("Trying to run the validation job"); validateExternalView(); } else { cleanupMetrics(); LOGGER.debug("Not leader, skip validation!"); } }
public void start() { registerMetrics(); // Report current status every one minutes. LOGGER.info("Trying to schedule a source kafka cluster validation job at rate {} {} !", _timeValue, _timeUnit.toString()); _executorService.scheduleAtFixedRate(new Runnable() { @Override public void run() { if (_helixMirrorMakerManager.isLeader()) { LOGGER.info("Trying to run the source kafka cluster info validation job"); validateSourceKafkaCluster(); } else { cleanupMetrics(); LOGGER.debug("Not leader, skip validation for source kafka cluster!"); } } private void cleanupMetrics() { _numMissingTopics.dec(_numMissingTopics.getCount()); _numMismatchedTopics.dec(_numMismatchedTopics.getCount()); _numMismatchedTopicPartitions.dec(_numMismatchedTopicPartitions.getCount()); for (String topic : _mismatchedTopicPartitionsCounter.keySet()) { Counter counter = _mismatchedTopicPartitionsCounter.get(topic); counter.dec(counter.getCount()); } } }, 120, _timeValue, _timeUnit); }
updateIdealstateInfo(topicPartitionMapForIdealState, idealStateForTopic); updateMetrics(numOnlineTopicPartitions, numOfflineTopicPartitions, numErrorTopicPartitions, numTopicPartitions, numServingTopics, numErrorTopics); updatePerWorkerISMetrics(topicPartitionMapForExternalView); updatePerWorkerEVMetrics(topicPartitionMapForExternalView); constructPerWorkerISCounterJson(topicPartitionMapForIdealState); JSONObject perWorkerEVCounterJson = constructPerWorkerEVCounterJson(topicPartitionMapForExternalView); JSONObject validationResultJson = constructValidationResultJson(numOnlineTopicPartitions, numOfflineTopicPartitions, numErrorTopicPartitions, numTopicPartitions, numServingTopics, numErrorTopics, perWorkerISCounterJson, perWorkerEVCounterJson);
@Override @Get public Representation get() { final String option = (String) getRequest().getAttributes().get("option"); if ("srcKafka".equals(option)) { if (_srcKafkaValidationManager == null) { LOGGER.warn("SourceKafkaClusterValidationManager is null!"); return new StringRepresentation("SrcKafkaValidationManager is not been initialized!"); } LOGGER.info("Trying to call validation on source kafka cluster!"); return new StringRepresentation(_srcKafkaValidationManager.validateSourceKafkaCluster()); } else { LOGGER.info("Trying to call validation on current cluster!"); return new StringRepresentation(_validationManager.validateExternalView()); } }
@BeforeTest public void setup() { LOGGER.info("Trying to setup"); ZkStarter.startLocalZkServer(); kafkaStarter = KafkaStarterUtils.startServer(KafkaStarterUtils.DEFAULT_KAFKA_PORT, KafkaStarterUtils.DEFAULT_BROKER_ID, KafkaStarterUtils.DEFAULT_ZK_STR, KafkaStarterUtils.getDefaultKafkaConfiguration()); // Create Kafka topic KafkaStarterUtils.createTopic("testTopic0", KafkaStarterUtils.DEFAULT_ZK_STR); try { Thread.sleep(2000); } catch (Exception e) { } kafkaBrokerTopicObserver = new KafkaBrokerTopicObserver("broker0", KafkaStarterUtils.DEFAULT_ZK_STR); ControllerConf controllerConf = new ControllerConf(); controllerConf.setControllerPort("9090"); controllerConf.setHelixClusterName("TestAutoTopicWhitelistingManager"); controllerConf.setInstanceId("controller-0"); controllerConf.setControllerMode("customized"); controllerConf.setZkStr(ZkStarter.DEFAULT_ZK_STR); controllerConf.setBackUpToGit("false"); controllerConf.setAutoRebalanceDelayInSeconds("1"); helixMirrorMakerManager = new HelixMirrorMakerManager(controllerConf); helixMirrorMakerManager.start(); sourceKafkaClusterValidationManager = new SourceKafkaClusterValidationManager(kafkaBrokerTopicObserver, helixMirrorMakerManager); sourceKafkaClusterValidationManager.start(); }
validationManager = new ValidationManager(helixMirrorMakerManager); validationManager.start();
constructMismatchedTopicPartitionsJson(misMatchedPartitionNumberTopics); JSONObject validationResultJson = constructValidationResultJson(notExistedTopics.size(), misMatchedPartitionNumberTopics.size(), numMismatchedTopicPartitions, mismatchedTopicPartitionsJson); if (_helixMirrorMakerManager.isLeader()) { updateMetrics(notExistedTopics.size(), misMatchedPartitionNumberTopics.size(), numMismatchedTopicPartitions, misMatchedPartitionNumberTopics);
private SourceKafkaClusterValidationManager getSourceKafkaClusterValidationManager() { if (_config.getEnableSrcKafkaValidation()) { if (!_kafkaBrokerTopicObserverMap.containsKey(SRC_KAFKA_CLUSTER)) { _kafkaBrokerTopicObserverMap.put(SRC_KAFKA_CLUSTER, new KafkaBrokerTopicObserver(SRC_KAFKA_CLUSTER, _config.getSrcKafkaZkPath())); } return new SourceKafkaClusterValidationManager(_kafkaBrokerTopicObserverMap.get(SRC_KAFKA_CLUSTER), _helixMirrorMakerManager, _config.getEnableAutoTopicExpansion()); } else { LOGGER.info("Not init SourceKafkaClusterValidationManager!"); return null; } }
JSONObject.parseObject(validationManager.validateExternalView()); Assert.assertEquals(validationResultJson.getIntValue("numErrorTopicPartitions"), 0); Assert.assertEquals(validationResultJson.getIntValue("numErrorTopics"), 0); } catch (Exception e) { validationResultJson = JSONObject.parseObject(validationManager.validateExternalView()); Assert.assertEquals(validationResultJson.getIntValue("numErrorTopicPartitions"), 0); Assert.assertEquals(validationResultJson.getIntValue("numErrorTopics"), 0);
@Test public void testValidation() { String validationResult = sourceKafkaClusterValidationManager.validateSourceKafkaCluster(); System.out.println(validationResult); Assert.assertEquals(validationResult, validationResult = sourceKafkaClusterValidationManager.validateSourceKafkaCluster(); System.out.println(validationResult); Assert.assertEquals(validationResult, JSONObject.parseObject(sourceKafkaClusterValidationManager.validateSourceKafkaCluster()); System.out.println(validationResultJson); Assert.assertEquals(validationResultJson.get("numMissingTopics"), 10); JSONObject.parseObject(sourceKafkaClusterValidationManager.validateSourceKafkaCluster()); System.out.println(validationResultJson); Assert.assertEquals(validationResultJson.get("numMissingTopics"), 19 - i); JSONObject.parseObject(sourceKafkaClusterValidationManager.validateSourceKafkaCluster()); System.out.println(validationResultJson); Assert.assertEquals(validationResultJson.get("numMissingTopics"), 10); JSONObject.parseObject(sourceKafkaClusterValidationManager.validateSourceKafkaCluster()); System.out.println(validationResultJson); Assert.assertEquals(validationResultJson.get("numMissingTopics"), 29 - i); validationResult = sourceKafkaClusterValidationManager.validateSourceKafkaCluster(); System.out.println(validationResult); Assert.assertEquals(validationResult,
public ControllerStarter(ControllerConf conf) { LOGGER.info("Trying to init ControllerStarter with config: {}", conf); _config = conf; HelixKafkaMirrorMakerMetricsReporter.init(conf); _component = new Component(); _controllerRestApp = new ControllerRestApplication(null); _helixMirrorMakerManager = new HelixMirrorMakerManager(_config); _validationManager = new ValidationManager(_helixMirrorMakerManager); _srcKafkaValidationManager = getSourceKafkaClusterValidationManager(); _autoTopicWhitelistingManager = getAutoTopicWhitelistingManager(); if (_config.getBackUpToGit()) { _clusterInfoBackupManager = new ClusterInfoBackupManager(_helixMirrorMakerManager, new GitBackUpHandler(conf.getRemoteBackupRepo(), conf.getLocalGitRepoPath()), _config); } else { _clusterInfoBackupManager = new ClusterInfoBackupManager(_helixMirrorMakerManager, new FileBackUpHandler(conf.getLocalBackupFilePath()), _config); } }
public void start() { registerMetrics();
private synchronized void updatePerWorkerISMetrics( Map<String, Integer> topicPartitionMapForIdealState) { for (String worker : topicPartitionMapForIdealState.keySet()) { if (!_idealStatePerWorkerTopicPartitionCounter.containsKey(worker)) { Counter workCounter = new Counter(); try { HelixKafkaMirrorMakerMetricsReporter.get().getRegistry().register( getIdealStatePerWorkMetricName(worker), workCounter); } catch (Exception e) { LOGGER.error("Error registering metrics!", e); } _idealStatePerWorkerTopicPartitionCounter.put(worker, workCounter); } Counter counter = _idealStatePerWorkerTopicPartitionCounter.get(worker); counter.inc(topicPartitionMapForIdealState.get(worker) - counter.getCount()); } for (String worker : _idealStatePerWorkerTopicPartitionCounter.keySet()) { if (!topicPartitionMapForIdealState.containsKey(worker)) { Counter counter = _idealStatePerWorkerTopicPartitionCounter.get(worker); counter.dec(counter.getCount()); } } }
private synchronized void updateMetrics(int numMissingTopics, int numMismatchedTopics, int numMismatchedTopicPartitions, Map<String, Integer> misMatchedPartitionNumberTopics) { _numMissingTopics.inc(numMissingTopics - _numMissingTopics.getCount()); _numMismatchedTopics.inc(numMismatchedTopics - _numMismatchedTopics.getCount()); _numMismatchedTopicPartitions .inc(numMismatchedTopicPartitions - _numMismatchedTopicPartitions.getCount()); for (String topic : misMatchedPartitionNumberTopics.keySet()) { if (!_mismatchedTopicPartitionsCounter.containsKey(topic)) { Counter topicPartitionCounter = new Counter(); try { HelixKafkaMirrorMakerMetricsReporter.get().getRegistry().register( getMismatchedTopicMetricName(topic), topicPartitionCounter); } catch (Exception e) { LOGGER.error("Error registering metrics!", e); } _mismatchedTopicPartitionsCounter.put(topic, topicPartitionCounter); } } for (String topic : _mismatchedTopicPartitionsCounter.keySet()) { Counter counter = _mismatchedTopicPartitionsCounter.get(topic); if (!misMatchedPartitionNumberTopics.containsKey(topic)) { counter.dec(counter.getCount()); } else { counter.inc(misMatchedPartitionNumberTopics.get(topic) - counter.getCount()); } } }
private synchronized void updatePerWorkerEVMetrics( Map<String, Integer> topicPartitionMapForExternalView) { for (String worker : topicPartitionMapForExternalView.keySet()) { if (!_externalViewPerWorkerTopicPartitionCounter.containsKey(worker)) { Counter workCounter = new Counter(); try { HelixKafkaMirrorMakerMetricsReporter.get().getRegistry().register( getExternalViewPerWorkMetricName(worker), workCounter); } catch (Exception e) { LOGGER.error("Error registering metrics!", e); } _externalViewPerWorkerTopicPartitionCounter.put(worker, workCounter); } Counter counter = _externalViewPerWorkerTopicPartitionCounter.get(worker); counter.inc(topicPartitionMapForExternalView.get(worker) - counter.getCount()); } for (String worker : _externalViewPerWorkerTopicPartitionCounter.keySet()) { if (!topicPartitionMapForExternalView.containsKey(worker)) { Counter counter = _externalViewPerWorkerTopicPartitionCounter.get(worker); counter.dec(counter.getCount()); } } }
public void stop() { _executorService.shutdown(); try { _executorService.awaitTermination(STOP_TIMEOUT_SEC, TimeUnit.SECONDS); } catch (InterruptedException e) { LOGGER.info("Stop ValidationManager got interrupted"); } _executorService.shutdownNow(); unregisterMetrics(); }
@Override public void run() { if (_helixMirrorMakerManager.isLeader()) { LOGGER.info("Trying to run the source kafka cluster info validation job"); validateSourceKafkaCluster(); } else { cleanupMetrics(); LOGGER.debug("Not leader, skip validation for source kafka cluster!"); } }
@Override public void run() { if (_helixMirrorMakerManager.isLeader()) { _isLeaderCounter.inc(1 - _isLeaderCounter.getCount()); LOGGER.info("Trying to run the validation job"); validateExternalView(); } else { cleanupMetrics(); LOGGER.debug("Not leader, skip validation!"); } }