private SourceKafkaClusterValidationManager getSourceKafkaClusterValidationManager() { if (_config.getEnableSrcKafkaValidation()) { if (!_kafkaBrokerTopicObserverMap.containsKey(SRC_KAFKA_CLUSTER)) { _kafkaBrokerTopicObserverMap.put(SRC_KAFKA_CLUSTER, new KafkaBrokerTopicObserver(SRC_KAFKA_CLUSTER, _config.getSrcKafkaZkPath())); } return new SourceKafkaClusterValidationManager(_kafkaBrokerTopicObserverMap.get(SRC_KAFKA_CLUSTER), _helixMirrorMakerManager, _config.getEnableAutoTopicExpansion()); } else { LOGGER.info("Not init SourceKafkaClusterValidationManager!"); return null; } }
_srcKafkaValidationManager.start();
@Override public void run() { if (_helixMirrorMakerManager.isLeader()) { LOGGER.info("Trying to run the source kafka cluster info validation job"); validateSourceKafkaCluster(); } else { cleanupMetrics(); LOGGER.debug("Not leader, skip validation for source kafka cluster!"); } }
constructMismatchedTopicPartitionsJson(misMatchedPartitionNumberTopics); JSONObject validationResultJson = constructValidationResultJson(notExistedTopics.size(), misMatchedPartitionNumberTopics.size(), numMismatchedTopicPartitions, mismatchedTopicPartitionsJson); if (_helixMirrorMakerManager.isLeader()) { updateMetrics(notExistedTopics.size(), misMatchedPartitionNumberTopics.size(), numMismatchedTopicPartitions, misMatchedPartitionNumberTopics);
@BeforeTest public void setup() { LOGGER.info("Trying to setup"); ZkStarter.startLocalZkServer(); kafkaStarter = KafkaStarterUtils.startServer(KafkaStarterUtils.DEFAULT_KAFKA_PORT, KafkaStarterUtils.DEFAULT_BROKER_ID, KafkaStarterUtils.DEFAULT_ZK_STR, KafkaStarterUtils.getDefaultKafkaConfiguration()); // Create Kafka topic KafkaStarterUtils.createTopic("testTopic0", KafkaStarterUtils.DEFAULT_ZK_STR); try { Thread.sleep(2000); } catch (Exception e) { } kafkaBrokerTopicObserver = new KafkaBrokerTopicObserver("broker0", KafkaStarterUtils.DEFAULT_ZK_STR); ControllerConf controllerConf = new ControllerConf(); controllerConf.setControllerPort("9090"); controllerConf.setHelixClusterName("TestAutoTopicWhitelistingManager"); controllerConf.setInstanceId("controller-0"); controllerConf.setControllerMode("customized"); controllerConf.setZkStr(ZkStarter.DEFAULT_ZK_STR); controllerConf.setBackUpToGit("false"); controllerConf.setAutoRebalanceDelayInSeconds("1"); helixMirrorMakerManager = new HelixMirrorMakerManager(controllerConf); helixMirrorMakerManager.start(); sourceKafkaClusterValidationManager = new SourceKafkaClusterValidationManager(kafkaBrokerTopicObserver, helixMirrorMakerManager); sourceKafkaClusterValidationManager.start(); }
public void start() { registerMetrics(); // Report current status every one minutes. LOGGER.info("Trying to schedule a source kafka cluster validation job at rate {} {} !", _timeValue, _timeUnit.toString()); _executorService.scheduleAtFixedRate(new Runnable() { @Override public void run() { if (_helixMirrorMakerManager.isLeader()) { LOGGER.info("Trying to run the source kafka cluster info validation job"); validateSourceKafkaCluster(); } else { cleanupMetrics(); LOGGER.debug("Not leader, skip validation for source kafka cluster!"); } } private void cleanupMetrics() { _numMissingTopics.dec(_numMissingTopics.getCount()); _numMismatchedTopics.dec(_numMismatchedTopics.getCount()); _numMismatchedTopicPartitions.dec(_numMismatchedTopicPartitions.getCount()); for (String topic : _mismatchedTopicPartitionsCounter.keySet()) { Counter counter = _mismatchedTopicPartitionsCounter.get(topic); counter.dec(counter.getCount()); } } }, 120, _timeValue, _timeUnit); }
private synchronized void updateMetrics(int numMissingTopics, int numMismatchedTopics, int numMismatchedTopicPartitions, Map<String, Integer> misMatchedPartitionNumberTopics) { _numMissingTopics.inc(numMissingTopics - _numMissingTopics.getCount()); _numMismatchedTopics.inc(numMismatchedTopics - _numMismatchedTopics.getCount()); _numMismatchedTopicPartitions .inc(numMismatchedTopicPartitions - _numMismatchedTopicPartitions.getCount()); for (String topic : misMatchedPartitionNumberTopics.keySet()) { if (!_mismatchedTopicPartitionsCounter.containsKey(topic)) { Counter topicPartitionCounter = new Counter(); try { HelixKafkaMirrorMakerMetricsReporter.get().getRegistry().register( getMismatchedTopicMetricName(topic), topicPartitionCounter); } catch (Exception e) { LOGGER.error("Error registering metrics!", e); } _mismatchedTopicPartitionsCounter.put(topic, topicPartitionCounter); } } for (String topic : _mismatchedTopicPartitionsCounter.keySet()) { Counter counter = _mismatchedTopicPartitionsCounter.get(topic); if (!misMatchedPartitionNumberTopics.containsKey(topic)) { counter.dec(counter.getCount()); } else { counter.inc(misMatchedPartitionNumberTopics.get(topic) - counter.getCount()); } } }
@BeforeTest public void setup() { LOGGER.info("Trying to setup"); ZkStarter.startLocalZkServer(); kafkaStarter = KafkaStarterUtils.startServer(KafkaStarterUtils.DEFAULT_KAFKA_PORT, KafkaStarterUtils.DEFAULT_BROKER_ID, KafkaStarterUtils.DEFAULT_ZK_STR, KafkaStarterUtils.getDefaultKafkaConfiguration()); try { Thread.sleep(2000); } catch (Exception e) { } kafkaBrokerTopicObserver = new KafkaBrokerTopicObserver("broker0", KafkaStarterUtils.DEFAULT_ZK_STR, 1); ControllerConf controllerConf = new ControllerConf(); controllerConf.setControllerPort("9090"); controllerConf.setHelixClusterName("TestAutoTopicWhitelistingManager"); controllerConf.setDeploymentName("TestAutoTopicWhitelistingManagerDeployment"); controllerConf.setInstanceId("controller-0"); controllerConf.setControllerMode("customized"); controllerConf.setZkStr(ZkStarter.DEFAULT_ZK_STR); controllerConf.setBackUpToGit("false"); controllerConf.setAutoRebalanceDelayInSeconds("1"); helixMirrorMakerManager = new HelixMirrorMakerManager(controllerConf); helixMirrorMakerManager.start(); sourceKafkaClusterValidationManager = new SourceKafkaClusterValidationManager(kafkaBrokerTopicObserver, helixMirrorMakerManager); sourceKafkaClusterValidationManager.start(); }
constructMismatchedTopicPartitionsJson(misMatchedPartitionNumberTopics); JSONObject validationResultJson = constructValidationResultJson(notExistedTopics.size(), misMatchedPartitionNumberTopics.size(), numMismatchedTopicPartitions, mismatchedTopicPartitionsJson); if (_helixMirrorMakerManager.isLeader()) { updateMetrics(notExistedTopics.size(), misMatchedPartitionNumberTopics.size(), numMismatchedTopicPartitions, misMatchedPartitionNumberTopics);
public void start() { registerMetrics(); // Report current status every one minutes. LOGGER.info("Trying to schedule a source kafka cluster validation job at rate {} {} !", _timeValue, _timeUnit.toString()); _executorService.scheduleAtFixedRate(new Runnable() { @Override public void run() { if (_helixMirrorMakerManager.isLeader()) { LOGGER.info("Trying to run the source kafka cluster info validation job"); validateSourceKafkaCluster(); } else { cleanupMetrics(); LOGGER.debug("Not leader, skip validation for source kafka cluster!"); } } private void cleanupMetrics() { _numMissingTopics.dec(_numMissingTopics.getCount()); _numMismatchedTopics.dec(_numMismatchedTopics.getCount()); _numMismatchedTopicPartitions.dec(_numMismatchedTopicPartitions.getCount()); for (String topic : _mismatchedTopicPartitionsCounter.keySet()) { Counter counter = _mismatchedTopicPartitionsCounter.get(topic); counter.dec(counter.getCount()); } } }, 120, _timeValue, _timeUnit); }
private synchronized void updateMetrics(int numMissingTopics, int numMismatchedTopics, int numMismatchedTopicPartitions, Map<String, Integer> misMatchedPartitionNumberTopics) { _numMissingTopics.inc(numMissingTopics - _numMissingTopics.getCount()); _numMismatchedTopics.inc(numMismatchedTopics - _numMismatchedTopics.getCount()); _numMismatchedTopicPartitions .inc(numMismatchedTopicPartitions - _numMismatchedTopicPartitions.getCount()); for (String topic : misMatchedPartitionNumberTopics.keySet()) { if (!_mismatchedTopicPartitionsCounter.containsKey(topic)) { Counter topicPartitionCounter = new Counter(); try { HelixKafkaMirrorMakerMetricsReporter.get().getRegistry().register( getMismatchedTopicMetricName(topic), topicPartitionCounter); } catch (Exception e) { LOGGER.error("Error registering metrics!", e); } _mismatchedTopicPartitionsCounter.put(topic, topicPartitionCounter); } } for (String topic : _mismatchedTopicPartitionsCounter.keySet()) { Counter counter = _mismatchedTopicPartitionsCounter.get(topic); if (!misMatchedPartitionNumberTopics.containsKey(topic)) { counter.dec(counter.getCount()); } else { counter.inc(misMatchedPartitionNumberTopics.get(topic) - counter.getCount()); } } }
@Override @Get public Representation get() { final String option = (String) getRequest().getAttributes().get("option"); if ("srcKafka".equals(option)) { if (_srcKafkaValidationManager == null) { LOGGER.warn("SourceKafkaClusterValidationManager is null!"); return new StringRepresentation("SrcKafkaValidationManager is not been initialized!"); } LOGGER.info("Trying to call validation on source kafka cluster!"); return new StringRepresentation(_srcKafkaValidationManager.validateSourceKafkaCluster()); } else { LOGGER.info("Trying to call validation on current cluster!"); return new StringRepresentation(_validationManager.validateExternalView()); } }
private SourceKafkaClusterValidationManager getSourceKafkaClusterValidationManager() { if (_config.getEnableSrcKafkaValidation()) { LOGGER.info("Try to init SourceKafkaClusterValidationManager!"); if (!_kafkaBrokerTopicObserverMap.containsKey(SRC_KAFKA_CLUSTER)) { _kafkaBrokerTopicObserverMap.put(SRC_KAFKA_CLUSTER, new KafkaBrokerTopicObserver(SRC_KAFKA_CLUSTER, _config.getSrcKafkaZkPath(), TimeUnit.MINUTES.toMillis(5))); } return new SourceKafkaClusterValidationManager( _kafkaBrokerTopicObserverMap.get(SRC_KAFKA_CLUSTER), _helixMirrorMakerManager, _config.getEnableAutoTopicExpansion()); } else { LOGGER.info("Not init SourceKafkaClusterValidationManager!"); return null; } }
_srcKafkaValidationManager.start();
@Test public void testValidation() { String validationResult = sourceKafkaClusterValidationManager.validateSourceKafkaCluster(); System.out.println(validationResult); Assert.assertEquals(validationResult, validationResult = sourceKafkaClusterValidationManager.validateSourceKafkaCluster(); System.out.println(validationResult); Assert.assertEquals(validationResult, JSONObject.parseObject(sourceKafkaClusterValidationManager.validateSourceKafkaCluster()); System.out.println(validationResultJson); Assert.assertEquals(validationResultJson.get("numMissingTopics"), 10); JSONObject.parseObject(sourceKafkaClusterValidationManager.validateSourceKafkaCluster()); System.out.println(validationResultJson); Assert.assertEquals(validationResultJson.get("numMissingTopics"), 19 - i); JSONObject.parseObject(sourceKafkaClusterValidationManager.validateSourceKafkaCluster()); System.out.println(validationResultJson); Assert.assertEquals(validationResultJson.get("numMissingTopics"), 10); JSONObject.parseObject(sourceKafkaClusterValidationManager.validateSourceKafkaCluster()); System.out.println(validationResultJson); Assert.assertEquals(validationResultJson.get("numMissingTopics"), 29 - i); validationResult = sourceKafkaClusterValidationManager.validateSourceKafkaCluster(); System.out.println(validationResult); Assert.assertEquals(validationResult,
@Override public void run() { if (_helixMirrorMakerManager.isLeader()) { LOGGER.info("Trying to run the source kafka cluster info validation job"); validateSourceKafkaCluster(); } else { cleanupMetrics(); LOGGER.debug("Not leader, skip validation for source kafka cluster!"); } }
@Override @Get public Representation get() { final String option = (String) getRequest().getAttributes().get("option"); if ("srcKafka".equals(option)) { if (_srcKafkaValidationManager == null) { LOGGER.warn("SourceKafkaClusterValidationManager is null!"); return new StringRepresentation("SrcKafkaValidationManager is not been initialized!"); } LOGGER.info("Trying to call validation on source kafka cluster!"); return new StringRepresentation(_srcKafkaValidationManager.validateSourceKafkaCluster()); } else { LOGGER.info("Trying to call validation on current cluster!"); return new StringRepresentation(_validationManager.validateExternalView()); } }
@Test public void testValidation() { String validationResult = sourceKafkaClusterValidationManager.validateSourceKafkaCluster(); System.out.println(validationResult); Assert.assertEquals(validationResult, validationResult = sourceKafkaClusterValidationManager.validateSourceKafkaCluster(); System.out.println(validationResult); Assert.assertEquals(validationResult, JSONObject.parseObject(sourceKafkaClusterValidationManager.validateSourceKafkaCluster()); System.out.println(validationResultJson); Assert.assertEquals(validationResultJson.get("numMissingTopics"), 10); JSONObject.parseObject(sourceKafkaClusterValidationManager.validateSourceKafkaCluster()); System.out.println(validationResultJson); Assert.assertEquals(validationResultJson.get("numMissingTopics"), 19 - i); JSONObject.parseObject(sourceKafkaClusterValidationManager.validateSourceKafkaCluster()); System.out.println(validationResultJson); Assert.assertEquals(validationResultJson.get("numMissingTopics"), 10); JSONObject.parseObject(sourceKafkaClusterValidationManager.validateSourceKafkaCluster()); System.out.println(validationResultJson); Assert.assertEquals(validationResultJson.get("numMissingTopics"), 29 - i); validationResult = sourceKafkaClusterValidationManager.validateSourceKafkaCluster(); System.out.println(validationResult); Assert.assertEquals(validationResult,