@Override public void addGauge(String name, final MetricsVariable variable) { Gauge gauge = new Gauge() { @Override public Object getValue() { return variable.getValue(); } }; addGaugeInternal(name, gauge); }
@Override public Long decrementCounter(String name) { return decrementCounter(name, 1L); }
@Override public Long incrementCounter(String name) { return incrementCounter(name, 1L); }
/** * Initializes reporters from HIVE_CODAHALE_METRICS_REPORTER_CLASSES or HIVE_METRICS_REPORTER if the former is not defined. * Note: if both confs are defined, only HIVE_CODAHALE_METRICS_REPORTER_CLASSES will be used. */ private void initReporting() { if (!(initCodahaleMetricsReporterClasses() || initMetricsReporter())) { LOGGER.warn("Unable to initialize metrics reporting"); } if (reporters.isEmpty()) { // log a warning incase no reporters were successfully added LOGGER.warn("No reporters configured for codahale metrics!"); } }
registerAll("gc", new GarbageCollectorMetricSet()); registerAll("buffers", new BufferPoolMetricSet(ManagementFactory.getPlatformMBeanServer())); registerAll("memory", new MemoryUsageGaugeSet()); registerAll("threads", new ThreadStatesGaugeSet()); registerAll("classLoading", new ClassLoadingGaugeSet()); initReporting();
@Test public void testSubmittedQueryCount() throws Exception { String json = ((CodahaleMetrics) metrics).dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, MetricsConstant.HS2_SUBMITTED_QURIES, "0"); operation.onNewState(OperationState.FINISHED, OperationState.RUNNING); json = ((CodahaleMetrics) metrics).dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, MetricsConstant.HS2_SUBMITTED_QURIES, "1"); }
private Counter getCounter(String counter) { CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance(); SortedMap<String, Counter> counters = metrics.getMetricRegistry().getCounters(); assertNotNull(counters); return counters.get(counter); }
private void registerAll(String prefix, MetricSet metricSet) { for (Map.Entry<String, Metric> entry : metricSet.getMetrics().entrySet()) { if (entry.getValue() instanceof MetricSet) { registerAll(prefix + "." + entry.getKey(), (MetricSet) entry.getValue()); } else { metricRegistry.register(prefix + "." + entry.getKey(), entry.getValue()); } } }
@Test public void testOpenSessionTimeMetrics() throws Exception { String json = metrics.dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_AVG_OPEN_SESSION_TIME, "NaN"); long firstSessionOpen = System.currentTimeMillis(); SessionHandle handle = sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user", "passw", "127.0.0.1", new HashMap<String, String>()); json = metrics.dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_AVG_OPEN_SESSION_TIME, (double)(System.currentTimeMillis() - firstSessionOpen), 100d); long secondSessionOpen = System.currentTimeMillis(); sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user", "passw", "127.0.0.1", new HashMap<String, String>()); json = metrics.dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_AVG_OPEN_SESSION_TIME, (double)(System.currentTimeMillis() - firstSessionOpen + System.currentTimeMillis() - secondSessionOpen) / 2d, 100d); sm.closeSession(handle); json = metrics.dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_AVG_OPEN_SESSION_TIME, (double)(System.currentTimeMillis() - secondSessionOpen), 100d); }
@BeforeClass public static void setUp() throws Exception { if (!tmpDir.toFile().exists()) { System.out.println("Creating directory " + tmpDir); Files.createDirectories(tmpDir, PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwxr-xr-x"))); } HiveConf conf = new HiveConf(); jsonReportFile = File.createTempFile("TestCodahaleMetrics", ".json"); System.out.println("Json metrics saved in " + jsonReportFile.getAbsolutePath()); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_CLASS, CodahaleMetrics.class.getCanonicalName()); conf.setVar(HiveConf.ConfVars.HIVE_CODAHALE_METRICS_REPORTER_CLASSES, "org.apache.hadoop.hive.common.metrics.metrics2.JsonFileMetricsReporter, " + "org.apache.hadoop.hive.common.metrics.metrics2.JmxMetricsReporter"); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_LOCATION, jsonReportFile.getAbsolutePath()); conf.setTimeVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_INTERVAL, REPORT_INTERVAL_MS, TimeUnit.MILLISECONDS); MetricsFactory.init(conf); metricRegistry = ((CodahaleMetrics) MetricsFactory.getInstance()).getMetricRegistry(); }
/** * Initializes reporters from HIVE_CODAHALE_METRICS_REPORTER_CLASSES or HIVE_METRICS_REPORTER if the former is not defined. * Note: if both confs are defined, only HIVE_CODAHALE_METRICS_REPORTER_CLASSES will be used. */ private void initReporting() { if (!(initCodahaleMetricsReporterClasses() || initMetricsReporter())) { LOGGER.warn("Unable to initialize metrics reporting"); } if (reporters.isEmpty()) { // log a warning incase no reporters were successfully added LOGGER.warn("No reporters configured for codahale metrics!"); } }
registerAll("gc", new GarbageCollectorMetricSet()); registerAll("buffers", new BufferPoolMetricSet(ManagementFactory.getPlatformMBeanServer())); registerAll("memory", new MemoryUsageGaugeSet()); registerAll("threads", new ThreadStatesGaugeSet()); registerAll("classLoading", new ClassLoadingGaugeSet()); initReporting();
private void registerAll(String prefix, MetricSet metricSet) { for (Map.Entry<String, Metric> entry : metricSet.getMetrics().entrySet()) { if (entry.getValue() instanceof MetricSet) { registerAll(prefix + "." + entry.getKey(), (MetricSet) entry.getValue()); } else { metricRegistry.register(prefix + "." + entry.getKey(), entry.getValue()); } } }
@Test public void testOpenSessionMetrics() throws Exception { String json = metrics.dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_OPEN_SESSIONS, 0); SessionHandle handle = sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user", "passw", "127.0.0.1", new HashMap<String, String>()); json = metrics.dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_OPEN_SESSIONS, 1); sm.openSession(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V9, "user", "passw", "127.0.0.1", new HashMap<String, String>()); json = metrics.dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_OPEN_SESSIONS, 2); sm.closeSession(handle); json = metrics.dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, MetricsConstant.HS2_OPEN_SESSIONS, 1); }
@Before public void before() throws Exception { HiveConf conf = new HiveConf(); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_CLASS, CodahaleMetrics.class.getCanonicalName()); // disable json file writing conf.setVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_INTERVAL, "60000m"); MetricsFactory.init(conf); metricRegistry = ((CodahaleMetrics) MetricsFactory.getInstance()).getMetricRegistry(); }
@Override public void addRatio(String name, MetricsVariable<Integer> numerator, MetricsVariable<Integer> denominator) { Preconditions.checkArgument(numerator != null, "Numerator must not be null"); Preconditions.checkArgument(denominator != null, "Denominator must not be null"); MetricVariableRatioGauge gauge = new MetricVariableRatioGauge(numerator, denominator); addGaugeInternal(name, gauge); }
/** * Opens scope, and makes note of the time started, increments run counter * */ public void open() { if (!isOpen) { isOpen = true; this.timerContext = timer.time(); CodahaleMetrics.this.incrementCounter(MetricsConstant.ACTIVE_CALLS + name); } else { LOGGER.warn("Scope named " + name + " is not closed, cannot be opened."); } }
/** * Closes scope, and records the time taken */ public void close() { if (isOpen) { timerContext.close(); CodahaleMetrics.this.decrementCounter(MetricsConstant.ACTIVE_CALLS + name); } else { LOGGER.warn("Scope named " + name + " is not open, cannot be closed."); } isOpen = false; } }
@Test public void testActiveUserQueriesCount() throws Exception { String name = MetricsConstant.SQL_OPERATION_PREFIX + "active_user"; String json = ((CodahaleMetrics) metrics).dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, name, ""); operation.onNewState(OperationState.RUNNING, OperationState.INITIALIZED); json = ((CodahaleMetrics) metrics).dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, name, "1"); operation.onNewState(OperationState.RUNNING, OperationState.RUNNING); json = ((CodahaleMetrics) metrics).dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, name, "1"); operation.onNewState(OperationState.FINISHED, OperationState.RUNNING); json = ((CodahaleMetrics) metrics).dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, name, "0"); }
@Before public void before() throws Exception { HiveConf conf = new HiveConf(); conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, "local"); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_CLASS, CodahaleMetrics.class.getCanonicalName()); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, MetricsReporting.JSON_FILE.name() + "," + MetricsReporting.JMX.name()); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_INTERVAL, "100000s"); MetricsFactory.init(conf); metricRegistry = ((CodahaleMetrics) MetricsFactory.getInstance()).getMetricRegistry(); hook = new MetricsQueryLifeTimeHook(); ctx = new QueryLifeTimeHookContextImpl(); }