private static long getRecordCountFromCounter(TaskAttemptContext context, Enum<?> counterName) { try { Method getCounterMethod = context.getClass().getMethod("getCounter", Enum.class); return ((Counter) getCounterMethod.invoke(context, counterName)).getValue(); } catch (Exception e) { throw new RuntimeException("Error reading record count counter", e); } } }
@Override public long getContextCounter(TaskInputOutputContext context, String group, String name) { return context.getCounter(group, name).getValue(); }
@Override public long getContextCounter(TaskInputOutputContext context, String group, String name) { return context.getCounter(group, name).getValue(); }
@Override public long getCustom(String metric) { return counters.getGroup(HadoopContextScanMetrics.CUSTOM_COUNTER_GROUP).findCounter(metric).getValue(); }
@Override public long getCustom(String metric) { return counters.getGroup(HadoopContextScanMetrics.CUSTOM_COUNTER_GROUP).findCounter(metric).getValue(); }
@Override public Map<String, Object> getStats() { if (groupByJob == null) { return null; } try { Counters jobCounters = groupByJob.getCounters(); Map<String, Object> metrics = TaskMetricsUtils.makeIngestionRowMetrics( jobCounters.findCounter(HadoopDruidIndexerConfig.IndexJobCounters.ROWS_PROCESSED_COUNTER).getValue(), jobCounters.findCounter(HadoopDruidIndexerConfig.IndexJobCounters.ROWS_PROCESSED_WITH_ERRORS_COUNTER).getValue(), jobCounters.findCounter(HadoopDruidIndexerConfig.IndexJobCounters.ROWS_UNPARSEABLE_COUNTER).getValue(), jobCounters.findCounter(HadoopDruidIndexerConfig.IndexJobCounters.ROWS_THROWN_AWAY_COUNTER).getValue() ); return metrics; } catch (IllegalStateException ise) { log.debug("Couldn't get counters due to job state"); return null; } catch (Exception e) { log.debug(e, "Encountered exception in getStats()."); return null; } }
@Override public Map<String, Object> getStats() { if (job == null) { return null; } try { Counters jobCounters = job.getCounters(); Map<String, Object> metrics = TaskMetricsUtils.makeIngestionRowMetrics( jobCounters.findCounter(HadoopDruidIndexerConfig.IndexJobCounters.ROWS_PROCESSED_COUNTER).getValue(), jobCounters.findCounter(HadoopDruidIndexerConfig.IndexJobCounters.ROWS_PROCESSED_WITH_ERRORS_COUNTER) .getValue(), jobCounters.findCounter(HadoopDruidIndexerConfig.IndexJobCounters.ROWS_UNPARSEABLE_COUNTER).getValue(), jobCounters.findCounter(HadoopDruidIndexerConfig.IndexJobCounters.ROWS_THROWN_AWAY_COUNTER).getValue() ); return metrics; } catch (IllegalStateException ise) { log.debug("Couldn't get counters due to job state"); return null; } catch (Exception e) { log.debug(e, "Encountered exception in getStats()."); return null; } }
@Override public Map<String, Object> getStats() { if (groupByJob == null) { return null; } try { Counters jobCounters = groupByJob.getCounters(); Map<String, Object> metrics = TaskMetricsUtils.makeIngestionRowMetrics( jobCounters.findCounter(HadoopDruidIndexerConfig.IndexJobCounters.ROWS_PROCESSED_COUNTER).getValue(), jobCounters.findCounter(HadoopDruidIndexerConfig.IndexJobCounters.ROWS_PROCESSED_WITH_ERRORS_COUNTER).getValue(), jobCounters.findCounter(HadoopDruidIndexerConfig.IndexJobCounters.ROWS_UNPARSEABLE_COUNTER).getValue(), jobCounters.findCounter(HadoopDruidIndexerConfig.IndexJobCounters.ROWS_THROWN_AWAY_COUNTER).getValue() ); return metrics; } catch (IllegalStateException ise) { log.debug("Couldn't get counters due to job state"); return null; } catch (Exception e) { log.debug(e, "Encountered exception in getStats()."); return null; } }
@Override public long get(Metric metric) { return counters.getGroup(HadoopContextScanMetrics.STANDARD_COUNTER_GROUP).findCounter(metric.name()).getValue(); }
@Override public long get(Metric metric) { return counters.getGroup(HadoopContextScanMetrics.STANDARD_COUNTER_GROUP).findCounter(metric.name()).getValue(); }
/** {@inheritDoc} */ @Override public void incrAllCounters(CounterGroupBase<Counter> rightGroup) { for (final Counter counter : rightGroup) cntrs.findCounter(name, counter.getName()).increment(counter.getValue()); }
/** {@inheritDoc} */ @Override public synchronized void incrAllCounters(AbstractCounters<Counter, CounterGroup> other) { for (CounterGroup group : other) { for (Counter counter : group) { findCounter(group.getName(), counter.getName()).increment(counter.getValue()); } } }
private void handleParseException(ParseException pe, Context context) { context.getCounter(HadoopDruidIndexerConfig.IndexJobCounters.INVALID_ROW_COUNTER).increment(1); Counter unparseableCounter = context.getCounter(HadoopDruidIndexerConfig.IndexJobCounters.ROWS_UNPARSEABLE_COUNTER); Counter processedWithErrorsCounter = context.getCounter(HadoopDruidIndexerConfig.IndexJobCounters.ROWS_PROCESSED_WITH_ERRORS_COUNTER); if (pe.isFromPartiallyValidRow()) { processedWithErrorsCounter.increment(1); } else { unparseableCounter.increment(1); } if (config.isLogParseExceptions()) { log.error(pe, "Encountered parse exception: "); } long rowsUnparseable = unparseableCounter.getValue(); long rowsProcessedWithError = processedWithErrorsCounter.getValue(); if (rowsUnparseable + rowsProcessedWithError > config.getMaxParseExceptions()) { log.error("Max parse exceptions exceeded, terminating task..."); throw new RuntimeException("Max parse exceptions exceeded, terminating task...", pe); } }
@Override protected Job doLoad(Configuration conf, HTableDescriptor htd) throws Exception { Job job = super.doLoad(conf, htd); this.numRowsLoadedWithExp1 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_1).getValue(); this.numRowsLoadedWithExp2 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_2).getValue(); this.numRowsLoadWithExp3 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_3).getValue(); this.numRowsLoadWithExp4 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_4).getValue(); System.out.println("Rows loaded with cell visibility " + VISIBILITY_EXPS[0] + " : " + this.numRowsLoadedWithExp1); System.out.println("Rows loaded with cell visibility " + VISIBILITY_EXPS[1] + " : " + this.numRowsLoadedWithExp2); System.out.println("Rows loaded with cell visibility " + VISIBILITY_EXPS[2] + " : " + this.numRowsLoadWithExp3); System.out.println("Rows loaded with cell visibility " + VISIBILITY_EXPS[3] + " : " + this.numRowsLoadWithExp4); return job; }
/** * Run the RowCounter map reduce job and verify the row count. * * @param args the command line arguments to be used for rowcounter job. * @param expectedCount the expected row count (result of map reduce job). * @throws Exception */ private void runRowCount(String[] args, int expectedCount) throws Exception { Job job = RowCounter.createSubmittableJob(TEST_UTIL.getConfiguration(), args); long start = System.currentTimeMillis(); job.waitForCompletion(true); long duration = System.currentTimeMillis() - start; LOG.debug("row count duration (ms): " + duration); assertTrue(job.isSuccessful()); Counter counter = job.getCounters().findCounter(RowCounter.RowCounterMapper.Counters.ROWS); assertEquals(expectedCount, counter.getValue()); }
private void runVerifyReplication(String[] args, int expectedGoodRows, int expectedBadRows) throws IOException, InterruptedException, ClassNotFoundException { Job job = new VerifyReplication().createSubmittableJob(new Configuration(conf1), args); if (job == null) { fail("Job wasn't created, see the log"); } if (!job.waitForCompletion(true)) { fail("Job failed, see the log"); } assertEquals(expectedGoodRows, job.getCounters().findCounter(VerifyReplication.Verifier.Counters.GOODROWS).getValue()); assertEquals(expectedBadRows, job.getCounters().findCounter(VerifyReplication.Verifier.Counters.BADROWS).getValue()); }
/** * Verify scan counters are emitted from the job * @param job * @throws IOException */ private void verifyJobCountersAreEmitted(Job job) throws IOException { Counters counters = job.getCounters(); Counter counter = counters.findCounter(TableRecordReaderImpl.HBASE_COUNTER_GROUP_NAME, "RPC_CALLS"); assertNotNull("Unable to find Job counter for HBase scan metrics, RPC_CALLS", counter); assertTrue("Counter value for RPC_CALLS should be larger than 0", counter.getValue() > 0); }
@Test public void testSyncTable() throws Exception { final TableName sourceTableName = TableName.valueOf(name.getMethodName() + "_source"); final TableName targetTableName = TableName.valueOf(name.getMethodName() + "_target"); Path testDir = TEST_UTIL.getDataTestDirOnTestFS("testSyncTable"); writeTestData(sourceTableName, targetTableName); hashSourceTable(sourceTableName, testDir); Counters syncCounters = syncTables(sourceTableName, targetTableName, testDir); assertEqualTables(90, sourceTableName, targetTableName); assertEquals(60, syncCounters.findCounter(Counter.ROWSWITHDIFFS).getValue()); assertEquals(10, syncCounters.findCounter(Counter.SOURCEMISSINGROWS).getValue()); assertEquals(10, syncCounters.findCounter(Counter.TARGETMISSINGROWS).getValue()); assertEquals(50, syncCounters.findCounter(Counter.SOURCEMISSINGCELLS).getValue()); assertEquals(50, syncCounters.findCounter(Counter.TARGETMISSINGCELLS).getValue()); assertEquals(20, syncCounters.findCounter(Counter.DIFFERENTCELLVALUES).getValue()); TEST_UTIL.deleteTable(sourceTableName); TEST_UTIL.deleteTable(targetTableName); }
@Test public void testSyncTableDoPutsFalse() throws Exception { final TableName sourceTableName = TableName.valueOf(name.getMethodName() + "_source"); final TableName targetTableName = TableName.valueOf(name.getMethodName() + "_target"); Path testDir = TEST_UTIL.getDataTestDirOnTestFS("testSyncTableDoPutsFalse"); writeTestData(sourceTableName, targetTableName); hashSourceTable(sourceTableName, testDir); Counters syncCounters = syncTables(sourceTableName, targetTableName, testDir, "--doPuts=false"); assertTargetDoPutsFalse(70, sourceTableName, targetTableName); assertEquals(60, syncCounters.findCounter(Counter.ROWSWITHDIFFS).getValue()); assertEquals(10, syncCounters.findCounter(Counter.SOURCEMISSINGROWS).getValue()); assertEquals(10, syncCounters.findCounter(Counter.TARGETMISSINGROWS).getValue()); assertEquals(50, syncCounters.findCounter(Counter.SOURCEMISSINGCELLS).getValue()); assertEquals(50, syncCounters.findCounter(Counter.TARGETMISSINGCELLS).getValue()); assertEquals(20, syncCounters.findCounter(Counter.DIFFERENTCELLVALUES).getValue()); TEST_UTIL.deleteTable(sourceTableName); TEST_UTIL.deleteTable(targetTableName); }
@Test public void testSyncTableDoDeletesFalse() throws Exception { final TableName sourceTableName = TableName.valueOf(name.getMethodName() + "_source"); final TableName targetTableName = TableName.valueOf(name.getMethodName() + "_target"); Path testDir = TEST_UTIL.getDataTestDirOnTestFS("testSyncTableDoDeletesFalse"); writeTestData(sourceTableName, targetTableName); hashSourceTable(sourceTableName, testDir); Counters syncCounters = syncTables(sourceTableName, targetTableName, testDir, "--doDeletes=false"); assertTargetDoDeletesFalse(100, sourceTableName, targetTableName); assertEquals(60, syncCounters.findCounter(Counter.ROWSWITHDIFFS).getValue()); assertEquals(10, syncCounters.findCounter(Counter.SOURCEMISSINGROWS).getValue()); assertEquals(10, syncCounters.findCounter(Counter.TARGETMISSINGROWS).getValue()); assertEquals(50, syncCounters.findCounter(Counter.SOURCEMISSINGCELLS).getValue()); assertEquals(50, syncCounters.findCounter(Counter.TARGETMISSINGCELLS).getValue()); assertEquals(20, syncCounters.findCounter(Counter.DIFFERENTCELLVALUES).getValue()); TEST_UTIL.deleteTable(sourceTableName); TEST_UTIL.deleteTable(targetTableName); }