private static void oldApiCounter(Reporter reporter, Enum<?> counter, long value) { try { org.apache.hadoop.mapred.Counters.Counter c = reporter.getCounter(counter); if (c != null) { c.increment(value); } } catch (Exception ex) { // counter unavailable } }
public void count(String group, String name, long amount) { reporter.getCounter(group.toString(), name.toString()).increment(amount); }
@Override protected void reportIncremental(MetricContext context, String name, long incremental) { this.reporter.getCounter(context.getName(), name).increment(incremental); }
@Override protected void reportValue(MetricContext context, String name, long value) { this.reporter.getCounter(context.getName(), name).setValue(value); }
@BeforeClass public void setUp() throws Exception { String contextName = CONTEXT_NAME + "_" + UUID.randomUUID().toString(); Reporter mockedReporter = Mockito.mock(Reporter.class); this.recordsProcessedCount = Mockito.mock(Counters.Counter.class); Mockito.when(mockedReporter.getCounter( contextName, MetricRegistry.name(RECORDS_PROCESSED, Measurements.COUNT.getName()))) .thenReturn(this.recordsProcessedCount); this.recordProcessRateCount = Mockito.mock(Counters.Counter.class); Mockito.when(mockedReporter.getCounter( contextName, MetricRegistry.name(RECORD_PROCESS_RATE, Measurements.COUNT.getName()))) .thenReturn(this.recordProcessRateCount); this.recordSizeDistributionCount = Mockito.mock(Counters.Counter.class); Mockito.when(mockedReporter.getCounter( contextName, MetricRegistry.name(RECORD_SIZE_DISTRIBUTION, Measurements.COUNT.getName()))) .thenReturn(this.recordSizeDistributionCount); this.totalDurationCount = Mockito.mock(Counters.Counter.class); Mockito.when(mockedReporter.getCounter( contextName, MetricRegistry.name(TOTAL_DURATION, Measurements.COUNT.getName()))) .thenReturn(this.totalDurationCount); this.queueSize = Mockito.mock(Counters.Counter.class); Mockito.when(mockedReporter.getCounter(contextName, QUEUE_SIZE)).thenReturn(this.queueSize); this.hadoopCounterReporter = HadoopCounterReporter.builder(mockedReporter) .convertRatesTo(TimeUnit.SECONDS) .convertDurationsTo(TimeUnit.SECONDS) .filter(MetricFilter.ALL) .build(MetricContext.builder(contextName).buildStrict()); }
@Override public void map(ImmutableBytesWritable key, Result value, OutputCollector<NullWritable,NullWritable> output, Reporter reporter) throws IOException { for (Cell cell : value.listCells()) { reporter.getCounter(TestTableInputFormat.class.getName() + ":row", Bytes.toString(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength())) .increment(1l); reporter.getCounter(TestTableInputFormat.class.getName() + ":family", Bytes.toString(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength())) .increment(1l); reporter.getCounter(TestTableInputFormat.class.getName() + ":value", Bytes.toString(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())) .increment(1l); } }
reporter.getCounter("org.apache.hadoop.mapred.Task$Counter", "MAP_INPUT_RECORDS"); TetherData data = new TetherData();
long numCollisions = reporter.getCounter(CollisionCounter.MAX_COLLISIONS).getCounter(); if(numTuples > numCollisions) { reporter.incrCounter(CollisionCounter.MAX_COLLISIONS, numTuples - numCollisions);
@Before @SuppressWarnings("unchecked") // mocked generics public void setup() { LOG.info(">>>> " + name.getMethodName()); job = new JobConf(); job.setBoolean(MRJobConfig.SHUFFLE_FETCH_RETRY_ENABLED, false); jobWithRetry = new JobConf(); jobWithRetry.setBoolean(MRJobConfig.SHUFFLE_FETCH_RETRY_ENABLED, true); id = TaskAttemptID.forName("attempt_0_1_r_1_1"); ss = mock(ShuffleSchedulerImpl.class); mm = mock(MergeManagerImpl.class); r = mock(Reporter.class); metrics = mock(ShuffleClientMetrics.class); except = mock(ExceptionReporter.class); key = JobTokenSecretManager.createSecretKey(new byte[]{0,0,0,0}); connection = mock(HttpURLConnection.class); allErrs = mock(Counters.Counter.class); when(r.getCounter(anyString(), anyString())).thenReturn(allErrs); ArrayList<TaskAttemptID> maps = new ArrayList<TaskAttemptID>(1); maps.add(map1ID); maps.add(map2ID); when(ss.getMapsForHost(host)).thenReturn(maps); }
@Override public Counters.Counter getCounter(Enum<?> anEnum) { return wrappedReporter.getCounter(anEnum); }
@Override public Counters.Counter getCounter(Enum<?> anEnum) { return wrappedReporter.getCounter(anEnum); }
@Override public Counters.Counter getCounter(String s, String s1) { return wrappedReporter.getCounter(s, s1); }
@Override public Counters.Counter getCounter(String s, String s1) { return wrappedReporter.getCounter(s, s1); }
@Override public Counter getCounter(Enum<?> counterName) { return reporter.getCounter(counterName); }
/** * test Reporter.NULL * */ @Test (timeout=5000) public void testReporter(){ Reporter nullReporter=Reporter.NULL; assertNull(nullReporter.getCounter(null)); assertNull(nullReporter.getCounter("group", "name")); // getInputSplit method removed try{ assertNull(nullReporter.getInputSplit()); }catch(UnsupportedOperationException e){ assertEquals( "NULL reporter has no input",e.getMessage()); } assertEquals(0,nullReporter.getProgress(),0.01); } }
public void registerCounter(int id, String group, String name) throws IOException { Counters.Counter counter = reporter.getCounter(group, name); registeredCounters.put(id, counter); }
private static void oldApiCounter(Reporter reporter, Enum<?> counter, long value) { try { org.apache.hadoop.mapred.Counters.Counter c = reporter.getCounter(counter); if (c != null) { c.increment(value); } } catch (Exception ex) { // counter unavailable } }
@Override public long getCounterValue( String group, String counter ) { return getReporter().getCounter( group, counter ).getValue(); }
@Override public long getCounterValue( String group, String counter ) { return getReporter().getCounter( group, counter ).getValue(); }
@Override public long getCounterValue( Enum counter ) { return getReporter().getCounter( counter ).getValue(); }