@Override protected void handleFailure(Counters counters) throws IOException { try (Connection conn = ConnectionFactory.createConnection(job.getConfiguration())) { TableName tableName = TableName.valueOf(COMMON_TABLE_NAME); CounterGroup g = counters.getGroup("undef"); Iterator<Counter> it = g.iterator(); while (it.hasNext()) { String keyString = it.next().getName(); byte[] key = Bytes.toBytes(keyString); HRegionLocation loc = conn.getRegionLocator(tableName).getRegionLocation(key, true); LOG.error("undefined row " + keyString + ", " + loc); } g = counters.getGroup("unref"); it = g.iterator(); while (it.hasNext()) { String keyString = it.next().getName(); byte[] key = Bytes.toBytes(keyString); HRegionLocation loc = conn.getRegionLocator(tableName).getRegionLocation(key, true); LOG.error("unreferred row " + keyString + ", " + loc); } } } }
protected void handleFailure(Counters counters) throws IOException { Configuration conf = job.getConfiguration(); TableName tableName = getTableName(conf); try (Connection conn = ConnectionFactory.createConnection(conf)) { try (RegionLocator rl = conn.getRegionLocator(tableName)) { CounterGroup g = counters.getGroup("undef"); Iterator<Counter> it = g.iterator(); while (it.hasNext()) { String keyString = it.next().getName(); byte[] key = Bytes.toBytes(keyString); HRegionLocation loc = rl.getRegionLocation(key, true); LOG.error("undefined row " + keyString + ", " + loc); } g = counters.getGroup("unref"); it = g.iterator(); while (it.hasNext()) { String keyString = it.next().getName(); byte[] key = Bytes.toBytes(keyString); HRegionLocation loc = rl.getRegionLocation(key, true); LOG.error("unreferred row " + keyString + ", " + loc); } } } } }
public String toString() { StringBuilder builder = new StringBuilder(); while (source.hasNext()) { Entry<String,Counters> nextCntr = source.next(); builder.append("\n").append(nextCntr.getKey()).append("\n----------------------\n"); Counters counters = nextCntr.getValue(); for (String groupName : counters.getGroupNames()) { CounterGroup group = counters.getGroup(groupName); Iterator<Counter> cntrItr = group.iterator(); while (cntrItr.hasNext()) { Counter counter = cntrItr.next(); builder.append(groupName).append("\t").append(counter.getDisplayName()).append("=").append(counter.getValue()).append("\n"); } } } return builder.toString(); }
Iterator<Counter> iter = group.iterator(); iter.hasNext(); String path = iter.next().getName();
private static String checkForIngestLabelOverride(Counters ingestJobCounters) { CounterGroup jobQueueName = ingestJobCounters.getGroup(IngestProcess.METRICS_LABEL_OVERRIDE.name()); if (jobQueueName.size() > 0) { Counter myCounter = jobQueueName.iterator().next(); return myCounter.getName(); } return null; }
auditBuf.append(") "); Iterator<Counter> counterIt = group.iterator(); int counterCount = 0; while (counterIt.hasNext()) {
/** * Wait for a counter to appear in a group and then return the name of that * counter. If job finishes before counter appears, return null. * * @param job Job * @param group Name of the counter group * @return Name of the counter inside of the group, or null if job finishes * before counter appears */ public static String waitAndGetCounterNameFromGroup(Job job, String group) { try { while (job.getCounters().getGroup(group).size() == 0) { if (job.isComplete()) { return null; } Thread.sleep(SLEEP_MSECS); } return job.getCounters().getGroup(group).iterator().next().getName(); } catch (IOException | InterruptedException e) { throw new IllegalStateException( "waitAndGetCounterNameFromGroup: Exception occurred", e); } } }
totalGroup.iterator(); while(ctrItr.hasNext()) { org.apache.hadoop.mapreduce.Counter counter = ctrItr.next();
totalGroup.iterator(); while(ctrItr.hasNext()) { org.apache.hadoop.mapreduce.Counter counter = ctrItr.next();
totalGroup.iterator(); while(ctrItr.hasNext()) { org.apache.hadoop.mapreduce.Counter counter = ctrItr.next();
totalGroup.iterator(); while(ctrItr.hasNext()) { org.apache.hadoop.mapreduce.Counter counter = ctrItr.next();
totalGroup.iterator(); while(ctrItr.hasNext()) { org.apache.hadoop.mapreduce.Counter counter = ctrItr.next();
private void validateCounters(org.apache.hadoop.mapreduce.Counters counters) { Iterator<org.apache.hadoop.mapreduce.CounterGroup> it = counters.iterator(); while (it.hasNext()) { org.apache.hadoop.mapreduce.CounterGroup group = it.next(); LOG.info("Group " + group.getDisplayName()); Iterator<org.apache.hadoop.mapreduce.Counter> itc = group.iterator(); while (itc.hasNext()) { LOG.info("Counter is " + itc.next().getDisplayName()); } } Assert.assertEquals(1, counters.countCounters()); }
outputDirectory = counters.getGroup(IngestProcess.OUTPUT_DIRECTORY.name()).iterator().next().getName(); Text outDir = new Text(outputDirectory); colQ.append(nulArray, 0, nulArray.length);
/** * @return the name of all counters */ public Collection<Pair<String, String>> findCounterValues() { final Collection<Pair<String, String>> counters = new LinkedList<Pair<String, String>>(); final Iterable<String> groupNames = getGroupNames(); if (mapred != null) { for (String groupName : groupNames) { final Group group = mapred.getGroup(groupName); collectCounters(counters, groupName, group.iterator()); } } else { for (String groupName : groupNames) { final CounterGroup group = mapreduce.getGroup(groupName); collectCounters(counters, groupName, group.iterator()); } } return counters; }
@Override protected void handleFailure(Counters counters) throws IOException { Configuration conf = job.getConfiguration(); ClusterConnection conn = (ClusterConnection) ConnectionFactory.createConnection(conf); TableName tableName = TableName.valueOf(COMMON_TABLE_NAME); CounterGroup g = counters.getGroup("undef"); Iterator<Counter> it = g.iterator(); while (it.hasNext()) { String keyString = it.next().getName(); byte[] key = Bytes.toBytes(keyString); HRegionLocation loc = conn.relocateRegion(tableName, key); LOG.error("undefined row " + keyString + ", " + loc); } g = counters.getGroup("unref"); it = g.iterator(); while (it.hasNext()) { String keyString = it.next().getName(); byte[] key = Bytes.toBytes(keyString); HRegionLocation loc = conn.relocateRegion(tableName, key); LOG.error("unreferred row " + keyString + ", " + loc); } } }
private static void saveJobStats(Job job, StepExecution stepExecution) { if (stepExecution == null) { return; } ExecutionContext executionContext = stepExecution.getExecutionContext(); String statusPrefix = "Job Status::"; executionContext.put(statusPrefix + "ID", JobUtils.getJobId(job).toString()); executionContext.put(statusPrefix + "Name", job.getJobName()); executionContext.put(statusPrefix + "Tracking URL", job.getTrackingURL()); executionContext.put(statusPrefix + "State", JobUtils.getStatus(job).toString()); try { for (String cgName : job.getCounters().getGroupNames()) { CounterGroup group = job.getCounters().getGroup(cgName); Iterator<Counter> ci = group.iterator(); while (ci.hasNext()) { Counter c = ci.next(); executionContext.put(group.getDisplayName().trim() + "::" + c.getDisplayName().trim(), c.getValue()); } } } catch (Exception ignore) {} }
protected void handleFailure(Counters counters) throws IOException { Configuration conf = job.getConfiguration(); TableName tableName = getTableName(conf); try (Connection conn = ConnectionFactory.createConnection(conf)) { try (RegionLocator rl = conn.getRegionLocator(tableName)) { CounterGroup g = counters.getGroup("undef"); Iterator<Counter> it = g.iterator(); while (it.hasNext()) { String keyString = it.next().getName(); byte[] key = Bytes.toBytes(keyString); HRegionLocation loc = rl.getRegionLocation(key, true); LOG.error("undefined row " + keyString + ", " + loc); } g = counters.getGroup("unref"); it = g.iterator(); while (it.hasNext()) { String keyString = it.next().getName(); byte[] key = Bytes.toBytes(keyString); HRegionLocation loc = rl.getRegionLocation(key, true); LOG.error("unreferred row " + keyString + ", " + loc); } } } } }
out.write("</b></td>\n </tr>\n"); Iterator<Counter> ctrItr = group.iterator(); while(ctrItr.hasNext()) { Counter counter = ctrItr.next();