Refine search
@Override public void incrementContextCounter(TaskInputOutputContext context, String group, String name, long incr) { context.getCounter(group, name).increment(incr); }
@SuppressWarnings("deprecation") private TaskAttemptContext getContext(String nameOutput) throws IOException { TaskAttemptContext taskContext = taskContexts.get(nameOutput); if (taskContext != null) { return taskContext; } // The following trick leverages the instantiation of a record writer via // the job thus supporting arbitrary output formats. Job job = new Job(context.getConfiguration()); job.setOutputFormatClass(getNamedOutputFormatClass(context, nameOutput)); Schema keySchema=null,valSchema=null; if (job.getConfiguration().get(MO_PREFIX + nameOutput + ".keyschema",null) != null) keySchema = Schema.parse(job.getConfiguration().get( MO_PREFIX + nameOutput + ".keyschema")); if (job.getConfiguration().get(MO_PREFIX + nameOutput + ".valueschema", null) != null) valSchema = Schema.parse(job.getConfiguration().get( MO_PREFIX + nameOutput + ".valueschema")); setSchema(job, keySchema, valSchema); taskContext = createTaskAttemptContext( job.getConfiguration(), context.getTaskAttemptID()); taskContexts.put(nameOutput, taskContext); return taskContext; }
/** * Write the output key and value using the OutputFormat defined by the * alias. * * @param alias the name given to the OutputFormat configuration * @param key the output key to be written * @param value the output value to be written * @param context the Mapper or Reducer Context * @throws IOException * @throws InterruptedException */ public static <K, V> void write(String alias, K key, V value, TaskInputOutputContext context) throws IOException, InterruptedException { KeyValue<K, V> keyval = new KeyValue<K, V>(key, value); context.write(new Text(alias), keyval); }
@Override public long getContextCounter(TaskInputOutputContext context, String group, String name) { return context.getCounter(group, name).getValue(); }
final StateHolder holder = new StateHolder(); Mockito.when(context.getCounter(Mockito.any(Enum.class))).then(new Answer<Counter>() { @Override public Counter answer(InvocationOnMock invocation) throws Throwable { Mockito.when(context.getCounter(Mockito.anyString(), Mockito.anyString())).then(new Answer<Counter>() { @Override public Counter answer(InvocationOnMock invocation) throws Throwable { Mockito.when(context.getConfiguration()).thenReturn(config); Mockito.when(context.getTaskAttemptID()).thenReturn(new TaskAttemptID()); Mockito.when(context.getStatus()).then(new Answer<String>() { @Override public String answer(InvocationOnMock invocation) throws Throwable { return null; }).when(context).setStatus(Mockito.anyString());
@Override protected void reportValue(MetricContext context, String name, long value) { this.hadoopContext.getCounter(context.getName(), name).setValue(value); }
protected Configuration getConfiguration() { if (context != null) { return context.getConfiguration(); } else if (testConf != null) { return testConf; } return null; }
@Override public void setStatus(String status) { if (context != null) { context.setStatus(status); } }
@Override public void progress() { if (context != null) { context.progress(); } else { taskAttemptContext.progress(); } } }
protected TaskAttemptID getTaskAttemptID() { if (context == null) { return null; } return context.getTaskAttemptID(); }
@Override public Counters.Counter getCounter(Enum<?> name) { return (context != null) ? (Counters.Counter) context.getCounter(name) : null; }
/** Compute sigma */ static void compute(Summation sigma, TaskInputOutputContext<?, ?, NullWritable, TaskResult> context ) throws IOException, InterruptedException { String s; LOG.info(s = "sigma=" + sigma); context.setStatus(s); final long start = Time.monotonicNow(); sigma.compute(); final long duration = Time.monotonicNow() - start; final TaskResult result = new TaskResult(sigma, duration); LOG.info(s = "result=" + result); context.setStatus(s); context.write(NullWritable.get(), result); }
/** * <p> * This method adds header to the first log file for each of the tasks. * </p> * * @param context * Context * @param className * calling class */ @SuppressWarnings(RAW_TYPES) public static void addLogHeader(TaskInputOutputContext context, String className) { getLogMsg(className, context.getJobName(), INFO, context.getJobID(), context.getTaskAttemptID()); }
/** {@inheritDoc} */ @Override public String getStatus() { return mHadoopContext.getStatus(); }
allFields.put(JOB_NAME_FIELD, new NormalizedFieldAndValue(JOB_NAME_FIELD, context.getJobName())); allFields.put(JOB_ID_FIELD, new NormalizedFieldAndValue(JOB_ID_FIELD, context.getJobID().getJtIdentifier()));
@Override public JobID getJobID() { return base.getJobID(); }
@Override public long getContextCounter(TaskInputOutputContext context, String group, String name) { return context.getCounter(group, name).getValue(); }
/** * Creates and initializes multiple outputs support, * it should be instantiated in the Mapper/Reducer setup method. * * @param context the TaskInputOutputContext object */ public CrunchOutputs(TaskInputOutputContext<?, ?, K, V> context) { this(context.getConfiguration()); this.baseContext = context; }
protected void setStatus(String status) { if (context != null) { context.setStatus(status); } }
public void progress() { if (context == null) { return; } context.progress(); }