/** * Returns the {@link BasicMapReduceTaskContext} for the given task. */ public final <K, V> BasicMapReduceTaskContext<K, V> get(TaskAttemptContext taskAttemptContext) { return get(new ContextCacheKey(taskAttemptContext)); }
taskContext = taskContextProvider.get(taskAttemptContext); this.outputs = Outputs.transform(contextConfig.getOutputs(), taskContext);
/** * Returns the {@link BasicMapReduceTaskContext} for the given configuration. Since TaskAttemptContext is not * provided, the returned MapReduceTaskContext will not have Metrics available. * */ public final <K, V> BasicMapReduceTaskContext<K, V> get(Configuration configuration) { return get(new ContextCacheKey(null, configuration)); }
taskContext = taskContextProvider.get(taskAttemptContext); this.outputs = Outputs.transform(contextConfig.getOutputs(), taskContext);
/** * Returns the {@link BasicMapReduceTaskContext} for the given task. */ public final <K, V> BasicMapReduceTaskContext<K, V> get(TaskAttemptContext taskAttemptContext) { return get(new ContextCacheKey(taskAttemptContext)); }
/** * Returns the {@link BasicMapReduceTaskContext} for the given configuration. Since TaskAttemptContext is not * provided, the returned MapReduceTaskContext will not have Metrics available. * */ public final <K, V> BasicMapReduceTaskContext<K, V> get(Configuration configuration) { return get(new ContextCacheKey(null, configuration)); }
private BasicMapReduceTaskContext getMapReduceTaskContext(TaskAttemptContext context) { MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(context.getConfiguration()); return classLoader.getTaskContextProvider().get(context); } }
@Override protected BatchReadable<KEY, VALUE> createBatchReadable(TaskAttemptContext context, @Nullable String datasetNamespace, String datasetName, Map<String, String> datasetArgs) { MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(context.getConfiguration()); BasicMapReduceTaskContext<?, ?> taskContext = classLoader.getTaskContextProvider().get(context); return taskContext.getBatchReadable(datasetNamespace, datasetName, datasetArgs); } }
@Override protected BatchReadable<KEY, VALUE> createBatchReadable(TaskAttemptContext context, @Nullable String datasetNamespace, String datasetName, Map<String, String> datasetArgs) { MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(context.getConfiguration()); BasicMapReduceTaskContext<?, ?> taskContext = classLoader.getTaskContextProvider().get(context); return taskContext.getBatchReadable(datasetNamespace, datasetName, datasetArgs); } }
@Override protected CloseableBatchWritable<KEY, VALUE> createBatchWritable(TaskAttemptContext context, String namespace, String datasetName, Map<String, String> datasetArgs) { MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(context.getConfiguration()); BasicMapReduceTaskContext<?, ?> taskContext = classLoader.getTaskContextProvider().get(context); return taskContext.getBatchWritable(namespace, datasetName, datasetArgs); } }
@Override protected CloseableBatchWritable<KEY, VALUE> createBatchWritable(TaskAttemptContext context, String namespace, String datasetName, Map<String, String> datasetArgs) { MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(context.getConfiguration()); BasicMapReduceTaskContext<?, ?> taskContext = classLoader.getTaskContextProvider().get(context); return taskContext.getBatchWritable(namespace, datasetName, datasetArgs); } }
DynamicPartitionerWriterWrapper(TaskAttemptContext job) { this.job = job; Configuration configuration = job.getConfiguration(); Class<? extends DynamicPartitioner> partitionerClass = configuration .getClass(PartitionedFileSetArguments.DYNAMIC_PARTITIONER_CLASS_NAME, null, DynamicPartitioner.class); this.dynamicPartitioner = new InstantiatorFactory(false).get(TypeToken.of(partitionerClass)).create(); this.partitionWriteOption = DynamicPartitioner.PartitionWriteOption.valueOf( configuration.get(PartitionedFileSetArguments.DYNAMIC_PARTITIONER_WRITE_OPTION)); MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(configuration); this.taskContext = classLoader.getTaskContextProvider().get(job); // name the output file 'part-<RunId>-m-00000' instead of 'part-m-00000' String outputName = DynamicPartitioningOutputFormat.getOutputName(job); if (partitionWriteOption == DynamicPartitioner.PartitionWriteOption.CREATE_OR_APPEND) { outputName = outputName + "-" + taskContext.getProgramRunId().getRun(); } this.outputName = outputName; String outputDatasetName = configuration.get(Constants.Dataset.Partitioned.HCONF_ATTR_OUTPUT_DATASET); this.outputDataset = taskContext.getDataset(outputDatasetName); this.partitioning = outputDataset.getPartitioning(); this.dynamicPartitioner.initialize(taskContext); this.fileOutputFormatName = job.getConfiguration() .getClass(Constants.Dataset.Partitioned.HCONF_ATTR_OUTPUT_FORMAT_CLASS_NAME, null, FileOutputFormat.class) .getName(); }
DynamicPartitionerWriterWrapper(TaskAttemptContext job) { this.job = job; Configuration configuration = job.getConfiguration(); Class<? extends DynamicPartitioner> partitionerClass = configuration .getClass(PartitionedFileSetArguments.DYNAMIC_PARTITIONER_CLASS_NAME, null, DynamicPartitioner.class); this.dynamicPartitioner = new InstantiatorFactory(false).get(TypeToken.of(partitionerClass)).create(); this.partitionWriteOption = DynamicPartitioner.PartitionWriteOption.valueOf( configuration.get(PartitionedFileSetArguments.DYNAMIC_PARTITIONER_WRITE_OPTION)); MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(configuration); this.taskContext = classLoader.getTaskContextProvider().get(job); // name the output file 'part-<RunId>-m-00000' instead of 'part-m-00000' String outputName = DynamicPartitioningOutputFormat.getOutputName(job); if (partitionWriteOption == DynamicPartitioner.PartitionWriteOption.CREATE_OR_APPEND) { outputName = outputName + "-" + taskContext.getProgramRunId().getRun(); } this.outputName = outputName; String outputDatasetName = configuration.get(Constants.Dataset.Partitioned.HCONF_ATTR_OUTPUT_DATASET); this.outputDataset = taskContext.getDataset(outputDatasetName); this.partitioning = outputDataset.getPartitioning(); this.dynamicPartitioner.initialize(taskContext); this.fileOutputFormatName = job.getConfiguration() .getClass(Constants.Dataset.Partitioned.HCONF_ATTR_OUTPUT_FORMAT_CLASS_NAME, null, FileOutputFormat.class) .getName(); }
static <T> T createDelegate(Configuration conf, String attrClass) { String delegateClassName = conf.get(attrClass); Class<?> delegateClass = conf.getClassByNameOrNull(delegateClassName); Preconditions.checkNotNull(delegateClass, "Class could not be found: %s", delegateClassName); T delegate = (T) ReflectionUtils.newInstance(delegateClass, conf); if (!(delegate instanceof ProgramLifecycle)) { return delegate; } MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(conf); BasicMapReduceTaskContext basicMapReduceContext = classLoader.getTaskContextProvider().get(conf); ClassLoader programClassLoader = classLoader.getProgramClassLoader(); ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(programClassLoader); try { ProgramLifecycle programLifecycle = (ProgramLifecycle) delegate; programLifecycle.initialize(new MapReduceLifecycleContext(basicMapReduceContext)); // register it so that its destroy method can get called when the BasicMapReduceTaskContext is closed basicMapReduceContext.registerProgramLifecycle(programLifecycle); return delegate; } catch (Exception e) { LOG.error("Failed to initialize delegate with {}", basicMapReduceContext, e); throw Throwables.propagate(e); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
static <T> T createDelegate(Configuration conf, String attrClass) { String delegateClassName = conf.get(attrClass); Class<?> delegateClass = conf.getClassByNameOrNull(delegateClassName); Preconditions.checkNotNull(delegateClass, "Class could not be found: %s", delegateClassName); T delegate = (T) ReflectionUtils.newInstance(delegateClass, conf); if (!(delegate instanceof ProgramLifecycle)) { return delegate; } MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(conf); BasicMapReduceTaskContext basicMapReduceContext = classLoader.getTaskContextProvider().get(conf); ClassLoader programClassLoader = classLoader.getProgramClassLoader(); ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(programClassLoader); try { ProgramLifecycle programLifecycle = (ProgramLifecycle) delegate; programLifecycle.initialize(new MapReduceLifecycleContext(basicMapReduceContext)); // register it so that its destroy method can get called when the BasicMapReduceTaskContext is closed basicMapReduceContext.registerProgramLifecycle(programLifecycle); return delegate; } catch (Exception e) { LOG.error("Failed to initialize delegate with {}", basicMapReduceContext, e); throw Throwables.propagate(e); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
Configuration configuration = context.getConfiguration(); MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(configuration); BasicMapReduceTaskContext taskContext = classLoader.getTaskContextProvider().get(this.taskContext);
ClassLoader weakReferenceClassLoader = new WeakReferenceDelegatorClassLoader(classLoader); BasicMapReduceTaskContext basicMapReduceContext = classLoader.getTaskContextProvider().get(context); String program = basicMapReduceContext.getProgramName();
ClassLoader weakReferenceClassLoader = new WeakReferenceDelegatorClassLoader(classLoader); BasicMapReduceTaskContext basicMapReduceContext = classLoader.getTaskContextProvider().get(context); String program = basicMapReduceContext.getProgramName();
ClassLoader weakReferenceClassLoader = new WeakReferenceDelegatorClassLoader(classLoader); BasicMapReduceTaskContext basicMapReduceContext = classLoader.getTaskContextProvider().get(context); long metricsReportInterval = basicMapReduceContext.getMetricsReportIntervalMillis(); final ReduceTaskMetricsWriter reduceTaskMetricsWriter = new ReduceTaskMetricsWriter(
ClassLoader weakReferenceClassLoader = new WeakReferenceDelegatorClassLoader(classLoader); BasicMapReduceTaskContext basicMapReduceContext = classLoader.getTaskContextProvider().get(context); long metricsReportInterval = basicMapReduceContext.getMetricsReportIntervalMillis(); final ReduceTaskMetricsWriter reduceTaskMetricsWriter = new ReduceTaskMetricsWriter(