/** * Constructs a ClassLoader based on the given {@link Parameters} and also uses the given * {@link TaskContextProviderFactory} to create {@link MapReduceTaskContextProvider} on demand. */ private MapReduceClassLoader(final Parameters parameters, final TaskContextProviderFactory contextProviderFactory) { super(null, createDelegates(parameters)); this.parameters = parameters; this.taskContextProviderSupplier = new Supplier<MapReduceTaskContextProvider>() { @Override public MapReduceTaskContextProvider get() { return contextProviderFactory.create(parameters.getCConf(), parameters.getHConf(), MapReduceClassLoader.this); } }; }
/** * Returns the {@link MapReduceTaskContextProvider} associated with this ClassLoader. */ public MapReduceTaskContextProvider getTaskContextProvider() { // Logging context needs to be set in main thread. LoggingContext loggingContext = createMapReduceLoggingContext(); LoggingContextAccessor.setLoggingContext(loggingContext); synchronized (this) { taskContextProvider = Optional.fromNullable(taskContextProvider).or(taskContextProviderSupplier); } taskContextProvider.startAndWait(); return taskContextProvider; }
private BasicMapReduceTaskContext getMapReduceTaskContext(TaskAttemptContext context) { MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(context.getConfiguration()); return classLoader.getTaskContextProvider().get(context); } }
static <T> T createDelegate(Configuration conf, String attrClass) { String delegateClassName = conf.get(attrClass); Class<?> delegateClass = conf.getClassByNameOrNull(delegateClassName); Preconditions.checkNotNull(delegateClass, "Class could not be found: %s", delegateClassName); T delegate = (T) ReflectionUtils.newInstance(delegateClass, conf); if (!(delegate instanceof ProgramLifecycle)) { return delegate; } MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(conf); BasicMapReduceTaskContext basicMapReduceContext = classLoader.getTaskContextProvider().get(conf); ClassLoader programClassLoader = classLoader.getProgramClassLoader(); ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(programClassLoader); try { ProgramLifecycle programLifecycle = (ProgramLifecycle) delegate; programLifecycle.initialize(new MapReduceLifecycleContext(basicMapReduceContext)); // register it so that its destroy method can get called when the BasicMapReduceTaskContext is closed basicMapReduceContext.registerProgramLifecycle(programLifecycle); return delegate; } catch (Exception e) { LOG.error("Failed to initialize delegate with {}", basicMapReduceContext, e); throw Throwables.propagate(e); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(key.getConfiguration()); programRef.compareAndSet(null, createProgram(contextConfig, classLoader.getProgramClassLoader())); program = programRef.get(); program, options, cConf, taskType, taskId, spec, workflowInfo, discoveryServiceClient, metricsCollectionService, txClient, transaction, programDatasetFramework, classLoader.getPluginInstantiator(), contextConfig.getLocalizedResources(), secureStore, secureStoreManager, authorizationEnforcer, authenticationContext, messagingService, mapReduceClassLoader, metadataReader,
Configuration mapredConf = job.getConfiguration(); MapReduceClassLoader classLoader = new MapReduceClassLoader(injector, cConf, mapredConf, context.getProgram().getClassLoader(), context.getApplicationSpecification().getPlugins(),
static <T> T createDelegate(Configuration conf, String attrClass) { String delegateClassName = conf.get(attrClass); Class<?> delegateClass = conf.getClassByNameOrNull(delegateClassName); Preconditions.checkNotNull(delegateClass, "Class could not be found: %s", delegateClassName); T delegate = (T) ReflectionUtils.newInstance(delegateClass, conf); if (!(delegate instanceof ProgramLifecycle)) { return delegate; } MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(conf); BasicMapReduceTaskContext basicMapReduceContext = classLoader.getTaskContextProvider().get(conf); ClassLoader programClassLoader = classLoader.getProgramClassLoader(); ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(programClassLoader); try { ProgramLifecycle programLifecycle = (ProgramLifecycle) delegate; programLifecycle.initialize(new MapReduceLifecycleContext(basicMapReduceContext)); // register it so that its destroy method can get called when the BasicMapReduceTaskContext is closed basicMapReduceContext.registerProgramLifecycle(programLifecycle); return delegate; } catch (Exception e) { LOG.error("Failed to initialize delegate with {}", basicMapReduceContext, e); throw Throwables.propagate(e); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(key.getConfiguration()); programRef.compareAndSet(null, createProgram(contextConfig, classLoader.getProgramClassLoader())); program = programRef.get(); program, options, cConf, taskType, taskId, spec, workflowInfo, discoveryServiceClient, metricsCollectionService, txClient, transaction, programDatasetFramework, classLoader.getPluginInstantiator(), contextConfig.getLocalizedResources(), secureStore, secureStoreManager, authorizationEnforcer, authenticationContext, messagingService, mapReduceClassLoader, metadataReader,
Configuration mapredConf = job.getConfiguration(); MapReduceClassLoader classLoader = new MapReduceClassLoader(injector, cConf, mapredConf, context.getProgram().getClassLoader(), context.getApplicationSpecification().getPlugins(),
@SuppressWarnings("unchecked") @Override public void run(Context context) throws IOException, InterruptedException { MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(context.getConfiguration()); ClassLoader weakReferenceClassLoader = new WeakReferenceDelegatorClassLoader(classLoader); BasicMapReduceTaskContext basicMapReduceContext = classLoader.getTaskContextProvider().get(context); String program = basicMapReduceContext.getProgramName(); ClassLoader programClassLoader = classLoader.getProgramClassLoader(); Mapper delegate = createMapperInstance(programClassLoader, getWrappedMapper(context.getConfiguration()), context, program);
@Override protected BatchReadable<KEY, VALUE> createBatchReadable(TaskAttemptContext context, @Nullable String datasetNamespace, String datasetName, Map<String, String> datasetArgs) { MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(context.getConfiguration()); BasicMapReduceTaskContext<?, ?> taskContext = classLoader.getTaskContextProvider().get(context); return taskContext.getBatchReadable(datasetNamespace, datasetName, datasetArgs); } }
/** * Returns the {@link MapReduceTaskContextProvider} associated with this ClassLoader. */ public MapReduceTaskContextProvider getTaskContextProvider() { // Logging context needs to be set in main thread. LoggingContext loggingContext = createMapReduceLoggingContext(); LoggingContextAccessor.setLoggingContext(loggingContext); synchronized (this) { taskContextProvider = Optional.fromNullable(taskContextProvider).or(taskContextProviderSupplier); } taskContextProvider.startAndWait(); return taskContextProvider; }
/** * Constructs a ClassLoader based on the given {@link Parameters} and also uses the given * {@link TaskContextProviderFactory} to create {@link MapReduceTaskContextProvider} on demand. */ private MapReduceClassLoader(final Parameters parameters, final TaskContextProviderFactory contextProviderFactory) { super(null, createDelegates(parameters)); this.parameters = parameters; this.taskContextProviderSupplier = new Supplier<MapReduceTaskContextProvider>() { @Override public MapReduceTaskContextProvider get() { return contextProviderFactory.create(parameters.getCConf(), parameters.getHConf(), MapReduceClassLoader.this); } }; }
@SuppressWarnings("unchecked") @Override public void run(Context context) throws IOException, InterruptedException { MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(context.getConfiguration()); ClassLoader weakReferenceClassLoader = new WeakReferenceDelegatorClassLoader(classLoader); BasicMapReduceTaskContext basicMapReduceContext = classLoader.getTaskContextProvider().get(context); String program = basicMapReduceContext.getProgramName(); ClassLoader programClassLoader = classLoader.getProgramClassLoader(); Mapper delegate = createMapperInstance(programClassLoader, getWrappedMapper(context.getConfiguration()), context, program);
@Override protected CloseableBatchWritable<KEY, VALUE> createBatchWritable(TaskAttemptContext context, String namespace, String datasetName, Map<String, String> datasetArgs) { MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(context.getConfiguration()); BasicMapReduceTaskContext<?, ?> taskContext = classLoader.getTaskContextProvider().get(context); return taskContext.getBatchWritable(namespace, datasetName, datasetArgs); } }
@SuppressWarnings("unchecked") @Override public void run(Context context) throws IOException, InterruptedException { MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(context.getConfiguration()); ClassLoader weakReferenceClassLoader = new WeakReferenceDelegatorClassLoader(classLoader); BasicMapReduceTaskContext basicMapReduceContext = classLoader.getTaskContextProvider().get(context); long metricsReportInterval = basicMapReduceContext.getMetricsReportIntervalMillis(); final ReduceTaskMetricsWriter reduceTaskMetricsWriter = new ReduceTaskMetricsWriter( ClassLoader programClassLoader = classLoader.getProgramClassLoader(); Reducer delegate = createReducerInstance(programClassLoader, userReducer);
@Override protected BatchReadable<KEY, VALUE> createBatchReadable(TaskAttemptContext context, @Nullable String datasetNamespace, String datasetName, Map<String, String> datasetArgs) { MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(context.getConfiguration()); BasicMapReduceTaskContext<?, ?> taskContext = classLoader.getTaskContextProvider().get(context); return taskContext.getBatchReadable(datasetNamespace, datasetName, datasetArgs); } }
@SuppressWarnings("unchecked") @Override public void run(Context context) throws IOException, InterruptedException { MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(context.getConfiguration()); ClassLoader weakReferenceClassLoader = new WeakReferenceDelegatorClassLoader(classLoader); BasicMapReduceTaskContext basicMapReduceContext = classLoader.getTaskContextProvider().get(context); long metricsReportInterval = basicMapReduceContext.getMetricsReportIntervalMillis(); final ReduceTaskMetricsWriter reduceTaskMetricsWriter = new ReduceTaskMetricsWriter( ClassLoader programClassLoader = classLoader.getProgramClassLoader(); Reducer delegate = createReducerInstance(programClassLoader, userReducer);
@Override protected CloseableBatchWritable<KEY, VALUE> createBatchWritable(TaskAttemptContext context, String namespace, String datasetName, Map<String, String> datasetArgs) { MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(context.getConfiguration()); BasicMapReduceTaskContext<?, ?> taskContext = classLoader.getTaskContextProvider().get(context); return taskContext.getBatchWritable(namespace, datasetName, datasetArgs); } }
DynamicPartitionerWriterWrapper(TaskAttemptContext job) { this.job = job; Configuration configuration = job.getConfiguration(); Class<? extends DynamicPartitioner> partitionerClass = configuration .getClass(PartitionedFileSetArguments.DYNAMIC_PARTITIONER_CLASS_NAME, null, DynamicPartitioner.class); this.dynamicPartitioner = new InstantiatorFactory(false).get(TypeToken.of(partitionerClass)).create(); this.partitionWriteOption = DynamicPartitioner.PartitionWriteOption.valueOf( configuration.get(PartitionedFileSetArguments.DYNAMIC_PARTITIONER_WRITE_OPTION)); MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(configuration); this.taskContext = classLoader.getTaskContextProvider().get(job); // name the output file 'part-<RunId>-m-00000' instead of 'part-m-00000' String outputName = DynamicPartitioningOutputFormat.getOutputName(job); if (partitionWriteOption == DynamicPartitioner.PartitionWriteOption.CREATE_OR_APPEND) { outputName = outputName + "-" + taskContext.getProgramRunId().getRun(); } this.outputName = outputName; String outputDatasetName = configuration.get(Constants.Dataset.Partitioned.HCONF_ATTR_OUTPUT_DATASET); this.outputDataset = taskContext.getDataset(outputDatasetName); this.partitioning = outputDataset.getPartitioning(); this.dynamicPartitioner.initialize(taskContext); this.fileOutputFormatName = job.getConfiguration() .getClass(Constants.Dataset.Partitioned.HCONF_ATTR_OUTPUT_FORMAT_CLASS_NAME, null, FileOutputFormat.class) .getName(); }