@Override public Handler get() { // we don't instantiate the handler class via injection, to avoid giving it access to objects bound in guice, // such as SConfiguration return new InstantiatorFactory(false).get(TypeToken.of(handlerClass)).create(); } }
protected Object create(TypeToken<?> type) { Class<?> rawType = type.getRawType(); Instantiator<?> creator = creators.get(rawType); if (creator == null) { creator = creatorFactory.get(type); creators.put(rawType, creator); } return creator.create(); }
@Inject @VisibleForTesting public AuthorizerInstantiator(CConfiguration cConf, AuthorizationContextFactory authorizationContextFactory) { this.cConf = cConf; this.authenticationEnabled = cConf.getBoolean(Constants.Security.ENABLED); this.authorizationEnabled = cConf.getBoolean(Constants.Security.Authorization.ENABLED); this.instantiatorFactory = new InstantiatorFactory(false); this.authorizationContextFactory = authorizationContextFactory; }
protected Object create(TypeToken<?> type) { Class<?> rawType = type.getRawType(); Instantiator<?> creator = creators.get(rawType); if (creator == null) { creator = creatorFactory.get(type); creators.put(rawType, creator); } return creator.create(); }
protected ReflectionReader(Schema schema, TypeToken<TO> type) { this.creatorFactory = new InstantiatorFactory(true); this.creators = Maps.newIdentityHashMap(); this.fieldAccessorFactory = new ReflectionFieldAccessorFactory(); this.schema = schema; this.type = type; }
DynamicPartitionerWriterWrapper(TaskAttemptContext job) { this.job = job; Configuration configuration = job.getConfiguration(); Class<? extends DynamicPartitioner> partitionerClass = configuration .getClass(PartitionedFileSetArguments.DYNAMIC_PARTITIONER_CLASS_NAME, null, DynamicPartitioner.class); this.dynamicPartitioner = new InstantiatorFactory(false).get(TypeToken.of(partitionerClass)).create(); this.partitionWriteOption = DynamicPartitioner.PartitionWriteOption.valueOf( configuration.get(PartitionedFileSetArguments.DYNAMIC_PARTITIONER_WRITE_OPTION)); MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(configuration); this.taskContext = classLoader.getTaskContextProvider().get(job); // name the output file 'part-<RunId>-m-00000' instead of 'part-m-00000' String outputName = DynamicPartitioningOutputFormat.getOutputName(job); if (partitionWriteOption == DynamicPartitioner.PartitionWriteOption.CREATE_OR_APPEND) { outputName = outputName + "-" + taskContext.getProgramRunId().getRun(); } this.outputName = outputName; String outputDatasetName = configuration.get(Constants.Dataset.Partitioned.HCONF_ATTR_OUTPUT_DATASET); this.outputDataset = taskContext.getDataset(outputDatasetName); this.partitioning = outputDataset.getPartitioning(); this.dynamicPartitioner.initialize(taskContext); this.fileOutputFormatName = job.getConfiguration() .getClass(Constants.Dataset.Partitioned.HCONF_ATTR_OUTPUT_FORMAT_CLASS_NAME, null, FileOutputFormat.class) .getName(); }
T plugin = (T) instantiatorFactory.get(pluginType).create(); configField.setAccessible(true); configField.set(plugin, config);
protected ReflectionReader(Schema schema, TypeToken<TO> type) { this.creatorFactory = new InstantiatorFactory(true); this.creators = Maps.newIdentityHashMap(); this.fieldAccessorFactory = new ReflectionFieldAccessorFactory(); this.schema = schema; this.type = type; }
@Test public void testUnsafe() { Record record = new InstantiatorFactory(false).get(TypeToken.of(Record.class)).create(); Reflections.visit(record, Record.class, new FieldVisitor() { @Override public void visit(Object instance, Type inspectType, Type declareType, Field field) throws Exception { if (!Modifier.isStatic(field.getModifiers())) { Assert.assertEquals(Defaults.defaultValue(field.getType()), field.get(instance)); } } }); }
/** * Create a new instance of plugin class without any config, config will be null in the instantiated plugin. * @param plugin information about the plugin * @param <T> Type of plugin * @return a new plugin instance */ public <T> T newInstanceWithoutConfig(Plugin plugin) throws IOException, ClassNotFoundException { ClassLoader pluginClassLoader = getPluginClassLoader(plugin); @SuppressWarnings("unchecked") Class<T> pluginClassLoaded = (Class<T>) pluginClassLoader.loadClass(plugin.getPluginClass().getClassName()); return instantiatorFactory.get(TypeToken.of(pluginClassLoaded)).create(); }
public PluginInstantiator(CConfiguration cConf, ClassLoader parentClassLoader, File pluginDir, boolean filterClassloader) { this.instantiatorFactory = new InstantiatorFactory(false); File tmpDir = new File(cConf.get(Constants.CFG_LOCAL_DATA_DIR), cConf.get(Constants.AppFabric.TEMP_DIR)).getAbsoluteFile(); this.pluginDir = pluginDir; this.tmpDir = DirUtils.createTempDir(tmpDir); this.classLoaders = CacheBuilder.newBuilder() .removalListener(new ClassLoaderRemovalListener()) .build(new ClassLoaderCacheLoader()); this.parentClassLoader = filterClassloader ? PluginClassLoader.createParent(parentClassLoader) : parentClassLoader; this.ownedParentClassLoader = filterClassloader; }
DynamicPartitionerWriterWrapper(TaskAttemptContext job) { this.job = job; Configuration configuration = job.getConfiguration(); Class<? extends DynamicPartitioner> partitionerClass = configuration .getClass(PartitionedFileSetArguments.DYNAMIC_PARTITIONER_CLASS_NAME, null, DynamicPartitioner.class); this.dynamicPartitioner = new InstantiatorFactory(false).get(TypeToken.of(partitionerClass)).create(); this.partitionWriteOption = DynamicPartitioner.PartitionWriteOption.valueOf( configuration.get(PartitionedFileSetArguments.DYNAMIC_PARTITIONER_WRITE_OPTION)); MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(configuration); this.taskContext = classLoader.getTaskContextProvider().get(job); // name the output file 'part-<RunId>-m-00000' instead of 'part-m-00000' String outputName = DynamicPartitioningOutputFormat.getOutputName(job); if (partitionWriteOption == DynamicPartitioner.PartitionWriteOption.CREATE_OR_APPEND) { outputName = outputName + "-" + taskContext.getProgramRunId().getRun(); } this.outputName = outputName; String outputDatasetName = configuration.get(Constants.Dataset.Partitioned.HCONF_ATTR_OUTPUT_DATASET); this.outputDataset = taskContext.getDataset(outputDatasetName); this.partitioning = outputDataset.getPartitioning(); this.dynamicPartitioner.initialize(taskContext); this.fileOutputFormatName = job.getConfiguration() .getClass(Constants.Dataset.Partitioned.HCONF_ATTR_OUTPUT_FORMAT_CLASS_NAME, null, FileOutputFormat.class) .getName(); }
/** * Create a new instance of plugin class without any config, config will be null in the instantiated plugin. * @param plugin information about the plugin * @param <T> Type of plugin * @return a new plugin instance */ public <T> T newInstanceWithoutConfig(Plugin plugin) throws IOException, ClassNotFoundException { ClassLoader pluginClassLoader = getPluginClassLoader(plugin); @SuppressWarnings("unchecked") Class<T> pluginClassLoaded = (Class<T>) pluginClassLoader.loadClass(plugin.getPluginClass().getClassName()); return instantiatorFactory.get(TypeToken.of(pluginClassLoaded)).create(); }
public PluginInstantiator(CConfiguration cConf, ClassLoader parentClassLoader, File pluginDir, boolean filterClassloader) { this.instantiatorFactory = new InstantiatorFactory(false); File tmpDir = new File(cConf.get(Constants.CFG_LOCAL_DATA_DIR), cConf.get(Constants.AppFabric.TEMP_DIR)).getAbsoluteFile(); this.pluginDir = pluginDir; this.tmpDir = DirUtils.createTempDir(tmpDir); this.classLoaders = CacheBuilder.newBuilder() .removalListener(new ClassLoaderRemovalListener()) .build(new ClassLoaderCacheLoader()); this.parentClassLoader = filterClassloader ? PluginClassLoader.createParent(parentClassLoader) : parentClassLoader; this.ownedParentClassLoader = filterClassloader; }
@SuppressWarnings("unchecked") private Workflow initializeWorkflow() throws Exception { Class<?> clz = Class.forName(workflowSpec.getClassName(), true, program.getClassLoader()); if (!Workflow.class.isAssignableFrom(clz)) { throw new IllegalStateException(String.format("%s is not Workflow.", clz)); } Class<? extends Workflow> workflowClass = (Class<? extends Workflow>) clz; final Workflow workflow = new InstantiatorFactory(false).get(TypeToken.of(workflowClass)).create(); // set metrics Reflections.visit(workflow, workflow.getClass(), new MetricsFieldSetter(workflowContext.getMetrics())); if (!(workflow instanceof ProgramLifecycle)) { return workflow; } final TransactionControl txControl = Transactions.getTransactionControl(workflowContext.getDefaultTxControl(), Workflow.class, workflow, "initialize", WorkflowContext.class); basicWorkflowToken.setCurrentNode(workflowSpec.getName()); workflowContext.setState(new ProgramState(ProgramStatus.INITIALIZING, null)); workflowContext.initializeProgram((ProgramLifecycle) workflow, txControl, false); workflowStateWriter.setWorkflowToken(workflowRunId, basicWorkflowToken); return workflow; }
T plugin = (T) instantiatorFactory.get(pluginType).create(); configField.setAccessible(true); configField.set(plugin, config);
@Override protected List<SparkHandlerDelegatorContext> createDelegatorContexts() throws Exception { List<SparkHandlerDelegatorContext> contexts = new ArrayList<>(); InstantiatorFactory instantiatorFactory = new InstantiatorFactory(false); for (SparkHttpServiceHandlerSpecification spec : context.getSpecification().getHandlers()) { Class<?> handlerClass = getProgram().getClassLoader().loadClass(spec.getClassName()); @SuppressWarnings("unchecked") TypeToken<SparkHttpServiceHandler> type = TypeToken.of((Class<SparkHttpServiceHandler>) handlerClass); MetricsContext handlerMetricsContext = runtimeContext.getProgramMetrics().childContext( Constants.Metrics.Tag.HANDLER, handlerClass.getSimpleName()); contexts.add(new SparkHandlerDelegatorContext(type, instantiatorFactory, spec, runtimeContext.getProgramMetrics(), handlerMetricsContext)); } return contexts; }
@SuppressWarnings("unchecked") private Workflow initializeWorkflow() throws Exception { Class<?> clz = Class.forName(workflowSpec.getClassName(), true, program.getClassLoader()); if (!Workflow.class.isAssignableFrom(clz)) { throw new IllegalStateException(String.format("%s is not Workflow.", clz)); } Class<? extends Workflow> workflowClass = (Class<? extends Workflow>) clz; final Workflow workflow = new InstantiatorFactory(false).get(TypeToken.of(workflowClass)).create(); // set metrics Reflections.visit(workflow, workflow.getClass(), new MetricsFieldSetter(workflowContext.getMetrics())); if (!(workflow instanceof ProgramLifecycle)) { return workflow; } final TransactionControl txControl = Transactions.getTransactionControl(workflowContext.getDefaultTxControl(), Workflow.class, workflow, "initialize", WorkflowContext.class); basicWorkflowToken.setCurrentNode(workflowSpec.getName()); workflowContext.setState(new ProgramState(ProgramStatus.INITIALIZING, null)); workflowContext.initializeProgram((ProgramLifecycle) workflow, txControl, false); workflowStateWriter.setWorkflowToken(workflowRunId, basicWorkflowToken); return workflow; }
@SuppressWarnings("unchecked") private CustomAction createCustomAction(BasicCustomActionContext context, InstantiatorFactory instantiator, ClassLoader classLoader) throws Exception { Class<?> clz = Class.forName(context.getSpecification().getClassName(), true, classLoader); Preconditions.checkArgument(CustomAction.class.isAssignableFrom(clz), "%s is not a CustomAction.", clz); CustomAction action = instantiator.get(TypeToken.of((Class<? extends CustomAction>) clz)).create(); Reflections.visit(action, action.getClass(), new PropertyFieldSetter(context.getSpecification().getProperties()), new DataSetFieldSetter(context), new MetricsFieldSetter(context.getMetrics())); return action; }
@Override protected List<SparkHandlerDelegatorContext> createDelegatorContexts() throws Exception { List<SparkHandlerDelegatorContext> contexts = new ArrayList<>(); InstantiatorFactory instantiatorFactory = new InstantiatorFactory(false); for (SparkHttpServiceHandlerSpecification spec : context.getSpecification().getHandlers()) { Class<?> handlerClass = getProgram().getClassLoader().loadClass(spec.getClassName()); @SuppressWarnings("unchecked") TypeToken<SparkHttpServiceHandler> type = TypeToken.of((Class<SparkHttpServiceHandler>) handlerClass); MetricsContext handlerMetricsContext = runtimeContext.getProgramMetrics().childContext( Constants.Metrics.Tag.HANDLER, handlerClass.getSimpleName()); contexts.add(new SparkHandlerDelegatorContext(type, instantiatorFactory, spec, runtimeContext.getProgramMetrics(), handlerMetricsContext)); } return contexts; }