Refine search
static ErrorReporter getErrorReporter( final Configuration conf) throws ClassNotFoundException { Class<? extends ErrorReporter> reporter = conf.getClass("hbasefsck.errorreporter", PrintingErrorReporter.class, ErrorReporter.class); return ReflectionUtils.newInstance(reporter, conf); }
/** * Instantiate the {@link UserProvider} specified in the configuration and set the passed * configuration via {@link UserProvider#setConf(Configuration)} * @param conf to read and set on the created {@link UserProvider} * @return a {@link UserProvider} ready for use. */ public static UserProvider instantiate(Configuration conf) { Class<? extends UserProvider> clazz = conf.getClass(USER_PROVIDER_CONF_KEY, UserProvider.class, UserProvider.class); return ReflectionUtils.newInstance(clazz, conf); }
/** * Create a region normalizer from the given conf. * @param conf configuration * @return {@link RegionNormalizer} implementation */ public static RegionNormalizer getRegionNormalizer(Configuration conf) { // Create instance of Region Normalizer Class<? extends RegionNormalizer> balancerKlass = conf.getClass(HConstants.HBASE_MASTER_NORMALIZER_CLASS, SimpleRegionNormalizer.class, RegionNormalizer.class); return ReflectionUtils.newInstance(balancerKlass, conf); } }
/** * Returns an instance of ImpersonationProvider. * Looks up the configuration to see if there is custom class specified. * @param conf * @return ImpersonationProvider */ private static ImpersonationProvider getInstance(Configuration conf) { Class<? extends ImpersonationProvider> clazz = conf.getClass( CommonConfigurationKeysPublic.HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS, DefaultImpersonationProvider.class, ImpersonationProvider.class); return ReflectionUtils.newInstance(clazz, conf); }
/** * Returns an instance of SaslPropertiesResolver. * Looks up the configuration to see if there is custom class specified. * Constructs the instance by passing the configuration directly to the * constructor to achieve thread safety using final fields. * @param conf * @return SaslPropertiesResolver */ public static SaslPropertiesResolver getInstance(Configuration conf) { Class<? extends SaslPropertiesResolver> clazz = conf.getClass( CommonConfigurationKeysPublic.HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS, SaslPropertiesResolver.class, SaslPropertiesResolver.class); return ReflectionUtils.newInstance(clazz, conf); }
public static FSUtils getInstance(FileSystem fs, Configuration conf) { String scheme = fs.getUri().getScheme(); if (scheme == null) { LOG.warn("Could not find scheme for uri " + fs.getUri() + ", default to hdfs"); scheme = "hdfs"; } Class<?> fsUtilsClass = conf.getClass("hbase.fsutil." + scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf); return fsUtils; }
static synchronized RpcEngine getProtocolEngine(Class<?> protocol, Configuration conf) { RpcEngine engine = PROTOCOL_ENGINES.get(protocol); if (engine == null) { Class<?> impl = conf.getClass(ENGINE_PROP+"."+protocol.getName(), WritableRpcEngine.class); engine = (RpcEngine)ReflectionUtils.newInstance(impl, conf); PROTOCOL_ENGINES.put(protocol, engine); } return engine; }
/** * Looks up and instantiates the Serialization Object * * Important to note here that we are not relying on the Hadoop * SerializationFactory part of the Serialization framework. This is because * in the case of Non-Writable Objects, we cannot make any assumptions about * the uniformity of the serialization class APIs - i.e., there may not be a * "write" method call and a subclass may need to implement its own * Serialization classes. The SerializationFactory currently returns the * first (de)serializer that is compatible with the class to be * deserialized; in this context, that assumption isn't necessarily true. * * @return the serialization object for this context * @exception does * not currently throw any IOException */ @Override public Serialization<S> getSerialization() throws IOException { Class<Serialization<S>> tClass = (Class<Serialization<S>>) conf.getClass( SerializationImplKey, null, Serialization.class); return tClass == null ? null : (Serialization<S>) ReflectionUtils .newInstance(tClass, conf); } }
/** * Gets backup restore job * @param conf configuration * @return backup restore job instance */ public static RestoreJob getRestoreJob(Configuration conf) { Class<? extends RestoreJob> cls = conf.getClass(HBASE_INCR_RESTORE_IMPL_CLASS, MapReduceRestoreJob.class, RestoreJob.class); RestoreJob service = ReflectionUtils.newInstance(cls, conf); service.setConf(conf); return service; }
public CellCreator(Configuration conf) { Class<? extends VisibilityExpressionResolver> clazz = conf.getClass( VISIBILITY_EXP_RESOLVER_CLASS, DefaultVisibilityExpressionResolver.class, VisibilityExpressionResolver.class); this.visExpResolver = ReflectionUtils.newInstance(clazz, conf); this.visExpResolver.init(); }
/** * Looks up and instantiates the Serialization Object * * Important to note here that we are not relying on the Hadoop * SerializationFactory part of the Serialization framework. This is because * in the case of Non-Writable Objects, we cannot make any assumptions about * the uniformity of the serialization class APIs - i.e., there may not be a * "write" method call and a subclass may need to implement its own * Serialization classes. The SerializationFactory currently returns the * first (de)serializer that is compatible with the class to be * deserialized; in this context, that assumption isn't necessarily true. * * @return the serialization object for this context * @exception does * not currently throw any IOException */ @Override public Serialization<S> getSerialization() throws IOException { Class<Serialization<S>> tClass = (Class<Serialization<S>>) conf.getClass( SerializationImplKey, null, Serialization.class); return tClass == null ? null : (Serialization<S>) ReflectionUtils .newInstance(tClass, conf); } }
/** * Gets backup copy job * @param conf configuration * @return backup copy job instance */ public static BackupCopyJob getBackupCopyJob(Configuration conf) { Class<? extends BackupCopyJob> cls = conf.getClass(HBASE_BACKUP_COPY_IMPL_CLASS, MapReduceBackupCopyJob.class, BackupCopyJob.class); BackupCopyJob service = ReflectionUtils.newInstance(cls, conf); service.setConf(conf); return service; }
/** * Create a loadbalancer from the given conf. * @param conf * @return A {@link LoadBalancer} */ public static LoadBalancer getLoadBalancer(Configuration conf) { // Create the balancer Class<? extends LoadBalancer> balancerKlass = conf.getClass(HConstants.HBASE_MASTER_LOADBALANCER_CLASS, getDefaultLoadBalancerClass(), LoadBalancer.class); return ReflectionUtils.newInstance(balancerKlass, conf); } }
/** * Gets backup merge job * @param conf configuration * @return backup merge job instance */ public static BackupMergeJob getBackupMergeJob(Configuration conf) { Class<? extends BackupMergeJob> cls = conf.getClass(HBASE_BACKUP_MERGE_IMPL_CLASS, MapReduceBackupMergeJob.class, BackupMergeJob.class); BackupMergeJob service = ReflectionUtils.newInstance(cls, conf); service.setConf(conf); return service; } }
public static NetworkTopology getInstance(Configuration conf, InnerNode.Factory factory) { NetworkTopology nt = ReflectionUtils.newInstance( conf.getClass(CommonConfigurationKeysPublic.NET_TOPOLOGY_IMPL_KEY, NetworkTopology.class, NetworkTopology.class), conf); return nt.init(factory); }
@Override public void setConf(Configuration conf) { this.conf = conf; final Class<? extends Random> klass = conf.getClass( HADOOP_SECURITY_SECURE_RANDOM_IMPL_KEY, OsSecureRandom.class, Random.class); try { random = ReflectionUtils.newInstance(klass, conf); if (LOG.isDebugEnabled()) { LOG.debug("Using " + klass.getName() + " as random number generator."); } } catch (Exception e) { LOG.info("Unable to use " + klass.getName() + ". Falling back to " + "Java SecureRandom.", e); this.random = new SecureRandom(); } }
/** * Get an instance of the configured TrashPolicy based on the value * of the configuration parameter fs.trash.classname. * * @param conf the configuration to be used * @param fs the file system to be used * @return an instance of TrashPolicy */ public static TrashPolicy getInstance(Configuration conf, FileSystem fs) { Class<? extends TrashPolicy> trashClass = conf.getClass( "fs.trash.classname", TrashPolicyDefault.class, TrashPolicy.class); TrashPolicy trash = ReflectionUtils.newInstance(trashClass, conf); trash.initialize(conf, fs); // initialize TrashPolicy return trash; } }
/** * Return HiveAuthorizerFactory used by new authorization plugin interface. * @param conf * @param authorizationProviderConfKey * @return * @throws HiveException if HiveAuthorizerFactory specified in configuration could not */ public static HiveAuthorizerFactory getAuthorizerFactory( Configuration conf, HiveConf.ConfVars authorizationProviderConfKey) throws HiveException { Class<? extends HiveAuthorizerFactory> cls = conf.getClass(authorizationProviderConfKey.varname, SQLStdHiveAuthorizerFactory.class, HiveAuthorizerFactory.class); if(cls == null){ //should not happen as default value is set throw new HiveException("Configuration value " + authorizationProviderConfKey.varname + " is not set to valid HiveAuthorizerFactory subclass" ); } HiveAuthorizerFactory authFactory = ReflectionUtils.newInstance(cls, conf); return authFactory; }
/** * Get an instance of the configured TrashPolicy based on the value * of the configuration parameter fs.trash.classname. * * @param conf the configuration to be used * @param fs the file system to be used * @param home the home directory * @return an instance of TrashPolicy * @deprecated Use {@link #getInstance(Configuration, FileSystem)} instead. */ @Deprecated public static TrashPolicy getInstance(Configuration conf, FileSystem fs, Path home) { Class<? extends TrashPolicy> trashClass = conf.getClass( "fs.trash.classname", TrashPolicyDefault.class, TrashPolicy.class); TrashPolicy trash = ReflectionUtils.newInstance(trashClass, conf); trash.initialize(conf, fs, home); // initialize TrashPolicy return trash; }
/** * Return HiveAuthorizerFactory used by new authorization plugin interface. * @param conf * @param authorizationProviderConfKey * @return * @throws HiveException if HiveAuthorizerFactory specified in configuration could not */ public static HiveAuthorizerFactory getAuthorizerFactory( Configuration conf, HiveConf.ConfVars authorizationProviderConfKey) throws HiveException { Class<? extends HiveAuthorizerFactory> cls = conf.getClass(authorizationProviderConfKey.varname, SQLStdHiveAuthorizerFactory.class, HiveAuthorizerFactory.class); if(cls == null){ //should not happen as default value is set throw new HiveException("Configuration value " + authorizationProviderConfKey.varname + " is not set to valid HiveAuthorizerFactory subclass" ); } HiveAuthorizerFactory authFactory = ReflectionUtils.newInstance(cls, conf); return authFactory; }