/** * Populates the instance variables of a {@link org.apache.hadoop.conf.Configurable} * instance with values from its {@link org.apache.hadoop.conf.Configuration}. * * <p>This includes annotations declared in parent classes.</p> * * @param instance The instance to configure. * @throws HadoopConfigurationException If there is an error with the declaration or * assigning the field. */ public static void configure(Configurable instance) { configure(instance, true); }
extractDeclaredVariables(instance.getClass(), includeParentClasses); for (ConfigurationVariable variable : variables) { try { extractDeclaredMethods(instance.getClass(), includeParentClasses); for (ConfigurationMethod method : methods) { try {
/** * Initializes this object's state using the settings from the configuration. * Throws a RuntimeException if there is an error. * * @param conf The configuration to read settings from. */ @Override public void setConf(Configuration conf) { super.setConf(conf); HadoopConfigurator.configure(this); }
/** * Populates the instance variables of a {@link org.apache.hadoop.conf.Configurable} * instance with values from its {@link org.apache.hadoop.conf.Configuration}. * * @param instance The instance to configure. * @param includeParentClasses Whether to include declared variables in super classes. * @throws HadoopConfigurationException If there is an error with the declaration or * assigning the field. */ public static void configure(Configurable instance, boolean includeParentClasses) { configure(instance, instance.getConf(), includeParentClasses); }
/** * Initializes internal state from the Configuration. * Sets the delimiter to write between columns, the family to write to, and * the max versions to read from each column. * * @param conf The Configuration to initialize from. */ @Override public void setConf(Configuration conf) { super.setConf(conf); HadoopConfigurator.configure(this); }
@Override public void setConf(Configuration conf) { super.setConf(conf); HadoopConfigurator.configure(this); }
/** {@inheritDoc} */ @Override public void setConf(Configuration conf) { super.setConf(conf); HadoopConfigurator.configure(this); // Validate that they are either both families or both columns. if (mInputColumn.isFullyQualified() != mOutputColumn.isFullyQualified()) { throw new RuntimeException( "Input and output must both be a specific column, or both be a family"); } }
HadoopConfigurator.configure(this); final Configuration conf = getConf(); Preconditions.checkNotNull(mTableImportDescriptor);