Codota Logo
com.moz.fiji.hadoop.configurator
Code IndexAdd Codota to your IDE (free)

How to use com.moz.fiji.hadoop.configurator

Best Java code snippets using com.moz.fiji.hadoop.configurator (Showing top 20 results out of 315)

  • Add the Codota plugin to your IDE and get smart completions
private void myMethod () {
FileOutputStream f =
  • Codota IconFile file;new FileOutputStream(file)
  • Codota IconString name;new FileOutputStream(name)
  • Codota IconFile file;new FileOutputStream(file, true)
  • Smart code suggestions by Codota
}
origin: com.moz.fiji.hadoop/hadoop-configurator

/**
 * Gets the configuration key that should be used to populate the field.
 *
 * @return The key that was specified in the HadoopConf annotation.
 */
public String getKey() {
 return mAnnotation.key();
}
origin: com.moz.fiji.hadoop/hadoop-configurator

 /**
  * Gets the default value specified by the annotation.
  *
  * @return The default value.
  */
 private String getDefault() {
  return mAnnotation.defaultValue();
 }
}
origin: com.moz.fiji.hadoop/hadoop-configurator

/**
 * Populates the instance variables of a {@link org.apache.hadoop.conf.Configurable}
 * instance with values from its {@link org.apache.hadoop.conf.Configuration}.
 *
 * <p>This includes annotations declared in parent classes.</p>
 *
 * @param instance The instance to configure.
 * @throws HadoopConfigurationException If there is an error with the declaration or
 *     assigning the field.
 */
public static void configure(Configurable instance) {
 configure(instance, true);
}
origin: com.moz.fiji.hadoop/hadoop-configurator

  extractDeclaredVariables(instance.getClass(), includeParentClasses);
for (ConfigurationVariable variable : variables) {
 try {
  variable.setValue(instance, conf);
 } catch (IllegalAccessException e) {
  throw new HadoopConfigurationException(e);
  extractDeclaredMethods(instance.getClass(), includeParentClasses);
for (ConfigurationMethod method : methods) {
 try {
  method.call(instance, conf);
 } catch (IllegalAccessException e) {
  throw new HadoopConfigurationException(e);
origin: com.moz.fiji.mapreduce.lib/fiji-mapreduce-lib

/**
 * Sets the delimiter.
 *
 * @param delimiter The delimiter.
 */
@HadoopConf(key=CONF_FIELD_DELIMITER, defaultValue=DEFAULT_FIELD_DELIMITER)
protected void setFieldDelimiter(String delimiter) {
 if (delimiter.length() != 1) {
  throw new RuntimeException("Delimiter must be exactly one character long."
    + "  Received: \"" + delimiter + "\".");
 }
 mFieldDelimiter = delimiter;
}
origin: com.moz.fiji.mapreduce.lib/fiji-mapreduce-lib

/**
 * Initializes this object's state using the settings from the configuration.
 * Throws a RuntimeException if there is an error.
 *
 * @param conf The configuration to read settings from.
 */
@Override
public void setConf(Configuration conf) {
 super.setConf(conf);
 HadoopConfigurator.configure(this);
}
origin: com.moz.fiji.hadoop/hadoop-configurator

 /**
  * Gets the default value as a string.
  *
  * @param instance The object instance.
  * @return The default string value.
  * @throws IllegalAccessException If the field cannot be read.
  */
 private String getDefaultString(Object instance) throws IllegalAccessException {
  String defaultValue = mAnnotation.defaultValue();
  if (defaultValue.isEmpty()) {
   return (String) mField.get(instance);
  }
  return defaultValue;
 }
}
origin: com.moz.fiji.mapreduce.lib/fiji-mapreduce-lib

/**
 * Sets the log rate - the number of lines between log statements for incomplete/rejected lines.
 *
 * @param logRateString The logging rate as a string.
 */
@HadoopConf(key=CONF_LOG_RATE, usage="The number of lines to skip between log statements")
protected final void setLogRate(String logRateString) {
 if (logRateString != null) {
  try {
   Long logRate = Long.parseLong(logRateString);
   mLogRate = logRate;
  } catch (NumberFormatException ne) {
   LOG.warn("Unable to parse log rate: " + logRateString);
  }
 }
}
origin: com.moz.fiji.hadoop/hadoop-configurator

/**
 * Gets the configuration key that should be used to populate the field.
 *
 * @return The key that was specified in the HadoopConf annotation.
 */
public String getKey() {
 return mAnnotation.key();
}
origin: com.moz.fiji.hadoop/hadoop-configurator

/**
 * Populates the instance variables of a {@link org.apache.hadoop.conf.Configurable}
 * instance with values from its {@link org.apache.hadoop.conf.Configuration}.
 *
 * @param instance The instance to configure.
 * @param includeParentClasses Whether to include declared variables in super classes.
 * @throws HadoopConfigurationException If there is an error with the declaration or
 *     assigning the field.
 */
public static void configure(Configurable instance, boolean includeParentClasses) {
 configure(instance, instance.getConf(), includeParentClasses);
}
origin: com.moz.fiji.hadoop/hadoop-configurator

/**
 * Gets the default value as a long.
 *
 * @param instance The object instance.
 * @return The default long value.
 * @throws IllegalAccessException If the field cannot be read.
 */
private long getDefaultLong(Object instance) throws IllegalAccessException {
 String defaultValue = mAnnotation.defaultValue();
 if (defaultValue.isEmpty()) {
  return mField.getLong(instance);
 }
 return Long.parseLong(defaultValue);
}
origin: com.moz.fiji.mapreduce.lib/fiji-mapreduce-lib

/**
 * Sets the output column name.
 *
 * @param column The output column.
 */
@HadoopConf(key=CONF_OUTPUT, usage="The output column name.")
protected void setOutputColumn(String column) {
 if (null == column || column.isEmpty()) {
  throw new RuntimeException("Must specify " + CONF_OUTPUT);
 }
 mOutputColumn = new FijiColumnName(column);
}
origin: com.moz.fiji.mapreduce.lib/fiji-mapreduce-lib

/**
 * Initializes internal state from the Configuration.
 * Sets the delimiter to write between columns, the family to write to, and
 * the max versions to read from each column.
 *
 * @param conf The Configuration to initialize from.
 */
@Override
public void setConf(Configuration conf) {
 super.setConf(conf);
 HadoopConfigurator.configure(this);
}
origin: com.moz.fiji.hadoop/hadoop-configurator

/**
 * Gets the default value as an int.
 *
 * @param instance The object instance.
 * @return The default int value.
 * @throws IllegalAccessException If the field cannot be read.
 */
private int getDefaultInt(Object instance) throws IllegalAccessException {
 String defaultValue = mAnnotation.defaultValue();
 if (defaultValue.isEmpty()) {
  return mField.getInt(instance);
 }
 return Integer.parseInt(defaultValue);
}
origin: com.moz.fiji.mapreduce.lib/fiji-mapreduce-lib

/**
 * Sets the input column name.
 *
 * @param column The input column.
 */
@HadoopConf(key=CONF_INPUT, usage="The input column name.")
protected void setInputColumn(String column) {
 if (null == column || column.isEmpty()) {
  throw new RuntimeException("Must specify " + CONF_INPUT);
 }
 mInputColumn = new FijiColumnName(column);
}
origin: com.moz.fiji.mapreduce.lib/fiji-mapreduce-lib

@Override
public void setConf(Configuration conf) {
 super.setConf(conf);
 HadoopConfigurator.configure(this);
}
origin: com.moz.fiji.hadoop/hadoop-configurator

/**
 * Gets the default value as a double.
 *
 * @param instance The object instance.
 * @return The default double value.
 * @throws IllegalAccessException If the field cannot be read.
 */
private float getDefaultDouble(Object instance) throws IllegalAccessException {
 String defaultValue = mAnnotation.defaultValue();
 if (defaultValue.isEmpty()) {
  return (float) mField.getDouble(instance);
 }
 return Float.parseFloat(defaultValue);
}
origin: com.moz.fiji.mapreduce.lib/fiji-mapreduce-lib

/**
 * Sets the family.
 *
 * @param family The family.
 */
@HadoopConf(key=CONF_EXPORT_FAMILY)
protected void setFamily(String family) {
 FijiColumnName name = new FijiColumnName(family);
 if (name.isFullyQualified()) {
  throw new RuntimeException("Expected an unqualified map type family. "
    + "Requested family was: " + name.getName());
 }
 mFamily = family;
}
origin: com.moz.fiji.mapreduce.lib/fiji-mapreduce-lib

/** {@inheritDoc} */
@Override
public void setConf(Configuration conf) {
 super.setConf(conf);
 HadoopConfigurator.configure(this);
 // Validate that they are either both families or both columns.
 if (mInputColumn.isFullyQualified() != mOutputColumn.isFullyQualified()) {
  throw new RuntimeException(
    "Input and output must both be a specific column, or both be a family");
 }
}
origin: com.moz.fiji.hadoop/hadoop-configurator

/**
 * Gets the default value as a float.
 *
 * @param instance The object instance.
 * @return The default float value.
 * @throws IllegalAccessException If the field cannot be read.
 */
private float getDefaultFloat(Object instance) throws IllegalAccessException {
 String defaultValue = mAnnotation.defaultValue();
 if (defaultValue.isEmpty()) {
  return mField.getFloat(instance);
 }
 return Float.parseFloat(defaultValue);
}
com.moz.fiji.hadoop.configurator

Most used classes

  • HadoopConf
  • HadoopConfigurator
    The entry point for the HadoopConfigurator system.Clients should call HadoopConfigurator#configure(o
  • ConfigurationMethod
    This class encapsulates everything there is to know about a Hadoop configuration method declaration.
  • ConfigurationVariable
    This class encapsulates everything there is to know about a Hadoop configuration variable declaratio
  • HadoopConfigurationException
    An exception thrown when there is an error populating the member variables of a Configurable instanc
Codota Logo
  • Products

    Search for Java codeSearch for JavaScript codeEnterprise
  • IDE Plugins

    IntelliJ IDEAWebStormAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogCodota Academy Plugin user guide Terms of usePrivacy policyJava Code IndexJavascript Code Index
Get Codota for your IDE now