Codota Logo
Configuration
Code IndexAdd Codota to your IDE (free)

How to use
Configuration
in
org.pentaho.hadoop.shim.api

Best Java code snippets using org.pentaho.hadoop.shim.api.Configuration (Showing top 16 results out of 315)

  • Common ways to obtain Configuration
private void myMethod () {
Configuration c =
  • Codota IconHadoopShim hadoopShim;hadoopShim.createConfiguration()
  • Smart code suggestions by Codota
}
origin: pentaho/pentaho-hadoop-shims

@Override
public String[] getJobtrackerConnectionInfo( Configuration c ) {
 String[] result = new String[ 2 ];
 if ( !"local".equals( c.get( "mapred.job.tracker", "local" ) ) ) {
  InetSocketAddress jobtracker = getJobTrackerAddress( c );
  result[ 0 ] = jobtracker.getHostName();
  result[ 1 ] = String.valueOf( jobtracker.getPort() );
 }
 return result;
}
origin: pentaho/pentaho-hadoop-shims

@SuppressWarnings( "deprecation" )
public static org.apache.hadoop.mapred.JobConf asConfiguration( Configuration c ) {
 return c.getAsDelegateConf( org.apache.hadoop.mapred.JobConf.class );
}
origin: pentaho/pentaho-hadoop-shims

@Override
public RunningJob submitJob( Configuration c ) throws IOException {
 ClassLoader cl = Thread.currentThread().getContextClassLoader();
 Thread.currentThread().setContextClassLoader( getClass().getClassLoader() );
 try {
  return c.submit();
 } catch ( InterruptedException | ClassNotFoundException e ) {
  throw new RuntimeException( e );
 } finally {
  Thread.currentThread().setContextClassLoader( cl );
 }
}
origin: pentaho/big-data-plugin

URL[] urls = new URL[] { resolvedJarUrl };
URLClassLoader loader = new URLClassLoader( urls, hadoopShim.getClass().getClassLoader() );
conf.setJobName( hadoopJobName );
 conf.setOutputKeyClass( keyClass );
 conf.setOutputValueClass( valueClass );
 conf.setMapOutputKeyClass( keyClass );
 conf.setMapOutputValueClass( valueClass );
 conf.setMapRunnerClass( runnerClass );
 conf.setMapperClass( mapper );
 conf.setCombinerClass( combiner );
 conf.setReducerClass( reducer );
 conf.setInputFormat( inputFormat );
 conf.setOutputFormat( outputFormat );
conf.setInputPaths( finalPaths );
conf.setOutputPath( getOutputPath( conf, fs ) );
 String value = stringStringEntry.getValue();
 if ( key != null && !"".equals( key ) && value != null && !"".equals( value ) ) {
origin: pentaho/big-data-plugin

   conf.get( MAPREDUCE_APPLICATION_CLASSPATH, DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH );
  conf.set( MAPREDUCE_APPLICATION_CLASSPATH, CLASSES + mapreduceClasspath );
  log.logBasic( MAPREDUCE_APPLICATION_CLASSPATH + ": " + conf.get( MAPREDUCE_APPLICATION_CLASSPATH ) );
 } catch ( Exception ex ) {
  throw new IOException(
JobConf jobConf = conf.getAsDelegateConf( JobConf.class );
jobConf.getCredentials().addAll( UserGroupInformation.getCurrentUser().getCredentials() );
return super.submit( conf );
origin: pentaho/pentaho-hadoop-shims

@Override
public void configureConnectionInformation( String namenodeHost, String namenodePort, String jobtrackerHost,
                      String jobtrackerPort, Configuration conf, List<String> logMessages )
 throws Exception {
 if ( namenodeHost == null || namenodeHost.trim().length() == 0 ) {
  throw new Exception( "No hdfs host specified!" );
 }
 if ( jobtrackerHost == null || jobtrackerHost.trim().length() == 0 ) {
  throw new Exception( "No job tracker host specified!" );
 }
 if ( namenodePort != null
  && namenodePort.trim().length() != 0
  && !"-1".equals( namenodePort.trim() ) ) {
  namenodePort = ":" + namenodePort;
 } else {
  // it's been realized that this is pretty fine to have
  // NameNode URL w/o port: e.g. HA mode (BAD-358)
  namenodePort = "";
  logMessages.add( "No hdfs port specified - HA? " );
 }
 if ( jobtrackerPort == null || jobtrackerPort.trim().length() == 0 ) {
  jobtrackerPort = getDefaultJobtrackerPort();
  logMessages.add( "No job tracker port specified - using default: " + jobtrackerPort );
 }
 String fsDefaultName = "hdfs://" + namenodeHost + namenodePort;
 String jobTracker = jobtrackerHost + ":" + jobtrackerPort;
 conf.set( "fs.default.name", fsDefaultName );
 conf.set( "mapred.job.tracker", jobTracker );
}
origin: pentaho/pentaho-hadoop-shims

                     List<String> logMessages ) throws Exception {
String runtimeFsDefaultName = conf.get( "pentaho.runtime.fs.default.name" );
String runtimeFsDefaultScheme = conf.get( "pentaho.runtime.fs.default.scheme", "hdfs" );
String runtimeJobTracker = conf.get( "pentaho.runtime.job.tracker" );
if ( runtimeFsDefaultName == null ) {
 if ( namenodeHost == null || namenodeHost.trim().length() == 0 ) {
conf.set( "fs.default.name", runtimeFsDefaultName );
conf.set( "mapred.job.tracker", runtimeJobTracker );
origin: pentaho/big-data-plugin

@Override
protected void configure( Configuration conf ) throws Exception {
 callVisitors();
 setMapRunnerClass( hadoopShim.getPentahoMapReduceMapRunnerClass().getCanonicalName() );
 conf.set( TRANSFORMATION_MAP_XML, mapperTransformationXml );
 conf.set( TRANSFORMATION_MAP_INPUT_STEPNAME, mapperInputStep );
 conf.set( TRANSFORMATION_MAP_OUTPUT_STEPNAME, mapperOutputStep );
 if ( combinerTransformationXml != null ) {
  conf.set( TRANSFORMATION_COMBINER_XML, combinerTransformationXml );
  conf.set( TRANSFORMATION_COMBINER_INPUT_STEPNAME, combinerInputStep );
  conf.set( TRANSFORMATION_COMBINER_OUTPUT_STEPNAME, combinerOutputStep );
  setCombinerClass( hadoopShim.getPentahoMapReduceCombinerClass().getCanonicalName() );
 }
 if ( reducerTransformationXml != null ) {
  conf.set( TRANSFORMATION_REDUCE_XML, reducerTransformationXml );
  conf.set( TRANSFORMATION_REDUCE_INPUT_STEPNAME, reducerInputStep );
  conf.set( TRANSFORMATION_REDUCE_OUTPUT_STEPNAME, reducerOutputStep );
  setReducerClass( hadoopShim.getPentahoMapReduceReducerClass().getCanonicalName() );
 }
 conf.setJarByClass( hadoopShim.getPentahoMapReduceMapRunnerClass() );
 conf.set( LOG_LEVEL, logLevel.toString() );
 configureVariableSpace( conf );
 super.configure( conf );
}
origin: pentaho/big-data-plugin

private Path getPath( Configuration conf, FileSystem fs, String outputPath ) {
 return fs.asPath( conf.getDefaultFileSystemURL(), outputPath );
}
origin: pentaho/pentaho-hadoop-shims

conf.set( "fs.maprfs.impl", MapRFileProvider.FS_MAPR_IMPL );
origin: pentaho/pentaho-hadoop-shims

conf.set( "fs.maprfs.impl", MapRFileProvider.FS_MAPR_IMPL );
origin: pentaho/pentaho-hadoop-shims

public static InetSocketAddress getJobTrackerAddress( Configuration conf ) {
 String jobTrackerStr = conf.get( "mapred.job.tracker", "localhost:8012" );
 return NetUtils.createSocketAddr( jobTrackerStr );
}
origin: pentaho/pentaho-hadoop-shims

@Override
public RunningJob submitJob( org.pentaho.hadoop.shim.api.Configuration c ) throws IOException {
 ClassLoader cl = Thread.currentThread().getContextClassLoader();
 Thread.currentThread().setContextClassLoader( getClass().getClassLoader() );
 try {
  return c.submit();
 } catch ( InterruptedException | ClassNotFoundException e ) {
  throw new RuntimeException( e );
 } finally {
  Thread.currentThread().setContextClassLoader( cl );
 }
}
origin: pentaho/big-data-plugin

@Override public int runTool( List<String> args, Properties properties ) {
 Configuration configuration = hadoopShim.createConfiguration();
 for ( Map.Entry<String, String> entry : Maps.fromProperties( properties ).entrySet() ) {
  configuration.set( entry.getKey(), entry.getValue() );
 }
 try {
  // Make sure Sqoop throws exceptions instead of returning a status of 1
  System.setProperty( SQOOP_THROW_ON_ERROR, Boolean.toString( true ) );
  configureShim( configuration );
  return sqoopShim.runTool( args.toArray( new String[args.size()] ), configuration );
 } catch ( Exception e ) {
  LOGGER.error( "Error executing sqoop", e );
  return 1;
 }
}
origin: pentaho/big-data-plugin

String fromConf = conf.get( propertyName );
if ( Utils.isEmpty( fromConf ) ) {
 Object objectValue = properties.getOrDefault( propertyName, null );
origin: pentaho/big-data-plugin

activeConfigurationId = hadoopConfiguration.getIdentifier();
Configuration config = hadoopConfiguration.getHadoopShim().createConfiguration();
String defaultFS = config.get( HadoopFileSystem.FS_DEFAULT_NAME );
org.pentaho.hadoop.shim.apiConfiguration

Javadoc

A thin abstraction for org.apache.hadoop.mapred.JobConf (and consequently org.apache.hadoop.conf.Configuration). Most of the methods here are a direct wrapping for methods found in org.apache.hadoop.mapred.JobConf and the documentation there should be considered when providing an implementation.

Most used methods

  • set
    Sets the property name's value to value.
  • get
    Look up the value of a property optionally returning a default value if the property is not set.
  • getAsDelegateConf
    Hack Return this configuration as was asked with provided delegate class (If it is possible).
  • submit
    Submit job for the current configuration provided by this implementation.
  • getDefaultFileSystemURL
    Get the default file system URL as stored in this configuration.
  • setCombinerClass
  • setInputFormat
  • setInputPaths
  • setJar
  • setJarByClass
  • setJobName
    Sets the MapReduce job name.
  • setMapOutputKeyClass
    Set the key class for the map output data.
  • setJobName,
  • setMapOutputKeyClass,
  • setMapOutputValueClass,
  • setMapRunnerClass,
  • setMapperClass,
  • setNumMapTasks,
  • setNumReduceTasks,
  • setOutputFormat,
  • setOutputKeyClass,
  • setOutputPath

Popular in Java

  • Updating database using SQL prepared statement
  • orElseThrow (Optional)
    Return the contained value, if present, otherwise throw an exception to be created by the provided s
  • setContentView (Activity)
  • addToBackStack (FragmentTransaction)
  • InputStreamReader (java.io)
    An InputStreamReader is a bridge from byte streams to character streams: It reads bytes and decodes
  • PrintWriter (java.io)
    Prints formatted representations of objects to a text-output stream. This class implements all of th
  • Enumeration (java.util)
    A legacy iteration interface.New code should use Iterator instead. Iterator replaces the enumeration
  • TreeMap (java.util)
    A Red-Black tree based NavigableMap implementation. The map is sorted according to the Comparable of
  • SSLHandshakeException (javax.net.ssl)
    The exception that is thrown when a handshake could not be completed successfully.
  • JOptionPane (javax.swing)
Codota Logo
  • Products

    Search for Java codeSearch for JavaScript codeEnterprise
  • IDE Plugins

    IntelliJ IDEAWebStormAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogCodota Academy Plugin user guide Terms of usePrivacy policyJava Code IndexJavascript Code Index
Get Codota for your IDE now