@Override public String[] getJobtrackerConnectionInfo( Configuration c ) { String[] result = new String[ 2 ]; if ( !"local".equals( c.get( "mapred.job.tracker", "local" ) ) ) { InetSocketAddress jobtracker = getJobTrackerAddress( c ); result[ 0 ] = jobtracker.getHostName(); result[ 1 ] = String.valueOf( jobtracker.getPort() ); } return result; }
conf.get( MAPREDUCE_APPLICATION_CLASSPATH, DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH ); conf.set( MAPREDUCE_APPLICATION_CLASSPATH, CLASSES + mapreduceClasspath ); log.logBasic( MAPREDUCE_APPLICATION_CLASSPATH + ": " + conf.get( MAPREDUCE_APPLICATION_CLASSPATH ) ); } catch ( Exception ex ) { throw new IOException(
public static InetSocketAddress getJobTrackerAddress( Configuration conf ) { String jobTrackerStr = conf.get( "mapred.job.tracker", "localhost:8012" ); return NetUtils.createSocketAddr( jobTrackerStr ); }
List<String> logMessages ) throws Exception { String runtimeFsDefaultName = conf.get( "pentaho.runtime.fs.default.name" ); String runtimeFsDefaultScheme = conf.get( "pentaho.runtime.fs.default.scheme", "hdfs" ); String runtimeJobTracker = conf.get( "pentaho.runtime.job.tracker" ); if ( runtimeFsDefaultName == null ) { if ( namenodeHost == null || namenodeHost.trim().length() == 0 ) {
String fromConf = conf.get( propertyName ); if ( Utils.isEmpty( fromConf ) ) { Object objectValue = properties.getOrDefault( propertyName, null );
activeConfigurationId = hadoopConfiguration.getIdentifier(); Configuration config = hadoopConfiguration.getHadoopShim().createConfiguration(); String defaultFS = config.get( HadoopFileSystem.FS_DEFAULT_NAME );