@Override public String[] getJobtrackerConnectionInfo( Configuration c ) { String[] result = new String[ 2 ]; if ( !"local".equals( c.get( "mapred.job.tracker", "local" ) ) ) { InetSocketAddress jobtracker = getJobTrackerAddress( c ); result[ 0 ] = jobtracker.getHostName(); result[ 1 ] = String.valueOf( jobtracker.getPort() ); } return result; }
@SuppressWarnings( "deprecation" ) public static org.apache.hadoop.mapred.JobConf asConfiguration( Configuration c ) { return c.getAsDelegateConf( org.apache.hadoop.mapred.JobConf.class ); }
@Override public RunningJob submitJob( Configuration c ) throws IOException { ClassLoader cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader( getClass().getClassLoader() ); try { return c.submit(); } catch ( InterruptedException | ClassNotFoundException e ) { throw new RuntimeException( e ); } finally { Thread.currentThread().setContextClassLoader( cl ); } }
URL[] urls = new URL[] { resolvedJarUrl }; URLClassLoader loader = new URLClassLoader( urls, hadoopShim.getClass().getClassLoader() ); conf.setJobName( hadoopJobName ); conf.setOutputKeyClass( keyClass ); conf.setOutputValueClass( valueClass ); conf.setMapOutputKeyClass( keyClass ); conf.setMapOutputValueClass( valueClass ); conf.setMapRunnerClass( runnerClass ); conf.setMapperClass( mapper ); conf.setCombinerClass( combiner ); conf.setReducerClass( reducer ); conf.setInputFormat( inputFormat ); conf.setOutputFormat( outputFormat ); conf.setInputPaths( finalPaths ); conf.setOutputPath( getOutputPath( conf, fs ) ); String value = stringStringEntry.getValue(); if ( key != null && !"".equals( key ) && value != null && !"".equals( value ) ) {
conf.get( MAPREDUCE_APPLICATION_CLASSPATH, DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH ); conf.set( MAPREDUCE_APPLICATION_CLASSPATH, CLASSES + mapreduceClasspath ); log.logBasic( MAPREDUCE_APPLICATION_CLASSPATH + ": " + conf.get( MAPREDUCE_APPLICATION_CLASSPATH ) ); } catch ( Exception ex ) { throw new IOException( JobConf jobConf = conf.getAsDelegateConf( JobConf.class ); jobConf.getCredentials().addAll( UserGroupInformation.getCurrentUser().getCredentials() ); return super.submit( conf );
@Override public void configureConnectionInformation( String namenodeHost, String namenodePort, String jobtrackerHost, String jobtrackerPort, Configuration conf, List<String> logMessages ) throws Exception { if ( namenodeHost == null || namenodeHost.trim().length() == 0 ) { throw new Exception( "No hdfs host specified!" ); } if ( jobtrackerHost == null || jobtrackerHost.trim().length() == 0 ) { throw new Exception( "No job tracker host specified!" ); } if ( namenodePort != null && namenodePort.trim().length() != 0 && !"-1".equals( namenodePort.trim() ) ) { namenodePort = ":" + namenodePort; } else { // it's been realized that this is pretty fine to have // NameNode URL w/o port: e.g. HA mode (BAD-358) namenodePort = ""; logMessages.add( "No hdfs port specified - HA? " ); } if ( jobtrackerPort == null || jobtrackerPort.trim().length() == 0 ) { jobtrackerPort = getDefaultJobtrackerPort(); logMessages.add( "No job tracker port specified - using default: " + jobtrackerPort ); } String fsDefaultName = "hdfs://" + namenodeHost + namenodePort; String jobTracker = jobtrackerHost + ":" + jobtrackerPort; conf.set( "fs.default.name", fsDefaultName ); conf.set( "mapred.job.tracker", jobTracker ); }
List<String> logMessages ) throws Exception { String runtimeFsDefaultName = conf.get( "pentaho.runtime.fs.default.name" ); String runtimeFsDefaultScheme = conf.get( "pentaho.runtime.fs.default.scheme", "hdfs" ); String runtimeJobTracker = conf.get( "pentaho.runtime.job.tracker" ); if ( runtimeFsDefaultName == null ) { if ( namenodeHost == null || namenodeHost.trim().length() == 0 ) { conf.set( "fs.default.name", runtimeFsDefaultName ); conf.set( "mapred.job.tracker", runtimeJobTracker );
@Override protected void configure( Configuration conf ) throws Exception { callVisitors(); setMapRunnerClass( hadoopShim.getPentahoMapReduceMapRunnerClass().getCanonicalName() ); conf.set( TRANSFORMATION_MAP_XML, mapperTransformationXml ); conf.set( TRANSFORMATION_MAP_INPUT_STEPNAME, mapperInputStep ); conf.set( TRANSFORMATION_MAP_OUTPUT_STEPNAME, mapperOutputStep ); if ( combinerTransformationXml != null ) { conf.set( TRANSFORMATION_COMBINER_XML, combinerTransformationXml ); conf.set( TRANSFORMATION_COMBINER_INPUT_STEPNAME, combinerInputStep ); conf.set( TRANSFORMATION_COMBINER_OUTPUT_STEPNAME, combinerOutputStep ); setCombinerClass( hadoopShim.getPentahoMapReduceCombinerClass().getCanonicalName() ); } if ( reducerTransformationXml != null ) { conf.set( TRANSFORMATION_REDUCE_XML, reducerTransformationXml ); conf.set( TRANSFORMATION_REDUCE_INPUT_STEPNAME, reducerInputStep ); conf.set( TRANSFORMATION_REDUCE_OUTPUT_STEPNAME, reducerOutputStep ); setReducerClass( hadoopShim.getPentahoMapReduceReducerClass().getCanonicalName() ); } conf.setJarByClass( hadoopShim.getPentahoMapReduceMapRunnerClass() ); conf.set( LOG_LEVEL, logLevel.toString() ); configureVariableSpace( conf ); super.configure( conf ); }
private Path getPath( Configuration conf, FileSystem fs, String outputPath ) { return fs.asPath( conf.getDefaultFileSystemURL(), outputPath ); }
conf.set( "fs.maprfs.impl", MapRFileProvider.FS_MAPR_IMPL );
conf.set( "fs.maprfs.impl", MapRFileProvider.FS_MAPR_IMPL );
public static InetSocketAddress getJobTrackerAddress( Configuration conf ) { String jobTrackerStr = conf.get( "mapred.job.tracker", "localhost:8012" ); return NetUtils.createSocketAddr( jobTrackerStr ); }
@Override public RunningJob submitJob( org.pentaho.hadoop.shim.api.Configuration c ) throws IOException { ClassLoader cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader( getClass().getClassLoader() ); try { return c.submit(); } catch ( InterruptedException | ClassNotFoundException e ) { throw new RuntimeException( e ); } finally { Thread.currentThread().setContextClassLoader( cl ); } }
@Override public int runTool( List<String> args, Properties properties ) { Configuration configuration = hadoopShim.createConfiguration(); for ( Map.Entry<String, String> entry : Maps.fromProperties( properties ).entrySet() ) { configuration.set( entry.getKey(), entry.getValue() ); } try { // Make sure Sqoop throws exceptions instead of returning a status of 1 System.setProperty( SQOOP_THROW_ON_ERROR, Boolean.toString( true ) ); configureShim( configuration ); return sqoopShim.runTool( args.toArray( new String[args.size()] ), configuration ); } catch ( Exception e ) { LOGGER.error( "Error executing sqoop", e ); return 1; } }
String fromConf = conf.get( propertyName ); if ( Utils.isEmpty( fromConf ) ) { Object objectValue = properties.getOrDefault( propertyName, null );
activeConfigurationId = hadoopConfiguration.getIdentifier(); Configuration config = hadoopConfiguration.getHadoopShim().createConfiguration(); String defaultFS = config.get( HadoopFileSystem.FS_DEFAULT_NAME );