@Override public PlatformInfo getPlatformInfo() { return HadoopUtil.getPlatformInfo( DAG.class, null, "Tez" ); }
@Override public PlatformInfo getPlatformInfo() { return HadoopUtil.getPlatformInfo( DAG.class, null, "Tez" ); }
@Override public PlatformInfo getPlatformInfo() { return HadoopUtil.getPlatformInfo( JobConf.class, "org/apache/hadoop", "Hadoop MR" ); }
public static String getPlatformVersion() { PlatformInfo tez = HadoopUtil.getPlatformInfo( DAG.class, null, "Tez" ); if( tez == null || tez.version == null ) return "unknown"; return tez.version; }
@Override public PlatformInfo getPlatformInfo() { return HadoopUtil.getPlatformInfo( JobConf.class, "org/apache/hadoop", "Hadoop MR" ); }
public static String getPlatformVersion() { PlatformInfo tez = HadoopUtil.getPlatformInfo( DAG.class, null, "Tez" ); if( tez == null || tez.version == null ) return "unknown"; return tez.version; }
/** * Constructor MapReduceFlow creates a new MapReduceFlow instance. * * @param properties of type Properties * @param name of type String * @param jobConf of type JobConf * @param flowDescriptor of type Map * @param deleteSinkOnInit of type boolean * @param stopJobsOnExit of type boolean */ @ConstructorProperties({"properties", "name", "jobConf", "flowDescriptor", "deleteSinkOnInit", "stopJobsOnExit"}) public MapReduceFlow( Properties properties, String name, JobConf jobConf, Map<String, String> flowDescriptor, boolean deleteSinkOnInit, boolean stopJobsOnExit ) { super( HadoopUtil.getPlatformInfo( JobConf.class, "org/apache/hadoop", "Hadoop MR" ), properties, jobConf, name, flowDescriptor, deleteSinkOnInit ); this.stopJobsOnExit = stopJobsOnExit; initializeFrom( jobConf ); // push off initialization allowing for overrides }
/** * Constructor MapReduceFlow creates a new MapReduceFlow instance. * * @param properties of type Properties * @param name of type String * @param jobConf of type JobConf * @param flowDescriptor of type Map * @param deleteSinkOnInit of type boolean * @param stopJobsOnExit of type boolean */ @ConstructorProperties({"properties", "name", "jobConf", "flowDescriptor", "deleteSinkOnInit", "stopJobsOnExit"}) public MapReduceFlow( Properties properties, String name, JobConf jobConf, Map<String, String> flowDescriptor, boolean deleteSinkOnInit, boolean stopJobsOnExit ) { super( HadoopUtil.getPlatformInfo( JobConf.class, "org/apache/hadoop", "Hadoop MR" ), properties, jobConf, name, flowDescriptor, deleteSinkOnInit ); this.stopJobsOnExit = stopJobsOnExit; initializeFrom( jobConf ); // push off initialization allowing for overrides }
/** * Constructor MultiMapReduceFlow creates a new MultiMapReduceFlow instance. * * @param name of String * @param jobConf of JobConf * @param jobConfs of JobConf... */ public MultiMapReduceFlow( String name, JobConf jobConf, JobConf... jobConfs ) { this( HadoopUtil.getPlatformInfo( JobConf.class, "org/apache/hadoop", "Hadoop MR" ), new Properties(), name ); initializeFrom( asList( jobConf, jobConfs ) ); }
/** * Constructor MultiMapReduceFlow creates a new MultiMapReduceFlow instance. * * @param name of String * @param jobConf of JobConf * @param jobConfs of JobConf... */ public MultiMapReduceFlow( String name, JobConf jobConf, JobConf... jobConfs ) { this( HadoopUtil.getPlatformInfo( JobConf.class, "org/apache/hadoop", "Hadoop MR" ), new Properties(), name ); initializeFrom( asList( jobConf, jobConfs ) ); }
/** * Constructor MultiMapReduceFlow creates a new MultiMapReduceFlow instance. * * @param properties of Map * @param name of String * @param jobConf of JobConf * @param jobConfs of JobConf... */ public MultiMapReduceFlow( Map<Object, Object> properties, String name, JobConf jobConf, JobConf... jobConfs ) { this( HadoopUtil.getPlatformInfo( JobConf.class, "org/apache/hadoop", "Hadoop MR" ), properties, name, null ); initializeFrom( asList( jobConf, jobConfs ) ); }
/** * Constructor MultiMapReduceFlow creates a new MultiMapReduceFlow instance. * * @param properties of Map * @param name of String * @param jobConf of JobConf * @param jobConfs of JobConf... */ public MultiMapReduceFlow( Map<Object, Object> properties, String name, JobConf jobConf, JobConf... jobConfs ) { this( HadoopUtil.getPlatformInfo( JobConf.class, "org/apache/hadoop", "Hadoop MR" ), properties, name, null ); initializeFrom( asList( jobConf, jobConfs ) ); }
/** * Constructor MultiMapReduceFlow creates a new MultiMapReduceFlow instance. * * @param properties of Map * @param name of String * @param flowDescriptor of Map * @param jobConf of JobConf * @param jobConfs of JobConf... */ public MultiMapReduceFlow( Map<Object, Object> properties, String name, Map<String, String> flowDescriptor, JobConf jobConf, JobConf... jobConfs ) { this( HadoopUtil.getPlatformInfo( JobConf.class, "org/apache/hadoop", "Hadoop MR" ), properties, name, flowDescriptor ); initializeFrom( asList( jobConf, jobConfs ) ); }
/** * Constructor MultiMapReduceFlow creates a new MultiMapReduceFlow instance. * * @param properties of Map * @param name of String * @param flowDescriptor of Map * @param stopJobsOnExit of boolean * @param jobConf of JobConf * @param jobConfs of JobConf... */ public MultiMapReduceFlow( Map<Object, Object> properties, String name, Map<String, String> flowDescriptor, boolean stopJobsOnExit, JobConf jobConf, JobConf... jobConfs ) { this( HadoopUtil.getPlatformInfo( JobConf.class, "org/apache/hadoop", "Hadoop MR" ), properties, name, flowDescriptor ); this.stopJobsOnExit = stopJobsOnExit; initializeFrom( asList( jobConf, jobConfs ) ); }
/** * Constructor MultiMapReduceFlow creates a new MultiMapReduceFlow instance. * * @param properties of Map * @param name of String * @param flowDescriptor of Map * @param stopJobsOnExit of boolean * @param jobConf of JobConf * @param jobConfs of JobConf... */ public MultiMapReduceFlow( Map<Object, Object> properties, String name, Map<String, String> flowDescriptor, boolean stopJobsOnExit, JobConf jobConf, JobConf... jobConfs ) { this( HadoopUtil.getPlatformInfo( JobConf.class, "org/apache/hadoop", "Hadoop MR" ), properties, name, flowDescriptor ); this.stopJobsOnExit = stopJobsOnExit; initializeFrom( asList( jobConf, jobConfs ) ); }
/** * Constructor MultiMapReduceFlow creates a new MultiMapReduceFlow instance. * * @param properties of Map * @param name of String * @param flowDescriptor of Map * @param jobConf of JobConf * @param jobConfs of JobConf... */ public MultiMapReduceFlow( Map<Object, Object> properties, String name, Map<String, String> flowDescriptor, JobConf jobConf, JobConf... jobConfs ) { this( HadoopUtil.getPlatformInfo( JobConf.class, "org/apache/hadoop", "Hadoop MR" ), properties, name, flowDescriptor ); initializeFrom( asList( jobConf, jobConfs ) ); }
protected FlowStepJob<JobConf> createFlowStepJob( ClientState clientState, FlowProcess<JobConf> flowProcess, JobConf initializedStepConfig ) { try { return new HadoopFlowStepJob( clientState, this, initializedStepConfig ); } catch( NoClassDefFoundError error ) { PlatformInfo platformInfo = HadoopUtil.getPlatformInfo( JobConf.class, "org/apache/hadoop", "Hadoop MR" ); String message = "unable to load platform specific class, please verify Hadoop cluster version: '%s', matches the Hadoop platform build dependency and associated FlowConnector, cascading-hadoop or cascading-hadoop2-mr1"; logError( String.format( message, platformInfo.toString() ), error ); throw error; } }
protected FlowStepJob<JobConf> createFlowStepJob( ClientState clientState, FlowProcess<JobConf> flowProcess, JobConf initializedStepConfig ) { try { return new HadoopFlowStepJob( clientState, this, initializedStepConfig ); } catch( NoClassDefFoundError error ) { PlatformInfo platformInfo = HadoopUtil.getPlatformInfo( JobConf.class, "org/apache/hadoop", "Hadoop MR" ); String message = "unable to load platform specific class, please verify Hadoop cluster version: '%s', matches the Hadoop platform build dependency and associated FlowConnector, cascading-hadoop or cascading-hadoop2-mr1"; logError( String.format( message, platformInfo.toString() ), error ); throw error; } }