@Override public <C> C copyConfig( C config ) { return HadoopUtil.copyJobConf( config ); }
@Override public <C> C copyConfig( C config ) { return HadoopUtil.copyJobConf( config ); }
@Override public <C> C copyConfig( C config ) { return HadoopUtil.copyJobConf( config ); }
@Override public <C> C copyConfig( C config ) { return HadoopUtil.copyJobConf( config ); }
@Override public JobConf getConfigCopy() { return HadoopUtil.copyJobConf( jobConf ); }
@Override public JobConf getConfigCopy() { return HadoopUtil.copyJobConf( jobConf ); }
@Override public Configuration getConfigCopy() { return HadoopUtil.copyJobConf(this.conf); }
@Override protected JobConf newConfig( JobConf defaultConfig ) { return defaultConfig == null ? new JobConf() : HadoopUtil.copyJobConf( defaultConfig ); }
@Override protected JobConf newConfig( JobConf defaultConfig ) { return defaultConfig == null ? new JobConf() : HadoopUtil.copyJobConf( defaultConfig ); }
@Override public Configuration mergeMapIntoConfig(Configuration defaultConfig, Map<String, String> map) { Configuration mergedConf = HadoopUtil.copyJobConf(defaultConfig); for(String key : map.keySet()) { mergedConf.set(key, map.get(key)); } return mergedConf; }
public static <J extends Configuration> J mergeConf( J job, Map<String, String> config, boolean directly ) { Configuration currentConf = directly ? job : ( job instanceof JobConf ? copyJobConf( (JobConf) job ) : new Configuration( job ) ); for( String key : config.keySet() ) { if( LOG.isDebugEnabled() ) LOG.debug( "merging key: {} value: {}", key, config.get( key ) ); currentConf.set( key, config.get( key ) ); } return (J) currentConf; }
@Override public JobConf getConfigCopy() { return HadoopUtil.copyJobConf( getConfig() ); }
private void initFromTraps( FlowProcess<JobConf> flowProcess, JobConf conf, Map<String, Tap> traps ) { if( !traps.isEmpty() ) { JobConf trapConf = HadoopUtil.copyJobConf( conf ); for( Tap tap : traps.values() ) tap.sinkConfInit( flowProcess, trapConf ); } }
public static JobConf createJobConf( Map<Object, Object> properties, JobConf defaultJobconf ) { JobConf jobConf = defaultJobconf == null ? new JobConf() : copyJobConf( defaultJobconf ); if( properties == null ) return jobConf; return copyConfiguration( properties, jobConf ); }
@Override public JobConf getConfigCopy() { return HadoopUtil.copyJobConf( getConfig() ); }
public static JobConf createJobConf( Map<Object, Object> properties, JobConf defaultJobconf ) { JobConf jobConf = defaultJobconf == null ? new JobConf() : copyJobConf( defaultJobconf ); if( properties == null ) return jobConf; return copyConfiguration( properties, jobConf ); }
protected void initConfig( Map<Object, Object> properties, JobConf parentConfig ) { if( properties != null ) parentConfig = createConfig( properties, parentConfig ); if( parentConfig == null ) // this is ok, getJobConf will pass a default parent in return; jobConf = HadoopUtil.copyJobConf( parentConfig ); // prevent local values from being shared jobConf.set( "fs.http.impl", HttpFileSystem.class.getName() ); jobConf.set( "fs.https.impl", HttpFileSystem.class.getName() ); syncPaths = HadoopMRUtil.addToClassPath( jobConf, getClassPath() ); }
protected void initConfig( Map<Object, Object> properties, JobConf parentConfig ) { if( properties != null ) parentConfig = createConfig( properties, parentConfig ); if( parentConfig == null ) // this is ok, getJobConf will pass a default parent in return; jobConf = HadoopUtil.copyJobConf( parentConfig ); // prevent local values from being shared jobConf.set( "fs.http.impl", HttpFileSystem.class.getName() ); jobConf.set( "fs.https.impl", HttpFileSystem.class.getName() ); syncPaths = HadoopMRUtil.addToClassPath( jobConf, getClassPath() ); }
/** * Configures the Flink program for this step */ public Configuration createInitializedConfig( FlowProcess<Configuration> flowProcess, Configuration parentConfig ) { this.env.getConfig().registerKryoType(Tuple.class); Configuration config = parentConfig == null ? new JobConf() : HadoopUtil.copyJobConf( parentConfig ); config.set( "cascading.flow.step.num", Integer.toString( getOrdinal() ) ); HadoopUtil.setIsInflow(config); this.setConfig(config); return config; }
@Override public TupleEntryCollector openTrapForWrite( Tap trap ) throws IOException { JobConf jobConf = HadoopUtil.copyJobConf( getJobConf() ); int stepNum = jobConf.getInt( "cascading.flow.step.num", 0 ); String partname; if( jobConf.getBoolean( "mapred.task.is.map", true ) ) partname = String.format( "-m-%05d-", stepNum ); else partname = String.format( "-r-%05d-", stepNum ); jobConf.set( "cascading.tapcollector.partname", "%s%spart" + partname + "%05d" ); return trap.openForWrite( new HadoopFlowProcess( this, jobConf ), null ); // do not honor sinkmode as this may be opened across tasks }