/** * Method getProperty returns the value associated with the given key from the underlying properties system. * * @param key of type String * @return String */ public String getProperty( String key ) { return getConfig().get( key ); }
protected void internalClean( boolean stop ) { if( !isPreserveTemporaryFiles() ) cleanTemporaryFiles( stop ); }
@Override protected void internalStart() { try { copyToDistributedCache(); deleteSinksIfReplace(); deleteTrapsIfReplace(); deleteCheckpointsIfReplace(); } catch( IOException exception ) { throw new FlowException( "unable to delete sinks", exception ); } registerHadoopShutdownHook(); }
protected int getMaxNumParallelSteps() { return stepsAreLocal() ? 1 : getMaxConcurrentSteps( getConfig() ); }
@Test public void testNestedProperties() throws IOException { Tap source = new Hfs( new TextLine( new Fields( "line" ) ), "/input" ); Pipe pipe = new Pipe( "test" ); pipe = new Each( pipe, new RegexSplitter( new Fields( "first", "second", "third" ), "\\s" ), Fields.ALL ); Tap sink = new Hfs( new TextLine(), "output", SinkMode.REPLACE ); Properties defaultProperties = new Properties(); defaultProperties.setProperty( "test.key", "test.value" ); HadoopFlow flow = (HadoopFlow) getPlatform().getFlowConnector( new Properties( defaultProperties ) ).connect( source, sink, pipe ); assertEquals( "test flow", "test.value", flow.getProperty( "test.key" ) ); assertEquals( "test step", "test.value", ( (HadoopFlowStep) flow.getFlowSteps().get( 0 ) ).createInitializedConfig( flow.getFlowProcess(), flow.getConfig() ).get( "test.key" ) ); }
@Override public FlowProcess<JobConf> getFlowProcess() { return new HadoopFlowProcess( getFlowSession(), getConfig() ); }
private void cleanTemporaryFiles( boolean stop ) { if( stop ) // unstable to call fs operations during shutdown return; // use step config so cascading.flow.step.path property is properly used for( FlowStep<JobConf> step : getFlowSteps() ) ( (BaseFlowStep<JobConf>) step ).clean(); }
@Override protected HadoopFlow createFlow( FlowDef flowDef ) { return new HadoopFlow( getPlatformInfo(), getDefaultProperties(), getDefaultConfig(), flowDef ); }
protected void initConfig( Map<Object, Object> properties, JobConf parentConfig ) { if( properties != null ) parentConfig = createConfig( properties, parentConfig ); if( parentConfig == null ) // this is ok, getJobConf will pass a default parent in return; jobConf = HadoopUtil.copyJobConf( parentConfig ); // prevent local values from being shared jobConf.set( "fs.http.impl", HttpFileSystem.class.getName() ); jobConf.set( "fs.https.impl", HttpFileSystem.class.getName() ); syncPaths = HadoopMRUtil.addToClassPath( jobConf, getClassPath() ); }
@Test public void testNestedProperties() throws IOException { Tap source = new Hfs( new TextLine( new Fields( "line" ) ), "/input" ); Pipe pipe = new Pipe( "test" ); pipe = new Each( pipe, new RegexSplitter( new Fields( "first", "second", "third" ), "\\s" ), Fields.ALL ); Tap sink = new Hfs( new TextLine(), "output", SinkMode.REPLACE ); Properties defaultProperties = new Properties(); defaultProperties.setProperty( "test.key", "test.value" ); HadoopFlow flow = (HadoopFlow) getPlatform().getFlowConnector( new Properties( defaultProperties ) ).connect( source, sink, pipe ); assertEquals( "test flow", "test.value", flow.getProperty( "test.key" ) ); assertEquals( "test step", "test.value", ( (HadoopFlowStep) flow.getFlowSteps().get( 0 ) ).createInitializedConfig( flow.getFlowProcess(), flow.getConfig() ).get( "test.key" ) ); }
protected int getMaxNumParallelSteps() { return stepsAreLocal() ? 1 : getMaxConcurrentSteps( getConfig() ); }
@Override public FlowProcess<JobConf> getFlowProcess() { return new HadoopFlowProcess( getFlowSession(), getConfig() ); }
private void cleanTemporaryFiles( boolean stop ) { if( stop ) // unstable to call fs operations during shutdown return; // use step config so cascading.flow.step.path property is properly used for( FlowStep<JobConf> step : getFlowSteps() ) ( (BaseFlowStep<JobConf>) step ).clean(); }
@Override protected HadoopFlow createFlow( FlowDef flowDef ) { return new HadoopFlow( getPlatformInfo(), getDefaultProperties(), getDefaultConfig(), flowDef ); }
protected void initConfig( Map<Object, Object> properties, JobConf parentConfig ) { if( properties != null ) parentConfig = createConfig( properties, parentConfig ); if( parentConfig == null ) // this is ok, getJobConf will pass a default parent in return; jobConf = HadoopUtil.copyJobConf( parentConfig ); // prevent local values from being shared jobConf.set( "fs.http.impl", HttpFileSystem.class.getName() ); jobConf.set( "fs.https.impl", HttpFileSystem.class.getName() ); syncPaths = HadoopMRUtil.addToClassPath( jobConf, getClassPath() ); }
/** * Method getProperty returns the value associated with the given key from the underlying properties system. * * @param key of type String * @return String */ public String getProperty( String key ) { return getConfig().get( key ); }
@Override protected void internalStart() { try { copyToDistributedCache(); deleteSinksIfReplace(); deleteTrapsIfReplace(); deleteCheckpointsIfReplace(); } catch( IOException exception ) { throw new FlowException( "unable to delete sinks", exception ); } registerHadoopShutdownHook(); }
protected void internalClean( boolean stop ) { if( !isPreserveTemporaryFiles() ) cleanTemporaryFiles( stop ); }
@Override public JobConf getConfigCopy() { return HadoopUtil.copyJobConf( getConfig() ); }
@Override public Map<Object, Object> getConfigAsProperties() { return HadoopUtil.createProperties( getConfig() ); }