/** * The onStarting event is fired when a Flow instance receives the start() message. A Flow is cut * down into executing units called stepFlow. A stepFlow contains a stepFlowJob which represents * the mapreduce job to be submitted to Hadoop. The ambrose graph is constructed from the step * graph found in flow object. * * @param flow the flow. */ @Override @SuppressWarnings("unchecked") public void onStarting(Flow flow) { // init flow List<FlowStep> steps = flow.getFlowSteps(); totalNumberOfJobs = steps.size(); currentFlowId = flow.getID(); Properties props = new Properties(); props.putAll(flow.getConfigAsProperties()); try { statsWriteService.initWriteService(props); } catch (IOException e) { LOG.error("Failed to initialize statsWriteService", e); } // convert graph from cascading to ambrose AmbroseCascadingGraphConverter converter = new AmbroseCascadingGraphConverter(Flows.getStepGraphFrom(flow), nodesByName); converter.convert(); AmbroseUtils.sendDagNodeNameMap(statsWriteService, currentFlowId, nodesByName); }
Thread.sleep( 1000 ); Map<String, FlowStepJob> map = Flows.getJobsMap( first );
Tap source = Flows.getTapForID( flowNode.getSourceTaps(), jobConf.get( "cascading.step.source" ) );
streamedSource = Flows.getFlowElementForID( node.getSourceElements(), Util.getFirst( inputMultiMap.getKeys() ) );
public TraceWriter( Flow flow ) { if( flow == null ) return; this.properties = flow.getConfigAsProperties(); this.flowName = Flows.getNameOrID( flow ); this.processLogger = (ProcessLogger) flow; }
private FlowDef normalizeTaps( FlowDef flowDef ) { Set<Tap> taps = new HashSet<>(); Map<String, Tap> sources = flowDef.getSourcesCopy(); Map<String, Tap> sinks = flowDef.getSinksCopy(); Map<String, Tap> traps = flowDef.getTrapsCopy(); Map<String, Tap> checkpoints = flowDef.getCheckpointsCopy(); boolean sourcesHasDupes = addTaps( sources, taps ); boolean sinksHasDupes = addTaps( sinks, taps ); boolean trapsHasDupes = addTaps( traps, taps ); boolean checkpointsHasDupes = addTaps( checkpoints, taps ); if( sourcesHasDupes ) normalize( taps, sources ); if( sinksHasDupes ) normalize( taps, sinks ); if( trapsHasDupes ) normalize( taps, traps ); if( checkpointsHasDupes ) normalize( taps, checkpoints ); return Flows.copy( flowDef, sources, sinks, traps, checkpoints ); }
Flows.fireOnCompleted( flow );
Thread.sleep( 1000 ); Map<String, FlowStepJob> map = Flows.getJobsMap( first );
Tap source = Flows.getTapForID( flowNode.getSourceTaps(), jobConf.get( "cascading.step.source" ) );
streamedSource = Flows.getFlowElementForID( node.getSourceElements(), Util.getFirst( inputMultiMap.getKeys() ) );
Thread.sleep( 1000 ); Map<String, FlowStepJob> map = Flows.getJobsMap( flow );
FlowStepGraph flowStepGraph = Flows.getStepGraphFrom(flow); DirectedGraph graph = new DefaultDirectedGraph<BaseFlowStep, FlowGraphEdge>( new EdgeFactory<BaseFlowStep, FlowGraphEdge>() {
Thread.sleep( 1000 ); Map<String, FlowStepJob> map = Flows.getJobsMap( flow );
/** * The onStarting event is fired when a Flow instance receives the start() message. A Flow is cut * down into executing units called stepFlow. A stepFlow contains a stepFlowJob which represents * the mapreduce job to be submitted to Hadoop. The ambrose graph is constructed from the step * graph found in flow object. * * @param flow the flow. */ @Override @SuppressWarnings("unchecked") public void onStarting(Flow flow) { // init flow List<FlowStep> steps = flow.getFlowSteps(); totalNumberOfJobs = steps.size(); currentFlowId = flow.getID(); Properties props = new Properties(); props.putAll(flow.getConfigAsProperties()); try { statsWriteService.initWriteService(props); } catch (IOException e) { LOG.error("Failed to initialize statsWriteService", e); } // convert graph from cascading to ambrose AmbroseCascadingGraphConverter converter = new AmbroseCascadingGraphConverter(Flows.getStepGraphFrom(flow), nodesByName); converter.convert(); AmbroseUtils.sendDagNodeNameMap(statsWriteService, currentFlowId, nodesByName); }
Thread.sleep( 1000 ); Map<String, FlowStepJob> map = Flows.getJobsMap( flow );
FlowStepGraph flowStepGraph = Flows.getStepGraphFrom(flow); DirectedGraph graph = new DefaultDirectedGraph<BaseFlowStep, FlowGraphEdge>( new EdgeFactory<BaseFlowStep, FlowGraphEdge>() {
Thread.sleep( 1000 ); Map<String, FlowStepJob> map = Flows.getJobsMap( flow );