@Override public TupleEntryCollector openForWrite(FlowProcess<JobConf> flowProcess, OutputCollector output) throws IOException { return new HadoopTupleEntrySchemeCollector(flowProcess, this, output); }
public HadoopTupleEntrySchemeCollector( FlowProcess<? extends Configuration> flowProcess, Tap<Configuration, RecordReader, OutputCollector> tap, OutputCollector outputCollector ) throws IOException { super( flowProcess, tap, tap.getScheme(), makeCollector( flowProcess, tap, outputCollector ), tap.getIdentifier() ); }
@Override protected OutputCollector<?, ?> wrapOutput( OutputCollector outputCollector ) { if( measuredOutputCollector == null ) measuredOutputCollector = new MeasuredOutputCollector( getFlowProcess(), SliceCounters.Write_Duration ); measuredOutputCollector.setOutputCollector( super.wrapOutput( outputCollector ) ); return measuredOutputCollector; } }
public HadoopTupleEntrySchemeCollector( FlowProcess<? extends Configuration> flowProcess, Tap<Configuration, RecordReader, OutputCollector> tap, OutputCollector outputCollector ) throws IOException { super( flowProcess, tap, tap.getScheme(), makeCollector( flowProcess, tap, outputCollector ), tap.getIdentifier() ); }
@Override protected OutputCollector<?, ?> wrapOutput( OutputCollector outputCollector ) { if( measuredOutputCollector == null ) measuredOutputCollector = new MeasuredOutputCollector( getFlowProcess(), SliceCounters.Write_Duration ); measuredOutputCollector.setOutputCollector( super.wrapOutput( outputCollector ) ); return measuredOutputCollector; } }
@Override public TupleEntryCollector openForWrite(FlowProcess<JobConf> flowProcess, OutputCollector output) throws IOException { return new HadoopTupleEntrySchemeCollector(flowProcess, this, output); }
public HadoopTupleEntrySchemeCollector( FlowProcess<? extends Configuration> flowProcess, Tap<Configuration, RecordReader, OutputCollector> tap ) throws IOException { super( flowProcess, tap, tap.getScheme(), makeCollector( flowProcess, tap, null ), tap.getIdentifier() ); }
@Override public TupleEntryCollector openForWrite( FlowProcess<? extends Configuration> flowProcess, OutputCollector output ) throws IOException { resetFileStatuses(); // output may be null when this method is called on the client side or cluster side when creating // side files with the PartitionTap return new HadoopTupleEntrySchemeCollector( flowProcess, this, output ); }
public HadoopTupleEntrySchemeCollector( FlowProcess<? extends Configuration> flowProcess, Tap<Configuration, RecordReader, OutputCollector> tap ) throws IOException { super( flowProcess, tap, tap.getScheme(), makeCollector( flowProcess, tap, null ), tap.getIdentifier() ); }
@Override public TupleEntryCollector openForWrite( FlowProcess<? extends Configuration> flowProcess, OutputCollector output ) throws IOException { resetFileStatuses(); // output may be null when this method is called on the client side or cluster side when creating // side files with the PartitionTap return new HadoopTupleEntrySchemeCollector( flowProcess, this, output ); }
@Override public TupleEntryCollector openForWrite( FlowProcess<? extends Configuration> flowProcess, OutputCollector output ) throws IOException { if( !isSink() ) throw new TapException( "this tap may not be used as a sink, no TableDesc defined" ); return new HadoopTupleEntrySchemeCollector( flowProcess, this, output ); }