protected void performOperation( Tuple[] context, TupleEntry entry ) { context[ 0 ] = entry.getTupleCopy(); } }
/** * Returns the data captured by this stub as a {@link Data} instance which enables further sorting, filtering, and * transformation. */ public Data result() { List<Tuple> output = new ArrayList<Tuple>(); for (TupleEntry entry : collected) { output.add(entry.getTupleCopy()); } return new Data(declaredFields, Collections.unmodifiableList(output)); }
@Override protected void collect(TupleEntry tupleEntry) throws IOException { callback.modified(); output.add(tupleEntry.getTupleCopy()); }
@Override public void receive( Duct previous, int ordinal, TupleEntry incomingEntry ) { Tuple valuesTuple = incomingEntry.getTupleCopy(); Tuple groupTuple = keyBuilder[ ordinal ].makeResult( valuesTuple, null ); // view on valuesTuple groupTuple = getDelegatedTuple( groupTuple ); // wrap so hasher/comparator is honored keys.add( groupTuple ); keyValues[ ordinal ].get( groupTuple ).add( valuesTuple ); }
@Override public void receive( Duct previous, int ordinal, TupleEntry incomingEntry ) { Tuple valuesTuple = incomingEntry.getTupleCopy(); Tuple groupTuple = keyBuilder[ 0 ].makeResult( valuesTuple, null ); // view on valuesTuple groupTuple = getDelegatedTuple( groupTuple ); // wrap so hasher/comparator is honored keys.add( groupTuple ); valueMap.put( groupTuple, valuesTuple ); }
/** {@inheritDoc} */ @Override public void sink(FlowProcess<? extends Properties> flowProcess, SinkCall<Void, List<Tuple>> sinkCall) throws IOException { sinkCall.getOutput().add(sinkCall.getOutgoingEntry().getTupleCopy()); }
/** * Constructor TupleEntry creates a new TupleEntry instance that is a safe copy of the given tupleEntry. * <p> * The new instance is safe to cache and will be modifiable regardless of the given tupleEntry state. * * @param tupleEntry of type TupleEntry */ @ConstructorProperties({"tupleEntry"}) public TupleEntry( TupleEntry tupleEntry ) { if( tupleEntry == null ) throw new IllegalArgumentException( "tupleEntry may not be null" ); this.fields = tupleEntry.getFields(); this.tuple = tupleEntry.getTupleCopy(); setCoercions(); }
public static List<Tuple> getAllTuples(Tap<JobConf, ?, ?> t, FlowProcess<JobConf> conf) throws IOException { TupleEntryIterator iter = t.openForRead(conf); List<Tuple> tuples = new ArrayList<Tuple>(); while (iter.hasNext()) { tuples.add(iter.next().getTupleCopy()); } return tuples; } }
@Override public void receive( Duct previous, int ordinal, TupleEntry incomingEntry ) { Tuple incomingTuple = ordinal != 0 ? incomingEntry.getTupleCopy() : incomingEntry.getTuple(); Tuple keyTuple = keyBuilder[ ordinal ].makeResult( incomingTuple, null ); // view in incomingTuple keyTuple = getDelegatedTuple( keyTuple ); if( ordinal != 0 ) { keys.add( keyTuple ); keyValues[ ordinal ].get( keyTuple ).add( incomingTuple ); // always a copy return; } waitOnLatch(); keys.remove( keyTuple ); streamedCollection.set( 0, incomingTuple ); // no need to copy, temp setting performJoinWith( keyTuple ); }
protected void collect( TupleEntry tupleEntry ) { if( copyTupleOnCollect ) tuples.add( tupleEntry.getTupleCopy() ); else tuples.add( tupleEntry.getTuple() ); }
protected void performOperation( Tuple[] context, TupleEntry entry ) { if( context[ 0 ] == null ) context[ 0 ] = new Tuple(); if( context[ 0 ].size() < firstN ) context[ 0 ].add( entry.getTupleCopy() ); }
@Override public Tuple aggregate( FlowProcess flowProcess, TupleEntry args, Tuple context ) { if( context == null ) return args.getTupleCopy(); else if( args.getObject( 0 ) == null ) return context; Comparable lhs = (Comparable) context.getObject( 0 ); Comparable rhs = (Comparable) args.getObject( 0 ); if( ( lhs == null ) || ( lhs.compareTo( rhs ) < 0 ) ) context.set( 0, rhs ); return context; }
@Override public Tuple aggregate( FlowProcess flowProcess, TupleEntry args, Tuple context ) { if( context == null ) return args.getTupleCopy(); else if( args.getObject( 0 ) == null ) return context; Comparable lhs = (Comparable) context.getObject( 0 ); Comparable rhs = (Comparable) args.getObject( 0 ); if( ( lhs == null ) || ( lhs.compareTo( rhs ) > 0 ) ) context.set( 0, rhs ); return context; }
@Override public Tuple aggregate( FlowProcess flowProcess, TupleEntry args, Tuple context ) { if( context == null ) return args.getTupleCopy(); else if( args.getObject( 0 ) == null ) return context; context.set( 0, context.getDouble( 0 ) + args.getDouble( 0 ) ); return context; }
public static <C extends Collection<Tuple>> C asCollection( TupleEntryIterator iterator, C result ) { while( iterator.hasNext() ) result.add( iterator.next().getTupleCopy() ); return result; }
@Override public Tuple aggregate( FlowProcess flowProcess, TupleEntry args, Tuple context ) { if( doComparison == null ) doComparison = args.getFields().hasComparators(); // ensure we use resolved fields if( context == null || ( doComparison && args.getFields().compare( context, args.getTuple() ) > 0 ) ) return args.getTupleCopy(); return context; }
@Override public boolean commitResource(JobConf conf) throws java.io.IOException { TupleEntryIterator it = new HadoopFlowProcess(conf).openTapForRead(this); boolean first_time = true; while (it.hasNext()) { TupleEntry tuple = it.next(); results.add(tuple.getTupleCopy()); if (first_time) { fields = tuple.getFields(); first_time = false; } } it.close(); return true; } }
@Override public boolean isRemove( FlowProcess flowProcess, FilterCall<CascadingCache<Tuple, Object>> filterCall ) { // we assume its more painful to create lots of tuple copies vs comparisons Tuple args = TupleHasher.wrapTuple( tupleHasher, filterCall.getArguments().getTuple() ); switch( include ) { case ALL: break; case NO_NULLS: if( Tuples.frequency( args, null ) == args.size() ) return true; break; } if( filterCall.getContext().containsKey( args ) ) { flowProcess.increment( Cache.Num_Keys_Hit, 1 ); return true; } // only do the copy here filterCall.getContext().put( TupleHasher.wrapTuple( tupleHasher, filterCall.getArguments().getTupleCopy() ), NULL_VALUE ); flowProcess.increment( Cache.Num_Keys_Missed, 1 ); return false; }