@Test public void testUnsubscribeScan() throws Exception { ObservableEventStream.getEventStream("HTTP-ClusterB", 20) .scan(new HashMap<String, String>(), new BiFunction<HashMap<String, String>, Event, HashMap<String, String>>() { @Override public HashMap<String, String> apply(HashMap<String, String> accum, Event perInstanceEvent) { accum.put("instance", perInstanceEvent.instanceId); return accum; } }) .take(10) .blockingForEach(new Consumer<HashMap<String, String>>() { @Override public void accept(HashMap<String, String> pv) { System.out.println(pv); } }); Thread.sleep(200); // make sure the event streams receive their interrupt } }
@Override public void accept(Emitter<Event> s) { s.onNext(randomEvent(type, numInstances)); try { // slow it down somewhat Thread.sleep(50); } catch (InterruptedException e) { Thread.currentThread().interrupt(); s.onError(e); } } }
public static Event randomEvent(String type, int numInstances) { Map<String, Object> values = new LinkedHashMap<String, Object>(); values.put("count200", randomIntFrom0to(4000)); values.put("count4xx", randomIntFrom0to(300)); values.put("count5xx", randomIntFrom0to(500)); return new Event(type, "instance_" + randomIntFrom0to(numInstances), values); }
@Test public void testTakeUnsubscribesOnGroupBy() throws Exception { Observable.merge( ObservableEventStream.getEventStream("HTTP-ClusterA", 50), ObservableEventStream.getEventStream("HTTP-ClusterB", 20) ) // group by type (2 clusters) .groupBy(new Function<Event, String>() { @Override public String apply(Event event) { return event.type; } }) .take(1) .blockingForEach(new Consumer<GroupedObservable<String, Event>>() { @Override public void accept(GroupedObservable<String, Event> v) { System.out.println(v); v.take(1).subscribe(); // FIXME groups need consumption to a certain degree to cancel upstream } }); System.out.println("**** finished"); Thread.sleep(200); // make sure the event streams receive their interrupt }
@Override public void accept(Emitter<Event> s) { s.onNext(randomEvent(type, numInstances)); try { // slow it down somewhat Thread.sleep(50); } catch (InterruptedException e) { Thread.currentThread().interrupt(); s.onError(e); } } }
public static Event randomEvent(String type, int numInstances) { Map<String, Object> values = new LinkedHashMap<String, Object>(); values.put("count200", randomIntFrom0to(4000)); values.put("count4xx", randomIntFrom0to(300)); values.put("count5xx", randomIntFrom0to(500)); return new Event(type, "instance_" + randomIntFrom0to(numInstances), values); }
@Test public void testTakeUnsubscribesOnFlatMapOfGroupBy() throws Exception { Observable.merge( ObservableEventStream.getEventStream("HTTP-ClusterA", 50), ObservableEventStream.getEventStream("HTTP-ClusterB", 20)
@Test public void testZipObservableOfObservables() throws Exception { ObservableEventStream.getEventStream("HTTP-ClusterB", 20) .groupBy(new Function<Event, String>() { @Override
@Test public void testTakeUnsubscribesOnGroupBy() throws Exception { Observable.merge( ObservableEventStream.getEventStream("HTTP-ClusterA", 50), ObservableEventStream.getEventStream("HTTP-ClusterB", 20) ) // group by type (2 clusters) .groupBy(new Function<Event, String>() { @Override public String apply(Event event) { return event.type; } }) .take(1) .blockingForEach(new Consumer<GroupedObservable<String, Event>>() { @Override public void accept(GroupedObservable<String, Event> v) { System.out.println(v); v.take(1).subscribe(); // FIXME groups need consumption to a certain degree to cancel upstream } }); System.out.println("**** finished"); Thread.sleep(200); // make sure the event streams receive their interrupt }
@Test public void testUnsubscribeScan() throws Exception { ObservableEventStream.getEventStream("HTTP-ClusterB", 20) .scan(new HashMap<String, String>(), new BiFunction<HashMap<String, String>, Event, HashMap<String, String>>() { @Override public HashMap<String, String> apply(HashMap<String, String> accum, Event perInstanceEvent) { accum.put("instance", perInstanceEvent.instanceId); return accum; } }) .take(10) .blockingForEach(new Consumer<HashMap<String, String>>() { @Override public void accept(HashMap<String, String> pv) { System.out.println(pv); } }); Thread.sleep(200); // make sure the event streams receive their interrupt } }
@Test public void testTakeUnsubscribesOnFlatMapOfGroupBy() throws Exception { Observable.merge( ObservableEventStream.getEventStream("HTTP-ClusterA", 50), ObservableEventStream.getEventStream("HTTP-ClusterB", 20)
@Test public void testZipObservableOfObservables() throws Exception { ObservableEventStream.getEventStream("HTTP-ClusterB", 20) .groupBy(new Function<Event, String>() { @Override