@Override public void destroyDistributedObject(String objectName) { JobTracker jobTracker = jobTrackers.remove(objectName); if (jobTracker != null) { jobTracker.destroy(); } }
public static void main(String[] args) throws ExecutionException, InterruptedException { try { HazelcastInstance hz1 = Hazelcast.newHazelcastInstance(); Hazelcast.newHazelcastInstance(); Hazelcast.newHazelcastInstance(); // create a default map IMap<Integer, Integer> m1 = hz1.getMap("default"); for (int i = 0; i < 10000; i++) { m1.put(i, i); } // create a job tracker with default config JobTracker tracker = hz1.getJobTracker("myJobTracker"); // using a built-in source from our IMap. This supplies key value pairs KeyValueSource<Integer, Integer> kvs = KeyValueSource.fromMap(m1); // create a new Job with our source Job<Integer, Integer> job = tracker.newJob(kvs); // configure the job ICompletableFuture<Map<String, Integer>> myMapReduceFuture = job.mapper(new MyMapper()).reducer(new MyReducerFactory()).submit(); Map<String, Integer> result = myMapReduceFuture.get(); System.out.println("The sum of the numbers 1 to 10000 is: " + result.get("all_values")); } finally { Hazelcast.shutdownAll(); } }
@Override public void execute(HazelcastInstance hazelcastInstance) throws Exception { JobTracker jobTracker = hazelcastInstance.getJobTracker("default"); IList<Person> list = hazelcastInstance.getList("persons"); KeyValueSource<String, Person> source = KeyValueSource.fromList(list); Job<String, Person> job = jobTracker.newJob(source); // find all people named James ICompletableFuture future = job.mapper(new PersonMapper("James")).submit(); System.out.println(ToStringPrettyfier.toString(future.get())); } }
@Override public void execute(HazelcastInstance hazelcastInstance) throws Exception { JobTracker jobTracker = hazelcastInstance.getJobTracker("default"); IMap<String, SalaryYear> map = hazelcastInstance.getMap("salaries"); KeyValueSource<String, SalaryYear> source = KeyValueSource.fromMap(map); Job<String, SalaryYear> job = jobTracker.newJob(source); JobCompletableFuture<Integer> future = job .mapper(new SalarySumMapper()) .combiner(new SalarySumCombinerFactory()) .reducer(new SalarySumReducerFactory()) .submit(new SalarySumCollator()); System.out.println("Salary sum: " + future.get()); }
@Override public void destroyDistributedObject(String objectName) { JobTracker jobTracker = jobTrackers.remove(objectName); if (jobTracker != null) { jobTracker.destroy(); } }
@Override public void execute(HazelcastInstance hazelcastInstance) throws Exception { JobTracker jobTracker = hazelcastInstance.getJobTracker("default"); IList<Person> list = hazelcastInstance.getList("persons"); KeyValueSource<String, Person> source = KeyValueSource.fromList(list); Job<String, Person> job = jobTracker.newJob(source); // find all people grouped by state // ICompletableFuture future = job.mapper(new StateBasedMapper()).submit(); // find all people for the given state ICompletableFuture future = job.mapper(new StateBasedMapper("CA")).submit(); System.out.println(ToStringPrettyfier.toString(future.get())); } }
@Override public void shutdown(boolean terminate) { for (JobTracker jobTracker : jobTrackers.values()) { jobTracker.destroy(); } jobTrackers.clear(); }
@Override public void execute(HazelcastInstance hazelcastInstance) throws Exception { JobTracker jobTracker = hazelcastInstance.getJobTracker("default"); IList<Person> list = hazelcastInstance.getList("persons"); KeyValueSource<String, Person> source = KeyValueSource.fromList(list); Job<String, Person> job = jobTracker.newJob(source); // collect all people by state ICompletableFuture future = job.mapper(new StateBasedCountMapper()).submit(); // count people by state // ICompletableFuture future = job.mapper(new StateBasedCountMapper()).reducer(new CountReducerFactory()).submit(); // same as above but with precalculation per node // ICompletableFuture future = job.mapper(new StateBasedCountMapper()).combiner(new CountCombinerFactory()) // .reducer(new CountReducerFactory()).submit(); System.out.println(ToStringPrettyfier.toString(future.get())); } }
@Override public void shutdown(boolean terminate) { for (JobTracker jobTracker : jobTrackers.values()) { jobTracker.destroy(); } jobTrackers.clear(); }
@Override public void execute(HazelcastInstance hazelcastInstance) throws Exception { JobTracker jobTracker = hazelcastInstance.getJobTracker("default"); IList<Person> list = hazelcastInstance.getList("persons"); KeyValueSource<String, Person> source = KeyValueSource.fromList(list); Job<String, Person> job = jobTracker.newJob(source); ICompletableFuture future = job.mapper(new SalaryMapper()) .combiner(new SalaryCombinerFactory()) .reducer(new SalaryReducerFactory()) .submit(); System.out.println(ToStringPrettyfier.toString(future.get())); } }
public static void main(String[] args) throws Exception { // prepare Hazelcast cluster HazelcastInstance hazelcastInstance = buildCluster(3); try { // read data fillMapWithData(hazelcastInstance); JobTracker tracker = hazelcastInstance.getJobTracker("default"); IMap<String, String> map = hazelcastInstance.getMap(MAP_NAME); KeyValueSource<String, String> source = KeyValueSource.fromMap(map); Job<String, String> job = tracker.newJob(source); ICompletableFuture<Map<String, Integer>> future = job .mapper(new TokenizerMapper()) // activate Combiner to add combining phase! // .combiner(new WordcountCombinerFactory()) .reducer(new WordcountReducerFactory()) .submit(); System.out.println(ToStringPrettyfier.toString(future.get())); } finally { // shutdown cluster Hazelcast.shutdownAll(); } }
@TimeStep(prob = 0.5) public void mapReduce(ThreadState state) throws Exception { JobTracker tracker = targetInstance.getJobTracker(Thread.currentThread().getName() + name); KeyValueSource<Integer, Employee> source = KeyValueSource.fromMap(map); Job<Integer, Employee> job = tracker.newJob(source); ICompletableFuture<Map<Integer, Set<Employee>>> future = job .mapper(new ModIdMapper(2)) .combiner(new RangeIdCombinerFactory(10, 30)) .reducer(new IdReducerFactory(10, 20, 30)) .submit(); Map<Integer, Set<Employee>> result = future.get(); for (Set<Employee> set : result.values()) { for (Employee employee : set) { assertTrue(employee.getId() % 2 == 0); assertTrue(employee.getId() >= 10 && employee.getId() <= 30); assertTrue(employee.getId() != 10); assertTrue(employee.getId() != 20); assertTrue(employee.getId() != 30); } } state.operationCounter.mapReduce++; }
@Override public <SuppliedValue, Result> Result aggregate(Supplier<K, V, SuppliedValue> supplier, Aggregation<K, SuppliedValue, Result> aggregation, JobTracker jobTracker) { try { Preconditions.isNotNull(jobTracker, "jobTracker"); KeyValueSource<K, V> keyValueSource = KeyValueSource.fromMultiMap(this); Job<K, V> job = jobTracker.newJob(keyValueSource); Mapper mapper = aggregation.getMapper(supplier); CombinerFactory combinerFactory = aggregation.getCombinerFactory(); ReducerFactory reducerFactory = aggregation.getReducerFactory(); Collator collator = aggregation.getCollator(); MappingJob mappingJob = job.mapper(mapper); ReducingSubmittableJob reducingJob; if (combinerFactory != null) { reducingJob = mappingJob.combiner(combinerFactory).reducer(reducerFactory); } else { reducingJob = mappingJob.reducer(reducerFactory); } ICompletableFuture<Result> future = reducingJob.submit(collator); return future.get(); } catch (Exception e) { throw new HazelcastException(e); } }
@Override @SuppressWarnings({"deprecation", "unchecked"}) public <SuppliedValue, Result> Result aggregate(Supplier<K, V, SuppliedValue> supplier, Aggregation<K, SuppliedValue, Result> aggregation, JobTracker jobTracker) { try { Preconditions.isNotNull(jobTracker, "jobTracker"); KeyValueSource<K, V> keyValueSource = KeyValueSource.fromMap(this); Job<K, V> job = jobTracker.newJob(keyValueSource); Mapper mapper = aggregation.getMapper(supplier); CombinerFactory combinerFactory = aggregation.getCombinerFactory(); ReducerFactory reducerFactory = aggregation.getReducerFactory(); Collator collator = aggregation.getCollator(); MappingJob mappingJob = job.mapper(mapper); ReducingSubmittableJob reducingJob; if (combinerFactory != null) { reducingJob = mappingJob.combiner(combinerFactory).reducer(reducerFactory); } else { reducingJob = mappingJob.reducer(reducerFactory); } ICompletableFuture<Result> future = reducingJob.submit(collator); return future.get(); } catch (Exception e) { throw new HazelcastException(e); } }
@Override public <SuppliedValue, Result> Result aggregate(Supplier<K, V, SuppliedValue> supplier, Aggregation<K, SuppliedValue, Result> aggregation, JobTracker jobTracker) { try { isNotNull(jobTracker, "jobTracker"); KeyValueSource<K, V> keyValueSource = KeyValueSource.fromMultiMap(this); Job<K, V> job = jobTracker.newJob(keyValueSource); Mapper mapper = aggregation.getMapper(supplier); CombinerFactory combinerFactory = aggregation.getCombinerFactory(); ReducerFactory reducerFactory = aggregation.getReducerFactory(); Collator collator = aggregation.getCollator(); MappingJob mappingJob = job.mapper(mapper); ReducingSubmittableJob reducingJob; if (combinerFactory != null) { reducingJob = mappingJob.combiner(combinerFactory).reducer(reducerFactory); } else { reducingJob = mappingJob.reducer(reducerFactory); } ICompletableFuture<Result> future = reducingJob.submit(collator); return future.get(); } catch (Exception e) { throw new HazelcastException(e); } }
@Override public <SuppliedValue, Result> Result aggregate(Supplier<K, V, SuppliedValue> supplier, Aggregation<K, SuppliedValue, Result> aggregation, JobTracker jobTracker) { try { isNotNull(jobTracker, "jobTracker"); KeyValueSource<K, V> keyValueSource = KeyValueSource.fromMultiMap(this); Job<K, V> job = jobTracker.newJob(keyValueSource); Mapper mapper = aggregation.getMapper(supplier); CombinerFactory combinerFactory = aggregation.getCombinerFactory(); ReducerFactory reducerFactory = aggregation.getReducerFactory(); Collator collator = aggregation.getCollator(); MappingJob mappingJob = job.mapper(mapper); ReducingSubmittableJob reducingJob; if (combinerFactory != null) { reducingJob = mappingJob.combiner(combinerFactory).reducer(reducerFactory); } else { reducingJob = mappingJob.reducer(reducerFactory); } ICompletableFuture<Result> future = reducingJob.submit(collator); return future.get(); } catch (Exception e) { throw new HazelcastException(e); } }
@Override public <SuppliedValue, Result> Result aggregate(Supplier<K, V, SuppliedValue> supplier, Aggregation<K, SuppliedValue, Result> aggregation, JobTracker jobTracker) { try { Preconditions.isNotNull(jobTracker, "jobTracker"); KeyValueSource<K, V> keyValueSource = KeyValueSource.fromMultiMap(this); Job<K, V> job = jobTracker.newJob(keyValueSource); Mapper mapper = aggregation.getMapper(supplier); CombinerFactory combinerFactory = aggregation.getCombinerFactory(); ReducerFactory reducerFactory = aggregation.getReducerFactory(); Collator collator = aggregation.getCollator(); MappingJob mappingJob = job.mapper(mapper); ReducingSubmittableJob reducingJob; if (combinerFactory != null) { reducingJob = mappingJob.combiner(combinerFactory).reducer(reducerFactory); } else { reducingJob = mappingJob.reducer(reducerFactory); } ICompletableFuture<Result> future = reducingJob.submit(collator); return future.get(); } catch (Exception e) { throw new HazelcastException(e); } }
@Override @SuppressWarnings({"deprecation", "unchecked"}) public <SuppliedValue, Result> Result aggregate(Supplier<K, V, SuppliedValue> supplier, Aggregation<K, SuppliedValue, Result> aggregation, JobTracker jobTracker) { try { Preconditions.isNotNull(jobTracker, "jobTracker"); KeyValueSource<K, V> keyValueSource = KeyValueSource.fromMap(this); Job<K, V> job = jobTracker.newJob(keyValueSource); Mapper mapper = aggregation.getMapper(supplier); CombinerFactory combinerFactory = aggregation.getCombinerFactory(); ReducerFactory reducerFactory = aggregation.getReducerFactory(); Collator collator = aggregation.getCollator(); MappingJob mappingJob = job.mapper(mapper); ReducingSubmittableJob reducingJob; if (combinerFactory != null) { reducingJob = mappingJob.combiner(combinerFactory).reducer(reducerFactory); } else { reducingJob = mappingJob.reducer(reducerFactory); } ICompletableFuture<Result> future = reducingJob.submit(collator); return future.get(); } catch (Exception e) { throw new HazelcastException(e); } }
@Override public <SuppliedValue, Result> Result aggregate(Supplier<K, V, SuppliedValue> supplier, Aggregation<K, SuppliedValue, Result> aggregation, JobTracker jobTracker) { checkTrue(NATIVE != mapConfig.getInMemoryFormat(), "NATIVE storage format is not supported for MapReduce"); try { isNotNull(jobTracker, "jobTracker"); KeyValueSource<K, V> keyValueSource = KeyValueSource.fromMap(this); Job<K, V> job = jobTracker.newJob(keyValueSource); Mapper mapper = aggregation.getMapper(supplier); CombinerFactory combinerFactory = aggregation.getCombinerFactory(); ReducerFactory reducerFactory = aggregation.getReducerFactory(); Collator collator = aggregation.getCollator(); MappingJob mappingJob = job.mapper(mapper); ReducingSubmittableJob reducingJob; if (combinerFactory == null) { reducingJob = mappingJob.reducer(reducerFactory); } else { reducingJob = mappingJob.combiner(combinerFactory).reducer(reducerFactory); } ICompletableFuture<Result> future = reducingJob.submit(collator); return future.get(); } catch (Exception e) { // TODO: not what we want, because it can lead to wrapping of HazelcastException throw new HazelcastException(e); } }