@Override public <E> IAtomicReference<E> getAtomicReference(String name) { return hzInstance.getAtomicReference(name); }
@Override public <E> IAtomicReference<E> getAtomicReference(String name) { return delegatedInstance.getAtomicReference(name); }
@Override public <E> IAtomicReference<E> getAtomicReference(String name) { return delegatedInstance.getAtomicReference(name); }
@Override public <E> IAtomicReference<E> getAtomicReference(String name) { return delegate.getAtomicReference(name); }
private static Optional<LeaderInfo> getLeaderForDBName( HazelcastInstance hazelcastInstance, String dbName ) { IAtomicReference<LeaderInfo> leader = hazelcastInstance.getAtomicReference( DB_NAME_LEADER_TERM_PREFIX + dbName ); return Optional.ofNullable( leader.get() ); }
@Override public <E> IAtomicReference<E> getAtomicReference(String name) { return getHazelcastInstance().getAtomicReference(name); }
@Produces public IAtomicReference produceIAtomicReferences( final ProviderContext context ) { final String name = retrieveSourceNameFrom( context ); final IAtomicReference data = hazelcast.getAtomicReference( name ); notifyDataWasProduced( data, IAtomicReference.class ); return data; }
public static void main(String[] args) { HazelcastInstance hz = Hazelcast.newHazelcastInstance(); IAtomicReference<String> ref = hz.getAtomicReference("reference"); ref.set("foo"); System.out.println(ref.get()); Hazelcast.shutdownAll(); } }
private static ClusterId getClusterId( HazelcastInstance hazelcastInstance ) { IAtomicReference<UUID> uuidReference = hazelcastInstance.getAtomicReference( CLUSTER_UUID ); UUID uuid = uuidReference.get(); return uuid != null ? new ClusterId( uuid ) : null; }
/** * Assuming a job already exists, updates the job * * @param j the job to update */ @Override public void updateJob(Job j) { IAtomicReference<Job> jRef = h.getAtomicReference("job-" + j.workerId()); jRef.set(j); }
static void casLeaders( HazelcastInstance hazelcastInstance, LeaderInfo leaderInfo, String dbName ) { IAtomicReference<LeaderInfo> leaderRef = hazelcastInstance.getAtomicReference( DB_NAME_LEADER_TERM_PREFIX + dbName ); LeaderInfo current = leaderRef.get(); Optional<LeaderInfo> currentOpt = Optional.ofNullable( current ); boolean sameLeader = currentOpt.map( LeaderInfo::memberId ).equals( Optional.ofNullable( leaderInfo.memberId() ) ); int termComparison = currentOpt.map( l -> Long.compare( l.term(), leaderInfo.term() ) ).orElse( -1 ); boolean greaterTermExists = termComparison > 0; boolean sameTermButNoStepdown = termComparison == 0 && !leaderInfo.isSteppingDown(); if ( sameLeader || greaterTermExists || sameTermButNoStepdown ) { return; } leaderRef.compareAndSet( current, leaderInfo ); }
static boolean casClusterId( HazelcastInstance hazelcastInstance, ClusterId clusterId ) { IAtomicReference<UUID> uuidReference = hazelcastInstance.getAtomicReference( CLUSTER_UUID ); return uuidReference.compareAndSet( null, clusterId.uuid() ) || uuidReference.get().equals( clusterId.uuid() ); }
@Override public void clearJob(String id) throws Exception { if(id == null) { log.warn("No job to clear; was null, returning"); return; } recentlyClearedJobs.add(id); IAtomicReference<Job> jRef = h.getAtomicReference("job-" + id); if(jRef.isNull()) return; jRef.clear(); log.info("Destroyed job ref " + id); Job remove = null; for(Job j : jobs) { if(j.workerId().equals(id)) { remove = j; break; } } if(remove != null) jobs.remove(remove); }
@Override public Job jobFor(String id) { if(done.get()) return null; IAtomicReference<Job> j = h.getAtomicReference("job-" + id); if(j.isNull() || isCurrentlyJob(id)) return null; return j.get(); }
@Setup public void setup() { values = new Object[valueCount]; Random random = new Random(); for (int i = 0; i < valueCount; i++) { if (useStringValue) { values[i] = generateString(valueLength); } else { values[i] = generateByteArray(random, valueLength); } } counters = getCounters(); String[] names = generateStringKeys(name, countersLength, keyLocality, targetInstance); for (int i = 0; i < counters.length; i++) { IAtomicReference<Object> atomicReference = targetInstance.getAtomicReference(names[i]); atomicReference.set(values[random.nextInt(values.length)]); counters[i] = atomicReference; } }
begunTraining = h.getAtomicReference(BEGUN); miniBatchSize = h.getAtomicReference(INPUT_SPLIT); workerEnabled = h.getMap(WORKER_ENABLED); replicate = h.getList(REPLICATE_WEIGHTS); updates = h.getList(UPDATES); heartbeat = h.getMap(HEART_BEAT); master = h.getAtomicReference(RESULT); isPretrain = h.getAtomicReference(IS_PRETRAIN); numTimesPretrain = h.getAtomicReference(NUM_TIMES_RUN_PRETRAIN); numTimesPretrainRan = h.getAtomicReference(NUM_TIMES_PRETRAIN_RAN); done = h.getAtomicReference(DONE); bestLoss = h.getAtomicReference(BEST_LOSS); earlyStop = h.getAtomicReference(EARLY_STOP); patience = h.getAtomicReference(PATIENCE); numBatches = h.getAtomicReference(NUM_BATCHES_SO_FAR_RAN); references = h.getMap(GLOBAL_REFERENCE);
@Override public boolean addJobToCurrent(Job j) throws Exception { IAtomicReference<Job> r = h.getAtomicReference("job-" + j.workerId()); if(r.get() != null || !r.isNull()) { boolean sent = false; while(!sent) { //always update for(String s : workers()) { if(jobFor(s) == null) { log.info("Redirecting worker " + j.workerId() + " to " + s + " due to work already being allocated"); r = h.getAtomicReference("job-" + s); j.setWorkerId(s); sent = true; } } } } r.set(j); jobs.add(j); return true; }
hazelcastInstance.getAtomicReference("myAtomicReference");
hazelcastInstance.getAtomicReference("myAtomicReference"); hazelcastInstance.getScheduledExecutorService("myScheduledExecutor"); } finally {