@Override public V get(K key) { return map.get(key); }
@Override public V get(K key) { return map.get(key); }
@Override public Properties getClientProperties(String clientId) { return clientsCache.get(clientId); }
public String getClientUser(String clientId) { if (clientId != null) { Properties props = clientsCache.get(clientId); if (props != null) { return props.getProperty(pn_schema_user); } } return null; }
private void get(Object oid, Exchange exchange) { exchange.getOut().setBody(this.cache.get(oid)); }
private static void executeReplicatedMap() { System.out.println("### ReplicatedMap Execution Started... ###"); int key = RANDOM.nextInt(100); int value = RANDOM.nextInt(100); ReplicatedMap<Integer, Integer> replicatedMap = (ReplicatedMap<Integer, Integer>) context .getBean("replicatedMap", ReplicatedMap.class); replicatedMap.put(key, value); System.out.println("A random pair is added to replicatedMap."); System.out.println("Added value: " + replicatedMap.get(key) + "\n"); }
@Override public Map<K, V> getAll(Set<K> keys) { Map<K, V> result = createHashMap(keys.size()); for (K key : keys) { result.put(key, map.get(key)); } return result; }
@Override public Map<K, V> getAll(Set<K> keys) { Map<K, V> result = createHashMap(keys.size()); for (K key : keys) { result.put(key, map.get(key)); } return result; }
public static void main(String[] args) { HazelcastInstance hz = Hazelcast.newHazelcastInstance(); ReplicatedMap<String, Person> binaryMap = hz.getReplicatedMap("binaryMap"); ReplicatedMap<String, Person> objectMap = hz.getReplicatedMap("objectMap"); Person person = new Person(); binaryMap.put("peter", person); objectMap.put("peter", person); System.out.println(person == binaryMap.get("peter")); System.out.println(binaryMap.get("peter") == binaryMap.get("peter")); System.out.println(person == objectMap.get("peter")); System.out.println(objectMap.get("peter") == objectMap.get("peter")); hz.shutdown(); }
public boolean isIndexEnabled(int pathId) { Index idx = idxDict.get(pathId); if (idx != null) { return idx.isEnabled(); } return false; }
private Collection<Index> getPathIndices(int pathId, String path) { Set<Index> result = new HashSet<>(1); Index idx = idxDict.get(pathId); if (idx != null) { result.add(idx); } else { if (!patterns.isEmpty()) { for (Map.Entry<Index, Pattern> e: patterns.entrySet()) { Matcher m = e.getValue().matcher(path); boolean match = m.matches(); if (match) { result.add(e.getKey()); // TODO: do we put multiple indexes for the same path?! think about it.. idxDict.put(pathId, e.getKey()); result.add(e.getKey()); } logger.trace("getPathIndexes; pattern {} {}matched for path {}", e.getValue().pattern(), match ? "" : "not ", path); } } } return result; }
@Override public Query getQuery(String query) { Integer qKey = getQueryKey(query); Query result = xqCache.get(qKey); // as I see we use BINARY format for this cache, no need for clone! //if (result != null) { // TODO: are you sure we have to perform clone here? // we got it by ref from replicated cache? // result = result.clone(); //} updateStats(query, result != null, 1); logger.trace("getQuery.exit; returning {}", result); return result; }
@TimeStep(prob = 0.45) public void get(ThreadState state) { int key = state.randomInt(keyCount); map.get(key); }
@TimeStep(prob = 0.3) public void get(ThreadState state) { try { int key = state.randomInt(keyCount); map.get(key); state.count.getCount.incrementAndGet(); } catch (DistributedObjectDestroyedException e) { EmptyStatement.ignore(e); } }
@Override public Collection<String> prepareQuery(String query) { //throws BagriException { logger.trace("prepareQuery.enter; query: {}", query); Collection<String> result = null; Query xq = xqCache.get(getQueryKey(query)); if (xq != null) { result = xq.getXdmQuery().getParamNames(); } logger.trace("prepareQuery.exit; returning: {}", result); return result; }
public static void main(String[] args) { // Start the Embedded Hazelcast Cluster Member. HazelcastInstance hz = Hazelcast.newHazelcastInstance(); // Get a Replicated Map called "my-replicated-map" ReplicatedMap<String, String> map = hz.getReplicatedMap("my-replicated-map"); // Put and Get a value from the Replicated Map // key/value replicated to all members map.put("key", "value"); // the value retrieved from local member map.get("key"); // Shutdown the Hazelcast Cluster Member hz.shutdown(); } }
public static void main(String[] args) { // Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1 HazelcastInstance hz = HazelcastClient.newHazelcastClient(); // Get a Replicated Map called "my-replicated-map" ReplicatedMap<String, String> map = hz.getReplicatedMap("my-replicated-map"); // Put and Get a value from the Replicated Map String replacedValue = map.put("key", "value"); // key/value replicated to all members System.out.println("replacedValue = " + replacedValue); // Will be null as its first update String value = map.get("key"); // the value is retrieved from a random member in the cluster System.out.println("value for key = " + value); // Shutdown this Hazelcast Client hz.shutdown(); } }
@Override protected void doUnregister(URL url) { Set<String> urls = replicatedMap.get(this.nodeId); ILock lock = hazelcastInstance.getLock(nodeId); lock.lock(); try { urls.remove(url.toFullString()); } finally { lock.unlock(); } }
@Override protected void doRegister(URL url) { if (logger.isInfoEnabled()) { logger.info("Register: " + url); } Set<String> urls = replicatedMap.get(nodeId); urls.add(url.toFullString()); ILock lock = hazelcastInstance.getLock(nodeId); lock.lock(); try { replicatedMap.put(nodeId, urls); } finally { lock.unlock(); } }
public static void main(String[] args) { System.setProperty("hazelcast.logging.type", "log4j"); JetInstance instance = Jet.newJetInstance(); Jet.newJetInstance(); try { TickerInfo.populateMap(instance.getHazelcastInstance().getReplicatedMap("tickersInfo")); DAG dag = new DAG(); Vertex tradesSource = dag.newVertex("tradesSource", GenerateTradesP::new); Vertex enrichment = dag.newVertex("enrichment", mapUsingContextP(replicatedMapContext("tickersInfo"), (ReplicatedMap<String, TickerInfo> map, Trade item) -> tuple2(item, map.get(item.getTicker())))); Vertex sink = dag.newVertex("sink", DiagnosticProcessors.writeLoggerP()); tradesSource.localParallelism(1); dag .edge(between(tradesSource, enrichment)) .edge(between(enrichment, sink)); instance.newJob(dag).join(); } finally { Jet.shutdownAll(); } } }