/** * @see org.springframework.cache.Cache#put(java.lang.Object, java.lang.Object) */ @Override public void put(final Object key, final Object value) { this.nativeCache.put(key, value != null ? value : NullValue.NULL); }
/** * @see org.springframework.cache.Cache#put(Object, Object) */ @Override public void put(final Object key, final Object value) { try { if (writeTimeout > 0) this.nativeCache.putAsync(key, value != null ? value : NullValue.NULL).get(writeTimeout, TimeUnit.MILLISECONDS); else this.nativeCache.put(key, value != null ? value : NullValue.NULL); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new CacheException(e); } catch (ExecutionException | TimeoutException e) { throw new CacheException(e); } }
private void populateCacheManager(BasicCache cache) { for (int i = 0; i < 100; i++) { cache.put("key" + i, i); } }
/** * @see org.infinispan.commons.api.BasicCache#put(Object, Object, long, TimeUnit) */ public void put(Object key, Object value, long lifespan, TimeUnit unit) { try { if (writeTimeout > 0) this.nativeCache.putAsync(key, value != null ? value : NullValue.NULL, lifespan, unit).get(writeTimeout, TimeUnit.MILLISECONDS); else this.nativeCache.put(key, value != null ? value : NullValue.NULL, lifespan, unit); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new CacheException(e); } catch (ExecutionException | TimeoutException e) { throw new CacheException(e); } }
private void populateCacheManager(BasicCache cache) { for (int i = 0; i < 100; i++) { cache.put(i, i); } }
@InvokeOnHeader("PUT") @InvokeOnHeader(InfinispanConstants.PUT) void onPut(Message message) { final BasicCache<Object, Object> cache = manager.getCache(message, this.cacheName); final Object key = message.getHeader(InfinispanConstants.KEY); final Object value = message.getHeader(InfinispanConstants.VALUE); final Object result; if (hasLifespan(message)) { long lifespan = message.getHeader(InfinispanConstants.LIFESPAN_TIME, long.class); TimeUnit timeUnit = message.getHeader(InfinispanConstants.LIFESPAN_TIME_UNIT, TimeUnit.class); if (hasMaxIdleTime(message)) { long maxIdle = message.getHeader(InfinispanConstants.MAX_IDLE_TIME, long.class); TimeUnit maxIdleTimeUnit = message.getHeader(InfinispanConstants.MAX_IDLE_TIME_UNIT, TimeUnit.class); result = cache.put(key, value, lifespan, timeUnit, maxIdle, maxIdleTimeUnit); } else { result = cache.put(key, value, lifespan, timeUnit); } } else { result = cache.put(key, value); } setResult(message, result); }
public static void registerSchema(BasicCacheContainer container, Schema schema) { container.getCache(AVRO_METADATA_CACHE_NAME).put(schema.getFullName(), schema); }
@Override public Exchange add(final CamelContext camelContext, final String key, final Exchange exchange) { LOG.trace("Adding an Exchange with ID {} for key {} in a thread-safe manner.", exchange.getExchangeId(), key); DefaultExchangeHolder newHolder = DefaultExchangeHolder.marshal(exchange, true, allowSerializedHeaders); DefaultExchangeHolder oldHolder = cache.put(key, newHolder); return unmarshallExchange(camelContext, oldHolder); }
@Override public Exchange add(final CamelContext camelContext, final String key, final Exchange exchange) { LOG.trace("Adding an Exchange with ID {} for key {} in a thread-safe manner.", exchange.getExchangeId(), key); DefaultExchangeHolder newHolder = DefaultExchangeHolder.marshal(exchange, true, allowSerializedHeaders); DefaultExchangeHolder oldHolder = cache.put(key, newHolder); return unmarshallExchange(camelContext, oldHolder); }
private void addMissing( final ConcreteResource resource, final boolean withTimeout ) { final String key = getResourceKey( resource ); if ( withTimeout ) { final int timeoutInSeconds = getTimeoutInSeconds( resource ); long timeout = Long.MAX_VALUE; if ( timeoutInSeconds > 0 ) { timeout = System.currentTimeMillis() + ( timeoutInSeconds * 1000 ); } logger.debug( "[NFC] {} will not be checked again until {}", resource, new SimpleDateFormat( TIMEOUT_FORMAT ).format( new Date( timeout ) ) ); final long f_timeout = timeout; nfcCache.execute( cache -> cache.put( key, new NfcConcreteResourceWrapper( resource, f_timeout ), timeoutInSeconds, TimeUnit.SECONDS ) ); } else { logger.debug( "[NFC] {} will not be checked again", resource ); nfcCache.execute( cache -> cache.put( key, new NfcConcreteResourceWrapper( resource, Long.MAX_VALUE ) ) ); } }
@Override public void put(UUID codeId, int lifespanSeconds, Map<String, String> codeData) { ActionTokenValueEntity tokenValue = new ActionTokenValueEntity(codeData); try { BasicCache<UUID, ActionTokenValueEntity> cache = codeCache.get(); cache.put(codeId, tokenValue, lifespanSeconds, TimeUnit.SECONDS); } catch (HotRodClientException re) { // No need to retry. The hotrod (remoteCache) has some retries in itself in case of some random network error happened. if (logger.isDebugEnabled()) { logger.debugf(re, "Failed when adding code %s", codeId); } throw re; } }
public void scheduleForStore( final StoreKey key, final String jobType, final String jobName, final Object payload, final int startSeconds ) throws IndySchedulerException { if ( !schedulerConfig.isEnabled() ) { logger.debug( "Scheduler disabled." ); return; } final Map<String, Object> dataMap = new HashMap<>( 3 ); dataMap.put( JOB_TYPE, jobType ); try { dataMap.put( PAYLOAD, objectMapper.writeValueAsString( payload ) ); } catch ( final JsonProcessingException e ) { throw new IndySchedulerException( "Failed to serialize JSON payload: " + payload, e ); } dataMap.put( SCHEDULE_TIME, System.currentTimeMillis() ); final ScheduleKey cacheKey = new ScheduleKey( key, jobType, jobName ); scheduleCache.execute( cache -> cache.put( cacheKey, dataMap, startSeconds, TimeUnit.SECONDS ) ); logger.debug( "Scheduled for the key {} with timeout: {} seconds", cacheKey, startSeconds ); }
@Override @ManagedOperation(description = "Adds the key to the store") public boolean add(Object key) { // need to check first as put will update the entry lifetime so it can not expire its cache lifespan if (getCache().containsKey(key)) { // there is already an entry so return false return false; } Boolean put = getCache().put(key, true); return put == null; }
@Override public void doStore(String id, SessionData data, long lastSaveTime) throws Exception { //Put an idle timeout on the cache entry if the session is not immortal - //if no requests arrive at any node before this timeout occurs, or no node //scavenges the session before this timeout occurs, the session will be removed. //NOTE: that no session listeners can be called for this. if (data.getMaxInactiveMs() > 0 && getInfinispanIdleTimeoutSec() > 0) _cache.put(getCacheKey(id), (InfinispanSessionData)data, -1, TimeUnit.MILLISECONDS, getInfinispanIdleTimeoutSec(), TimeUnit.SECONDS); else _cache.put(getCacheKey(id), (InfinispanSessionData)data); if (LOG.isDebugEnabled()) LOG.debug("Session {} saved to infinispan, expires {} ", id, data.getExpiry()); }
@Test public void demonstrateCacheEvictionUponUpdateJsr107() { final Integer bookToUpdateId = Integer.valueOf(2); assert !booksCache().containsKey(bookToUpdateId): "Cache should not initially contain the book with id " + bookToUpdateId; Book bookToUpdate = new Book("112-358-132", "Random Author", "Path to Infinispan Enlightenment"); booksCache().put(bookToUpdate, bookToUpdate); assert booksCache().containsKey(bookToUpdate); bookToUpdate.setTitle("Work in Progress"); Book result = bookDao.updateBook(bookToUpdate); this.log.infof("Book [%s] updated", bookToUpdate); assert !booksCache().containsKey(bookToUpdate) : "updateBook(" + bookToUpdate + ") should have removed updated book from cache"; }
@Test public void demonstrateCacheEvictionUponUpdateJsr107() { final Integer bookToUpdateId = Integer.valueOf(2); assert !booksCache().containsKey(bookToUpdateId): "Cache should not initially contain the book with id " + bookToUpdateId; Book bookToUpdate = new Book("112-358-132", "Random Author", "Path to Infinispan Enlightenment"); booksCache().put(bookToUpdate, bookToUpdate); assert booksCache().containsKey(bookToUpdate); bookToUpdate.setTitle("Work in Progress"); Book result = bookDao.updateBook(bookToUpdate); this.log.infof("Book [%s] updated", bookToUpdate); assert !booksCache().containsKey(bookToUpdate) : "updateBook(" + bookToUpdate + ") should have removed updated book from cache"; }
@Test public void demonstrateCacheEvictionUponUpdateJsr107() { final Integer bookToUpdateId = Integer.valueOf(2); assert !booksCache().containsKey(bookToUpdateId): "Cache should not initially contain the book with id " + bookToUpdateId; Book bookToUpdate = new Book("112-358-132", "Random Author", "Path to Infinispan Enlightenment"); booksCache().put(bookToUpdate, bookToUpdate); assert booksCache().containsKey(bookToUpdate); bookToUpdate.setTitle("Work in Progress"); Book result = bookDao.updateBook(bookToUpdate); this.log.infof("Book [%s] updated", bookToUpdate); assert !booksCache().containsKey(bookToUpdate) : "updateBook(" + bookToUpdate + ") should have removed updated book from cache"; }
@Test public void demonstrateCacheEvictionUponUpdateJsr107() { final Integer bookToUpdateId = Integer.valueOf(2); assert !booksCache().containsKey(bookToUpdateId): "Cache should not initially contain the book with id " + bookToUpdateId; Book bookToUpdate = new Book("112-358-132", "Random Author", "Path to Infinispan Enlightenment"); booksCache().put(bookToUpdate, bookToUpdate); assert booksCache().containsKey(bookToUpdate); bookToUpdate.setTitle("Work in Progress"); Book result = bookDao.updateBook(bookToUpdate); this.log.infof("Book [%s] updated", bookToUpdate); assert !booksCache().containsKey(bookToUpdate) : "updateBook(" + bookToUpdate + ") should have removed updated book from cache"; }
getCacheForWrite().put("user_" + user1.getId(), user1); getCacheForWrite().put("user_" + user2.getId(), user2); getCacheForWrite().put("user_" + user3.getId(), user3); getCacheForWrite().put("account_" + account1.getId(), account1); getCacheForWrite().put("account_" + account2.getId(), account2); getCacheForWrite().put("account_" + account3.getId(), account3); getCacheForWrite().put("transaction_" + transaction0.getId(), transaction0); getCacheForWrite().put("transaction_" + transaction1.getId(), transaction1); getCacheForWrite().put("transaction_" + transaction2.getId(), transaction2); getCacheForWrite().put("transaction_" + transaction3.getId(), transaction3); getCacheForWrite().put("transaction_" + transaction4.getId(), transaction4); getCacheForWrite().put("transaction_" + transaction5.getId(), transaction5); transaction.setDebit(true); transaction.setValid(true); getCacheForWrite().put("transaction_" + transaction.getId(), transaction); getCacheForWrite().put("dummy", "a primitive value cannot be queried"); getCacheForWrite().put("notIndexed1", new NotIndexed("testing 123")); getCacheForWrite().put("notIndexed2", new NotIndexed("xyz"));
getCacheForWrite().put("user_" + user1.getId(), user1); getCacheForWrite().put("user_" + user2.getId(), user2); getCacheForWrite().put("user_" + user3.getId(), user3); getCacheForWrite().put("transaction_" + transaction0.getId(), transaction0); getCacheForWrite().put("transaction_" + transaction1.getId(), transaction1); getCacheForWrite().put("transaction_" + transaction2.getId(), transaction2); getCacheForWrite().put("transaction_" + transaction3.getId(), transaction3); getCacheForWrite().put("transaction_" + transaction4.getId(), transaction4); getCacheForWrite().put("transaction_" + transaction5.getId(), transaction5); transaction.setDebit(true); transaction.setValid(true); getCacheForWrite().put("transaction_" + transaction.getId(), transaction); getCacheForWrite().put("notIndexed1", new NotIndexed("testing 123")); getCacheForWrite().put("notIndexed2", new NotIndexed("xyz"));