if (itemsToAdd.getFirst() == null && itemsToDelete.getFirst() == null) { return; // nothing to do for this cluster member delList = itemsToDelete; itemsToAdd = new LinkedList<>(); itemsToDelete = new LinkedList<>(); log.error("Failed to flush pending items; initiating rollback", se); LinkedListNode<RetryWrapper> node = addList.getLast(); while (node != null) { savePublishedItem(node.object); node.remove(); node = addList.getLast();
/** * Creates and stores the published item in the database. * @param wrapper The published item, wrapped for retry */ private static void savePublishedItem(RetryWrapper wrapper) { boolean firstPass = (wrapper.getRetryCount() == 0); PublishedItem item = wrapper.get(); String itemKey = item.getItemKey(); itemCache.put(itemKey, item); log.debug("Added new (inbound) item to cache"); synchronized (itemsPending) { LinkedListNode<RetryWrapper> itemToReplace = itemsPending.remove(itemKey); if (itemToReplace != null) { itemToReplace.remove(); // remove duplicate from itemsToAdd linked list } LinkedListNode<RetryWrapper> listNode = firstPass ? itemsToAdd.addLast(wrapper) : itemsToAdd.addFirst(wrapper); itemsPending.put(itemKey, listNode); } // skip the flush step if this is a retry attempt if (firstPass && itemsPending.size() > MAX_ITEMS_FLUSH) { TaskEngine.getInstance().submit(new Runnable() { @Override public void run() { flushPendingItems(false); } }); } }
@Override public void clear() { throw new UnsupportedOperationException(); } }
LinkedListNode<RetryWrapper> addItem = addList.getFirst(); LinkedListNode<PublishedItem> delItem = delList.getFirst(); LinkedListNode<RetryWrapper> addHead = addItem.previous; while (addItem != addHead) { delList.addLast(addItem.object.get()); addItem = addItem.next; delItem = delList.getFirst(); if (delItem != null) { PreparedStatement pstmt = null; writePendingItems(con, addList.getFirst(), true); } catch (SQLException ex) { writePendingItems(con, addList.getFirst(), false);
@Override public synchronized V get(Object key) { checkNotNull(key, NULL_KEY_IS_NOT_ALLOWED); // First, clear all entries that have been in cache longer than the // maximum defined age. deleteExpiredEntries(); DefaultCache.CacheObject<V> cacheObject = map.get(key); if (cacheObject == null) { // The object didn't exist in cache, so increment cache misses. cacheMisses++; return null; } // The object exists in cache, so increment cache hits. Also, increment // the object's read count. cacheHits++; cacheObject.readCount++; // Remove the object from it's current place in the cache order list, // and re-insert it at the front of the list. cacheObject.lastAccessedListNode.remove(); lastAccessedList.addFirst((LinkedListNode<K>) cacheObject.lastAccessedListNode); return cacheObject.object; }
private CacheObjectCollection(Collection<DefaultCache.CacheObject<V>> cachedObjects) { this.cachedObjects = new ArrayList<>(cachedObjects); }
/** * Removes the specified published item from the DB. * * @param item The published item to delete. */ public static void removePublishedItem(PublishedItem item) { String itemKey = item.getItemKey(); itemCache.remove(itemKey); synchronized (itemsPending) { itemsToDelete.addLast(item); LinkedListNode<RetryWrapper> itemToAdd = itemsPending.remove(itemKey); if (itemToAdd != null) itemToAdd.remove(); // drop from itemsToAdd linked list } }
LinkedListNode<RetryWrapper> addItem = addList.getFirst(); LinkedListNode<PublishedItem> delItem = delList.getFirst(); LinkedListNode<RetryWrapper> addHead = addItem.previous; while (addItem != addHead) { delList.addLast(addItem.object.get()); addItem = addItem.next; delItem = delList.getFirst(); if (delItem != null) { PreparedStatement pstmt = null; writePendingItems(con, addList.getFirst(), true); } catch (SQLException ex) { writePendingItems(con, addList.getFirst(), false);
private CacheObjectCollection(Collection<DefaultCache.CacheObject<V>> cachedObjects) { this.cachedObjects = new ArrayList<>(cachedObjects); }
/** * Removes the specified published item from the DB. * * @param item The published item to delete. */ public static void removePublishedItem(PublishedItem item) { String itemKey = item.getItemKey(); itemCache.remove(itemKey); synchronized (itemsPending) { itemsToDelete.addLast(item); LinkedListNode<RetryWrapper> itemToAdd = itemsPending.remove(itemKey); if (itemToAdd != null) itemToAdd.remove(); // drop from itemsToAdd linked list } }
if (itemsToAdd.getFirst() == null && itemsToDelete.getFirst() == null) { return; // nothing to do for this cluster member delList = itemsToDelete; itemsToAdd = new LinkedList<>(); itemsToDelete = new LinkedList<>(); log.error("Failed to flush pending items; initiating rollback", se); LinkedListNode<RetryWrapper> node = addList.getLast(); while (node != null) { savePublishedItem(node.object); node.remove(); node = addList.getLast();
@Override public void clear() { throw new UnsupportedOperationException(); } }
/** * Creates and stores the published item in the database. * @param wrapper The published item, wrapped for retry */ private static void savePublishedItem(RetryWrapper wrapper) { boolean firstPass = (wrapper.getRetryCount() == 0); PublishedItem item = wrapper.get(); String itemKey = item.getItemKey(); itemCache.put(itemKey, item); log.debug("Added new (inbound) item to cache"); synchronized (itemsPending) { LinkedListNode<RetryWrapper> itemToReplace = itemsPending.remove(itemKey); if (itemToReplace != null) { itemToReplace.remove(); // remove duplicate from itemsToAdd linked list } LinkedListNode<RetryWrapper> listNode = firstPass ? itemsToAdd.addLast(wrapper) : itemsToAdd.addFirst(wrapper); itemsPending.put(itemKey, listNode); } // skip the flush step if this is a retry attempt if (firstPass && itemsPending.size() > MAX_ITEMS_FLUSH) { TaskEngine.getInstance().submit(new Runnable() { @Override public void run() { flushPendingItems(false); } }); } }
do { remove(lastAccessedList.getLast().object); } while (cacheSize > desiredSize); t = System.currentTimeMillis() - t;
@Override public synchronized V get(Object key) { checkNotNull(key, NULL_KEY_IS_NOT_ALLOWED); // First, clear all entries that have been in cache longer than the // maximum defined age. deleteExpiredEntries(); DefaultCache.CacheObject<V> cacheObject = map.get(key); if (cacheObject == null) { // The object didn't exist in cache, so increment cache misses. cacheMisses++; return null; } // The object exists in cache, so increment cache hits. Also, increment // the object's read count. cacheHits++; cacheObject.readCount++; // Remove the object from it's current place in the cache order list, // and re-insert it at the front of the list. cacheObject.lastAccessedListNode.remove(); lastAccessedList.addFirst((LinkedListNode<K>) cacheObject.lastAccessedListNode); return cacheObject.object; }