/** * Return the first available node that maps to the given id. * * @param id node id * @return node or <code>null</code> */ public AbstractNodeData retrieveFirst(NodeId id) { ReferenceMap map = (ReferenceMap) cache.get(id); if (map != null) { Iterator<AbstractNodeData> iter = map.values().iterator(); try { while (iter.hasNext()) { AbstractNodeData data = iter.next(); if (data != null) { return data; } } } finally { iter = null; } } return null; }
/** * Return the first available node that maps to the given id. * * @param id node id * @return node or <code>null</code> */ public AbstractNodeData retrieveFirst(NodeId id) { ReferenceMap map = (ReferenceMap) cache.get(id); if (map != null) { Iterator<AbstractNodeData> iter = map.values().iterator(); try { while (iter.hasNext()) { AbstractNodeData data = iter.next(); if (data != null) { return data; } } } finally { iter = null; } } return null; }
@SuppressWarnings("unchecked") @Override public void destroy() throws IOException { if (parent == null) throw new IOException("tried to destroy main job working directory"); Collection<RemoteDirectoryEntry> values; synchronized (localCache) { values = new ArrayList<>(localCache.values()); } for (RemoteDirectoryEntry obj : values) { if (obj == null) continue; try { obj.destroy(); } catch (IOException e) { } } forceDelete(dir); parent.forgetEntry(this); }
@SuppressWarnings("unchecked") @Override public void destroy() throws IOException { if (parent == null) throw new IOException("tried to destroy main job working directory"); Collection<RemoteDirectoryEntry> values; synchronized (localCache) { values = new ArrayList<>(localCache.values()); } for (RemoteDirectoryEntry obj : values) { if (obj == null) continue; try { obj.destroy(); } catch (IOException e) { } } forceDelete(dir); parent.forgetEntry(this); }
Iterator iter = idCache.values().iterator(); while (iter.hasNext()) { LRUEntry entry = (LRUEntry) iter.next();
Iterator iter = idCache.values().iterator(); while (iter.hasNext()) { LRUEntry entry = (LRUEntry) iter.next();