void onReplayTimerEvent(final long correlationId, final long timestamp) { clusterTimeMs(timestamp); if (!timerService.cancelTimer(correlationId)) { missedTimersSet.add(correlationId); } }
final LongHashSet recordingIds = new LongHashSet(); copiedRecordingLog.entries().stream().mapToLong(e -> e.recordingId).forEach(recordingIds::add); try (Stream<Path> segments = Files.list(archiveDataDir.toPath()) final long recording = Long.parseLong(fileName.split("-")[0]); return !recordingIds.contains(recording); }).map(Path::toFile).forEach(this::deleteFile);
/** * {@inheritDoc} */ @Override public void onStatisticsUpdated(final InetAddress inetAddress, final int port, final long socketIdentifier, final long inode, final long receiveQueueDepth, final long transmitQueueDepth) { if(socketInodesNotOwnedByThisProcess.contains(inode)) { return; } if(!socketInodesOwnedByThisProcess.contains(inode)) { socketInodeRetriever.accept(socketInodesOwnedByThisProcess); if(!socketInodesOwnedByThisProcess.contains(inode)) { clearNotOwnedInodeCacheIfTooLarge(); socketInodesNotOwnedByThisProcess.add(inode); } } if(socketInodesOwnedByThisProcess.contains(inode)) { delegate.onStatisticsUpdated(inetAddress, port, socketIdentifier, inode, receiveQueueDepth, transmitQueueDepth); } }
/** * Fast Path set difference for comparison with another LongHashSet. * <p> * NB: garbage free in the identical case, allocates otherwise. * * @param other the other set to subtract * @return null if identical, otherwise the set of differences */ public LongHashSet difference(final LongHashSet other) { LongHashSet difference = null; final long missingValue = this.missingValue; for (final long value : values) { if (value != missingValue && !other.contains(value)) { if (difference == null) { difference = new LongHashSet(size, missingValue); } difference.add(value); } } return difference; }
boolean isAuthenticated(final long sessionId) { return currentlyAuthenticatedSessionIds.contains(sessionId); }
@Test public void shouldFindMultipleSocketInodeValues() throws Exception { try(final DatagramChannel channel1 = DatagramChannel.open().bind(new InetSocketAddress(55555)); final DatagramChannel channel2 = DatagramChannel.open().bind(new InetSocketAddress(44444))) { final LongHashSet target = new LongHashSet(4); retriever.accept(target); assertThat(target.size(), is(atLeast(2))); } }
public void purgeEntriesOlderThan(final long latestUpdateCount) { keysForRemoval.clear(); final Long2ObjectHashMap<T>.KeyIterator iterator = monitoredSocketInstances.keySet().iterator(); while(iterator.hasNext()) { final long key = iterator.nextLong(); if(monitoredSocketInstances.get(key).getUpdateCount() != latestUpdateCount) { keysForRemoval.add(key); } } final LongIterator keyIterator = keysForRemoval.iterator(); while(keyIterator.hasNext()) { final long key = keyIterator.nextValue(); final T staleEntry = monitoredSocketInstances.remove(key); staleEntry.describeTo(socketDescriptor); lifecycleListener.socketMonitoringStopped( socketDescriptor.getAddress(), socketDescriptor.getPort(), socketDescriptor.getInode()); } } }
/** * {@inheritDoc} */ public Object[] toArray() { final Object[] arrayCopy = new Object[size()]; copyValues(arrayCopy); return arrayCopy; }
/** * {@inheritDoc} */ public boolean removeAll(final Collection<?> coll) { boolean removed = false; for (final Object value : coll) { removed |= remove(value); } return removed; }
public WorkflowPersistenceCache(ZeebeDb<ZbColumnFamilies> zeebeDb) { workflowKey = new DbLong(); persistedWorkflow = new PersistedWorkflow(); workflowColumnFamily = zeebeDb.createColumnFamily(ZbColumnFamilies.WORKFLOW_CACHE, workflowKey, persistedWorkflow); workflowId = new DbString(); workflowVersion = new DbLong(); idAndVersionKey = new DbCompositeKey<>(workflowId, workflowVersion); workflowByIdAndVersionColumnFamily = zeebeDb.createColumnFamily( ZbColumnFamilies.WORKFLOW_CACHE_BY_ID_AND_VERSION, idAndVersionKey, persistedWorkflow); latestWorkflowColumnFamily = zeebeDb.createColumnFamily( ZbColumnFamilies.WORKFLOW_CACHE_LATEST_KEY, workflowId, workflowVersion); deployments = new LongHashSet(); workflowsByKey = new Long2ObjectHashMap<>(); }
/** * {@inheritDoc} */ public boolean isEmpty() { return size() == 0; }
containsAll(otherSet); if (c.size() != size()) return containsAll(c);
public void reset(final File backupLocation) { if (!currentlyAuthenticatedSessionIds.isEmpty()) { throw new IllegalStateException( "There are currently authenticated sessions: " + currentlyAuthenticatedSessionIds); } counter = LOWEST_VALID_SESSION_ID; currentlyAuthenticatedSessionIds.clear(); compositeToContext.clear(); if (backupLocation != null) { mappedFile.transferTo(backupLocation); } buffer.setMemory(0, buffer.capacity(), (byte)0); initialiseBuffer(); }
/** * Clear and populate the supplied LongHashSet with the inodes associated with sockets owned by this process. * @param targetForOwnedSocketInodes the container for the retrieved inodes */ @Override public void accept(final LongHashSet targetForOwnedSocketInodes) { targetForOwnedSocketInodes.clear(); try { Files.list(Paths.get("/proc/self/fd")). filter(Files::isSymbolicLink). mapToLong(CurrentProcessSocketInodeRetriever::socketLinkInode). filter(inode -> inode != NOT_A_SOCKET). forEach(targetForOwnedSocketInodes::add); } catch(final IOException e) { throw new UncheckedIOException(e); } }
/** * {@inheritDoc} */ @Override public void onStatisticsUpdated(final InetAddress inetAddress, final int port, final long socketIdentifier, final long inode, final long receiveQueueDepth, final long transmitQueueDepth, final long drops) { if(socketInodesNotOwnedByThisProcess.contains(inode)) { return; } if(!socketInodesOwnedByThisProcess.contains(inode)) { socketInodeRetriever.accept(socketInodesOwnedByThisProcess); if(!socketInodesOwnedByThisProcess.contains(inode)) { clearNotOwnedInodeCacheIfTooLarge(); socketInodesNotOwnedByThisProcess.add(inode); } } if(socketInodesOwnedByThisProcess.contains(inode)) { delegate.onStatisticsUpdated(inetAddress, port, socketIdentifier, inode, receiveQueueDepth, transmitQueueDepth, drops); } }
if (value != MISSING_VALUE && !other.contains(value)) difference = new LongHashSet(); difference.add(value); difference = new LongHashSet(); difference.add(MISSING_VALUE);