public Packet remove(long correlationID) { return store.remove(correlationID); }
public Packet remove(long correlationID) { return store.remove(correlationID); }
public Packet remove(long correlationID) { return store.remove(correlationID); }
@Override public void onReadRollbackRecord(final long transactionID) throws Exception { TransactionHolder tx = loadTransactions.remove(transactionID); // The rollback could be alone on its own journal-file and the // whole transaction body was reclaimed but the commit-record // So it is completely legal to not find a transaction at this // point if (tx != null) { JournalTransaction tnp = transactions.remove(transactionID); if (tnp == null) { throw new IllegalStateException("Cannot find tx " + transactionID); } // There is no need to validate summaries/holes on // Rollbacks.. We will ignore the data anyway. tnp.rollback(file); hasData.lazySet(true); } }
@Override public void onReadRollbackRecord(final long transactionID) throws Exception { TransactionHolder tx = loadTransactions.remove(transactionID); // The rollback could be alone on its own journal-file and the // whole transaction body was reclaimed but the commit-record // So it is completely legal to not find a transaction at this // point if (tx != null) { JournalTransaction tnp = transactions.remove(transactionID); if (tnp == null) { throw new IllegalStateException("Cannot find tx " + transactionID); } // There is no need to validate summaries/holes on // Rollbacks.. We will ignore the data anyway. tnp.rollback(file); hasData.lazySet(true); } }
@Override void execute() throws Exception { JournalRecord deleteRecord = journal.getRecords().remove(id); if (deleteRecord == null) { ActiveMQJournalLogger.LOGGER.noRecordDuringCompactReplay(id); } else { deleteRecord.delete(usedFile); } } }
@Override public void onReadDeleteRecord(final long recordID) throws Exception { hasData.lazySet(true); loadManager.deleteRecord(recordID); JournalRecord posFiles = records.remove(recordID); if (posFiles != null) { posFiles.delete(file); } }
@Override void execute() throws Exception { JournalRecord deleteRecord = journal.getRecords().remove(id); if (deleteRecord == null) { ActiveMQJournalLogger.LOGGER.noRecordDuringCompactReplay(id); } else { deleteRecord.delete(usedFile); } } }
@Override public void onReadDeleteRecord(final long recordID) throws Exception { hasData.lazySet(true); loadManager.deleteRecord(recordID); JournalRecord posFiles = records.remove(recordID); if (posFiles != null) { posFiles.delete(file); } }
@Override public void onReadRollbackRecord(final long transactionID) throws Exception { if (logger.isTraceEnabled()) { logger.trace("onReadRollbackRecord " + transactionID); } if (pendingTransactions.get(transactionID) != null) { // Sanity check, this should never happen throw new IllegalStateException("Inconsistency during compacting: RollbackRecord ID = " + transactionID + " for an already rolled back transaction during compacting"); } else { JournalTransaction newTransaction = newTransactions.remove(transactionID); if (newTransaction != null) { JournalInternalRecord rollbackRecord = new JournalRollbackRecordTX(transactionID); checkSize(rollbackRecord.getEncodeSize()); writeEncoder(rollbackRecord); newTransaction.rollback(currentFile); } } }
@Override void execute() throws Exception { JournalTransaction newTransaction = newTransactions.get(liveTransaction.getId()); if (newTransaction != null) { liveTransaction.merge(newTransaction); liveTransaction.commit(commitFile); } newTransactions.remove(liveTransaction.getId()); }
@Override void execute() throws Exception { JournalTransaction newTransaction = newTransactions.get(liveTransaction.getId()); if (newTransaction != null) { liveTransaction.merge(newTransaction); liveTransaction.rollback(rollbackFile); } newTransactions.remove(liveTransaction.getId()); }
@Override void execute() throws Exception { JournalTransaction newTransaction = newTransactions.get(liveTransaction.getId()); if (newTransaction != null) { liveTransaction.merge(newTransaction); liveTransaction.commit(commitFile); } newTransactions.remove(liveTransaction.getId()); }
@Override void execute() throws Exception { JournalTransaction newTransaction = newTransactions.get(liveTransaction.getId()); if (newTransaction != null) { liveTransaction.merge(newTransaction); liveTransaction.rollback(rollbackFile); } newTransactions.remove(liveTransaction.getId()); }
@Override public void onReadCommitRecord(final long transactionID, final int numberOfRecords) throws Exception { if (logger.isTraceEnabled()) { logger.trace("onReadCommitRecord " + transactionID); } if (pendingTransactions.get(transactionID) != null) { // Sanity check, this should never happen ActiveMQJournalLogger.LOGGER.inconsistencyDuringCompacting(transactionID); } else { JournalTransaction newTransaction = newTransactions.remove(transactionID); if (newTransaction != null) { JournalInternalRecord commitRecord = new JournalCompleteRecordTX(TX_RECORD_TYPE.COMMIT, transactionID, null); checkSize(commitRecord.getEncodeSize()); writeEncoder(commitRecord, newTransaction.getCounter(currentFile)); newTransaction.commit(currentFile); } } }
@Override public void onReadCommitRecord(final long transactionID, final int numberOfRecords) throws Exception { if (logger.isTraceEnabled()) { logger.trace("onReadCommitRecord " + transactionID); } if (pendingTransactions.get(transactionID) != null) { // Sanity check, this should never happen ActiveMQJournalLogger.LOGGER.inconsistencyDuringCompacting(transactionID); } else { JournalTransaction newTransaction = newTransactions.remove(transactionID); if (newTransaction != null) { JournalInternalRecord commitRecord = new JournalCompleteRecordTX(TX_RECORD_TYPE.COMMIT, transactionID, null); checkSize(commitRecord.getEncodeSize()); writeEncoder(commitRecord, newTransaction.getCounter(currentFile)); newTransaction.commit(currentFile); } } }
public void benchConcurrentLongHashMap() throws Exception { // public static void main(String args[]) { ConcurrentLongHashMap<String> map = new ConcurrentLongHashMap<>(N, 1); for (long i = 0; i < Iterations; i++) { for (int j = 0; j < N; j++) { map.put(i, "value"); } for (long h = 0; h < ReadIterations; h++) { for (int j = 0; j < N; j++) { map.get(i); } } for (int j = 0; j < N; j++) { map.remove(i); } } }
@Test public void testRemove() { ConcurrentLongHashMap<String> map = new ConcurrentLongHashMap<>(); assertTrue(map.isEmpty()); assertNull(map.put(1, "one")); assertFalse(map.isEmpty()); assertFalse(map.remove(0, "zero")); assertFalse(map.remove(1, "uno")); assertFalse(map.isEmpty()); assertTrue(map.remove(1, "one")); assertTrue(map.isEmpty()); }
@Test public void testNegativeUsedBucketCount() { ConcurrentLongHashMap<String> map = new ConcurrentLongHashMap<>(16, 1); map.put(0, "zero"); assertEquals(1, map.getUsedBucketCount()); map.put(0, "zero1"); assertEquals(1, map.getUsedBucketCount()); map.remove(0); assertEquals(0, map.getUsedBucketCount()); map.remove(0); assertEquals(0, map.getUsedBucketCount()); }
@Test public void testRehashingWithDeletes() { int n = 16; ConcurrentLongHashMap<Integer> map = new ConcurrentLongHashMap<>(n / 2, 1); assertEquals(map.capacity(), n); assertEquals(map.size(), 0); for (int i = 0; i < n / 2; i++) { map.put(i, i); } for (int i = 0; i < n / 2; i++) { map.remove(i); } for (int i = n; i < (2 * n); i++) { map.put(i, i); } assertEquals(map.capacity(), 2 * n); assertEquals(map.size(), n); }