@Override public Entry<K, V> next() { Map.Entry<K, V> kv = iterator.next(); return new Entry<>(kv.getKey(), kv.getValue()); }
@Override public Entry<K, V> next() { Map.Entry<K, V> kv = iterator.next(); return new Entry<>(kv.getKey(), kv.getValue()); }
@Override public Entry<K, V> next() { Map.Entry<K, V> kv = iterator.next(); return new Entry<>(kv.getKey(), kv.getValue()); }
@Override public Entry<K, V> next() { Map.Entry<K, V> kv = iterator.next(); return new Entry<>(kv.getKey(), kv.getValue()); }
@Override public Entry<K, V> next() { Map.Entry<K, V> kv = iterator.next(); return new Entry<>(kv.getKey(), kv.getValue()); }
@Override public CompletableFuture<Map<K, V>> getAllAsync(List<K> keys) { readMetrics.numGetAlls.inc(); // Make a copy of entries which might be immutable Map<K, V> getAllResult = new HashMap<>(); List<K> missingKeys = lookupCache(keys, getAllResult); if (missingKeys.isEmpty()) { return CompletableFuture.completedFuture(getAllResult); } long startNs = System.nanoTime(); return rdTable.getAllAsync(missingKeys).handle((records, e) -> { if (e != null) { throw new SamzaException("Failed to get records for " + keys, e); } else { if (records != null) { cache.putAll(records.entrySet().stream() .map(r -> new Entry<>(r.getKey(), r.getValue())) .collect(Collectors.toList())); getAllResult.putAll(records); } readMetrics.getAllNs.update(System.nanoTime() - startNs); return getAllResult; } }); }
@Override public CompletableFuture<Map<K, V>> getAllAsync(List<K> keys) { readMetrics.numGetAlls.inc(); // Make a copy of entries which might be immutable Map<K, V> getAllResult = new HashMap<>(); List<K> missingKeys = lookupCache(keys, getAllResult); if (missingKeys.isEmpty()) { return CompletableFuture.completedFuture(getAllResult); } long startNs = System.nanoTime(); return rdTable.getAllAsync(missingKeys).handle((records, e) -> { if (e != null) { throw new SamzaException("Failed to get records for " + keys, e); } else { if (records != null) { cache.putAll(records.entrySet().stream() .map(r -> new Entry<>(r.getKey(), r.getValue())) .collect(Collectors.toList())); getAllResult.putAll(records); } readMetrics.getAllNs.update(System.nanoTime() - startNs); return getAllResult; } }); }
@Override public CompletableFuture<Map<K, V>> getAllAsync(List<K> keys) { readMetrics.numGetAlls.inc(); // Make a copy of entries which might be immutable Map<K, V> getAllResult = new HashMap<>(); List<K> missingKeys = lookupCache(keys, getAllResult); if (missingKeys.isEmpty()) { return CompletableFuture.completedFuture(getAllResult); } long startNs = System.nanoTime(); return rdTable.getAllAsync(missingKeys).handle((records, e) -> { if (e != null) { throw new SamzaException("Failed to get records for " + keys, e); } else { if (records != null) { cache.putAll(records.entrySet().stream() .map(r -> new Entry<>(r.getKey(), r.getValue())) .collect(Collectors.toList())); getAllResult.putAll(records); } readMetrics.getAllNs.update(System.nanoTime() - startNs); return getAllResult; } }); }
@Override public CompletableFuture<Map<K, V>> getAllAsync(List<K> keys) { readMetrics.numGetAlls.inc(); // Make a copy of entries which might be immutable Map<K, V> getAllResult = new HashMap<>(); List<K> missingKeys = lookupCache(keys, getAllResult); if (missingKeys.isEmpty()) { return CompletableFuture.completedFuture(getAllResult); } long startNs = System.nanoTime(); return rdTable.getAllAsync(missingKeys).handle((records, e) -> { if (e != null) { throw new SamzaException("Failed to get records for " + keys, e); } else { if (records != null) { cache.putAll(records.entrySet().stream() .map(r -> new Entry<>(r.getKey(), r.getValue())) .collect(Collectors.toList())); getAllResult.putAll(records); } readMetrics.getAllNs.update(System.nanoTime() - startNs); return getAllResult; } }); }
@Override public CompletableFuture<Map<K, V>> getAllAsync(List<K> keys) { incCounter(metrics.numGetAlls); // Make a copy of entries which might be immutable Map<K, V> getAllResult = new HashMap<>(); List<K> missingKeys = lookupCache(keys, getAllResult); if (missingKeys.isEmpty()) { return CompletableFuture.completedFuture(getAllResult); } long startNs = clock.nanoTime(); return table.getAllAsync(missingKeys).handle((records, e) -> { if (e != null) { throw new SamzaException("Failed to get records for " + keys, e); } else { if (records != null) { cache.putAll(records.entrySet().stream() .map(r -> new Entry<>(r.getKey(), r.getValue())) .collect(Collectors.toList())); getAllResult.putAll(records); } updateTimer(metrics.getAllNs, clock.nanoTime() - startNs); return getAllResult; } }); }
@Override public Entry<K, V> next() { Map.Entry<byte[], byte[]> next = wrapped.next(); return new Entry<>(keySerializer.fromBytes(next.getKey()), valSerializer.fromBytes(next.getValue())); } }
@Test public void testRetryEngagedPut() throws Exception { String tableId = "testRetryEngagedPut"; TableRetryPolicy policy = new TableRetryPolicy(); policy.withFixedBackoff(Duration.ofMillis(10)); TableWriteFunction<String, String> writeFn = mock(TableWriteFunction.class); doReturn(CompletableFuture.completedFuture(null)).when(writeFn).putAllAsync(any()); doReturn(true).when(writeFn).isRetriable(any()); int [] times = new int[] {0}; List<Entry<String, String>> records = new ArrayList<>(); records.add(new Entry<>("foo1", "bar1")); records.add(new Entry<>("foo2", "bar2")); doAnswer(invocation -> { CompletableFuture<Map<String, String>> future = new CompletableFuture(); if (times[0] > 0) { future.complete(null); } else { times[0]++; future.completeExceptionally(new RuntimeException("test exception")); } return future; }).when(writeFn).putAllAsync(any()); RetriableWriteFunction<String, String> retryIO = new RetriableWriteFunction<>(policy, writeFn, schedExec); retryIO.setMetrics(getMetricsUtil(tableId)); retryIO.putAllAsync(records).get(); verify(writeFn, times(2)).putAllAsync(any()); Assert.assertEquals(1, retryIO.retryMetrics.retryCount.getCount()); Assert.assertEquals(0, retryIO.retryMetrics.successCount.getCount()); Assert.assertTrue(retryIO.retryMetrics.retryTimer.getSnapshot().getMax() > 0); }
verify(writeRateLimiter, times(0)).throttle(anyList()); table.putAllAsync(Arrays.asList(new Entry("1", "2"))).get(); verify(writeFn, times(1)).putAllAsync(any()); verify(writeRateLimiter, times(1)).throttle(anyString(), anyString());
@Test public void testPutAllWithOneRetry() throws Exception { TableRetryPolicy policy = new TableRetryPolicy(); policy.withFixedBackoff(Duration.ofMillis(10)); TableReadFunction<String, String> readFn = mock(TableReadFunction.class); TableWriteFunction<String, String> writeFn = mock(TableWriteFunction.class); doReturn(true).when(writeFn).isRetriable(any()); AtomicInteger times = new AtomicInteger(); doAnswer(invocation -> { CompletableFuture<Map<String, String>> future = new CompletableFuture(); if (times.get() > 0) { future.complete(null); } else { times.incrementAndGet(); future.completeExceptionally(new RuntimeException("test exception")); } return future; }).when(writeFn).putAllAsync(any()); AsyncReadWriteTable delegate = new AsyncRemoteTable(readFn, writeFn); AsyncRetriableTable table = new AsyncRetriableTable("t1", delegate, null, policy, schedExec, readFn, writeFn); table.init(TestRemoteTable.getMockContext()); table.putAllAsync(Arrays.asList(new Entry(1, 2))).get(); verify(writeFn, times(2)).putAllAsync(any()); assertEquals(1, table.writeRetryMetrics.retryCount.getCount()); assertEquals(0, table.writeRetryMetrics.successCount.getCount()); assertEquals(0, table.writeRetryMetrics.permFailureCount.getCount()); assertTrue(table.writeRetryMetrics.retryTimer.getSnapshot().getMax() > 0); }
verify(writeFn, times(1)).putAsync(any(), any()); assertEquals(++times, table.writeRetryMetrics.successCount.getCount()); table.putAllAsync(Arrays.asList(new Entry("1", "2"))).get(); verify(writeFn, times(1)).putAllAsync(any()); assertEquals(++times, table.writeRetryMetrics.successCount.getCount());
@Test public void testPutWithPermFailureOnMaxCount() throws Exception { TableRetryPolicy policy = new TableRetryPolicy(); policy.withFixedBackoff(Duration.ofMillis(5)); policy.withStopAfterAttempts(10); TableReadFunction<String, String> readFn = mock(TableReadFunction.class); TableWriteFunction<String, String> writeFn = mock(TableWriteFunction.class); doReturn(true).when(writeFn).isRetriable(any()); CompletableFuture<String> future = new CompletableFuture(); future.completeExceptionally(new RuntimeException("test exception")); doReturn(future).when(writeFn).putAllAsync(any()); AsyncReadWriteTable delegate = new AsyncRemoteTable(readFn, writeFn); AsyncRetriableTable table = new AsyncRetriableTable("t1", delegate, null, policy, schedExec, readFn, writeFn); table.init(TestRemoteTable.getMockContext()); try { table.putAllAsync(Arrays.asList(new Entry(1, 2))).get(); fail(); } catch (ExecutionException e) { } verify(writeFn, atLeast(11)).putAllAsync(any()); assertEquals(10, table.writeRetryMetrics.retryCount.getCount()); assertEquals(0, table.writeRetryMetrics.successCount.getCount()); assertEquals(1, table.writeRetryMetrics.permFailureCount.getCount()); assertTrue(table.writeRetryMetrics.retryTimer.getSnapshot().getMax() > 0); }
@Test public void testPutAll() throws Exception { ReadWriteTable table = createTable(false); List<Entry> entries = Arrays.asList(new Entry("k1", "v1"), new Entry("k2", null)); table.putAll(entries); table.putAllAsync(entries).get(); verify(kvStore, times(2)).putAll(any()); verify(kvStore, times(2)).deleteAll(any()); Assert.assertEquals(2, numPutAlls.getCount()); Assert.assertEquals(2, numDeleteAlls.getCount()); Assert.assertTrue(putAllNs.getSnapshot().getAverage() > 0); Assert.assertTrue(deleteAllNs.getSnapshot().getAverage() > 0); Assert.assertEquals(0, putNs.getSnapshot().getAverage(), 0.001); Assert.assertEquals(0, deleteNs.getSnapshot().getAverage(), 0.001); Assert.assertEquals(0, flushNs.getSnapshot().getAverage(), 0.001); Assert.assertEquals(0, numPuts.getCount()); Assert.assertEquals(0, numDeletes.getCount()); Assert.assertEquals(0, numFlushes.getCount()); Assert.assertEquals(0, putCallbackNs.getSnapshot().getAverage(), 0.001); Assert.assertEquals(0, deleteCallbackNs.getSnapshot().getAverage(), 0.001); }