@Override protected SortedReader<ByteBuffer> createReader(NavigableMap<byte[], byte[]> data) throws IOException { set = createSoplogSet("test"); int i = 0; int flushes = 0; FlushCounter fc = new FlushCounter(); for (Entry<byte[], byte[]> entry : data.entrySet()) { set.put(entry.getKey(), entry.getValue()); if (i++ % 13 == 0) { flushes++; set.flush(null, fc); } } while (!fc.flushes.compareAndSet(flushes, 0)); return set; }
public void testErrorDuringFlush() throws Exception { FlushCounter handler = new FlushCounter(); handler.error.set(true); SortedOplogSetImpl sos = prepSoplogSet("err"); sos.testErrorDuringFlush = true; flushAndWait(handler, sos); }
public void testCloseInterruptsFlush() throws Exception { FlushCounter handler = new FlushCounter(); SortedOplogSetImpl sos = prepSoplogSet("closeDuringFlush"); sos.testDelayDuringFlush = new CountDownLatch(1); sos.flush(null, handler); sos.close(); assertTrue(sos.isClosed()); assertEquals(1, handler.flushes.get()); }
public void testMergedIterator() throws IOException { FlushCounter handler = new FlushCounter(); SortedOplogSet sos = createSoplogSet("merge");
public void testInUse() throws Exception { FlushCounter handler = new FlushCounter(); SortedOplogSet sos = createSoplogSet("inuse"); for (int i = 0; i < 1000; i++) { sos.put(wrapInt(i), wrapInt(i)); } flushAndWait(handler, sos); // start iterating over soplog SortedIterator<ByteBuffer> range = sos.scan(); assertEquals(0, ((SizeTieredCompactor) sos.getCompactor()).countInactiveReaders()); for (int i = 1000; i < 5000; i++) { sos.put(wrapInt(i), wrapInt(i)); if (i % 100 == 0) { sos.flush(null, handler); } } flushAndWait(handler, sos); compactAndWait(sos, false); assertEquals(1, ((SizeTieredCompactor) sos.getCompactor()).countInactiveReaders()); range.close(); compactAndWait(sos, false); assertEquals(0, ((SizeTieredCompactor) sos.getCompactor()).countInactiveReaders()); validate(sos, 5000); sos.close(); }
public void testTombstone() throws Exception { FlushCounter handler = new FlushCounter(); SortedOplogFactory factory = new HFileSortedOplogFactory("tombstone", null, new SortedOplogStatistics("stats", "tombstone"), new HFileStoreStatistics("storeStats", "tombstone")); Compactor compactor = new SizeTieredCompactor(factory, NonCompactor.createFileset("tombstone", new File(".")), new FileTracker(), Executors.newSingleThreadExecutor(), 2, 2); SortedOplogSet sos = new SortedOplogSetImpl(factory, Executors.newSingleThreadExecutor(), compactor); for (int i = 0; i < 1000; i++) { sos.put(wrapInt(i), wrapInt(i)); } sos.flush(null, handler); for (int i = 900; i < 1000; i++) { sos.put(wrapInt(i), new byte[] {SoplogToken.TOMBSTONE.toByte()}); } flushAndWait(handler, sos); compactAndWait(sos, true); validate(sos, 900); sos.close(); }
public void testEmptyFlush() throws Exception { FlushCounter handler = new FlushCounter(); SortedOplogSet sos = prepSoplogSet("empty"); flushAndWait(handler, sos); flushAndWait(handler, sos); }
public void testClearInterruptsFlush() throws Exception { FlushCounter handler = new FlushCounter(); SortedOplogSetImpl sos = prepSoplogSet("clearDuringFlush"); sos.testDelayDuringFlush = new CountDownLatch(1); sos.flush(null, handler); sos.clear(); flushAndWait(handler, sos); validateEmpty(sos); assertEquals(2, handler.flushes.get()); }
public void testWithCompaction() throws IOException, InterruptedException { FlushCounter handler = new FlushCounter(); SortedOplogSet sos = createSoplogSet("compact"); for (int i = 0; i < 1000; i++) { sos.put(wrapInt(i), wrapInt(i)); if (i % 100 == 0) { sos.flush(null, handler); } } flushAndWait(handler, sos); compactAndWait(sos, false); validate(sos, 1000); sos.close(); }
public void testDestroyInterruptsFlush() throws Exception { FlushCounter handler = new FlushCounter(); SortedOplogSetImpl sos = prepSoplogSet("destroyDuringFlush"); sos.testDelayDuringFlush = new CountDownLatch(1); sos.flush(null, handler); sos.destroy(); assertTrue(sos.isClosed()); assertEquals(1, handler.flushes.get()); }