@Test public void testContainsMatch() { LogCustomizer custom = LogCustomizer .forLogger("org.apache.jackrabbit.oak.commons.junit.LogCustomizerTest") .contains("Test Message") .create(); try { custom.starting(); LOG.info("test message"); LOG.info("test message 1"); LOG.info("1 test message"); List<String> logs = custom.getLogs(); assertTrue(logs.isEmpty()); LOG.info("Test Message"); assertEquals(1, logs.size()); LOG.info("1Test Message"); LOG.info("1Test Message2"); LOG.info("1 Test Message"); assertEquals(4, logs.size()); } finally { custom.finished(); } }
@Test public void gcCheckDeletedSize() throws Exception { log.info("Starting gcCheckDeletedSize()"); // Capture logs for the second round of gc LogCustomizer customLogs = LogCustomizer .forLogger(MarkSweepGarbageCollector.class.getName()) .enable(Level.INFO) .filter(Level.INFO) .contains("Estimated size recovered for") .create(); customLogs.starting(); Set<String> existingAfterGC = executeGarbageCollection(cluster, cluster.getCollector(0),false); assertEquals(1, customLogs.getLogs().size()); long deletedSize = (cluster.blobStoreState.blobsAdded.size() - cluster.blobStoreState.blobsPresent.size()) * 100; assertTrue(customLogs.getLogs().get(0).contains(String.valueOf(deletedSize))); assertStats(cluster.statsProvider, 1, 0, cluster.blobStoreState.blobsAdded.size() - cluster.blobStoreState.blobsPresent.size(), cluster.blobStoreState.blobsAdded.size() - cluster.blobStoreState.blobsPresent.size(), NAME); assertEquals(deletedSize, getStatCount(cluster.statsProvider, NAME, TOTAL_SIZE_DELETED)); customLogs.finished(); assertTrue(Sets.symmetricDifference(cluster.blobStoreState.blobsPresent, existingAfterGC).isEmpty()); }
@Test public void queryIteratorNotConsumedTest() throws SQLException, NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException { LogCustomizer customLogs = LogCustomizer.forLogger(RDBDocumentStoreJDBC.class.getName()).enable(Level.DEBUG).contains("finalizing unclosed").create(); customLogs.starting(); insertTestResource(this.getClass().getName() + "." + name.getMethodName()); MyConnectionHandler ch = new MyConnectionHandler(super.rdbDataSource); RDBTableMetaData tmd = ((RDBDocumentStore) super.ds).getTable(Collection.NODES); List<QueryCondition> conditions = Collections.emptyList(); Iterator<RDBRow> qi = jdbc.queryAsIterator(ch, tmd, null, null, RDBDocumentStore.EMPTY_KEY_PATTERN, conditions, Integer.MAX_VALUE, null); assertTrue(qi instanceof Closeable); assertEquals(1, ch.cnt.get()); Method fin = qi.getClass().getDeclaredMethod("finalize"); try { fin.setAccessible(true); fin.invoke(qi); assertTrue("finalizing non-consumed iterator should generate log entry", customLogs.getLogs().size() >= 1); } finally { Utils.closeIfCloseable(qi); fin.setAccessible(false); customLogs.finished(); } }
@Test public void testSimpleConflictHandling() { LogCustomizer logCustomizer = LogCustomizer.forLogger(RDBDocumentStore.class.getName()).enable(Level.DEBUG) .contains("invalidating cache and retrying").create(); logCustomizer.starting();
LogCustomizer warnLogs = LogCustomizer.forLogger(ChangeProcessor.class.getName()) .filter(Level.WARN) .contains(OBS_QUEUE_FULL_WARN) .create(); LogCustomizer debugLogs = LogCustomizer.forLogger(ChangeProcessor.class.getName()) .filter(Level.DEBUG) .contains(OBS_QUEUE_FULL_WARN) .create(); LogCustomizer logLevelSetting = LogCustomizer.forLogger(ChangeProcessor.class.getName())
LogCustomizer warnLogCustomizer = LogCustomizer.forLogger(ActiveDeletedBlobCollectorFactory.class.getName()).enable(Level.WARN) .contains("Exception occurred while ") .create();
@Test public void init12fail() { LogCustomizer logCustomizer = LogCustomizer.forLogger(RDBDocumentStore.class.getName()).enable(Level.INFO) .contains("Attempted to upgrade").create(); logCustomizer.starting();
LogCustomizer warnLogCustomizer = LogCustomizer.forLogger(ActiveDeletedBlobCollectorFactory.class.getName()).enable(Level.DEBUG) .contains("Exception occurred while ") .create();
@Test public void dontWarnUnnecesarily() { String oldVal = System.getProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM); final LogCustomizer custom = LogCustomizer .forLogger(BufferedOakDirectory.class.getName()) .contains("Ignoring configuration ") .enable(Level.WARN).create(); custom.starting(); BufferedOakDirectory.setEnableWritingSingleBlobIndexFile(true); assertEquals("Don't warn unnecessarily", 0, custom.getLogs().size()); System.setProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM, "true"); reReadCommandLineParam(); assertEquals("Don't warn unnecessarily", 0, custom.getLogs().size()); BufferedOakDirectory.setEnableWritingSingleBlobIndexFile(true); assertEquals("Don't warn unnecessarily", 0, custom.getLogs().size()); System.clearProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM); System.setProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM, "false"); reReadCommandLineParam(); assertEquals("Don't warn unnecessarily", 0, custom.getLogs().size()); BufferedOakDirectory.setEnableWritingSingleBlobIndexFile(false); assertEquals("Don't warn unnecessarily", 0, custom.getLogs().size()); System.clearProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM); custom.finished(); if (oldVal == null) { System.clearProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM); } else { System.setProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM, oldVal); } }
@Test public void settingConfigDifferentFromCLIWarns() { String oldVal = System.getProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM); final LogCustomizer custom = LogCustomizer .forLogger(BufferedOakDirectory.class.getName()) .contains("Ignoring configuration ") .enable(Level.WARN).create(); custom.starting(); System.setProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM, "true"); reReadCommandLineParam(); BufferedOakDirectory.setEnableWritingSingleBlobIndexFile(false); assertEquals("Warn on conflicting config on CLI and set method", 1, custom.getLogs().size()); custom.finished(); custom.starting(); System.setProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM, "false"); reReadCommandLineParam(); BufferedOakDirectory.setEnableWritingSingleBlobIndexFile(true); assertEquals("Warn on conflicting config on CLI and set method", 1, custom.getLogs().size()); custom.finished(); if (oldVal == null) { System.clearProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM); } else { System.setProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM, oldVal); } }
@Test public void init01fail() { LogCustomizer logCustomizer = LogCustomizer.forLogger(RDBDocumentStore.class.getName()).enable(Level.INFO) .contains("Attempted to upgrade").create(); logCustomizer.starting(); Assume.assumeTrue(ds instanceof RDBDataSourceWrapper); RDBDataSourceWrapper wds = (RDBDataSourceWrapper)ds; wds.setFailAlterTableAddColumnStatements(true); RDBOptions op = new RDBOptions().tablePrefix("T01F").initialSchema(0).upgradeToSchema(1).dropTablesOnClose(true); RDBDocumentStore rdb = null; try { rdb = new RDBDocumentStore(this.ds, new DocumentMK.Builder(), op); RDBTableMetaData meta = rdb.getTable(Collection.NODES); assertEquals(op.getTablePrefix() + "_NODES", meta.getName()); assertFalse(meta.hasVersion()); assertEquals("unexpected # of log entries: " + logCustomizer.getLogs(), RDBDocumentStore.getTableNames().size(), logCustomizer.getLogs().size()); UpdateOp testInsert = new UpdateOp(Utils.getIdFromPath("/foo"), true); assertTrue(rdb.create(Collection.NODES, Collections.singletonList(testInsert))); } finally { wds.setFailAlterTableAddColumnStatements(false); logCustomizer.finished(); if (rdb != null) { rdb.dispose(); } } }
@Test public void queryIteratorConsumedTest() throws SQLException { insertTestResource(this.getClass().getName() + "." + name.getMethodName()); LogCustomizer customLogs = LogCustomizer.forLogger(RDBDocumentStoreJDBC.class.getName()).enable(Level.DEBUG) .contains("Query on ").create(); customLogs.starting(); MyConnectionHandler ch = new MyConnectionHandler(super.rdbDataSource); RDBTableMetaData tmd = ((RDBDocumentStore) super.ds).getTable(Collection.NODES); List<QueryCondition> conditions = Collections.emptyList(); try { Iterator<RDBRow> qi = jdbc.queryAsIterator(ch, tmd, null, null, RDBDocumentStore.EMPTY_KEY_PATTERN, conditions, Integer.MAX_VALUE, null); assertTrue(qi instanceof Closeable); assertEquals(1, ch.cnt.get()); while (qi.hasNext()) { qi.next(); } assertEquals(0, ch.cnt.get()); assertEquals("should have a DEBUG level log entry", 1, customLogs.getLogs().size()); } finally { customLogs.finished(); customLogs = null; } }
@Test public void init12() { LogCustomizer logCustomizer = LogCustomizer.forLogger(RDBDocumentStore.class.getName()).enable(Level.INFO) .contains("to DB level 2").create(); logCustomizer.starting(); RDBOptions op = new RDBOptions().tablePrefix("T12").initialSchema(1).upgradeToSchema(2).dropTablesOnClose(true); RDBDocumentStore rdb = null; try { rdb = new RDBDocumentStore(this.ds, new DocumentMK.Builder(), op); RDBTableMetaData meta = rdb.getTable(Collection.NODES); assertEquals(op.getTablePrefix() + "_NODES", meta.getName()); assertTrue(meta.hasSplitDocs()); int statementsPerTable = 5; assertEquals("unexpected # of log entries: " + logCustomizer.getLogs(), statementsPerTable * RDBDocumentStore.getTableNames().size(), logCustomizer.getLogs().size()); } finally { logCustomizer.finished(); if (rdb != null) { rdb.dispose(); } } }
@Test public void init22() { LogCustomizer logCustomizer = LogCustomizer.forLogger(RDBDocumentStore.class.getName()).enable(Level.INFO) .contains("to DB level").create(); logCustomizer.starting(); RDBOptions op = new RDBOptions().tablePrefix("T" + "22").initialSchema(2).upgradeToSchema(2).dropTablesOnClose(true); RDBDocumentStore rdb = null; try { rdb = new RDBDocumentStore(this.ds, new DocumentMK.Builder(), op); RDBTableMetaData meta = rdb.getTable(Collection.NODES); assertEquals(op.getTablePrefix() + "_NODES", meta.getName()); assertTrue(meta.hasVersion()); assertTrue(meta.hasSplitDocs()); assertEquals("unexpected # of log entries: " + logCustomizer.getLogs(), 0, logCustomizer.getLogs().size()); } finally { logCustomizer.finished(); if (rdb != null) { rdb.dispose(); } } }
@Test public void init01() { LogCustomizer logCustomizer = LogCustomizer.forLogger(RDBDocumentStore.class.getName()).enable(Level.INFO) .contains("to DB level 1").create(); logCustomizer.starting(); RDBOptions op = new RDBOptions().tablePrefix("T01").initialSchema(0).upgradeToSchema(1).dropTablesOnClose(true); RDBDocumentStore rdb = null; try { rdb = new RDBDocumentStore(this.ds, new DocumentMK.Builder(), op); RDBTableMetaData meta = rdb.getTable(Collection.NODES); assertEquals(op.getTablePrefix() + "_NODES", meta.getName()); assertTrue(meta.hasVersion()); assertEquals("unexpected # of log entries: " + logCustomizer.getLogs(), RDBDocumentStore.getTableNames().size(), logCustomizer.getLogs().size()); } finally { logCustomizer.finished(); if (rdb != null) { rdb.dispose(); } } }
@Test public void logWarnWhenSeekingBackAfterRead() throws Exception { byte[] fileBytes = writeFile(); LogCustomizer logRecorder = LogCustomizer .forLogger(OakStreamingIndexFile.class.getName()).enable(Level.WARN) .contains("Seeking back on streaming index file").create(); NodeBuilder fooBuilder = builder.child("foo"); try (OakStreamingIndexFile readFile = new OakStreamingIndexFile("foo", fooBuilder, "dirDetails", modeDependantBlobFactory.getNodeBuilderBlobFactory(fooBuilder)) ) { logRecorder.starting(); byte[] readBytes = new byte[fileBytes.length]; readFile.readBytes(readBytes, 0, 10); assertEquals("Don't log for simple reads", 0, logRecorder.getLogs().size()); readFile.seek(12); assertEquals("Don't log for forward seeks", 0, logRecorder.getLogs().size()); readFile.seek(2); assertEquals("Log warning for backward seeks", 1, logRecorder.getLogs().size()); } logRecorder.finished(); }
@Test public void init11() { LogCustomizer logCustomizer = LogCustomizer.forLogger(RDBDocumentStore.class.getName()).enable(Level.INFO) .contains("to DB level 1").create(); logCustomizer.starting(); RDBOptions op = new RDBOptions().tablePrefix("T11").initialSchema(1).upgradeToSchema(1).dropTablesOnClose(true); RDBDocumentStore rdb = null; try { rdb = new RDBDocumentStore(this.ds, new DocumentMK.Builder(), op); RDBTableMetaData meta = rdb.getTable(Collection.NODES); assertEquals(op.getTablePrefix() + "_NODES", meta.getName()); assertTrue(meta.hasVersion()); assertEquals("unexpected # of log entries: " + logCustomizer.getLogs(), 0, logCustomizer.getLogs().size()); } finally { logCustomizer.finished(); if (rdb != null) { rdb.dispose(); } } }
public static void testIncorrectParams(List<String> argList, ArrayList<String> assertMsg, Class logger) { LogCustomizer customLogs = LogCustomizer .forLogger(logger.getName()) .enable(Level.INFO) .filter(Level.INFO) .contains(assertMsg.get(0)) .create(); customLogs.starting(); DataStoreCommand cmd = new DataStoreCommand(); try { cmd.execute(argList.toArray(new String[0])); } catch (Exception e) { log.error("", e); } Assert.assertNotNull(customLogs.getLogs().get(0)); customLogs.finished(); }
@Test public void warnOnQueueFull() throws RepositoryException, InterruptedException, ExecutionException { LogCustomizer customLogs = LogCustomizer.forLogger(ChangeProcessor.class.getName()) .filter(Level.WARN) .contains(OBS_QUEUE_FULL_WARN) .create(); observationManager.addEventListener(listener, NODE_ADDED, TEST_PATH, true, null, null, false); try { customLogs.starting(); addNodeToFillObsQueue(); assertTrue("Observation queue full warning must get logged", customLogs.getLogs().size() > 0); customLogs.finished(); } finally { observationManager.removeEventListener(listener); } }
@Before public void prepare() throws Exception { // Capture logs customLogs = LogCustomizer .forLogger(UploadStagingCache.class.getName()) .enable(Level.INFO) .filter(Level.INFO) .contains("Uploads in progress on close [0]") .create(); customLogs.starting(); super.prepare(); }