@Test public void gcCheckDeletedSize() throws Exception { log.info("Starting gcCheckDeletedSize()"); // Capture logs for the second round of gc LogCustomizer customLogs = LogCustomizer .forLogger(MarkSweepGarbageCollector.class.getName()) .enable(Level.INFO) .filter(Level.INFO) .contains("Estimated size recovered for") .create(); customLogs.starting(); Set<String> existingAfterGC = executeGarbageCollection(cluster, cluster.getCollector(0),false); assertEquals(1, customLogs.getLogs().size()); long deletedSize = (cluster.blobStoreState.blobsAdded.size() - cluster.blobStoreState.blobsPresent.size()) * 100; assertTrue(customLogs.getLogs().get(0).contains(String.valueOf(deletedSize))); assertStats(cluster.statsProvider, 1, 0, cluster.blobStoreState.blobsAdded.size() - cluster.blobStoreState.blobsPresent.size(), cluster.blobStoreState.blobsAdded.size() - cluster.blobStoreState.blobsPresent.size(), NAME); assertEquals(deletedSize, getStatCount(cluster.statsProvider, NAME, TOTAL_SIZE_DELETED)); customLogs.finished(); assertTrue(Sets.symmetricDifference(cluster.blobStoreState.blobsPresent, existingAfterGC).isEmpty()); }
@Test public void testRegexMatch() { LogCustomizer custom = LogCustomizer .forLogger("org.apache.jackrabbit.oak.commons.junit.LogCustomizerTest") .matchesRegex("^Length is [0-9]* units.$") .create(); try { custom.starting(); LOG.info("test message"); LOG.info("test message 1"); LOG.info("1 test message"); LOG.info("1 Length is 10 units."); LOG.info("Length is 10 units.1"); LOG.info("Length is abc units."); List<String> logs = custom.getLogs(); assertTrue(logs.isEmpty()); LOG.info("Length is 1 units."); LOG.info("Length is 20 units."); LOG.info("Length is units."); assertEquals(3, logs.size()); } finally { custom.finished(); } }
@Test public void warnOnRepeatedQueueFull() throws RepositoryException, InterruptedException, ExecutionException { LogCustomizer warnLogs = LogCustomizer.forLogger(ChangeProcessor.class.getName()) .filter(Level.WARN) .contains(OBS_QUEUE_FULL_WARN) .create(); LogCustomizer debugLogs = LogCustomizer.forLogger(ChangeProcessor.class.getName()) .filter(Level.DEBUG) .contains(OBS_QUEUE_FULL_WARN) .create(); LogCustomizer logLevelSetting = LogCustomizer.forLogger(ChangeProcessor.class.getName()) .enable(Level.DEBUG) .create(); logLevelSetting.starting();
if (ds instanceof RDBDocumentStore) { LogCustomizer logCustomizerRead = LogCustomizer.forLogger(RDBDocumentStoreJDBC.class.getName() + ".perf") .enable(Level.TRACE).matchesRegex("read: .*").create(); logCustomizerRead.starting(); LogCustomizer logCustomizerQuery = LogCustomizer.forLogger(RDBDocumentStoreJDBC.class.getName() + ".perf") .enable(Level.TRACE).matchesRegex("quer.*").create(); logCustomizerQuery.starting();
@Test public void reindexForDisabledIndexes() throws Exception{ EditorHook hook = new EditorHook( new IndexUpdateProvider(new CompositeIndexEditorProvider( new PropertyIndexEditorProvider(), new ReferenceEditorProvider() ))); NodeState before = builder.getNodeState(); createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "fooIndex", true, false, ImmutableSet.of("foo"), null); builder.child("testRoot").setProperty("foo", "abc"); NodeState after = builder.getNodeState(); NodeState indexed = hook.processCommit(before, after, CommitInfo.EMPTY); before = indexed; builder = before.builder(); builder.getChildNode("oak:index").getChildNode("fooIndex").setProperty(TYPE_PROPERTY_NAME, TYPE_DISABLED); builder.getChildNode("oak:index").getChildNode("fooIndex").setProperty(REINDEX_PROPERTY_NAME, true); after = builder.getNodeState(); LogCustomizer customLogs = LogCustomizer.forLogger(IndexUpdate.class.getName()).filter(Level.INFO).create(); customLogs.starting(); before = after; builder = before.builder(); builder.child("testRoot2").setProperty("foo", "abc"); after = builder.getNodeState(); indexed = hook.processCommit(before, after, CommitInfo.EMPTY); assertTrue(customLogs.getLogs().isEmpty()); customLogs.finished(); }
@Test public void dontWarnUnnecesarily() { String oldVal = System.getProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM); final LogCustomizer custom = LogCustomizer .forLogger(BufferedOakDirectory.class.getName()) .contains("Ignoring configuration ") .enable(Level.WARN).create(); custom.starting(); BufferedOakDirectory.setEnableWritingSingleBlobIndexFile(true); assertEquals("Don't warn unnecessarily", 0, custom.getLogs().size()); System.setProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM, "true"); reReadCommandLineParam(); assertEquals("Don't warn unnecessarily", 0, custom.getLogs().size()); BufferedOakDirectory.setEnableWritingSingleBlobIndexFile(true); assertEquals("Don't warn unnecessarily", 0, custom.getLogs().size()); System.clearProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM); System.setProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM, "false"); reReadCommandLineParam(); assertEquals("Don't warn unnecessarily", 0, custom.getLogs().size()); BufferedOakDirectory.setEnableWritingSingleBlobIndexFile(false); assertEquals("Don't warn unnecessarily", 0, custom.getLogs().size()); System.clearProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM); custom.finished(); if (oldVal == null) { System.clearProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM); } else { System.setProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM, oldVal); } }
@Test public void perfLog() throws Exception{ String logName = DocumentStoreStats.class.getName() + ".perf"; LogCustomizer customLogs = LogCustomizer.forLogger(logName) .filter(Level.TRACE) .create(); enableLevel(logName, Level.INFO); customLogs.starting(); //No logs untill debug enabled stats.doneFindAndModify(100, Collection.NODES, "foo", true, true, 0); assertEquals(0, customLogs.getLogs().size()); stats.doneFindAndModify(TimeUnit.SECONDS.toNanos(10), Collection.NODES, "foo", true, true, 0); assertEquals(0, customLogs.getLogs().size()); //Change level to DEBUG - Now threshold rule applies enableLevel(logName, Level.DEBUG); stats.doneFindAndModify(100, Collection.NODES, "foo", true, true, 0); assertEquals(0, customLogs.getLogs().size()); stats.doneFindAndModify(TimeUnit.SECONDS.toNanos(10), Collection.NODES, "foo", true, true, 0); assertEquals(1, customLogs.getLogs().size()); //With trace level everything is logged enableLevel(logName, Level.TRACE); stats.doneFindAndModify(100, Collection.NODES, "foo", true, true, 0); assertEquals(2, customLogs.getLogs().size()); customLogs.finished(); }
@Test public void init01fail() { LogCustomizer logCustomizer = LogCustomizer.forLogger(RDBDocumentStore.class.getName()).enable(Level.INFO) .contains("Attempted to upgrade").create(); logCustomizer.starting(); Assume.assumeTrue(ds instanceof RDBDataSourceWrapper); RDBDataSourceWrapper wds = (RDBDataSourceWrapper)ds; wds.setFailAlterTableAddColumnStatements(true); RDBOptions op = new RDBOptions().tablePrefix("T01F").initialSchema(0).upgradeToSchema(1).dropTablesOnClose(true); RDBDocumentStore rdb = null; try { rdb = new RDBDocumentStore(this.ds, new DocumentMK.Builder(), op); RDBTableMetaData meta = rdb.getTable(Collection.NODES); assertEquals(op.getTablePrefix() + "_NODES", meta.getName()); assertFalse(meta.hasVersion()); assertEquals("unexpected # of log entries: " + logCustomizer.getLogs(), RDBDocumentStore.getTableNames().size(), logCustomizer.getLogs().size()); UpdateOp testInsert = new UpdateOp(Utils.getIdFromPath("/foo"), true); assertTrue(rdb.create(Collection.NODES, Collections.singletonList(testInsert))); } finally { wds.setFailAlterTableAddColumnStatements(false); logCustomizer.finished(); if (rdb != null) { rdb.dispose(); } } }
@Test public void settingConfigDifferentFromCLIWarns() { String oldVal = System.getProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM); final LogCustomizer custom = LogCustomizer .forLogger(BufferedOakDirectory.class.getName()) .contains("Ignoring configuration ") .enable(Level.WARN).create(); custom.starting(); System.setProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM, "true"); reReadCommandLineParam(); BufferedOakDirectory.setEnableWritingSingleBlobIndexFile(false); assertEquals("Warn on conflicting config on CLI and set method", 1, custom.getLogs().size()); custom.finished(); custom.starting(); System.setProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM, "false"); reReadCommandLineParam(); BufferedOakDirectory.setEnableWritingSingleBlobIndexFile(true); assertEquals("Warn on conflicting config on CLI and set method", 1, custom.getLogs().size()); custom.finished(); if (oldVal == null) { System.clearProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM); } else { System.setProperty(ENABLE_WRITING_SINGLE_BLOB_INDEX_FILE_PARAM, oldVal); } }
@Test public void queryIteratorNotConsumedTest() throws SQLException, NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException { LogCustomizer customLogs = LogCustomizer.forLogger(RDBDocumentStoreJDBC.class.getName()).enable(Level.DEBUG).contains("finalizing unclosed").create(); customLogs.starting(); insertTestResource(this.getClass().getName() + "." + name.getMethodName()); MyConnectionHandler ch = new MyConnectionHandler(super.rdbDataSource); RDBTableMetaData tmd = ((RDBDocumentStore) super.ds).getTable(Collection.NODES); List<QueryCondition> conditions = Collections.emptyList(); Iterator<RDBRow> qi = jdbc.queryAsIterator(ch, tmd, null, null, RDBDocumentStore.EMPTY_KEY_PATTERN, conditions, Integer.MAX_VALUE, null); assertTrue(qi instanceof Closeable); assertEquals(1, ch.cnt.get()); Method fin = qi.getClass().getDeclaredMethod("finalize"); try { fin.setAccessible(true); fin.invoke(qi); assertTrue("finalizing non-consumed iterator should generate log entry", customLogs.getLogs().size() >= 1); } finally { Utils.closeIfCloseable(qi); fin.setAccessible(false); customLogs.finished(); } }
@Test public void queryIteratorConsumedTest() throws SQLException { insertTestResource(this.getClass().getName() + "." + name.getMethodName()); LogCustomizer customLogs = LogCustomizer.forLogger(RDBDocumentStoreJDBC.class.getName()).enable(Level.DEBUG) .contains("Query on ").create(); customLogs.starting(); MyConnectionHandler ch = new MyConnectionHandler(super.rdbDataSource); RDBTableMetaData tmd = ((RDBDocumentStore) super.ds).getTable(Collection.NODES); List<QueryCondition> conditions = Collections.emptyList(); try { Iterator<RDBRow> qi = jdbc.queryAsIterator(ch, tmd, null, null, RDBDocumentStore.EMPTY_KEY_PATTERN, conditions, Integer.MAX_VALUE, null); assertTrue(qi instanceof Closeable); assertEquals(1, ch.cnt.get()); while (qi.hasNext()) { qi.next(); } assertEquals(0, ch.cnt.get()); assertEquals("should have a DEBUG level log entry", 1, customLogs.getLogs().size()); } finally { customLogs.finished(); customLogs = null; } }
@Test public void init22() { LogCustomizer logCustomizer = LogCustomizer.forLogger(RDBDocumentStore.class.getName()).enable(Level.INFO) .contains("to DB level").create(); logCustomizer.starting(); RDBOptions op = new RDBOptions().tablePrefix("T" + "22").initialSchema(2).upgradeToSchema(2).dropTablesOnClose(true); RDBDocumentStore rdb = null; try { rdb = new RDBDocumentStore(this.ds, new DocumentMK.Builder(), op); RDBTableMetaData meta = rdb.getTable(Collection.NODES); assertEquals(op.getTablePrefix() + "_NODES", meta.getName()); assertTrue(meta.hasVersion()); assertTrue(meta.hasSplitDocs()); assertEquals("unexpected # of log entries: " + logCustomizer.getLogs(), 0, logCustomizer.getLogs().size()); } finally { logCustomizer.finished(); if (rdb != null) { rdb.dispose(); } } }