public static BlobStoreFixture getMemoryBlobStore() { return new BlobStoreFixture("MEM") { private MemoryBlobStore mbs = new MemoryBlobStore(); @Override public BlobStore setUp() { return mbs; } @Override public void tearDown() { } @Override public long size() { throw new UnsupportedOperationException("Implementation pending"); } }; }
public static BlobStoreFixture getMemoryBlobStore() { return new BlobStoreFixture("MEM") { private MemoryBlobStore mbs = new MemoryBlobStore(); @Override public BlobStore setUp() { return mbs; } @Override public void tearDown() { } @Override public long size() { throw new UnsupportedOperationException("Implementation pending"); } }; }
public BlobStore getBlobStore() { if (blobStore == null) { blobStore = new MemoryBlobStore(); configureBlobStore(blobStore); } return blobStore; }
public BlobStore getBlobStore() { if (blobStore == null) { blobStore = new MemoryBlobStore(); configureBlobStore(blobStore); } return blobStore; }
private ActiveDeletedBlobCollectorMBean getTestBean(IndexPathService indexPathService, AsyncIndexInfoService asyncIndexInfoService) { ActiveDeletedBlobCollectorMBeanImpl bean = new ActiveDeletedBlobCollectorMBeanImpl(ActiveDeletedBlobCollectorFactory.NOOP, wb, nodeStore, indexPathService, asyncIndexInfoService, new MemoryBlobStore(), sameThreadExecutor()); bean.clock = clock; return bean; } }
/** * Argument-less constructor, used for in-memory kernel. */ protected Repository() { this.homeDir = null; DefaultRevisionStore rs = new DefaultRevisionStore(new InMemPersistence(), null); try { rs.initialize(); } catch (Exception e) { /* Not plausible for in-memory operation */ throw new InternalError("Unable to initialize in-memory store"); } this.rs = rs; this.bs = new MemoryBlobStore(); initialized = true; }
private DocumentNodeStore createNodeStore(int clusterId) { if (ds == null) { ds = new MemoryDocumentStore(); } if (bs == null) { bs = new MemoryBlobStore(); } return createNodeStore(clusterId, ds, bs); }
@Override public void setUp() throws Exception { store = new MemoryBlobStore(); store.setBlockSize(128); ((MemoryBlobStore) store).setBlockSizeMin(48); }
private DocumentMK createMK(int clusterId, int asyncDelay) { if (ds == null) { ds = new MemoryDocumentStore(); } if (bs == null) { bs = new MemoryBlobStore(); } return createMK(clusterId, asyncDelay, ds, bs); } }
@Before public void configureDefaultServices(){ context.registerService(BlobStore.class, new MemoryBlobStore()); context.registerService(NodeStoreProvider.class, new NodeStoreProvider() { @Override public NodeStore getNodeStore() { return secondaryStore; } }, ImmutableMap.<String, Object>of("role", "secondary")); context.registerService(Executor.class, Executors.newSingleThreadExecutor()); context.registerService(StatisticsProvider.class, StatisticsProvider.NOOP); MockOsgi.injectServices(cacheService, context.bundleContext()); }
@Test public void basicTest() throws Exception { List<String> paths = createTestPaths(); FlatFileNodeStoreBuilder builder = new FlatFileNodeStoreBuilder(TestUtils.createEntries(paths), folder.getRoot()); FlatFileStore flatStore = builder.withBlobStore(new MemoryBlobStore()) .withPreferredPathElements(preferred) .build(); List<String> entryPaths = StreamSupport.stream(flatStore.spliterator(), false) .map(NodeStateEntry::getPath) .collect(Collectors.toList()); List<String> sortedPaths = TestUtils.sortPaths(paths, preferred); assertEquals(sortedPaths, entryPaths); }
@Test public void testGetBinaries() throws Exception { StringBuilder sb = new StringBuilder(); CSVPrinter p = new CSVPrinter(sb, CSVFileBinaryResourceProvider.FORMAT); // BLOB_ID, LENGTH, JCR_MIMETYPE, JCR_ENCODING, JCR_PATH p.printRecord("a", 123, "text/plain", null, "/a"); p.printRecord("a2", 123, "text/plain", null, "/a/c"); p.printRecord("b", null, "text/plain", null, "/b"); p.printRecord(null, null, "text/plain", null, "/c"); File dataFile = temporaryFolder.newFile(); Files.write(sb, dataFile, Charsets.UTF_8); CSVFileBinaryResourceProvider provider = new CSVFileBinaryResourceProvider(dataFile, new MemoryBlobStore()); Map<String, BinaryResource> binaries = provider.getBinaries("/").uniqueIndex(BinarySourceMapper.BY_BLOBID); assertEquals(3, binaries.size()); assertEquals("a", binaries.get("a").getBlobId()); assertEquals("/a", binaries.get("a").getPath()); binaries = provider.getBinaries("/a").uniqueIndex(BinarySourceMapper.BY_BLOBID); assertEquals(1, binaries.size()); provider.close(); } }
private DocumentNodeStore create(int clusterId) { DocumentMK.Builder builder = new DocumentMK.Builder(); if (ds == null) { ds = new MemoryDocumentStore(); } if (bs == null) { bs = new MemoryBlobStore(); } builder.setDocumentStore(ds).setBlobStore(bs); DocumentNodeStore store = builder.setClusterId(++clusterId) .setLeaseCheckMode(LeaseCheckMode.DISABLED).open().getNodeStore(); return store; }
private DocumentMK createMK(int clusterId) { DocumentMK.Builder builder = new DocumentMK.Builder(); builder.setAsyncDelay(0); if (MONGO_DB) { MongoConnection c = connectionFactory.getConnection(); MongoUtils.dropCollections(c.getDBName()); builder.setMongoDB(c.getMongoClient(), c.getDBName()); } else { if (ds == null) { ds = new MemoryDocumentStore(); } if (bs == null) { bs = new MemoryBlobStore(); } builder.setDocumentStore(ds).setBlobStore(bs); } return builder.setClusterId(clusterId + 1).open(); }
private DocumentMK createMK(int clusterId, int asyncDelay) { if (MONGO_DB) { MongoConnection connection = connectionFactory.getConnection(); return register(new DocumentMK.Builder() .setMongoDB(connection.getMongoClient(), connection.getDBName()) .setClusterId(clusterId).setAsyncDelay(asyncDelay).open()); } else { if (ds == null) { ds = new MemoryDocumentStore(); } if (bs == null) { bs = new MemoryBlobStore(); } return createMK(clusterId, asyncDelay, ds, bs); } }
DocumentMK createMK(int clusterId, int asyncDelay) { if (MONGO_DB) { MongoConnection connection = connectionFactory.getConnection(); return register(new DocumentMK.Builder() .setMongoDB(connection.getMongoClient(), connection.getDBName()) .setLeaseCheckMode(LeaseCheckMode.DISABLED).setClusterId(clusterId) .setAsyncDelay(asyncDelay).open()); } else { if (ds == null) { ds = new MemoryDocumentStore(); } if (bs == null) { bs = new MemoryBlobStore(); } return createMK(clusterId, asyncDelay, ds, bs); } }
private void doLargeCleanupTest(int offset, int size) throws Exception { Clock clock = new Clock.Virtual(); DocumentMK mk1 = createMK(0 /* clusterId: 0 => uses clusterNodes collection */, 0, new MemoryDocumentStore(), new MemoryBlobStore()); DocumentNodeStore ns1 = mk1.getNodeStore(); // make sure we're visible and marked as active renewClusterIdLease(ns1); JournalGarbageCollector gc = new JournalGarbageCollector(ns1, 0); clock.getTimeIncreasing(); clock.getTimeIncreasing(); gc.gc(); // cleanup everything that might still be there // create entries as parametrized: for(int i=offset; i<size+offset; i++) { mk1.commit("/", "+\"regular"+i+"\": {}", null, null); // always run background ops to 'flush' the change // into the journal: ns1.runBackgroundOperations(); } Thread.sleep(100); // sleep 100millis assertEquals(size, gc.gc()); // should now be able to clean up everything }
@Test public void csvGenerator() throws Exception { File csv = new File(temporaryFolder.getRoot(), "test.csv"); BlobStore blobStore = new MemoryBlobStore(); NodeBuilder builder = EMPTY_NODE.builder(); createFileNode(builder, "a", blobOf("foo", blobStore), "text/plain"); createFileNode(builder, "b", blobOf("hello", blobStore), "text/plain"); NodeStore store = new MemoryNodeStore(builder.getNodeState()); NodeStoreBinaryResourceProvider extractor = new NodeStoreBinaryResourceProvider(store, blobStore); CSVFileGenerator generator = new CSVFileGenerator(csv); generator.generate(extractor.getBinaries("/")); CSVFileBinaryResourceProvider csvbrp = new CSVFileBinaryResourceProvider(csv, blobStore); assertEquals(2, csvbrp.getBinaries("/").size()); csvbrp.close(); }
/** * Test for OAK-1202 */ @Test public void retryPersisted() throws Exception { MemoryDocumentStore ds = new MemoryDocumentStore(); MemoryBlobStore bs = new MemoryBlobStore(); DocumentNodeStore ns1 = createMK(1, 1000, ds, bs); DocumentNodeStore ns2 = createMK(2, 1000, ds, bs); try { NodeBuilder builder1 = ns1.getRoot().builder(); createTree(builder1.child("bar"), 2); NodeBuilder builder2 = ns2.getRoot().builder(); createTree(builder2.child("qux"), 2); ns1.merge(builder1, HOOK, CommitInfo.EMPTY); ns2.merge(builder2, HOOK, CommitInfo.EMPTY); } finally { ns1.dispose(); ns2.dispose(); } }
@Test public void countBinaries() throws Exception { NodeBuilder builder = EMPTY_NODE.builder(); createFileNode(builder, "a", new IdBlob("hello", null), "text/plain"); createFileNode(builder, "b", new IdBlob("hello", "id1"), "text/plain"); createFileNode(builder.child("a2"), "c", new IdBlob("hello", "id2"), "text/foo") .setProperty(JcrConstants.JCR_ENCODING, "bar"); NodeStore store = new MemoryNodeStore(builder.getNodeState()); BlobStore blobStore = new MemoryBlobStore(); NodeStoreBinaryResourceProvider extractor = new NodeStoreBinaryResourceProvider(store, blobStore); assertEquals(2, extractor.getBinaries("/").size()); assertEquals(1, extractor.getBinaries("/a2").size()); BinaryResource bs = extractor.getBinaries("/a2").first().get(); assertEquals("text/foo", bs.getMimeType()); assertEquals("bar", bs.getEncoding()); assertEquals("id2", bs.getBlobId()); }