@Override public void setBytes(byte[] bytes) { blobStore.putBlob(filterKey, bytes); } };
@Override public void setBytes(byte[] bytes) { blobStore.putBlob(filterKey, bytes); } };
public static void putBlob(BlobStore blobStore, String blobName, CharSequence contents) { byte[] blob = contents.toString().getBytes(Charsets.UTF_8); blobStore.putBlob(blobName, blob); }
public static void putBlob(BlobStore blobStore, String blobName, CharSequence contents) { byte[] blob = contents.toString().getBytes(Charsets.UTF_8); blobStore.putBlob(blobName, blob); }
@Override protected Void _call() { BlobStore blobStore = context().blobStore(); byte[] blob = message.getBytes(Charsets.UTF_8); blobStore.putBlob(MergeOp.MERGE_MSG, blob); return null; }
@Override protected Void _call() { BlobStore blobStore = context().blobStore(); byte[] blob = message.getBytes(Charsets.UTF_8); blobStore.putBlob(MergeOp.MERGE_MSG, blob); return null; }
blobStore.putBlob(deleteKey, nowBytes); deleteTokenExecutor.schedule(new Runnable() {
private void createFilterFile(Map<String, String> filters) { String filterBlob = ""; for (Entry<String, String> entry : filters.entrySet()) { String featurePath = entry.getKey(); String filter = entry.getValue(); filterBlob += "[" + featurePath + "]\n"; filterBlob += "type = CQL\n"; filterBlob += "filter = " + filter + "\n"; } try { localGeogig.repo.blobStore().putBlob(Blobs.SPARSE_FILTER_BLOB_KEY, filterBlob.getBytes()); Optional<Remote> remoteInfo = localGeogig.geogig.command(RemoteResolve.class) .setName(REMOTE_NAME).call(); Preconditions.checkState(remoteInfo.isPresent()); LocalMappedRemoteRepo remoteRepo; remoteRepo = spy( new LocalMappedRemoteRepo(remoteInfo.get(), remoteGeogig.envHome.toURI())); doNothing().when(remoteRepo).close(); remoteRepo.setRepository(remoteGeogig.geogig.getRepository()); localGeogig.remoteOverride.put(REMOTE_NAME, remoteRepo); } catch (Exception e) { e.printStackTrace(); throw e; } }
repository().blobStore().putBlob(Blobs.SPARSE_FILTER_BLOB_KEY, Files.toByteArray(oldFilterFile)); } catch (Exception e) {
repository().blobStore().putBlob(Blobs.SPARSE_FILTER_BLOB_KEY, Files.toByteArray(oldFilterFile)); } catch (Exception e) {
private void createFilterFile(Map<String, String> filters) { String filterBlob = ""; for (Entry<String, String> entry : filters.entrySet()) { String featurePath = entry.getKey(); String filter = entry.getValue(); filterBlob += "[" + featurePath + "]\n"; filterBlob += "type = CQL\n"; filterBlob += "filter = " + filter + "\n"; } try { localGeogig.repo.blobStore().putBlob(Blobs.SPARSE_FILTER_BLOB_KEY, filterBlob.getBytes()); Optional<Remote> remoteInfo = localGeogig.geogig.command(RemoteResolve.class) .setName(REMOTE_NAME).call(); Preconditions.checkState(remoteInfo.isPresent()); LocalMappedRemoteRepo remoteRepo; remoteRepo = spy( new LocalMappedRemoteRepo(remoteInfo.get(), remoteGeogig.envHome.toURI())); doNothing().when(remoteRepo).close(); remoteRepo.setRepository(remoteGeogig.geogig.getRepository()); localGeogig.remoteOverride.put(REMOTE_NAME, remoteRepo); } catch (Exception e) { e.printStackTrace(); throw e; } }
geogig.getRepository().blobStore().putBlob(Blobs.SPARSE_FILTER_BLOB_KEY, filterFile.getBytes());
geogig.getRepository().blobStore().putBlob(Blobs.SPARSE_FILTER_BLOB_KEY, filterFile.getBytes());
@Test public void testClean() throws Exception { // Set up some refs to clean up geogig.command(UpdateRef.class).setName(Ref.MERGE_HEAD).setNewValue(ObjectId.NULL).call(); geogig.command(UpdateRef.class).setName(Ref.ORIG_HEAD).setNewValue(ObjectId.NULL).call(); geogig.command(UpdateRef.class).setName(Ref.CHERRY_PICK_HEAD).setNewValue(ObjectId.NULL) .call(); geogig.getRepository().blobStore().putBlob(MergeOp.MERGE_MSG, "Merge message".getBytes()); ImmutableList<String> cleanedUp = geogig.command(CleanRefsOp.class).call(); assertEquals(4, cleanedUp.size()); assertTrue(cleanedUp.contains(Ref.MERGE_HEAD)); assertTrue(cleanedUp.contains(Ref.ORIG_HEAD)); assertTrue(cleanedUp.contains(Ref.CHERRY_PICK_HEAD)); assertTrue(cleanedUp.contains(MergeOp.MERGE_MSG)); Optional<Ref> ref = geogig.command(RefParse.class).setName(Ref.MERGE_HEAD).call(); assertFalse(ref.isPresent()); ref = geogig.command(RefParse.class).setName(Ref.ORIG_HEAD).call(); assertFalse(ref.isPresent()); ref = geogig.command(RefParse.class).setName(Ref.CHERRY_PICK_HEAD).call(); assertFalse(ref.isPresent()); Optional<byte[]> mergeMsg = geogig.getRepository().blobStore().getBlob(MergeOp.MERGE_MSG); assertFalse(mergeMsg.isPresent()); // Running it again should result in nothing being cleaned up. cleanedUp = geogig.command(CleanRefsOp.class).call(); assertEquals(0, cleanedUp.size()); }
@Test public void testClean() throws Exception { // Set up some refs to clean up geogig.command(UpdateRef.class).setName(Ref.MERGE_HEAD).setNewValue(ObjectId.NULL).call(); geogig.command(UpdateRef.class).setName(Ref.ORIG_HEAD).setNewValue(ObjectId.NULL).call(); geogig.command(UpdateRef.class).setName(Ref.CHERRY_PICK_HEAD).setNewValue(ObjectId.NULL) .call(); geogig.getRepository().blobStore().putBlob(MergeOp.MERGE_MSG, "Merge message".getBytes()); ImmutableList<String> cleanedUp = geogig.command(CleanRefsOp.class).call(); assertEquals(4, cleanedUp.size()); assertTrue(cleanedUp.contains(Ref.MERGE_HEAD)); assertTrue(cleanedUp.contains(Ref.ORIG_HEAD)); assertTrue(cleanedUp.contains(Ref.CHERRY_PICK_HEAD)); assertTrue(cleanedUp.contains(MergeOp.MERGE_MSG)); Optional<Ref> ref = geogig.command(RefParse.class).setName(Ref.MERGE_HEAD).call(); assertFalse(ref.isPresent()); ref = geogig.command(RefParse.class).setName(Ref.ORIG_HEAD).call(); assertFalse(ref.isPresent()); ref = geogig.command(RefParse.class).setName(Ref.CHERRY_PICK_HEAD).call(); assertFalse(ref.isPresent()); Optional<byte[]> mergeMsg = geogig.getRepository().blobStore().getBlob(MergeOp.MERGE_MSG); assertFalse(mergeMsg.isPresent()); // Running it again should result in nothing being cleaned up. cleanedUp = geogig.command(CleanRefsOp.class).call(); assertEquals(0, cleanedUp.size()); }