public static Optional<byte[]> getBlob(BlobStore blobStore, final String blobName) { Optional<byte[]> blob = blobStore.getBlob(blobName); return blob; }
@Override public void setBytes(byte[] bytes) { blobStore.putBlob(filterKey, bytes); } };
@Override public void run() { if (repo.isOpen()) { BlobStore blobs = repo.blobStore(); blobs.removeBlob(deleteKey); } } }, 60, TimeUnit.SECONDS);
@Test public void testClean() throws Exception { // Set up some refs to clean up geogig.command(UpdateRef.class).setName(Ref.MERGE_HEAD).setNewValue(ObjectId.NULL).call(); geogig.command(UpdateRef.class).setName(Ref.ORIG_HEAD).setNewValue(ObjectId.NULL).call(); geogig.command(UpdateRef.class).setName(Ref.CHERRY_PICK_HEAD).setNewValue(ObjectId.NULL) .call(); geogig.getRepository().blobStore().putBlob(MergeOp.MERGE_MSG, "Merge message".getBytes()); ImmutableList<String> cleanedUp = geogig.command(CleanRefsOp.class).call(); assertEquals(4, cleanedUp.size()); assertTrue(cleanedUp.contains(Ref.MERGE_HEAD)); assertTrue(cleanedUp.contains(Ref.ORIG_HEAD)); assertTrue(cleanedUp.contains(Ref.CHERRY_PICK_HEAD)); assertTrue(cleanedUp.contains(MergeOp.MERGE_MSG)); Optional<Ref> ref = geogig.command(RefParse.class).setName(Ref.MERGE_HEAD).call(); assertFalse(ref.isPresent()); ref = geogig.command(RefParse.class).setName(Ref.ORIG_HEAD).call(); assertFalse(ref.isPresent()); ref = geogig.command(RefParse.class).setName(Ref.CHERRY_PICK_HEAD).call(); assertFalse(ref.isPresent()); Optional<byte[]> mergeMsg = geogig.getRepository().blobStore().getBlob(MergeOp.MERGE_MSG); assertFalse(mergeMsg.isPresent()); // Running it again should result in nothing being cleaned up. cleanedUp = geogig.command(CleanRefsOp.class).call(); assertEquals(0, cleanedUp.size()); }
@Override protected String _call() { BlobStore blobStore = context.blobStore(); Optional<InputStream> blobAsStream = blobStore.getBlobAsStream(MergeOp.MERGE_MSG); if (!blobAsStream.isPresent()) { return ""; } try (InputStream in = blobAsStream.get()) { List<String> lines = CharStreams.readLines(new InputStreamReader(in, Charsets.UTF_8)); return Joiner.on("\n").join(lines); } catch (IOException e) { throw Throwables.propagate(e); } }
@Test public void testClean() throws Exception { // Set up some refs to clean up geogig.command(UpdateRef.class).setName(Ref.MERGE_HEAD).setNewValue(ObjectId.NULL).call(); geogig.command(UpdateRef.class).setName(Ref.ORIG_HEAD).setNewValue(ObjectId.NULL).call(); geogig.command(UpdateRef.class).setName(Ref.CHERRY_PICK_HEAD).setNewValue(ObjectId.NULL) .call(); geogig.getRepository().blobStore().putBlob(MergeOp.MERGE_MSG, "Merge message".getBytes()); ImmutableList<String> cleanedUp = geogig.command(CleanRefsOp.class).call(); assertEquals(4, cleanedUp.size()); assertTrue(cleanedUp.contains(Ref.MERGE_HEAD)); assertTrue(cleanedUp.contains(Ref.ORIG_HEAD)); assertTrue(cleanedUp.contains(Ref.CHERRY_PICK_HEAD)); assertTrue(cleanedUp.contains(MergeOp.MERGE_MSG)); Optional<Ref> ref = geogig.command(RefParse.class).setName(Ref.MERGE_HEAD).call(); assertFalse(ref.isPresent()); ref = geogig.command(RefParse.class).setName(Ref.ORIG_HEAD).call(); assertFalse(ref.isPresent()); ref = geogig.command(RefParse.class).setName(Ref.CHERRY_PICK_HEAD).call(); assertFalse(ref.isPresent()); Optional<byte[]> mergeMsg = geogig.getRepository().blobStore().getBlob(MergeOp.MERGE_MSG); assertFalse(mergeMsg.isPresent()); // Running it again should result in nothing being cleaned up. cleanedUp = geogig.command(CleanRefsOp.class).call(); assertEquals(0, cleanedUp.size()); }
@Override protected String _call() { BlobStore blobStore = context.blobStore(); Optional<InputStream> blobAsStream = blobStore.getBlobAsStream(MergeOp.MERGE_MSG); if (!blobAsStream.isPresent()) { return ""; } try (InputStream in = blobAsStream.get()) { List<String> lines = CharStreams.readLines(new InputStreamReader(in, Charsets.UTF_8)); return Joiner.on("\n").join(lines); } catch (IOException e) { throw new RuntimeException(e); } }
public static Optional<byte[]> getBlob(BlobStore blobStore, final String blobName) { Optional<byte[]> blob = blobStore.getBlob(blobName); return blob; }
@Override public void setBytes(byte[] bytes) { blobStore.putBlob(filterKey, bytes); } };
private void skipCurrentCommit() { List<String> nextFile = Blobs.readLines(context().blobStore(), REBASE_NEXT_BLOB); try { String idx = nextFile.get(0); String blobName = REBASE_BLOB_PREFIX + idx; context().blobStore().removeBlob(blobName); int newIdx = Integer.parseInt(idx) + 1; Blobs.putBlob(context().blobStore(), REBASE_NEXT_BLOB, String.valueOf(newIdx)); } catch (Exception e) { throw new IllegalStateException("Cannot read/write rebase commits index", e); } }
@Override public byte[] iniBytes() throws IOException { Optional<byte[]> bytes = blobStore.getBlob(filterKey); if (bytes.isPresent()) { return bytes.get(); } else { throw new IOException("Filter blob did not exist."); } }
public static void putBlob(BlobStore blobStore, String blobName, CharSequence contents) { byte[] blob = contents.toString().getBytes(Charsets.UTF_8); blobStore.putBlob(blobName, blob); }
private void skipCurrentCommit() { List<String> nextFile = Blobs.readLines(context().blobStore(), REBASE_NEXT_BLOB); try { String idx = nextFile.get(0); String blobName = REBASE_BLOB_PREFIX + idx; context().blobStore().removeBlob(blobName); int newIdx = Integer.parseInt(idx) + 1; Blobs.putBlob(context().blobStore(), REBASE_NEXT_BLOB, String.valueOf(newIdx)); } catch (Exception e) { throw new IllegalStateException("Cannot read/write rebase commits index", e); } }
@Override public byte[] iniBytes() throws IOException { Optional<byte[]> bytes = blobStore.getBlob(filterKey); if (bytes.isPresent()) { return bytes.get(); } else { throw new IOException("Filter blob did not exist."); } }
public static void putBlob(BlobStore blobStore, String blobName, CharSequence contents) { byte[] blob = contents.toString().getBytes(Charsets.UTF_8); blobStore.putBlob(blobName, blob); }
@Override protected ImmutableList<String> _call() { Builder<String> cleaned = new ImmutableList.Builder<String>(); Optional<Ref> ref = command(UpdateRef.class).setDelete(true).setName(Ref.MERGE_HEAD).call(); if (ref.isPresent()) { cleaned.add(Ref.MERGE_HEAD); } ref = command(UpdateRef.class).setDelete(true).setName(Ref.ORIG_HEAD).call(); if (ref.isPresent()) { cleaned.add(Ref.ORIG_HEAD); } ref = command(UpdateRef.class).setDelete(true).setName(Ref.CHERRY_PICK_HEAD).call(); if (ref.isPresent()) { cleaned.add(Ref.CHERRY_PICK_HEAD); } BlobStore blobStore = context.blobStore(); Optional<byte[]> blob = Blobs.getBlob(blobStore, MergeOp.MERGE_MSG); if (blob.isPresent()) { cleaned.add(MergeOp.MERGE_MSG); blobStore.removeBlob(MergeOp.MERGE_MSG); } return cleaned.build(); }
/** * @return true if this is a sparse (mapped) clone. */ @Override public boolean isSparse() { return blobStore().getBlob(Blobs.SPARSE_FILTER_BLOB_KEY).isPresent(); }
@Override protected Void _call() { BlobStore blobStore = context().blobStore(); byte[] blob = message.getBytes(Charsets.UTF_8); blobStore.putBlob(MergeOp.MERGE_MSG, blob); return null; }
private boolean applyNextCommit(boolean useCommitChanges) { List<String> nextFile = Blobs.readLines(context().blobStore(), REBASE_NEXT_BLOB); if (nextFile.isEmpty()) { return false; } String idx = nextFile.get(0); String blobName = REBASE_BLOB_PREFIX + idx; List<String> commitFile = Blobs.readLines(context().blobStore(), blobName); if (commitFile.isEmpty()) { return false; } String commitId = commitFile.get(0); RevCommit commit = objectDatabase().getCommit(ObjectId.valueOf(commitId)); applyCommit(commit, useCommitChanges); context().blobStore().removeBlob(blobName); int newIdx = Integer.parseInt(idx) + 1; try { Blobs.putBlob(context().blobStore(), REBASE_NEXT_BLOB, String.valueOf(newIdx)); } catch (Exception e) { throw new IllegalStateException("Cannot read/write rebase commits index", e); } return true; }
/** * @return true if this is a sparse (mapped) clone. */ @Override public boolean isSparse() { return blobStore().getBlob(Blobs.SPARSE_FILTER_BLOB_KEY).isPresent(); }