System.out.println("Metageneration: " + blob.getMetageneration()); System.out.println("Name: " + blob.getName()); System.out.println("Size: " + blob.getSize()); System.out.println("StorageClass: " + blob.getStorageClass()); System.out.println("TimeCreated: " + new Date(blob.getCreateTime()));
@Before public void before() throws IOException { when(metadata.getSize()).thenReturn(42L); when(metadata.getGeneration()).thenReturn(2L); when(gcsStorage.get( file, Storage.BlobGetOption.fields(Storage.BlobField.GENERATION, Storage.BlobField.SIZE))) .thenReturn(metadata); when(gcsStorage.reader(file, Storage.BlobSourceOption.generationMatch(2L))) .thenReturn(gcsChannel); when(gcsChannel.isOpen()).thenReturn(true); chan = CloudStorageReadChannel.create( gcsStorage, file, 0, 1, CloudStorageConfiguration.DEFAULT, ""); verify(gcsStorage) .get( eq(file), eq(Storage.BlobGetOption.fields(Storage.BlobField.GENERATION, Storage.BlobField.SIZE))); verify(gcsStorage).reader(eq(file), eq(Storage.BlobSourceOption.generationMatch(2L))); }
Blob remoteBlob = storage.create(blob, content); assertNotNull(remoteBlob); assertEquals(blobSize, (long) remoteBlob.getSize()); try (ReadChannel reader = storage.reader(blob.getBlobId())) { reader.setChunkSize(chunkSize);
if (remoteBlob.getName().equals(blobNames[1])) { assertEquals(CONTENT_TYPE, remoteBlob.getContentType()); assertEquals(BLOB_BYTE_CONTENT.length, (long) remoteBlob.getSize()); assertFalse(remoteBlob.isDirectory()); } else if (remoteBlob.getName().equals(directoryName + subdirectoryName)) { assertEquals(0L, (long) remoteBlob.getSize()); assertTrue(remoteBlob.isDirectory()); } else {
assertEquals(OWNER, blob.getOwner()); assertEquals(SELF_LINK, blob.getSelfLink()); assertEquals(SIZE, blob.getSize()); assertEquals(UPDATE_TIME, blob.getUpdateTime()); assertEquals(storage.getOptions(), blob.getStorage().getOptions()); assertNull(blob.getOwner()); assertNull(blob.getSelfLink()); assertEquals(0L, (long) blob.getSize()); assertNull(blob.getUpdateTime()); assertTrue(blob.isDirectory());
@Override public long contentLength() throws IOException { return throwExceptionForNullBlob(getBlob()).getSize(); }
@Override public long contentLength() throws IOException { return throwExceptionForNullBlob(getBlob()).getSize(); }
private static String getBlobSize(Blob blob) { return blob.isDirectory() ? "-" : toHumanReadableByteSize(blob.getSize()); }
@Override public long length(String path) { final BlobId blob = GcsBlob.of(path).getBlobId(); return storageProvider.getStorage().get(blob).getSize(); }
@Override public void importFiles(String path, String pattern, ArrayList<String> files, ArrayList<String> keys, ArrayList<String> fails, ArrayList<String> dels) { // bk[0] is bucket name, bk[1] is file name - file name is optional. final String bk[] = GcsBlob.removePrefix(path).split("/", 2); if (bk.length < 2) { parseBucket(bk[0], files, keys, fails); } else { try { Blob blob = storageProvider.getStorage().get(bk[0], bk[1]); final GcsBlob gcsBlob = GcsBlob.of(blob.getBlobId()); final Key k = GcsFileVec.make(path, blob.getSize()); keys.add(k.toString()); files.add(path); } catch (Throwable t) { Log.err(t); fails.add(path); } } }
@Test public void testBlobSizeDisplay_Bytes() { when(binaryBlob.getSize()).thenReturn(100L); initEditorWithBlobs(binaryBlob); JTable bucketTable = editorPanel.getBucketContentTable(); assertThat(bucketTable.getValueAt(0, COL_NAME_TO_INDEX.get("Size"))).isEqualTo("100 B"); }
@Test public void testBlobSizeDisplay_MB() { when(binaryBlob.getSize()).thenReturn(104857600L); initEditorWithBlobs(binaryBlob); JTable bucketTable = editorPanel.getBucketContentTable(); assertThat(bucketTable.getValueAt(0, COL_NAME_TO_INDEX.get("Size"))).isEqualTo("100.0 MB"); }
@Test public void testBlobSizeDisplay_KB() { when(binaryBlob.getSize()).thenReturn(102400L); initEditorWithBlobs(binaryBlob); JTable bucketTable = editorPanel.getBucketContentTable(); assertThat(bucketTable.getValueAt(0, COL_NAME_TO_INDEX.get("Size"))).isEqualTo("100.0 KB"); }
@Test public void testBlobSizeDisplay_GB() { when(binaryBlob.getSize()).thenReturn(107374182400L); initEditorWithBlobs(binaryBlob); JTable bucketTable = editorPanel.getBucketContentTable(); assertThat(bucketTable.getValueAt(0, COL_NAME_TO_INDEX.get("Size"))).isEqualTo("100.0 GB"); }
@Before public void setUp() { GcsTestUtils.setupVirtualFileWithBucketMocks(bucketVirtualFile); when(loginService.isLoggedIn()).thenReturn(true); when(directoryBlob.isDirectory()).thenReturn(true); when(directoryBlob.getName()).thenReturn(DIR_NAME); when(binaryBlob.isDirectory()).thenReturn(false); when(binaryBlob.getName()).thenReturn(BLOB_NAME); when(binaryBlob.getSize()).thenReturn(1024L); when(binaryBlob.getContentType()).thenReturn(BLOB_CONTENT_TYPE); when(binaryBlob.getUpdateTime()).thenReturn(0L); when(binaryBlobInDirectory.getName()).thenReturn(NESTED_BLOB_FULL_NAME); // TODO: consider shutting down timer instead when clear what is creating the timer. ThreadTracker.longRunningThreadCreated(ApplicationManager.getApplication(), "Timer-0"); }
private void parseBucket(String bucketId, ArrayList<String> files, ArrayList<String> keys, ArrayList<String> fails) { final Bucket bucket = storageProvider.getStorage().get(bucketId); for (Blob blob : bucket.list().iterateAll()) { final GcsBlob gcsBlob = GcsBlob.of(blob.getBlobId()); Log.debug("Importing: " + gcsBlob.toString()); try { final Key k = GcsFileVec.make(gcsBlob.getCanonical(), blob.getSize()); keys.add(k.toString()); files.add(gcsBlob.getCanonical()); } catch (Throwable t) { Log.err(t); fails.add(gcsBlob.getCanonical()); } } }
@Override public Key uriToKey(URI uri) throws IOException { final GcsBlob blob = GcsBlob.of(uri); final Long contentSize = storageProvider.getStorage().get(blob.getBlobId()).getSize(); return GcsFileVec.make(blob.getCanonical(), contentSize); }