public boolean getBlobExists(String container, String blobPath) throws URISyntaxException, StorageException { return getCloudBlobContainer(container).getBlockBlobReference(blobPath).exists(); } }
@Override public boolean exists() { try { return manifestBlob.exists(); } catch (StorageException e) { log.error("Can't check if the manifest exists", e); return false; } }
@Override public boolean exists() { try { return manifestBlob.exists(); } catch (StorageException e) { log.error("Can't check if the manifest exists", e); return false; } }
@Override public boolean exists() { try { return this.blobContainer.exists() && blockBlob.exists(); } catch (StorageException e) { log.error(MSG_FAIL_CHECK_EXIST, e); throw new StorageRuntimeException(MSG_FAIL_CHECK_EXIST, e); } }
@Override public SegmentArchiveReader open(String archiveName) throws IOException { try { CloudBlobDirectory archiveDirectory = getDirectory(archiveName); if (!archiveDirectory.getBlockBlobReference("closed").exists()) { throw new IOException("The archive " + archiveName + " hasn't been closed correctly."); } return new AzureSegmentArchiveReader(archiveDirectory, ioMonitor); } catch (StorageException | URISyntaxException e) { throw new IOException(e); } }
@Override public boolean metadataRecordExists(String name) { long start = System.currentTimeMillis(); ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(getClass().getClassLoader()); CloudBlockBlob blob = getAzureContainer().getBlockBlobReference(addMetaKeyPrefix(name)); boolean exists = blob.exists(); LOG.debug("Metadata record {} exists {}. duration={}", name, exists, (System.currentTimeMillis() - start)); return exists; } catch (DataStoreException | StorageException | URISyntaxException e) { LOG.debug("Error checking existence of metadata record = {}", name, e); } finally { if (contextClassLoader != null) { Thread.currentThread().setContextClassLoader(contextClassLoader); } } return false; }
@Override public SegmentArchiveReader open(String archiveName) throws IOException { try { CloudBlobDirectory archiveDirectory = getDirectory(archiveName); if (!archiveDirectory.getBlockBlobReference("closed").exists()) { throw new IOException("The archive " + archiveName + " hasn't been closed correctly."); } return new AzureSegmentArchiveReader(archiveDirectory, ioMonitor); } catch (StorageException | URISyntaxException e) { throw new IOException(e); } }
public boolean getBlobExists(String container, String blobPath) throws URISyntaxException, StorageException { return getCloudBlobContainer(container).getBlockBlobReference(blobPath).exists(); } }
@Override public boolean fileExists(long id) throws IOException { try { String haName = EncyptUtils.encHashArchiveName(id, Main.chunkStoreEncryptionEnabled); CloudBlockBlob blob = container.getBlockBlobReference("blocks/" + haName); return blob.exists(); } catch (Exception e) { SDFSLogger.getLog().error("unable to get id", e); throw new IOException(e); } }
@Override public boolean exists(DataIdentifier identifier) throws DataStoreException { long start = System.currentTimeMillis(); String key = getKeyName(identifier); ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(getClass().getClassLoader()); boolean exists =getAzureContainer().getBlockBlobReference(key).exists(); LOG.debug("Blob exists={} identifier={} duration={}", exists, key, (System.currentTimeMillis() - start)); return exists; } catch (Exception e) { throw new DataStoreException(e); } finally { if (null != contextClassLoader) { Thread.currentThread().setContextClassLoader(contextClassLoader); } } }
private AzureBlobLease getLeaseInternal(String partitionId, BlobRequestOptions options) throws URISyntaxException, IOException, StorageException { AzureBlobLease retval = null; CloudBlockBlob leaseBlob = this.consumerGroupDirectory.getBlockBlobReference(partitionId); // getBlockBlobReference does not take options if (leaseBlob.exists(null, options, null)) { retval = downloadLease(leaseBlob, options); } return retval; }
private String createLink(CloudBlobContainer blobContainer, String projectDir, String versionDir, String fileName) throws URISyntaxException, StorageException { String blobRef = new StringBuffer() .append(projectDir) .append("/") .append(versionDir) .append("/") .append(fileName) .toString(); if (!blobContainer.getBlockBlobReference(blobRef).exists()) { throw new StopExecutionException("blob file does not exists: " + blobRef); } String link = new StringBuffer() .append(blobContainer.getUri().toASCIIString()) .append("/") .append(blobRef) .append("?") .append(generateSASToken(blobContainer, TOKEN_TIME_SPAN)) .toString(); return link; } }
@Override public boolean objectClaimed(String key) throws IOException { if (!this.clustered) return true; try { CloudBlockBlob kblob = container.getBlockBlobReference("claims/" + key + "/" + EncyptUtils.encHashArchiveName(Main.DSEID, Main.chunkStoreEncryptionEnabled)); return kblob.exists(); } catch (Exception e) { SDFSLogger.getLog().error("error checking if blob is clamimed", e); return false; } }
@Override public boolean isCheckedOut(String name, long volumeID) throws IOException { if (!this.clustered) return true; try { CloudBlockBlob kblob = container.getBlockBlobReference("claims/" + name + "/" + EncyptUtils.encHashArchiveName(volumeID, Main.chunkStoreEncryptionEnabled)); return kblob.exists(); } catch (Exception e) { SDFSLogger.getLog().error("error checking if blob is clamimed", e); return false; } }
private Buffer readBlob(String name) throws IOException { try { CloudBlockBlob blob = getBlob(name); if (!blob.exists()) { return null; } long length = blob.getProperties().getLength(); Buffer buffer = Buffer.allocate((int) length); AzureUtilities.readBufferFully(blob, buffer); return buffer; } catch (StorageException e) { throw new IOException(e); } }
private Buffer readBlob(String name) throws IOException { try { CloudBlockBlob blob = getBlob(name); if (!blob.exists()) { return null; } long length = blob.getProperties().getLength(); Buffer buffer = Buffer.allocate((int) length); AzureUtilities.readBufferFully(blob, buffer); return buffer; } catch (StorageException e) { throw new IOException(e); } }
@Override public void deleteChunk(byte[] hash, long start, int len) throws IOException { String hashString = this.getHashName(hash, Main.chunkStoreEncryptionEnabled); CloudBlobContainer container = null; try { container = pool.borrowObject(); this.chunks.invalidate(hashString); CloudBlockBlob blob = container.getBlockBlobReference(hashString); if (blob.exists()) blob.delete(); } catch (Exception e) { SDFSLogger.getLog() .warn("Unable to delete object " + hashString, e); } finally { pool.returnObject(container); } }
@Override public void checkoutObject(long id, int claims) throws IOException { try { CloudBlockBlob cblob = container.getBlockBlobReference(this.getClaimName(id)); if (cblob.exists()) return; else { String haName = EncyptUtils.encHashArchiveName(id, Main.chunkStoreEncryptionEnabled); CloudBlockBlob kblob = container.getBlockBlobReference("keys/" + haName); kblob.downloadAttributes(); HashMap<String, String> metaData = kblob.getMetadata(); cblob.setMetadata(metaData); cblob.uploadText(Long.toString(System.currentTimeMillis())); } } catch (Exception e) { throw new IOException(e); } }
@Override public void eventOccurred(SendingRequestEvent eventArg) { if (((HttpURLConnection) eventArg.getConnectionObject()).getRequestMethod().equals("DELETE")) { try { blob1.delete(); assertFalse(blob1.exists()); } catch (StorageException e) { fail("Delete should succeed."); } } } });
@Override public boolean blockRestored(String id) { try { CloudBlockBlob blob = container.getBlockBlobReference("blocks/" + id); if(!blob.exists()) return true; blob.downloadAttributes(); if (blob.getProperties().getStandardBlobTier().equals(StandardBlobTier.HOT)) { return true; } else { if (blob.getProperties().getRehydrationStatus() == null || blob.getProperties().getRehydrationStatus().equals(RehydrationStatus.UNKNOWN)) { SDFSLogger.getLog().warn("rehydration status unknow for " + id + " will attempt to rehydrate"); blob.uploadStandardBlobTier(StandardBlobTier.HOT); } return false; } } catch (Exception e) { SDFSLogger.getLog().warn("error while checking block [" + id + "] restored", e); return false; } }