@Override public InputStream get() { return blob.getInputStream(); } }
@Override public DateTime getModifiedDate() { return blob.getMetrics().getCreationTime(); } });
Blob blob = blobStore.get(asset.blobRef().getBlobId()); if (blob != null) { asset.createdBy(blob.getHeaders().get(BlobStore.CREATED_BY_HEADER)); asset.createdByIp(blob.getHeaders().get(BlobStore.CREATED_BY_IP_HEADER)); asset.blobCreated(blob.getMetrics().getCreationTime());
provenance.set(HASHES_NOT_VERIFIED, !assetBlob.getHashesVerified()); Map<String, String> blobHeaders = assetBlob.getBlob().getHeaders(); if (blobHeaders.containsKey(BlobStore.CREATED_BY_HEADER)) { asset.createdBy(blobHeaders.get(BlobStore.CREATED_BY_HEADER));
@Override public InputStream openInputStream() throws IOException { return blob.getInputStream(); }
/** * Deletes this temporary {@link AssetBlob} by clearing any locally ingested blobs. * * Note this shouldn't stop the current response from serving back temporary content, * it just makes sure non-persisted content is eventually cleaned up from the store. * * @since 3.4 */ void delete(final String reason) { if (ingestedBlob != null) { if (canonicalBlob != null) { // canonical redirect is in place, so it's safe to hard-delete the temp blob blobStore.deleteHard(ingestedBlob.getId()); } else { // no redirect, so the temp blob is all we have - use soft-delete so the bytes // will still be available on disk for streaming back in the current response, // while making sure it gets cleaned up on the next compact blobStore.delete(ingestedBlob.getId(), reason); } } }
@Override public long getSize() { return blob.getMetrics().getContentSize(); }
@Override public InputStream getInputStream() { return blob.getInputStream(); }
/** * Parses the data field of an attachment in the JSON. The data is stored in a temp blob, and the blob ID is returned * as a string. */ private String parseAttachmentData() throws IOException { try { TempBlob tempBlob = readBinaryValueIntoTempBlob(); String id = tempBlob.getBlob().getId().toString(); tempBlobs.put(id, tempBlob); return id; } catch (Exception e) { throw new IOException("failed to process attachment data", e); } }
/** * The blob size in bytes. */ public long getSize() { return getBlob().getMetrics().getContentSize(); }
/** * returns true if the blobs data is accessible, false otherwise */ protected boolean blobDataExists(final Blob blob) { try { blob.getInputStream().close(); return true; } catch (Exception e) { // NOSONAR return false; } }
@Override public boolean create(final String path, final String mimeType, final InputStream inputStream) { if (exists(path)) { return false; } Map<String, String> blobMetadata = new HashMap<>(); blobMetadata.put(BlobStore.BLOB_NAME_HEADER, path); blobMetadata.put(BlobStore.CREATED_BY_HEADER, "Unknown"); blobMetadata.put("mimeType", mimeType); final Blob blob = blobStore.create(inputStream, blobMetadata); final RawBinaryMetadata item = new RawBinaryMetadata(path, blob.getId().asUniqueString(), mimeType); ORID rid; try (ODatabaseDocumentTx db = openDb()) { ODocument doc = entityAdapter.create(db, item); rid = doc.getIdentity(); } log.debug("Added item with RID: {}", rid); return true; }
private void maybeUpdateAsset(final Repository repository, final Asset asset, final Blob blob) { Map<String, Object> packageJson = npmPackageParser.parsePackageJson(blob::getInputStream); NestedAttributesMap updatedMetadata = createFullPackageMetadata( new NestedAttributesMap("metadata", packageJson), repository.getName(), blob.getMetrics().getSha1Hash(), repository, extractPackageRootVersionUnlessEmpty); updatePackageRootIfShaIncorrect(repository, asset, blob, updatedMetadata, NpmPackageId.parse((String) packageJson.get(P_NAME)), (String) packageJson.get(P_VERSION)); }
private String calculateIntegrity(final Asset asset, final Blob blob, final String algorithm) { try { HashCode hash; if (algorithm.equalsIgnoreCase(SHA1.name())) { hash = hash(SHA1, blob.getInputStream()); } else { hash = hash(SHA512, blob.getInputStream()); } return algorithm + "-" + Base64.getEncoder().encodeToString(hash.asBytes()); } catch (IOException e) { log.error("Failed to calculate hash for asset {}", asset.name(), e); } return ""; } }
@Override public AssetBlob createBlob(final String blobName, final TempBlob originalBlob, @Nullable final Map<String, String> headers, @Nullable final String declaredContentType, boolean skipContentVerification) throws IOException { checkNotNull(blobName); checkNotNull(originalBlob); if (!writePolicy.checkCreateAllowed()) { throw new IllegalOperationException("Repository is read only: " + repositoryName); } Map<String, String> storageHeadersMap = buildStorageHeaders(blobName, originalBlob, headers, declaredContentType, skipContentVerification); return blobTx.createByCopying( originalBlob.getBlob().getId(), storageHeadersMap, originalBlob.getHashes(), originalBlob.getHashesVerified() ); }
@Override @Transactional public Component afterMove(final Component component, final Repository destination) { destination.optionalFacet(NpmHostedFacet.class).ifPresent(f -> { final StorageTx tx = UnitOfWork.currentTx(); tx.browseAssets(component).forEach(asset -> { Blob blob = checkNotNull(tx.getBlob(asset.blobRef())); final Map<String, Object> packageJson = npmPackageParser.parsePackageJson(blob::getInputStream); final NpmPackageId packageId = NpmPackageId.parse((String) packageJson.get(P_NAME)); try { final NestedAttributesMap updatedMetadata = createFullPackageMetadata( new NestedAttributesMap("metadata", packageJson), destination.getName(), blob.getMetrics().getSha1Hash(), destination, extractNewestVersion); f.putPackageRoot(packageId, null, updatedMetadata); } catch (IOException e) { log.error("Failed to update package root, packageId: {}", packageId, e); } }); }); return component; } }
/** * Extracts the contents for the first matching {@code composer.json} file (of which there should only be one) as a * map representing the parsed JSON content. If no such file is found then an empty map is returned. */ public Map<String, Object> extractFromZip(final Blob blob) throws IOException { try (InputStream is = blob.getInputStream()) { try (ArchiveInputStream ais = archiveStreamFactory.createArchiveInputStream(ArchiveStreamFactory.ZIP, is)) { ArchiveEntry entry = ais.getNextEntry(); while (entry != null) { Map<String, Object> contents = processEntry(ais, entry); if (!contents.isEmpty()) { return contents; } entry = ais.getNextEntry(); } } return Collections.emptyMap(); } catch (ArchiveException e) { throw new IOException("Error reading from archive", e); } }
/** * The blob reference this instance is pointing to. */ @Nonnull public BlobRef getBlobRef() { return new BlobRef( nodeAccess.getId(), blobStore.getBlobStoreConfiguration().getName(), getBlob().getId().asUniqueString()); }
String oldBlobSha1 = oldBlob.getMetrics().getSha1Hash(); String newBlobSha1 = assetBlob.getBlob().getMetrics().getSha1Hash(); checksumsMatch = oldBlobSha1.equalsIgnoreCase(newBlobSha1);