public StorageFileItemBody( StorageFileItem file ) throws IOException { this.file = file; this.channel = Channels.newChannel( file.getInputStream() ); }
if (!file.isContentGenerated() && !file.isVirtual() && file.getRepositoryItemAttributes().containsKey(StorageFileItem.DIGEST_SHA1_KEY)) { etag = "{SHA1{" + file.getRepositoryItemAttributes().get(StorageFileItem.DIGEST_SHA1_KEY) + "}}"; response.setHeader("Content-Type", file.getMimeType()); response.setDateHeader("Last-Modified", file.getModified()); if (file.getLength() != ContentLocator.UNKNOWN_LENGTH) { response.setHeader("Content-Length", String.valueOf(file.getLength())); if (!file.isContentGenerated() && file.getResourceStoreRequest().getIfModifiedSince() != 0 && file.getModified() <= file.getResourceStoreRequest().getIfModifiedSince()) { else if (!file.isContentGenerated() && file.getResourceStoreRequest().getIfNoneMatch() != null && etag != null && file.getResourceStoreRequest().getIfNoneMatch().equals(etag)) { final List<Range<Long>> ranges = getRequestedRanges(request, file.getLength()); if (ranges.isEmpty()) { if (contentNeeded) { try (final InputStream in = file.getInputStream()) { sendContent(in, response); renderer.renderErrorPage(request, response, file.getResourceStoreRequest(), new UnsupportedOperationException( "Multiple ranges not yet supported!")); response.setStatus(HttpServletResponse.SC_REQUESTED_RANGE_NOT_SATISFIABLE);
@Override public ContentLocator generateContent( Repository repository, String path, StorageFileItem item ) throws IllegalOperationException, ItemNotFoundException, LocalStorageException { InputStreamReader isr = null; try { StringWriter sw = new StringWriter(); VelocityContext vctx = new VelocityContext( item.getItemContext() ); isr = new InputStreamReader( item.getInputStream(), "UTF-8" ); velocity.getEngine().evaluate( vctx, sw, item.getRepositoryItemUid().toString(), isr ); return new StringContentLocator( sw.toString() ); } catch ( Exception e ) { throw new LocalStorageException( "Could not expand the template: " + item.getRepositoryItemUid().toString(), e ); } finally { IOUtil.close( isr ); } } }
@Override protected void processFileItem(final WalkerContext context, final StorageFileItem item) throws Exception { if (!item.getPath().equals("/" + Yum.PATH_OF_REPOMD_XML) && !locations.contains(item.getPath().substring(1))) { log.trace("Removing obsolete {}:{}", repository.getId(), item.getPath()); repository.deleteItem(true, item.getResourceStoreRequest()); } } });
@Override protected void setItemAttributes(final StorageFileItem item, final RequestContext context, final P2GroupRepository repository) { if (P2Constants.ARTIFACTS_JAR.equals(item.getPath()) || P2Constants.ARTIFACTS_XML.equals(item.getPath())) { item.getRepositoryItemAttributes().putAll(getMemberHash(P2Constants.ARTIFACTS_XML, context, repository)); } else if (P2Constants.CONTENT_JAR.equals(item.getPath()) || P2Constants.CONTENT_XML.equals(item.getPath())) { item.getRepositoryItemAttributes().putAll(getMemberHash(P2Constants.CONTENT_XML, context, repository)); } }
/** * Method that will create the DOM for given P2 metadata file. It handles files like "artifacts.xml" and * "content.xml", but also thier JAR counterparts, like "artifacts.jar" and "content.jar" by cranking them up, * getting the entry with same name but with modified extension to ".xml". */ public static Xpp3Dom getMetadataXpp3Dom(final StorageFileItem item) throws IOException, XmlPullParserException { // TODO: if we ever want to have the DOM reused, this method could put the parsed DOM // into item context (to not have it reparsed). For now, this call always parses as // currently we'd go rather to P2 "eat CPU" then "eat heap", as P2 metadata DOM // objects might get very large final Xpp3Dom dom; if (item.getName().endsWith(".jar")) { dom = parseJarItem(item, item.getName().replace(".jar", ".xml")); } else if (item.getName().endsWith(".xml")) { dom = parseXmlItem(item); } else { throw new IOException("Cannot parse the DOM for metadata in item " + item.getRepositoryItemUid()); } return dom; }
public StorageFileItemRepresentation(StorageFileItem file) { super(MediaType.valueOf(file.getMimeType()), file); setSize(file.getLength()); if (file.getRepositoryItemAttributes().containsKey(DigestCalculatingInspector.DIGEST_SHA1_KEY)) { // Shield SHA1 // {SHA1{xxxx}} final String tag = String.format("{SHA1{%s}}", file.getRepositoryItemAttributes().get(DigestCalculatingInspector.DIGEST_SHA1_KEY)); setTag(new Tag(tag, false)); } if (file.getItemContext().containsKey(AbstractResourceStoreContentPlexusResource.OVERRIDE_FILENAME_KEY)) { String filename = file.getItemContext().get(AbstractResourceStoreContentPlexusResource.OVERRIDE_FILENAME_KEY).toString(); setDownloadable(true); setDownloadName(filename); } }
new InputStreamEntity( new InterruptableInputStream( fileItem.getInputStream() ), fileItem.getLength() ); entity.setContentType( fileItem.getMimeType() ); method.setEntity( entity );
fileItem == null ? null : fileItem.getRepositoryItemAttributes() .get(DigestCalculatingInspector.DIGEST_SHA1_KEY); if (checksum != null) { resource.setMd5Hash(fileItem.getRepositoryItemAttributes().get(DigestCalculatingInspector.DIGEST_MD5_KEY)); resource.setSha1Hash(checksum); resource.setLastChanged(fileItem.getModified()); resource.setSize(fileItem.getLength()); resource.setUploaded(fileItem.getCreated()); resource.setUploader(fileItem.getRepositoryItemAttributes().get(AccessManager.REQUEST_USER)); resource.setMimeType(fileItem.getMimeType());
fileItem == null ? null : fileItem.getAttributes().get( DigestCalculatingInspector.DIGEST_SHA1_KEY ); if ( checksum != null ) resource.setMd5Hash( fileItem.getAttributes().get( DigestCalculatingInspector.DIGEST_MD5_KEY ) ); resource.setSha1Hash( checksum ); resource.setLastChanged( fileItem.getModified() ); resource.setSize( fileItem.getLength() ); resource.setUploaded( fileItem.getCreated() ); resource.setUploader( fileItem.getAttributes().get( AccessManager.REQUEST_USER ) ); resource.setMimeType( fileItem.getMimeType() );
@Override public FileTypeValidity isExpectedFileType(final StorageFileItem file) { // only check content from p2 repositories if (file.getRepositoryItemUid().getRepository().adaptToFacet(P2Repository.class) == null) { return FileTypeValidity.NEUTRAL; } if (file.getRepositoryItemUid().getPath().endsWith(".pack.gz")) { try (InputStream input = file.getInputStream();) { final byte[] magicBytes = new byte[4]; if (input.read(magicBytes) > 0) { if (Arrays.equals(magicBytes, PACK200_MAGIC) // real pack.gz || Arrays.equals(magicBytes, JAR_MAGIC)) // plain jar works too { return FileTypeValidity.VALID; } } } catch (final IOException e) { log.error("Unable to read pack200 magic bytes", e); } return FileTypeValidity.INVALID; } return super.isExpectedFileType(file); } }
orig = mdFile.getInputStream(); IOUtil.copy( orig, backup1 ); OutputStreamWriter osw = new OutputStreamWriter( bos ); metadataWriter.write( osw, imd ); mdFile.setContentLocator( new ByteArrayContentLocator( bos.toByteArray(), mdFile.getMimeType() ) ); mdFile.setContentLocator( new ByteArrayContentLocator( backup1.toByteArray(), mdFile.getMimeType() ) );
is = item.getInputStream(); if ( item.getItemContext().getRequestAppRootUrl() != null ) String appRootUrl = item.getItemContext().getRequestAppRootUrl(); item.setLength( result.getByteArray().length );
public void processStorageFileItem( StorageFileItem item, File file ) throws Exception { InputStream fis = new FileInputStream( file ); try { byte[] buffer = new byte[1024]; MessageDigest md5 = MessageDigest.getInstance( "MD5" ); MessageDigest sha1 = MessageDigest.getInstance( "SHA1" ); int numRead; do { numRead = fis.read( buffer ); if ( numRead > 0 ) { md5.update( buffer, 0, numRead ); sha1.update( buffer, 0, numRead ); } } while ( numRead != -1 ); String md5digestStr = new String( Hex.encodeHex( md5.digest() ) ); String sha1DigestStr = new String( Hex.encodeHex( sha1.digest() ) ); item.getRepositoryItemAttributes().put( DIGEST_MD5_KEY, md5digestStr ); item.getRepositoryItemAttributes().put( DIGEST_SHA1_KEY, sha1DigestStr ); } finally { fis.close(); } }
&& oldSha1.equals(siteItem.getRepositoryItemAttributes().get(StorageFileItem.DIGEST_SHA1_KEY))) { return; site = UpdateSite.read(siteItem.getInputStream());
final StorageFileItem remoteMirrorsItem = getMirrorsItemRemote(mirrorsURL); final ContentLocator content = new PreparedContentLocator(((StorageFileItem) remoteMirrorsItem).getInputStream(), "text/xml", remoteMirrorsItem.getLength()); mirrorsItem = new DefaultStorageFileItem(this, new ResourceStoreRequest(PRIVATE_MIRRORS_PATH),
private TreeMap<String, String> getMemberHash(final String xml, final RequestContext context, final P2GroupRepository repository) { final TreeMap<String, String> memberHash = new TreeMap<String, String>(); int count = 0; List<StorageFileItem> storageItems; try { storageItems = doRetrieveItems(xml, context, repository); } catch (final Exception e) { // assume it has changed, so return an empty map return memberHash; } for (final StorageFileItem storageItem : storageItems) { final String hash = storageItem.getRepositoryItemAttributes().get(DigestCalculatingInspector.DIGEST_SHA1_KEY); if (hash != null) { memberHash.put(ATTR_HASH_PREFIX + count + "." + storageItem.getRepositoryItemUid().toString(), hash); count++; } } return memberHash; } }
( (StorageFileItem) item ).getInputStream(), ( (StorageFileItem) item ).getMimeType() ) );
private static StorageFileItem compressMetadataItem(final Repository repository, final String path, final StorageFileItem metadataXml) throws IOException { final Manifest manifest = new Manifest(); manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0"); // this is a special one: once cached (hence consumed), temp file gets deleted final FileContentLocator fileContentLocator = new FileContentLocator("application/java-archive"); try (OutputStream buffer = fileContentLocator.getOutputStream(); ZipOutputStream out = new ZipOutputStream(buffer); InputStream in = metadataXml.getInputStream()) { out.putNextEntry(new JarEntry(metadataXml.getName())); IOUtils.copy(in, out); } final DefaultStorageFileItem result = new DefaultStorageFileItem( repository, new ResourceStoreRequest(path), true /* isReadable */, false /* isWritable */, fileContentLocator ); return result; }
if ( file.getItemContext().containsKey( XML_DETECTION_LAX_KEY ) ) Boolean.parseBoolean( String.valueOf( file.getItemContext().get( XML_DETECTION_LAX_KEY ) ) ); final String filePath = file.getPath().toLowerCase(); if ( filePath.endsWith( ".pom" ) ) getLogger().debug( "Checking if Maven POM {} is of the correct MIME type.", file.getRepositoryItemUid() ); getLogger().warn( "Cannot access content of StorageFileItem: " + file.getRepositoryItemUid(), e ); file.getRepositoryItemUid() ); getLogger().warn( "Cannot access content of StorageFileItem: " + file.getRepositoryItemUid(), e ); file.getRepositoryItemUid() ); "Cannot detect MIME type and validate content of StorageFileItem: " + file.getRepositoryItemUid(), e ); getLogger().debug( "Checking if Maven checksum {} is valid.", file.getRepositoryItemUid() ); getLogger().warn( "Cannot access content of StorageFileItem: " + file.getRepositoryItemUid(), e ); "Cannot detect MIME type and validate content of StorageFileItem: " + file.getRepositoryItemUid(), e );