@Override public ContentLocator generateContent( Repository repository, String path, StorageFileItem item ) throws IllegalOperationException, ItemNotFoundException, LocalStorageException { InputStreamReader isr = null; try { StringWriter sw = new StringWriter(); VelocityContext vctx = new VelocityContext( item.getItemContext() ); isr = new InputStreamReader( item.getInputStream(), "UTF-8" ); velocity.getEngine().evaluate( vctx, sw, item.getRepositoryItemUid().toString(), isr ); return new StringContentLocator( sw.toString() ); } catch ( Exception e ) { throw new LocalStorageException( "Could not expand the template: " + item.getRepositoryItemUid().toString(), e ); } finally { IOUtil.close( isr ); } } }
@Override public FileTypeValidity isExpectedFileType(final StorageFileItem file) { // only check content from p2 repositories if (file.getRepositoryItemUid().getRepository().adaptToFacet(P2Repository.class) == null) { return FileTypeValidity.NEUTRAL; } if (file.getRepositoryItemUid().getPath().endsWith(".pack.gz")) { try (InputStream input = file.getInputStream();) { final byte[] magicBytes = new byte[4]; if (input.read(magicBytes) > 0) { if (Arrays.equals(magicBytes, PACK200_MAGIC) // real pack.gz || Arrays.equals(magicBytes, JAR_MAGIC)) // plain jar works too { return FileTypeValidity.VALID; } } } catch (final IOException e) { log.error("Unable to read pack200 magic bytes", e); } return FileTypeValidity.INVALID; } return super.isExpectedFileType(file); } }
file.getRepositoryItemUid().toString(), fileTypeValidatorEntry.getKey() );
private TreeMap<String, String> getMemberHash(final String xml, final RequestContext context, final P2GroupRepository repository) { final TreeMap<String, String> memberHash = new TreeMap<String, String>(); int count = 0; List<StorageFileItem> storageItems; try { storageItems = doRetrieveItems(xml, context, repository); } catch (final Exception e) { // assume it has changed, so return an empty map return memberHash; } for (final StorageFileItem storageItem : storageItems) { final String hash = storageItem.getRepositoryItemAttributes().get(DigestCalculatingInspector.DIGEST_SHA1_KEY); if (hash != null) { memberHash.put(ATTR_HASH_PREFIX + count + "." + storageItem.getRepositoryItemUid().toString(), hash); count++; } } return memberHash; } }
public void removeArtifactUsage(StorageFileItem item) throws IOException { if (getLogger().isDebugEnabled()) { getLogger().debug( "Removing artifact usage data for " + item.getRepositoryItemUid().getPath()); } GAV artifact = getArtifactForStorageItem(item); if (artifact != null) { artifactUsageStore.removeArtifact(artifact); } }
new Object[] { file.getRepositoryItemUid(), expectedMimeTypes, magicMimeTypes } ); new Object[] { file.getRepositoryItemUid(), expectedMimeTypes, magicMimeTypes } );
/** * Method that will create the DOM for given P2 metadata file. It handles files like "artifacts.xml" and * "content.xml", but also thier JAR counterparts, like "artifacts.jar" and "content.jar" by cranking them up, * getting the entry with same name but with modified extension to ".xml". */ public static Xpp3Dom getMetadataXpp3Dom(final StorageFileItem item) throws IOException, XmlPullParserException { // TODO: if we ever want to have the DOM reused, this method could put the parsed DOM // into item context (to not have it reparsed). For now, this call always parses as // currently we'd go rather to P2 "eat CPU" then "eat heap", as P2 metadata DOM // objects might get very large final Xpp3Dom dom; if (item.getName().endsWith(".jar")) { dom = parseJarItem(item, item.getName().replace(".jar", ".xml")); } else if (item.getName().endsWith(".xml")) { dom = parseXmlItem(item); } else { throw new IOException("Cannot parse the DOM for metadata in item " + item.getRepositoryItemUid()); } return dom; }
public void calculateArtifactUsage(StorageFileItem item) throws IOException { if (getLogger().isDebugEnabled()) { getLogger().debug( "Calculating usage of " + item.getRepositoryItemUid().getPath()); } // don't bother if the file hasn't changed since // the last time it was processed if (this.artifactUsageStore.isAlreadyCalculated(item .getRepositoryItemUid().getPath(), item.getModified())) { return; } // convert to a Maven project GAV artifact = getArtifactForStorageItem(item); if (artifact != null) { artifactUsageStore.addDependencies(artifact, dependencyResolver .resolveDependencies(artifact), item.getRepositoryItemUid() .getPath()); } }
new Object[]{ P2Constants.CONTENT_XML, RepositoryStringUtils.getHumanizedNameString(fileItem.getRepositoryItemUid().getRepository()), e.getMessage(), name }); new Object[]{ P2Constants.CONTENT_XML, RepositoryStringUtils.getHumanizedNameString(fileItem.getRepositoryItemUid().getRepository()), e.getMessage(), name });
if ( filePath.endsWith( ".pom" ) ) getLogger().debug( "Checking if Maven POM {} is of the correct MIME type.", file.getRepositoryItemUid() ); getLogger().warn( "Cannot access content of StorageFileItem: " + file.getRepositoryItemUid(), e ); file.getRepositoryItemUid() ); getLogger().warn( "Cannot access content of StorageFileItem: " + file.getRepositoryItemUid(), e ); file.getRepositoryItemUid() ); "Cannot detect MIME type and validate content of StorageFileItem: " + file.getRepositoryItemUid(), e ); getLogger().debug( "Checking if Maven checksum {} is valid.", file.getRepositoryItemUid() ); getLogger().warn( "Cannot access content of StorageFileItem: " + file.getRepositoryItemUid(), e ); "Cannot detect MIME type and validate content of StorageFileItem: " + file.getRepositoryItemUid(), e );
new Object[]{ P2Constants.ARTIFACTS_XML, RepositoryStringUtils.getHumanizedNameString(fileItem.getRepositoryItemUid().getRepository()), e.getMessage(), name }); new Object[]{ P2Constants.ARTIFACTS_XML, RepositoryStringUtils.getHumanizedNameString(fileItem.getRepositoryItemUid().getRepository()), e.getMessage(), name });
"IOException during parse of metadata UID=\"" + fileItem.getRepositoryItemUid().toString() + "\", will be skipped from aggregation!", e ); + fileItem.getRepositoryItemUid().toString() + "\", will be skipped from aggregation!", e );
if (repoMDItem.getRepositoryItemAttributes().get(YumProxy.PROCESSED) == null) { try { repoMDItem.getRepositoryItemUid().getLock().lock(Action.update); if (repoMDItem.getRepositoryItemAttributes().get(YumProxy.PROCESSED) == null) { MetadataProcessor.processProxiedMetadata((ProxyRepository) repository); repoMDItem.getRepositoryItemUid().getLock().unlock();
final RepositoryItemUidLock itemLock = item.getRepositoryItemUid().getLock(); itemLock.lock(Action.create); try {
createRepositoryReference(request, file.getRepositoryItemUid().getRepository().getId(), file.getRepositoryItemUid().getPath());
MavenRepository repo = fileItem.getRepositoryItemUid().getRepository().adaptToFacet( MavenRepository.class ); RepositoryPolicy policy = repo.getRepositoryPolicy(); if ( metadata != null && metadata.getVersioning() != null )
createRepositoryReference( request, file.getRepositoryItemUid().getRepository().getId(), file.getRepositoryItemUid().getPath() );