/** * Helper to format "entry too big" messages. */ static String getEntryTooBigMessage(final ZipArchiveEntry ze) { return ze.getName() + "'s size exceeds the limit of 4GByte."; }
/** * Creates an exception. * @param reason the feature that is not supported * @param entry the entry using the feature */ public UnsupportedZipFeatureException(final Feature reason, final ZipArchiveEntry entry) { super("unsupported feature " + reason + " used in entry " + entry.getName()); this.reason = reason; this.entry = entry; }
/** * Is this entry a directory? * @return true if the entry is a directory */ @Override public boolean isDirectory() { return getName().endsWith("/"); }
/** * Get the hashCode of the entry. * This uses the name as the hashcode. * @return a hashcode. */ @Override public int hashCode() { // this method has severe consequences on performance. We cannot rely // on the super.hashCode() method since super.getName() always return // the empty string in the current implemention (there's no setter) // so it is basically draining the performance of a hashmap lookup return getName().hashCode(); }
private ZipEncoding getEntryEncoding(final ZipArchiveEntry ze) { final boolean encodable = zipEncoding.canEncode(ze.getName()); return !encodable && fallbackToUTF8 ? ZipEncodingHelper.UTF8_ZIP_ENCODING : zipEncoding; }
/** * Creates an exception for archives that use an unsupported * compression algorithm. * @param method the method that is not supported * @param entry the entry using the feature * @since 1.5 */ public UnsupportedZipFeatureException(final ZipMethod method, final ZipArchiveEntry entry) { super("unsupported feature method '" + method.name() + "' used in entry " + entry.getName()); this.reason = Feature.METHOD; this.entry = entry; }
/** * Read all data of the current entry from the underlying stream * that hasn't been read, yet. */ private void drainCurrentEntryData() throws IOException { long remaining = current.entry.getCompressedSize() - current.bytesReadFromStream; while (remaining > 0) { final long n = in.read(buf.array(), 0, (int) Math.min(buf.capacity(), remaining)); if (n < 0) { throw new EOFException("Truncated ZIP entry: " + ArchiveUtils.sanitize(current.entry.getName())); } count(n); remaining -= n; } }
/** * Reads all the entries from the ZipInputStream * into memory, and don't close (since POI 4.0.1) the source stream. * We'll then eat lots of memory, but be able to * work with the entries at-will. */ public ZipInputStreamZipEntrySource(ZipArchiveThresholdInputStream inp) throws IOException { for (;;) { final ZipArchiveEntry zipEntry = inp.getNextEntry(); if (zipEntry == null) { break; } zipEntries.put(zipEntry.getName(), new ZipArchiveFakeEntry(zipEntry, inp)); } }
private void checkThreshold() throws IOException { if (!guardState) { return; } final InputStreamStatistics stats = (InputStreamStatistics)in; final long payloadSize = stats.getUncompressedCount(); final long rawSize = stats.getCompressedCount(); final String entryName = entry == null ? "not set" : entry.getName(); // check the file size first, in case we are working on uncompressed streams if(payloadSize > MAX_ENTRY_SIZE) { throw new IOException(String.format(Locale.ROOT, MAX_ENTRY_SIZE_MSG, payloadSize, rawSize, MAX_ENTRY_SIZE, entryName)); } // don't alert for small expanded size if (payloadSize <= GRACE_ENTRY_SIZE) { return; } double ratio = rawSize / (double)payloadSize; if (ratio >= MIN_INFLATE_RATIO) { return; } // one of the limits was reached, report it throw new IOException(String.format(Locale.ROOT, MIN_INFLATE_RATIO_MSG, payloadSize, rawSize, ratio, MIN_INFLATE_RATIO, entryName)); }
private ByteBuffer getName(final ZipArchiveEntry ze) throws IOException { return getEntryEncoding(ze).encode(ze.getName()); }
ZipArchiveFakeEntry(ZipArchiveEntry entry, InputStream inp) throws IOException { super(entry.getName()); final long entrySize = entry.getSize(); if (entrySize < -1 || entrySize>=Integer.MAX_VALUE) { throw new IOException("ZIP entry size is too large or invalid"); } // Grab the de-compressed contents for later data = (entrySize == -1) ? IOUtils.toByteArray(inp) : IOUtils.toByteArray(inp, (int)entrySize); }
@Override public ZipArchiveEntry getEntry(final String path) { String normalizedPath = path.replace('\\', '/'); final ZipArchiveEntry entry = zipArchive.getEntry(normalizedPath); if (entry != null) { return entry; } // the opc spec allows case-insensitive filename matching (see #49609) for (final ZipArchiveEntry ze : asIterable(asIterator(zipArchive.getEntries()))) { if (normalizedPath.equalsIgnoreCase(ze.getName().replace('\\','/'))) { return ze; } } return null; } }
/** * Parses the given bytes as extra field data and consumes any * unparseable data as an {@link UnparseableExtraFieldData} * instance. * @param extra an array of bytes to be parsed into extra fields * @throws RuntimeException if the bytes cannot be parsed * @throws RuntimeException on error */ @Override public void setExtra(final byte[] extra) throws RuntimeException { try { final ZipExtraField[] local = ExtraFieldUtils.parse(extra, true, ExtraFieldUtils.UnparseableExtraField.READ); mergeExtraFields(local, true); } catch (final ZipException e) { // actually this is not possible as of Commons Compress 1.1 throw new RuntimeException("Error parsing extra fields for entry: " //NOSONAR + getName() + " - " + e.getMessage(), e); } }
EntryTriple(final ZipArchiveEntry zipArchiveEntry, final ContentTypeManager contentTypeManager) { this.zipArchiveEntry = zipArchiveEntry; final String entryName = zipArchiveEntry.getName(); PackagePartName ppn = null; try { // We get an error when we parse [Content_Types].xml // because it's not a valid URI. ppn = (CONTENT_TYPES_PART_NAME.equalsIgnoreCase(entryName)) ? null : PackagingURIHelper.createPartName(ZipHelper.getOPCNameFromZipItemName(entryName)); } catch (Exception e) { // We assume we can continue, even in degraded mode ... LOG.log(POILogger.WARN,"Entry " + entryName + " is not valid, so this part won't be add to the package.", e); } this.partName = ppn; this.contentType = (ppn == null) ? null : contentTypeManager.getContentType(partName); }
while (entries.hasMoreElements()) { ZipArchiveEntry entry = (ZipArchiveEntry) entries.nextElement(); policePartSize(f, entry.getSize(), entry.getName()); InputStream in = null; try { byte[] bytes = getBytesFromInputStream( zf.getInputStream(entry) ); policePartSize(f, bytes.length, entry.getName()); // in case earlier check ineffective partByteArrays.put(entry.getName(), new ByteArray(bytes) ); } catch (PartTooLargeException e) { throw e;
if (createUnicodeExtraFields == UnicodeExtraFieldPolicy.ALWAYS || !encodable) { ze.addExtraField(new UnicodePathExtraField(ze.getName(), name.array(), name.arrayOffset(),
final ZipArchiveEntry entry = entries.nextElement(); if (!entry.isDirectory()) { final String name = entry.getName().toLowerCase(); if (name.endsWith(".class")) { isJar = true;
private void writeLocalFileHeader(final ZipArchiveEntry ze, final boolean phased) throws IOException { final boolean encodable = zipEncoding.canEncode(ze.getName()); final ByteBuffer name = getName(ze); if (createUnicodeExtraFields != UnicodeExtraFieldPolicy.NEVER) { addUnicodeExtraFields(ze, encodable, name); } final long localHeaderStart = streamCompressor.getTotalBytesWritten(); final byte[] localHeader = createLocalFileHeader(ze, name, encodable, phased, localHeaderStart); metaData.put(ze, new EntryMetaData(localHeaderStart, usesDataDescriptor(ze.getMethod(), phased))); entry.localDataStart = localHeaderStart + LFH_CRC_OFFSET; // At crc offset writeCounted(localHeader); entry.dataStart = streamCompressor.getTotalBytesWritten(); }
@Test public void test1() throws IOException { ZipArchiveInputStream zis = new ZipArchiveInputStream(BadZipEntryFlagTest.class.getResourceAsStream("/bad.zip")); for (ZipArchiveEntry e = zis.getNextZipEntry(); e != null; e = zis.getNextZipEntry()) { e.getGeneralPurposeBit().useEncryption(false); if (!e.isDirectory()) { zis.read(); System.out.println(e.getName()); } } }
ZipArchiveEntry zeNew = new ZipArchiveEntry(ze.getName()); zeNew.setComment(ze.getComment()); zeNew.setExtra(ze.getExtra());