public static String hashFile(Path path) throws IOException { synchronized (DIGEST) { DIGEST.reset(); updateDigest(DIGEST, path); return bytesToHexString(DIGEST.digest()); } }
@Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { if (filter.accept(root.relativize(dir))) { String[] files = dir.toFile().list(); if (files == null || files.length == 0) { contents.put(root.relativize(dir), HashUtils.hash(root.relativize(dir).toString())); } return FileVisitResult.CONTINUE; } return FileVisitResult.SKIP_SUBTREE; }
@Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if(filter.accept(root.relativize(file))) { contents.put(root.relativize(file), HashUtils.hashFile(file)); } return FileVisitResult.CONTINUE; }
public static byte[] hashPath(Path path) throws IOException { synchronized (DIGEST) { DIGEST.reset(); updateDigest(DIGEST, path); return DIGEST.digest(); } }
public byte[] getHash() throws ProvisioningException { if(hash == null) { try { hash = HashUtils.hashPath(p); } catch (IOException e) { throw new ProvisioningException(Errors.hashCalculation(p)); } } return hash; }
public static String hash(String content) throws IOException { synchronized (DIGEST) { DIGEST.reset(); DIGEST.update(content.getBytes(StandardCharsets.UTF_8)); return bytesToHexString(DIGEST.digest()); } }
private static void readHashes(FsEntry parent, List<FsEntry> dirs) throws ProvisioningException { int dirsTotal = 0; try(DirectoryStream<Path> stream = Files.newDirectoryStream(parent.getPath())) { for(Path child : stream) { if(child.getFileName().toString().equals(Constants.HASHES)) { try(BufferedReader reader = Files.newBufferedReader(child)) { String line = reader.readLine(); while(line != null) { new FsEntry(parent, line, HashUtils.hexStringToByteArray(reader.readLine())); line = reader.readLine(); } } catch (IOException e) { throw new ProvisioningException("Failed to read hashes", e); } } else { dirs.add(new FsEntry(parent, child)); ++dirsTotal; } } } catch (IOException e) { throw new ProvisioningException("Failed to read hashes", e); } while(dirsTotal > 0) { readHashes(dirs.remove(dirs.size() - 1), dirs); --dirsTotal; } }
private static void updateDigest(MessageDigest digest, Path path) throws IOException { if(Files.isDirectory(path)) { try(DirectoryStream<Path> stream = Files.newDirectoryStream(path)) { final Map<String, Path> sortedChildren = new TreeMap<String, Path>(); for(Path p : stream) { sortedChildren.put(p.getFileName().toString(), p); } for (Path child : sortedChildren.values()) { updateDigest(digest, child); } } } else { try (BufferedInputStream bis = new BufferedInputStream(Files.newInputStream(path))){ byte[] bytes = new byte[8192]; int read; while ((read = bis.read(bytes)) > -1) { digest.update(bytes, 0, read); } } } }
targetHash = HashUtils.hashPath(target); } catch (IOException e) { throw new ProvisioningException(Errors.hashCalculation(target), e);
writer.write(HashUtils.bytesToHexString(child.getHash())); writer.newLine(); } else {
public static byte[] hashJar(Path jarFile, boolean ignoreManifest) throws IOException { synchronized (DIGEST) { DIGEST.reset(); try (FileSystem zipfs = ZipUtils.newFileSystem(jarFile)) { for (Path zipRoot : zipfs.getRootDirectories()) { final Map<String, Path> sortedChildren = new TreeMap<String, Path>(); try(DirectoryStream<Path> stream = Files.newDirectoryStream(zipRoot)) { for(Path p : stream) { final String fileName = p.getFileName().toString(); if(ignoreManifest && fileName.equals("META-INF/")) { continue; } sortedChildren.put(fileName, p); } } for (Path child : sortedChildren.values()) { updateDigest(DIGEST, child); } } } return DIGEST.digest(); } }
final byte[] targetHash; try { targetHash = HashUtils.hashPath(target); } catch (IOException e) { throw new ProvisioningException(Errors.hashCalculation(target), e);
return Collections.singletonMap(root.relativize(root), HashUtils.hashFile(root));