@Override public void run() { byte[] hash = Hashing.sha256().hashBytes(data).asBytes(); String hashStr = BaseEncoding.base16().encode(hash); archive.setHash(hash); String path = new StringBuilder() .append(hashStr.substring(0, 2)) .append('/') .append(hashStr.substring(2)) .toString(); try { try (InputStream in = minioClient.getObject(minioBucket, path)) { return; // already exists } catch (ErrorResponseException ex) { // doesn't exist } minioClient.putObject(minioBucket, path, new ByteArrayInputStream(data), data.length, "binary/octet-stream"); } catch (ErrorResponseException | InsufficientDataException | InternalException | InvalidArgumentException | InvalidBucketNameException | NoResponseException | IOException | InvalidKeyException | NoSuchAlgorithmException | XmlPullParserException ex) { logger.warn("unable to upload data to store", ex); } }
continue; case "hash": archive.setHash(Base64.getDecoder().decode(value)); continue; case "compression":
@Override public void load(Store store) throws IOException { List<IndexEntry> indexes = cacheDao.findIndexesForCache(con, cacheEntry); for (IndexEntry indexEntry : indexes) { Index index = store.addIndex(indexEntry.getIndexId()); index.setCrc(indexEntry.getCrc()); index.setRevision(indexEntry.getRevision()); try (ResultSetIterable<ArchiveEntry> archives = cacheDao.findArchivesForIndex(con, indexEntry)) { for (ArchiveEntry archiveEntry : archives) { if (index.getArchive(archiveEntry.getArchiveId()) != null) { throw new IOException("Duplicate archive " + archiveEntry + " on " + indexEntry); } Archive archive = index.addArchive(archiveEntry.getArchiveId()); archive.setNameHash(archiveEntry.getNameHash()); archive.setCrc(archiveEntry.getCrc()); archive.setRevision(archiveEntry.getRevision()); archive.setHash(archiveEntry.getHash()); // File data is not necessary for cache updating } } } }