private static HashResult readFrom(HashType h, DataInputStream dis) throws IOException { byte[] buf = new byte[h.hashLength]; dis.readFully(buf); return new HashResult(h, buf); }
public static HashResult[] copy(HashResult[] hashes) { if(hashes == null) return null; HashResult[] out = new HashResult[hashes.length]; for(int i=0;i<hashes.length;i++) { out[i] = hashes[i].clone(); } return out; }
public static byte[] getCrossSegmentSeed(HashResult[] hashes, byte[] hashThisLayerOnly) { byte[] hash = hashThisLayerOnly; if(hash == null) { if(hashes == null || hashes.length == 0 || !HashResult.contains(hashes, HashType.SHA256)) throw new IllegalArgumentException("No hashes in getCryptoKey - need hashes to generate splitfile key!"); hash = HashResult.get(hashes, HashType.SHA256); } return getCrossSegmentSeed(hash); }
byte[] hashThisLayerOnly = null; if(hashes != null && metadata) { hashThisLayerOnly = HashResult.get(hashes, HashType.SHA256); Logger.debug(this, "Computed hashes for "+this+" for "+block.desiredURI+" size "+origSize); for(HashResult res : hashes) { Logger.debug(this, res.type.name()+" : "+res.hashAsHex()); if(persistent) clientHashes = HashResult.copy(hashes); ctx.eventProducer.produceEvent(new ExpectedHashesEvent(clientHashes), context);
MultiHashInputStream hashStream = null; if(hashes != null) { hashStream = new MultiHashInputStream(input, HashResult.makeBitmask(hashes)); input = hashStream; if(hashes != null) { HashResult[] results = hashStream.getResults(); if(!HashResult.strictEquals(results, hashes)) { Logger.error(this, "Hashes failed verification (length read is "+hashStream.getReadBytes()+") "+" for "+uri); throw new FetchException(FetchExceptionMode.CONTENT_HASH_FAILED);
@Override public void onHashes(HashResult[] hashes, ClientContext context) { synchronized(this) { if(this.hashes != null) { if(!HashResult.strictEquals(hashes, this.hashes)) Logger.error(this, "Two sets of hashes?!"); return; } this.hashes = hashes; } HashResult[] clientHashes = hashes; if(persistent()) clientHashes = HashResult.copy(hashes); final HashResult[] h = clientHashes; context.getJobRunner(persistent()).queueNormalOrDrop(new PersistentJob() { @Override public boolean run(ClientContext context) { ctx.eventProducer.produceEvent(new ExpectedHashesEvent(h), context); return false; } }); }
if(version == 0) throw new MetadataParseException("Version 0 does not support hashes"); h = HashResult.readHashes(dis); if(specifySplitfileKey || hashes == null || hashes.length == 0 || !HashResult.contains(hashes, HashType.SHA256)) { byte[] key = new byte[32]; dis.readFully(key);
private void readTransientProgressFields(DataInputStream dis) throws IOException, StorageFormatException { foundDataLength = dis.readLong(); if(dis.readBoolean()) foundDataMimeType = dis.readUTF(); else foundDataMimeType = null; compatMode = new CompatibilityAnalyser(dis); HashResult[] hashes = HashResult.readHashes(dis); if(hashes == null || hashes.length == 0) { expectedHashes = null; } else { expectedHashes = new ExpectedHashes(hashes, identifier, global); } }
public static HashResult[] readHashes(DataInputStream dis) throws IOException { int bitmask = dis.readInt(); if(bitmask == 0) return null; int count = 0; for(HashType h : HashType_values) { if((bitmask & h.bitmask) == h.bitmask) { count++; } } HashResult[] results = new HashResult[count]; int x = 0; for(HashType h : HashType_values) { if((bitmask & h.bitmask) == h.bitmask) { results[x++] = HashResult.readFrom(h, dis); } } return results; }
@Override public SimpleFieldSet getFieldSet() { if(hashes == null) { Logger.error(this, "Hashes == null, possibly persistence issue caused prior to build 1411 on "+this); return null; } SimpleFieldSet fs = new SimpleFieldSet(false); SimpleFieldSet values = new SimpleFieldSet(false); for(HashResult hash : hashes) { if(hash == null) { Logger.error(this, "Hash == null, possibly persistence issue caused prior to build 1411 on "+this); return null; } values.putOverwrite(hash.type.name(), hash.hashAsHex()); } fs.put("Hashes", values); fs.putOverwrite("Identifier", identifier); fs.put("Global", global); return fs; }
try { is = data.getInputStream(); MultiHashInputStream hasher = new MultiHashInputStream(is, HashResult.makeBitmask(hashes)); byte[] buf = new byte[32768]; while(hasher.read(buf) > 0); is = null; HashResult[] results = hasher.getResults(); if(!HashResult.strictEquals(results, hashes)) { onFailure(new FetchException(FetchExceptionMode.CONTENT_HASH_FAILED), SingleFileFetcher.this, context); return;
origDataSize = dis.readLong(); origCompressedDataSize = dis.readLong(); hashes = HashResult.readHashes(dis); dis.close(); this.hasPaddedLastBlock = (dataLength % CHKBlock.DATA_LENGTH != 0);
public static byte[] getCryptoKey(HashResult[] hashes) { if(hashes == null || hashes.length == 0 || !HashResult.contains(hashes, HashType.SHA256)) throw new IllegalArgumentException("No hashes in getCryptoKey - need hashes to generate splitfile key!"); byte[] hash = HashResult.get(hashes, HashType.SHA256); return getCryptoKey(hash); }
HashResult getResult() { HashResult result = new HashResult(hashType, digest.digest()); hashType.recycle(digest); digest = null; return result; } }
/** Deep copy those fields that need to be deep copied after clone() */ private void finishClone(Metadata orig) { if(orig.segments != null) { segments = new SplitFileSegmentKeys[orig.segments.length]; for(int i=0;i<segments.length;i++) { segments[i] = orig.segments[i].clone(); } } if(hashes != null) { hashes = new HashResult[orig.hashes.length]; for(int i=0;i<hashes.length;i++) hashes[i] = orig.hashes[i].clone(); } if(manifestEntries != null) { manifestEntries = new HashMap<String, Metadata>(orig.manifestEntries); for(Map.Entry<String, Metadata> entry : manifestEntries.entrySet()) { entry.setValue((Metadata)entry.getValue().clone()); } } if(clientMetadata != null) clientMetadata = clientMetadata.clone(); }
HashResult getResult() { HashResult result = new HashResult(hashType, digest.digest()); hashType.recycle(digest); digest = null; return result; } }