private void forceStartGet() { // Use a LinkedHashSet so insertion order is iteration order. // This may help a provider that loads sequentially in the // set's iterator order to load in the order we want data. // LinkedHashSet<ChunkKey> toLoad = new LinkedHashSet<ChunkKey>(); while (bytesReady + bytesLoading < highWaterMark && !queue.isEmpty()) { ChunkKey key = queue.removeFirst(); stats.access(key).cntPrefetcher_Load++; toLoad.add(key); status.put(key, Status.LOADING); bytesLoading += averageChunkSize; // For the first chunk, start immediately to reduce the // startup latency associated with additional chunks. if (first) break; } if (!toLoad.isEmpty() && error == null) db.chunk().get(Context.LOCAL, toLoad, this); if (first) { first = false; maybeStartGet(); } }
private void forceStartGet() { // Use a LinkedHashSet so insertion order is iteration order. // This may help a provider that loads sequentially in the // set's iterator order to load in the order we want data. // LinkedHashSet<ChunkKey> toLoad = new LinkedHashSet<ChunkKey>(); while (bytesReady + bytesLoading < highWaterMark && !queue.isEmpty()) { ChunkKey key = queue.removeFirst(); stats.access(key).cntPrefetcher_Load++; toLoad.add(key); status.put(key, Status.LOADING); bytesLoading += averageChunkSize; // For the first chunk, start immediately to reduce the // startup latency associated with additional chunks. if (first) break; } if (!toLoad.isEmpty() && error == null) db.chunk().get(Context.LOCAL, toLoad, this); if (first) { first = false; maybeStartGet(); } }
public void copyObjectAsIs(PackOutputStream out, ObjectToPack otp, boolean validate) throws IOException, StoredObjectRepresentationNotAvailableException { DhtObjectToPack obj = (DhtObjectToPack) otp; try { PackChunk chunk = recentChunks.get(obj.chunk); if (chunk == null) { chunk = prefetcher.get(obj.chunk); if (chunk == null) { // This should never happen during packing, it implies // the fetch plan was incorrect. Unfortunately that can // occur if objects need to be recompressed on the fly. // stats.access(obj.chunk).cntCopyObjectAsIs_PrefetchMiss++; chunk = getChunk(obj.chunk); } if (!chunk.isFragment()) recentChunk(chunk); } chunk.copyObjectAsIs(out, obj, validate, this); } catch (DhtMissingChunkException missingChunk) { stats.access(missingChunk.getChunkKey()).cntCopyObjectAsIs_InvalidChunk++; throw new StoredObjectRepresentationNotAvailableException(otp); } }
public void copyObjectAsIs(PackOutputStream out, ObjectToPack otp, boolean validate) throws IOException, StoredObjectRepresentationNotAvailableException { DhtObjectToPack obj = (DhtObjectToPack) otp; try { PackChunk chunk = recentChunks.get(obj.chunk); if (chunk == null) { chunk = prefetcher.get(obj.chunk); if (chunk == null) { // This should never happen during packing, it implies // the fetch plan was incorrect. Unfortunately that can // occur if objects need to be recompressed on the fly. // stats.access(obj.chunk).cntCopyObjectAsIs_PrefetchMiss++; chunk = getChunk(obj.chunk); } if (!chunk.isFragment()) recentChunk(chunk); } chunk.copyObjectAsIs(out, obj, validate, this); } catch (DhtMissingChunkException missingChunk) { stats.access(missingChunk.getChunkKey()).cntCopyObjectAsIs_InvalidChunk++; throw new StoredObjectRepresentationNotAvailableException(otp); } }
private PackChunk load(ChunkKey chunkKey) throws DhtException { if (0 == stats.access(chunkKey).cntReader_Load++ && readerOptions.isTrackFirstChunkLoad()) stats.access(chunkKey).locReader_Load = new Throwable("first"); Context opt = Context.READ_REPAIR; Sync<Collection<PackChunk.Members>> sync = Sync.create(); db.chunk().get(opt, Collections.singleton(chunkKey), sync); try { Collection<PackChunk.Members> c = sync.get(getOptions() .getTimeout()); if (c.isEmpty()) return null; if (c instanceof List) return ((List<PackChunk.Members>) c).get(0).build(); return c.iterator().next().build(); } catch (InterruptedException e) { throw new DhtTimeoutException(e); } catch (TimeoutException e) { throw new DhtTimeoutException(e); } }
private PackChunk load(ChunkKey chunkKey) throws DhtException { if (0 == stats.access(chunkKey).cntReader_Load++ && readerOptions.isTrackFirstChunkLoad()) stats.access(chunkKey).locReader_Load = new Throwable("first"); Context opt = Context.READ_REPAIR; Sync<Collection<PackChunk.Members>> sync = Sync.create(); db.chunk().get(opt, Collections.singleton(chunkKey), sync); try { Collection<PackChunk.Members> c = sync.get(getOptions() .getTimeout()); if (c.isEmpty()) return null; if (c instanceof List) return ((List<PackChunk.Members>) c).get(0).build(); return c.iterator().next().build(); } catch (InterruptedException e) { throw new DhtTimeoutException(e); } catch (TimeoutException e) { throw new DhtTimeoutException(e); } }