setTimeout(Timeout.getTimeout(rc, "core", "dht", "timeout", getTimeout())); setPrefetchFollowEdgeHints(rc.getBoolean("core", "dht", "prefetchFollowEdgeHints", isPrefetchFollowEdgeHints())); setChunkLimit(rc.getInt("core", "dht", "chunkLimit", getChunkLimit())); setOpenQueuePrefetchRatio(rc.getInt("core", "dht", "openQueuePrefetchRatio", getOpenQueuePrefetchRatio())); setWalkCommitsPrefetchRatio(rc.getInt("core", "dht", "walkCommitsPrefetchRatio", getWalkCommitsPrefetchRatio())); setWalkTreesPrefetchRatio(rc.getInt("core", "dht", "walkTreesPrefetchRatio", getWalkTreesPrefetchRatio())); setWriteObjectsPrefetchRatio(rc.getInt("core", "dht", "writeObjectsPrefetchRatio", getWriteObjectsPrefetchRatio())); setObjectIndexConcurrentBatches(rc.getInt("core", "dht", "objectIndexConcurrentBatches", getObjectIndexConcurrentBatches())); setObjectIndexBatchSize(rc.getInt("core", "dht", "objectIndexBatchSize", getObjectIndexBatchSize())); setDeltaBaseCacheSize(rc.getInt("core", "dht", "deltaBaseCacheSize", getDeltaBaseCacheSize())); setDeltaBaseCacheLimit(rc.getInt("core", "dht", "deltaBaseCacheLimit", getDeltaBaseCacheLimit())); setRecentInfoCacheSize(rc.getInt("core", "dht", "recentInfoCacheSize", getRecentInfoCacheSize())); setTrackFirstChunkLoad(rc.getBoolean("core", "dht", "debugTrackFirstChunkLoad", isTrackFirstChunkLoad())); return this;
DeltaBaseCache(DhtReader reader) { stats = reader.getStatistics(); DhtReaderOptions options = reader.getOptions(); maxByteCount = options.getDeltaBaseCacheLimit(); table = new Slot[options.getDeltaBaseCacheSize()]; }
private void awaitPendingBatches() throws InterruptedException, DhtTimeoutException, DhtException { Timeout to = ctx.getOptions().getTimeout(); if (!metaBatches.tryAcquire(batchSize, to.getTime(), to.getUnit())) throw new DhtTimeoutException(DhtText.get().timeoutChunkMeta); if (metaError.get() != null) throw metaError.get(); }
RecentChunks(DhtReader reader) { this.reader = reader; this.stats = reader.getStatistics(); this.byKey = new HashMap<ChunkKey, Node>(); this.maxBytes = reader.getOptions().getChunkLimit(); }
private void init() throws IOException { ObjectWithInfo<T> c; while ((c = nextObjectWithInfo()) != null) { ChunkKey chunkKey = c.chunkKey; Collection<ObjectWithInfo<T>> list = byChunk.get(chunkKey); if (list == null) { list = new ArrayList<ObjectWithInfo<T>>(); byChunk.put(chunkKey, list); if (prefetcher == null) { int limit = reader.getOptions().getChunkLimit(); int ratio = reader.getOptions().getOpenQueuePrefetchRatio(); int prefetchLimit = (int) (limit * (ratio / 100.0)); reader.getRecentChunks().setMaxBytes(limit - prefetchLimit); prefetcher = new Prefetcher(reader, 0, prefetchLimit); } prefetcher.push(chunkKey); } list.add(c); } chunkItr = byChunk.values().iterator(); } }
private PackChunk load(ChunkKey chunkKey) throws DhtException { if (0 == stats.access(chunkKey).cntReader_Load++ && readerOptions.isTrackFirstChunkLoad()) stats.access(chunkKey).locReader_Load = new Throwable("first"); Context opt = Context.READ_REPAIR; Sync<Collection<PackChunk.Members>> sync = Sync.create(); db.chunk().get(opt, Collections.singleton(chunkKey), sync); try { Collection<PackChunk.Members> c = sync.get(getOptions() .getTimeout()); if (c.isEmpty()) return null; if (c instanceof List) return ((List<PackChunk.Members>) c).get(0).build(); return c.iterator().next().build(); } catch (InterruptedException e) { throw new DhtTimeoutException(e); } catch (TimeoutException e) { throw new DhtTimeoutException(e); } }
ObjectWriter(DhtReader ctx, Prefetcher prefetch) { this.ctx = ctx; this.prefetch = prefetch; batchSize = ctx.getOptions().getObjectIndexBatchSize(); metaBatches = new Semaphore(batchSize); metaError = new AtomicReference<DhtException>(); allVisits = new LinkedHashMap<ChunkKey, Integer>(); allMeta = new HashMap<ChunkKey, ChunkMeta>(); metaMissing = new HashSet<ChunkKey>(); metaToRead = new HashSet<ChunkKey>(); curVisit = 1; }
QueueObjectLookup(DhtReader reader, boolean reportMissing) { this.repo = reader.getRepositoryKey(); this.db = reader.getDatabase(); this.reader = reader; this.options = reader.getOptions(); this.reportMissing = reportMissing; this.tmp = new ArrayList<ObjectInfo>(4); this.context = Context.FAST_MISSING_OK; this.toRetry = new ArrayList<T>(); this.concurrentBatches = options.getObjectIndexConcurrentBatches(); }
@Override public void walkAdviceBeginCommits(RevWalk rw, Collection<RevCommit> roots) throws IOException { endPrefetch(); // Don't assign the prefetcher right away. Delay until its // configured as push might invoke our own methods that may // try to call back into the active prefetcher. // Prefetcher p = prefetch(OBJ_COMMIT, readerOptions.getWalkCommitsPrefetchRatio()); p.push(this, roots); prefetcher = p; }
RecentInfoCache(DhtReaderOptions options) { final int sz = options.getRecentInfoCacheSize(); infoCache = new LinkedHashMap<ObjectId, List<ObjectInfo>>(sz, 0.75f, true) { private static final long serialVersionUID = 1L; @Override protected boolean removeEldestEntry(Entry<ObjectId, List<ObjectInfo>> e) { return sz < size(); } }; }
@Override public B setup() throws IllegalArgumentException, DhtException, RepositoryNotFoundException { if (getDatabase() == null) throw new IllegalArgumentException(DhtText.get().databaseRequired); if (getReaderOptions() == null) setReaderOptions(new DhtReaderOptions()); if (getInserterOptions() == null) setInserterOptions(new DhtInserterOptions()); if (getRepositoryKey() == null) { if (getRepositoryName() == null) throw new IllegalArgumentException(DhtText.get().nameRequired); RepositoryKey r; try { r = getDatabase().repositoryIndex().get( RepositoryName.create(name)); } catch (TimeoutException e) { throw new DhtTimeoutException(MessageFormat.format( DhtText.get().timeoutLocatingRepository, name), e); } if (isMustExist() && r == null) throw new RepositoryNotFoundException(getRepositoryName()); if (r != null) setRepositoryKey(r); } return self(); }
private void awaitPendingBatches() throws InterruptedException, DhtTimeoutException, DhtException { Timeout to = ctx.getOptions().getTimeout(); if (!metaBatches.tryAcquire(batchSize, to.getTime(), to.getUnit())) throw new DhtTimeoutException(DhtText.get().timeoutChunkMeta); if (metaError.get() != null) throw metaError.get(); }
RecentChunks(DhtReader reader) { this.reader = reader; this.stats = reader.getStatistics(); this.byKey = new HashMap<ChunkKey, Node>(); this.maxBytes = reader.getOptions().getChunkLimit(); }
DeltaBaseCache(DhtReader reader) { stats = reader.getStatistics(); DhtReaderOptions options = reader.getOptions(); maxByteCount = options.getDeltaBaseCacheLimit(); table = new Slot[options.getDeltaBaseCacheSize()]; }
private void init() throws IOException { ObjectWithInfo<T> c; while ((c = nextObjectWithInfo()) != null) { ChunkKey chunkKey = c.chunkKey; Collection<ObjectWithInfo<T>> list = byChunk.get(chunkKey); if (list == null) { list = new ArrayList<ObjectWithInfo<T>>(); byChunk.put(chunkKey, list); if (prefetcher == null) { int limit = reader.getOptions().getChunkLimit(); int ratio = reader.getOptions().getOpenQueuePrefetchRatio(); int prefetchLimit = (int) (limit * (ratio / 100.0)); reader.getRecentChunks().setMaxBytes(limit - prefetchLimit); prefetcher = new Prefetcher(reader, 0, prefetchLimit); } prefetcher.push(chunkKey); } list.add(c); } chunkItr = byChunk.values().iterator(); } }
private PackChunk load(ChunkKey chunkKey) throws DhtException { if (0 == stats.access(chunkKey).cntReader_Load++ && readerOptions.isTrackFirstChunkLoad()) stats.access(chunkKey).locReader_Load = new Throwable("first"); Context opt = Context.READ_REPAIR; Sync<Collection<PackChunk.Members>> sync = Sync.create(); db.chunk().get(opt, Collections.singleton(chunkKey), sync); try { Collection<PackChunk.Members> c = sync.get(getOptions() .getTimeout()); if (c.isEmpty()) return null; if (c instanceof List) return ((List<PackChunk.Members>) c).get(0).build(); return c.iterator().next().build(); } catch (InterruptedException e) { throw new DhtTimeoutException(e); } catch (TimeoutException e) { throw new DhtTimeoutException(e); } }
ObjectWriter(DhtReader ctx, Prefetcher prefetch) { this.ctx = ctx; this.prefetch = prefetch; batchSize = ctx.getOptions().getObjectIndexBatchSize(); metaBatches = new Semaphore(batchSize); metaError = new AtomicReference<DhtException>(); allVisits = new LinkedHashMap<ChunkKey, Integer>(); allMeta = new HashMap<ChunkKey, ChunkMeta>(); metaMissing = new HashSet<ChunkKey>(); metaToRead = new HashSet<ChunkKey>(); curVisit = 1; }
QueueObjectLookup(DhtReader reader, boolean reportMissing) { this.repo = reader.getRepositoryKey(); this.db = reader.getDatabase(); this.reader = reader; this.options = reader.getOptions(); this.reportMissing = reportMissing; this.tmp = new ArrayList<ObjectInfo>(4); this.context = Context.FAST_MISSING_OK; this.toRetry = new ArrayList<T>(); this.concurrentBatches = options.getObjectIndexConcurrentBatches(); }
@Override public void walkAdviceBeginCommits(RevWalk rw, Collection<RevCommit> roots) throws IOException { endPrefetch(); // Don't assign the prefetcher right away. Delay until its // configured as push might invoke our own methods that may // try to call back into the active prefetcher. // Prefetcher p = prefetch(OBJ_COMMIT, readerOptions.getWalkCommitsPrefetchRatio()); p.push(this, roots); prefetcher = p; }
RecentInfoCache(DhtReaderOptions options) { final int sz = options.getRecentInfoCacheSize(); infoCache = new LinkedHashMap<ObjectId, List<ObjectInfo>>(sz, 0.75f, true) { private static final long serialVersionUID = 1L; @Override protected boolean removeEldestEntry(Entry<ObjectId, List<ObjectInfo>> e) { return sz < size(); } }; }