@Test public void get_afterDeleteDirectoryOutsideGlideAndClose_doesNotThrow() { assumeTrue("A file handle is likely open, so cannot delete dir", !Util.isWindows()); DiskCache cache = DiskLruCacheWrapper.create(dir, 1024 * 1024); cache.get(mock(Key.class)); deleteRecursive(dir); cache.clear(); cache.get(mock(Key.class)); } }
@Test public void testDoesNotCommitIfWriterReturnsFalse() { cache.put(key, new DiskCache.Writer() { @Override public boolean write(@NonNull File file) { return false; } }); assertNull(cache.get(key)); }
@Test public void clearDiskCache_afterOpeningDiskCache_andDeleteDirectoryOutsideGlide_doesNotThrow() { DiskCache cache = DiskLruCacheWrapper.create(cacheDir, 1024 * 1024); cache.get(mock(Key.class)); deleteRecursively(cacheDir); cache.clear(); }
@Test public void testDoesNotCommitIfWriterWritesButReturnsFalse() { cache.put(key, new DiskCache.Writer() { @Override public boolean write(@NonNull File file) { try { Util.writeFile(file, data); } catch (IOException e) { fail(e.toString()); } return false; } }); assertNull(cache.get(key)); }
cacheFile = helper.getDiskCache().get(originalKey); if (cacheFile != null) { this.sourceKey = sourceId;
@Test public void clearDiskCache_afterOpeningDiskCache_andDeleteDirectoryOutsideGlide_doesNotThrow() { assumeTrue("A file handle is likely open, so cannot delete dir", !Util.isWindows()); DiskCache cache = DiskLruCacheWrapper.create(dir, 1024 * 1024); cache.get(mock(Key.class)); deleteRecursive(dir); cache.clear(); }
resourceClass, helper.getOptions()); cacheFile = helper.getDiskCache().get(currentKey); if (cacheFile != null) { sourceKey = sourceId;
@Test public void testCanInsertAndGet() throws IOException { cache.put(key, new DiskCache.Writer() { @Override public boolean write(@NonNull File file) { try { Util.writeFile(file, data); } catch (IOException e) { fail(e.toString()); } return true; } }); byte[] received = Util.readFile(cache.get(key), data.length); assertArrayEquals(data, received); }
@Test public void loadFromCache_afterDiskCacheDeleted_doesNotFail() { final DiskCache cache = DiskLruCacheWrapper.create(cacheDir, 1024 * 1024); cache.get(mock(Key.class)); deleteRecursively(cacheDir); Glide.init( context, new GlideBuilder() .setDiskCache(new Factory() { @Override public DiskCache build() { return cache; } })); Drawable drawable = concurrency.get(Glide.with(context) .load(raw.canonical) .submit()); assertThat(drawable).isNotNull(); }
@Test public void testEditIsAbortedIfWriterThrows() throws IOException { try { cache.put(key, new DiskCache.Writer() { @Override public boolean write(@NonNull File file) { throw new RuntimeException("test"); } }); } catch (RuntimeException e) { // Expected. } cache.put(key, new DiskCache.Writer() { @Override public boolean write(@NonNull File file) { try { Util.writeFile(file, data); } catch (IOException e) { fail(e.toString()); } return true; } }); byte[] received = Util.readFile(cache.get(key), data.length); assertArrayEquals(data, received); }
@Test public void loadFromCache_afterDiskCacheDeletedAndCleared_doesNotFail() { final DiskCache cache = DiskLruCacheWrapper.create(cacheDir, 1024 * 1024); cache.get(mock(Key.class)); deleteRecursively(cacheDir); cache.clear(); Glide.init( context, new GlideBuilder() .setDiskCache(new Factory() { @Override public DiskCache build() { return cache; } })); Drawable drawable = concurrency.get( Glide.with(context) .load(ResourceIds.raw.canonical) .submit()); assertThat(drawable).isNotNull(); }
public <Z> Resource<Z> load(Key key, ResourceDecoder<File, Z> decoder, int width, int height) { File fromCache = diskCache.get(key); if (fromCache == null) { return null; } Resource<Z> result = null; try { result = decoder.decode(fromCache, width, height); } catch (IOException e) { if (Log.isLoggable(TAG, Log.DEBUG)) { Log.d(TAG, "Exception decoding image from cache", e); } } if (result == null) { if (Log.isLoggable(TAG, Log.DEBUG)) { Log.d(TAG, "Failed to decode image from cache or not present in cache"); } diskCache.delete(key); } return result; } }
private Resource<T> loadFromCache(Key key) throws IOException { File cacheFile = diskCacheProvider.getDiskCache().get(key); if (cacheFile == null) { return null; } Resource<T> result = null; try { result = loadProvider.getCacheDecoder().decode(cacheFile, width, height); } finally { if (result == null) { diskCacheProvider.getDiskCache().delete(key); } } return result; }
@Override public boolean startNext() { while (modelLoaders == null || !hasNextModelLoader()) { sourceIdIndex++; if (sourceIdIndex >= cacheKeys.size()) { return false; } Key sourceId = cacheKeys.get(sourceIdIndex); Key originalKey = new DataCacheKey(sourceId, helper.getSignature()); cacheFile = helper.getDiskCache().get(originalKey); if (cacheFile != null) { this.sourceKey = sourceId; modelLoaders = helper.getModelLoaders(cacheFile); modelLoaderIndex = 0; } } loadData = null; boolean started = false; while (!started && hasNextModelLoader()) { ModelLoader<File, ?> modelLoader = modelLoaders.get(modelLoaderIndex++); loadData = modelLoader.buildLoadData(cacheFile, helper.getWidth(), helper.getHeight(), helper.getOptions()); if (loadData != null && helper.hasLoadPath(loadData.fetcher.getDataClass())) { started = true; loadData.fetcher.loadData(helper.getPriority(), this); } } return started; }
cacheFile = helper.getDiskCache().get(currentKey); if (cacheFile != null) { this.sourceKey = sourceId;