@VisibleForTesting synchronized void clearDiskCacheIfCreated() { if (diskCache == null) { return; } diskCache.clear(); }
cacheFile = helper.getDiskCache().get(originalKey); if (cacheFile != null) { this.sourceKey = sourceId;
void encode(DiskCacheProvider diskCacheProvider, Options options) { GlideTrace.beginSection("DecodeJob.encode"); try { diskCacheProvider.getDiskCache().put(key, new DataCacheWriter<>(encoder, toEncode, options)); } finally { toEncode.unlock(); GlideTrace.endSection(); } }
public <Z> Resource<Z> load(Key key, ResourceDecoder<File, Z> decoder, int width, int height) { File fromCache = diskCache.get(key); if (fromCache == null) { return null; } Resource<Z> result = null; try { result = decoder.decode(fromCache, width, height); } catch (IOException e) { if (Log.isLoggable(TAG, Log.DEBUG)) { Log.d(TAG, "Exception decoding image from cache", e); } } if (result == null) { if (Log.isLoggable(TAG, Log.DEBUG)) { Log.d(TAG, "Failed to decode image from cache or not present in cache"); } diskCache.delete(key); } return result; } }
public void clearDiskCache() { diskCacheProvider.getDiskCache().clear(); }
resourceClass, helper.getOptions()); cacheFile = helper.getDiskCache().get(currentKey); if (cacheFile != null) { sourceKey = sourceId;
private void cacheData(Object dataToCache) { long startTime = LogTime.getLogTime(); try { Encoder<Object> encoder = helper.getSourceEncoder(dataToCache); DataCacheWriter<Object> writer = new DataCacheWriter<>(encoder, dataToCache, helper.getOptions()); originalKey = new DataCacheKey(loadData.sourceKey, helper.getSignature()); helper.getDiskCache().put(originalKey, writer); if (Log.isLoggable(TAG, Log.VERBOSE)) { Log.v(TAG, "Finished encoding source to cache" + ", key: " + originalKey + ", data: " + dataToCache + ", encoder: " + encoder + ", duration: " + LogTime.getElapsedMillis(startTime)); } } finally { loadData.fetcher.cleanup(); } sourceCacheGenerator = new DataCacheGenerator(Collections.singletonList(loadData.sourceKey), helper, this); }
private Resource<T> loadFromCache(Key key) throws IOException { File cacheFile = diskCacheProvider.getDiskCache().get(key); if (cacheFile == null) { return null; } Resource<T> result = null; try { result = loadProvider.getCacheDecoder().decode(cacheFile, width, height); } finally { if (result == null) { diskCacheProvider.getDiskCache().delete(key); } } return result; }
@After public void tearDown() { try { cache.clear(); } finally { deleteRecursive(dir); } }
@Test public void loadFromCache_afterDiskCacheDeleted_doesNotFail() { final DiskCache cache = DiskLruCacheWrapper.create(cacheDir, 1024 * 1024); cache.get(mock(Key.class)); deleteRecursively(cacheDir); Glide.init( context, new GlideBuilder() .setDiskCache(new Factory() { @Override public DiskCache build() { return cache; } })); Drawable drawable = concurrency.get(Glide.with(context) .load(raw.canonical) .submit()); assertThat(drawable).isNotNull(); }
private void writeTransformedToCache(Resource<T> transformed) { if (transformed == null || !diskCacheStrategy.cacheResult()) { return; } long startTime = LogTime.getLogTime(); SourceWriter<Resource<T>> writer = new SourceWriter<Resource<T>>(loadProvider.getEncoder(), transformed); diskCacheProvider.getDiskCache().put(resultKey, writer); if (Log.isLoggable(TAG, Log.VERBOSE)) { logWithTimeAndKey("Wrote transformed from source to cache", startTime); } }
@Test public void get_afterDeleteDirectoryOutsideGlideAndClose_doesNotThrow() { assumeTrue("A file handle is likely open, so cannot delete dir", !Util.isWindows()); DiskCache cache = DiskLruCacheWrapper.create(dir, 1024 * 1024); cache.get(mock(Key.class)); deleteRecursive(dir); cache.clear(); cache.get(mock(Key.class)); } }
@Test public void testEditIsAbortedIfWriterThrows() throws IOException { try { cache.put(key, new DiskCache.Writer() { @Override public boolean write(@NonNull File file) { throw new RuntimeException("test"); } }); } catch (RuntimeException e) { // Expected. } cache.put(key, new DiskCache.Writer() { @Override public boolean write(@NonNull File file) { try { Util.writeFile(file, data); } catch (IOException e) { fail(e.toString()); } return true; } }); byte[] received = Util.readFile(cache.get(key), data.length); assertArrayEquals(data, received); }
public void clearDiskCache() { diskCacheProvider.getDiskCache().clear(); }
@Override public boolean startNext() { while (modelLoaders == null || !hasNextModelLoader()) { sourceIdIndex++; if (sourceIdIndex >= cacheKeys.size()) { return false; } Key sourceId = cacheKeys.get(sourceIdIndex); Key originalKey = new DataCacheKey(sourceId, helper.getSignature()); cacheFile = helper.getDiskCache().get(originalKey); if (cacheFile != null) { this.sourceKey = sourceId; modelLoaders = helper.getModelLoaders(cacheFile); modelLoaderIndex = 0; } } loadData = null; boolean started = false; while (!started && hasNextModelLoader()) { ModelLoader<File, ?> modelLoader = modelLoaders.get(modelLoaderIndex++); loadData = modelLoader.buildLoadData(cacheFile, helper.getWidth(), helper.getHeight(), helper.getOptions()); if (loadData != null && helper.hasLoadPath(loadData.fetcher.getDataClass())) { started = true; loadData.fetcher.loadData(helper.getPriority(), this); } } return started; }
private Resource<T> cacheAndDecodeSourceData(A data) throws IOException { long startTime = LogTime.getLogTime(); SourceWriter<A> writer = new SourceWriter<A>(loadProvider.getSourceEncoder(), data); diskCacheProvider.getDiskCache().put(resultKey.getOriginalKey(), writer); if (Log.isLoggable(TAG, Log.VERBOSE)) { logWithTimeAndKey("Wrote source to cache", startTime); } startTime = LogTime.getLogTime(); Resource<T> result = loadFromCache(resultKey.getOriginalKey()); if (Log.isLoggable(TAG, Log.VERBOSE) && result != null) { logWithTimeAndKey("Decoded source from cache", startTime); } return result; }