@NotNull public Set<String> removeOptimisticUpdates(@NotNull final UUID mutationId) { checkNotNull(mutationId, "mutationId == null"); Set<String> changedCacheKeys = new HashSet<>(); Set<String> removedKeys = new HashSet<>(); Map<String, RecordJournal> recordJournals = lruCache.asMap(); for (Map.Entry<String, RecordJournal> entry : recordJournals.entrySet()) { String cacheKey = entry.getKey(); RecordJournal journal = entry.getValue(); changedCacheKeys.addAll(journal.revert(mutationId)); if (journal.history.isEmpty()) { removedKeys.add(cacheKey); } } lruCache.invalidateAll(removedKeys); return changedCacheKeys; }
@Nullable @Override public Record loadRecord(@NotNull final String key, @NotNull final CacheHeaders cacheHeaders) { final Record record; try { record = lruCache.get(key, new Callable<Record>() { @Override public Record call() throws Exception { return nextCache().flatMap(new Function<NormalizedCache, Optional<Record>>() { @NotNull @Override public Optional<Record> apply(@NotNull NormalizedCache cache) { return Optional.fromNullable(cache.loadRecord(key, cacheHeaders)); } }).get(); // lruCache.get(key, callable) requires non-null. } }); } catch (Exception ignore) { return null; } if (cacheHeaders.hasHeader(ApolloCacheHeaders.EVICT_AFTER_READ)) { lruCache.invalidate(key); } return record; }
@NotNull protected Set<String> performMerge(@NotNull final Record apolloRecord, @NotNull final CacheHeaders cacheHeaders) { final Record oldRecord = lruCache.getIfPresent(apolloRecord.key()); if (oldRecord == null) { lruCache.put(apolloRecord.key(), apolloRecord); return apolloRecord.keys(); } else { Set<String> changedKeys = oldRecord.mergeWith(apolloRecord); //re-insert to trigger new weight calculation lruCache.put(apolloRecord.key(), oldRecord); return changedKeys; } }
@Override public boolean remove(@NotNull final CacheKey cacheKey, final boolean cascade) { checkNotNull(cacheKey, "cacheKey == null"); boolean result = nextCache().map(new Function<NormalizedCache, Boolean>() { @NotNull @Override public Boolean apply(@NotNull NormalizedCache cache) { return cache.remove(cacheKey, cascade); } }).or(Boolean.FALSE); RecordJournal recordJournal = lruCache.getIfPresent(cacheKey.key()); if (recordJournal != null) { lruCache.invalidate(cacheKey.key()); result = true; if (cascade) { for (CacheReference cacheReference : recordJournal.snapshot.referencedFields()) { result = result & remove(CacheKey.from(cacheReference.key()), true); } } } return result; }
@Nullable @Override public Record loadRecord(@NotNull final String key, @NotNull final CacheHeaders cacheHeaders) { checkNotNull(key, "key == null"); checkNotNull(cacheHeaders, "cacheHeaders == null"); try { final Optional<Record> nonOptimisticRecord = nextCache() .flatMap(new Function<NormalizedCache, Optional<Record>>() { @NotNull @Override public Optional<Record> apply(@NotNull NormalizedCache cache) { return Optional.fromNullable(cache.loadRecord(key, cacheHeaders)); } }); final RecordJournal journal = lruCache.getIfPresent(key); if (journal != null) { return nonOptimisticRecord.map(new Function<Record, Record>() { @NotNull @Override public Record apply(@NotNull Record record) { Record result = record.clone(); result.mergeWith(journal.snapshot); return result; } }).or(journal.snapshot.clone()); } else { return nonOptimisticRecord.orNull(); } } catch (Exception ignore) { return null; } }
void clearCurrentCache() { lruCache.invalidateAll(); }
@Override public Map<Class, Map<String, Record>> dump() { Map<String, Record> records = new LinkedHashMap<>(); for (Map.Entry<String, RecordJournal> entry : lruCache.asMap().entrySet()) { records.put(entry.getKey(), entry.getValue().snapshot); } Map<Class, Map<String, Record>> dump = new LinkedHashMap<>(); dump.put(this.getClass(), Collections.unmodifiableMap(records)); if (nextCache().isPresent()) { dump.putAll(nextCache().get().dump()); } return dump; }
@Override public void clear(@Nonnull Key key) { networkResponses.invalidate(key); }
/** * @since 11.0 */ @Nullable @Override public V get(K key, Callable<? extends V> valueLoader) throws ExecutionException { return delegate().get(key, valueLoader); }
/** * Only update memory after persister has been successfully updated * * @param key * @param data */ void updateMemory(@Nonnull final Key key, final Parsed data) { memCache.put(key, Observable.just(data)); }
@Override public boolean remove(@NotNull final CacheKey cacheKey, final boolean cascade) { checkNotNull(cacheKey, "cacheKey == null"); boolean result; result = nextCache().map(new Function<NormalizedCache, Boolean>() { @NotNull @Override public Boolean apply(@NotNull NormalizedCache cache) { return cache.remove(cacheKey, cascade); } }).or(Boolean.FALSE); Record record = lruCache.getIfPresent(cacheKey.key()); if (record != null) { lruCache.invalidate(cacheKey.key()); result = true; if (cascade) { for (CacheReference cacheReference : record.referencedFields()) { result = result & remove(CacheKey.from(cacheReference.key()), true); } } } return result; }
@Nonnull @Override public Observable<Raw> read(@Nonnull Key key) { Observable<Raw> cachedValue = networkResponses.getIfPresent(key); return cachedValue == null ? Observable.error(new FileNotFoundException()) : cachedValue; }
@SuppressWarnings("ResultOfMethodCallIgnored") @Override public void clearAll() { lruCache.invalidateAll(); //noinspection ResultOfMethodCallIgnored nextCache().apply(new Action<NormalizedCache>() { @Override public void apply(@NotNull NormalizedCache cache) { cache.clearAll(); } }); }
@Override public Map<Class, Map<String, Record>> dump() { Map<Class, Map<String, Record>> dump = new LinkedHashMap<>(); dump.put(this.getClass(), Collections.unmodifiableMap(new LinkedHashMap<>(lruCache.asMap()))); if (nextCache().isPresent()) { dump.putAll(nextCache().get().dump()); } return dump; } }
@Override public void invalidate(Object key) { delegate().invalidate(key); }
Observable<Parsed> cache(@Nonnull final Key key) { try { return memCache.get(key, () -> disk(key)); } catch (ExecutionException e) { return Observable.empty(); } }
@Nonnull @Override public Observable<Boolean> write(@Nonnull Key key, @Nonnull Raw raw) { networkResponses.put(key, Observable.just(raw)); return Observable.just(true); }
@NotNull public Set<String> mergeOptimisticUpdate(@NotNull final Record record) { checkNotNull(record, "record == null"); final RecordJournal journal = lruCache.getIfPresent(record.key()); if (journal == null) { lruCache.put(record.key(), new RecordJournal(record)); return Collections.singleton(record.key()); } else { return journal.commit(record); } }
@Override public boolean remove(@Nonnull final CacheKey cacheKey) { checkNotNull(cacheKey, "cacheKey == null"); boolean result = nextCache().map(new Function<NormalizedCache, Boolean>() { @Nonnull @Override public Boolean apply(@Nonnull NormalizedCache cache) { return cache.remove(cacheKey); } }).or(Boolean.FALSE); if (lruCache.getIfPresent(cacheKey.key()) != null) { lruCache.invalidate(cacheKey.key()); result = true; } return result; }
@Nullable @Override public Record loadRecord(@Nonnull final String key, @Nonnull final CacheHeaders cacheHeaders) { final Record record; try { record = lruCache.get(key, new Callable<Record>() { @Override public Record call() throws Exception { return nextCache().flatMap(new Function<NormalizedCache, Optional<Record>>() { @Nonnull @Override public Optional<Record> apply(@Nonnull NormalizedCache cache) { return Optional.fromNullable(cache.loadRecord(key, cacheHeaders)); } }).get(); // lruCache.get(key, callable) requires non-null. } }); } catch (Exception ignore) { return null; } if (cacheHeaders.hasHeader(GraphQLCacheHeaders.EVICT_AFTER_READ)) { lruCache.invalidate(key); } return record; }