/** * @return A set of all field keys. A field key incorporates any GraphQL arguments in addition to the field name. */ public Set<String> keys() { Set<String> keys = new HashSet<>(); for (Map.Entry<String, Object> field : fields.entrySet()) { keys.add(key() + "." + field.getKey()); } return keys; }
@NotNull protected Set<String> performMerge(@NotNull final Record apolloRecord, @NotNull final CacheHeaders cacheHeaders) { final Record oldRecord = lruCache.getIfPresent(apolloRecord.key()); if (oldRecord == null) { lruCache.put(apolloRecord.key(), apolloRecord); return apolloRecord.keys(); } else { Set<String> changedKeys = oldRecord.mergeWith(apolloRecord); //re-insert to trigger new weight calculation lruCache.put(apolloRecord.key(), oldRecord); return changedKeys; } }
@NotNull @Override public CacheKey resolveCacheKey(@NotNull ResponseField field, @NotNull Record record) { return CacheKey.from(record.key()); }
public Builder toBuilder() { return new Builder(key(), this.fields, mutationId); }
public static int calculateBytes(Record record) { int size = SIZE_OF_RECORD_OVERHEAD + record.key().getBytes(Charset.defaultCharset()).length; for (Map.Entry<String, Object> field : record.fields().entrySet()) { size += (field.getKey().getBytes(Charset.defaultCharset()).length + weighField(field.getValue())); } return size; }
@NotNull public Set<String> mergeOptimisticUpdate(@NotNull final Record record) { checkNotNull(record, "record == null"); final RecordJournal journal = lruCache.getIfPresent(record.key()); if (journal == null) { lruCache.put(record.key(), new RecordJournal(record)); return Collections.singleton(record.key()); } else { return journal.commit(record); } }
/** * @param otherRecord The record to merge into this record. * @return A set of field keys which have changed, or were added. A field key incorporates any GraphQL arguments in * addition to the field name. */ public Set<String> mergeWith(Record otherRecord) { Set<String> changedKeys = new HashSet<>(); for (Map.Entry<String, Object> field : otherRecord.fields.entrySet()) { Object newFieldValue = field.getValue(); boolean hasOldFieldValue = this.fields.containsKey(field.getKey()); Object oldFieldValue = this.fields.get(field.getKey()); if (!hasOldFieldValue || (oldFieldValue == null && newFieldValue != null) || (oldFieldValue != null && !oldFieldValue.equals(newFieldValue))) { this.fields.put(field.getKey(), newFieldValue); changedKeys.add(key() + "." + field.getKey()); adjustSizeEstimate(newFieldValue, oldFieldValue); } } mutationId = otherRecord.mutationId; return changedKeys; }
/** * Lookups record by mutation id, if it's found removes it from the history and invalidates snapshot record. * Snapshot record is superposition of all record versions in the history. */ Set<String> revert(UUID mutationId) { int recordIndex = -1; for (int i = 0; i < history.size(); i++) { if (mutationId.equals(history.get(i).mutationId())) { recordIndex = i; break; } } if (recordIndex == -1) { return Collections.emptySet(); } Set<String> changedKeys = new HashSet<>(); changedKeys.add(history.remove(recordIndex).key()); for (int i = Math.max(0, recordIndex - 1); i < history.size(); i++) { Record record = history.get(i); if (i == Math.max(0, recordIndex - 1)) { snapshot = record.clone(); } else { changedKeys.addAll(snapshot.mergeWith(record)); } } return changedKeys; } }
@Override public void didResolveObject(ResponseField field, Optional<R> objectSource) { path = pathStack.pop(); if (objectSource.isPresent()) { Record completedRecord = currentRecordBuilder.build(); valueStack.push(new CacheReference(completedRecord.key())); dependentKeys.add(completedRecord.key()); recordSet.merge(completedRecord); } currentRecordBuilder = recordStack.pop().toBuilder(); }
private void assertTestRecordPresentAndAccurate(Record testRecord, NormalizedCache store) { final Record cacheRecord1 = store.loadRecord(testRecord.key(), CacheHeaders.NONE); assertThat(cacheRecord1.key()).isEqualTo(testRecord.key()); assertThat(cacheRecord1.field("a")).isEqualTo(testRecord.field("a")); assertThat(cacheRecord1.field("b")).isEqualTo(testRecord.field("b")); }
@Test public void testDualCacheMultipleRecord() { LruNormalizedCacheFactory secondaryCacheFactory = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION); NormalizedCache primaryCache = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION) .chain(secondaryCacheFactory).createChain(basicFieldAdapter); Record.Builder recordBuilder = Record.builder("root1"); recordBuilder.addField("bar", "bar"); final Record record1 = recordBuilder.build(); recordBuilder = Record.builder("root2"); recordBuilder.addField("bar", "bar"); final Record record2 = recordBuilder.build(); recordBuilder = Record.builder("root3"); recordBuilder.addField("bar", "bar"); final Record record3 = recordBuilder.build(); Collection<Record> records = Arrays.asList(record1, record2, record3); Collection<String> keys = Arrays.asList(record1.key(), record2.key(), record3.key()); primaryCache.merge(records, CacheHeaders.NONE); assertThat(primaryCache.loadRecords(keys, CacheHeaders.NONE).size()).isEqualTo(3); //verify write through behavior assertThat(primaryCache.loadRecords(keys, CacheHeaders.NONE).size()).isEqualTo(3); assertThat(primaryCache.nextCache().get() .loadRecords(keys, CacheHeaders.NONE).size()).isEqualTo(3); }
@Nonnull @Override public CacheKey resolveCacheKey(@Nonnull ResponseField field, @Nonnull Record record) { return CacheKey.from(record.key()); } };
public static int calculateBytes(Record record) { int size = SIZE_OF_RECORD_OVERHEAD + record.key().getBytes().length; for (Map.Entry<String, Object> field : record.fields().entrySet()) { size += (field.getKey().getBytes().length + weighField(field.getValue())); } return size; }
public static int calculateBytes(Record record) { int size = SIZE_OF_RECORD_OVERHEAD + record.key().getBytes().length; for (Map.Entry<String, Object> field : record.fields().entrySet()) { size += (field.getKey().getBytes().length + weighField(field.getValue())); } return size; }
@Nonnull public Set<String> mergeOptimisticUpdate(@Nonnull final Record record) { checkNotNull(record, "record == null"); final RecordJournal journal = lruCache.getIfPresent(record.key()); if (journal == null) { lruCache.put(record.key(), new RecordJournal(record)); return Collections.singleton(record.key()); } else { return journal.commit(record); } }
@Nonnull public Set<String> mergeOptimisticUpdate(@Nonnull final Record record) { checkNotNull(record, "record == null"); final RecordJournal journal = lruCache.getIfPresent(record.key()); if (journal == null) { lruCache.put(record.key(), new RecordJournal(record)); return Collections.singleton(record.key()); } else { return journal.commit(record); } }
@Override public void didResolveObject(ResponseField field, Optional<R> objectSource) { path = pathStack.pop(); if (objectSource.isPresent()) { Record completedRecord = currentRecordBuilder.build(); valueStack.push(new CacheReference(completedRecord.key())); dependentKeys.add(completedRecord.key()); recordSet.merge(completedRecord); } currentRecordBuilder = recordStack.pop().toBuilder(); }
@Override public void didResolveObject(ResponseField field, Optional<R> objectSource) { path = pathStack.pop(); if (objectSource.isPresent()) { Record completedRecord = currentRecordBuilder.build(); valueStack.push(new CacheReference(completedRecord.key())); dependentKeys.add(completedRecord.key()); recordSet.merge(completedRecord); } currentRecordBuilder = recordStack.pop().toBuilder(); }