summaryrefslogtreecommitdiff
path: root/synapse/util/caches
diff options
context:
space:
mode:
Diffstat (limited to 'synapse/util/caches')
-rw-r--r--synapse/util/caches/__init__.py14
-rw-r--r--synapse/util/caches/deferred_cache.py14
-rw-r--r--synapse/util/caches/dictionary_cache.py24
-rw-r--r--synapse/util/caches/lrucache.py5
-rw-r--r--synapse/util/caches/stream_change_cache.py2
-rw-r--r--synapse/util/caches/treecache.py16
6 files changed, 40 insertions, 35 deletions
diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py
index 9012034b..cab1bf0c 100644
--- a/synapse/util/caches/__init__.py
+++ b/synapse/util/caches/__init__.py
@@ -64,32 +64,32 @@ class CacheMetric:
evicted_size = attr.ib(default=0)
memory_usage = attr.ib(default=None)
- def inc_hits(self):
+ def inc_hits(self) -> None:
self.hits += 1
- def inc_misses(self):
+ def inc_misses(self) -> None:
self.misses += 1
- def inc_evictions(self, size=1):
+ def inc_evictions(self, size: int = 1) -> None:
self.evicted_size += size
- def inc_memory_usage(self, memory: int):
+ def inc_memory_usage(self, memory: int) -> None:
if self.memory_usage is None:
self.memory_usage = 0
self.memory_usage += memory
- def dec_memory_usage(self, memory: int):
+ def dec_memory_usage(self, memory: int) -> None:
self.memory_usage -= memory
- def clear_memory_usage(self):
+ def clear_memory_usage(self) -> None:
if self.memory_usage is not None:
self.memory_usage = 0
def describe(self):
return []
- def collect(self):
+ def collect(self) -> None:
try:
if self._cache_type == "response_cache":
response_cache_size.labels(self._cache_name).set(len(self._cache))
diff --git a/synapse/util/caches/deferred_cache.py b/synapse/util/caches/deferred_cache.py
index b6456392..f05590da 100644
--- a/synapse/util/caches/deferred_cache.py
+++ b/synapse/util/caches/deferred_cache.py
@@ -93,7 +93,7 @@ class DeferredCache(Generic[KT, VT]):
TreeCache, "MutableMapping[KT, CacheEntry]"
] = cache_type()
- def metrics_cb():
+ def metrics_cb() -> None:
cache_pending_metric.labels(name).set(len(self._pending_deferred_cache))
# cache is used for completed results and maps to the result itself, rather than
@@ -113,7 +113,7 @@ class DeferredCache(Generic[KT, VT]):
def max_entries(self):
return self.cache.max_size
- def check_thread(self):
+ def check_thread(self) -> None:
expected_thread = self.thread
if expected_thread is None:
self.thread = threading.current_thread()
@@ -235,7 +235,7 @@ class DeferredCache(Generic[KT, VT]):
self._pending_deferred_cache[key] = entry
- def compare_and_pop():
+ def compare_and_pop() -> bool:
"""Check if our entry is still the one in _pending_deferred_cache, and
if so, pop it.
@@ -256,7 +256,7 @@ class DeferredCache(Generic[KT, VT]):
return False
- def cb(result):
+ def cb(result) -> None:
if compare_and_pop():
self.cache.set(key, result, entry.callbacks)
else:
@@ -268,7 +268,7 @@ class DeferredCache(Generic[KT, VT]):
# not have been. Either way, let's double-check now.
entry.invalidate()
- def eb(_fail):
+ def eb(_fail) -> None:
compare_and_pop()
entry.invalidate()
@@ -314,7 +314,7 @@ class DeferredCache(Generic[KT, VT]):
for entry in iterate_tree_cache_entry(entry):
entry.invalidate()
- def invalidate_all(self):
+ def invalidate_all(self) -> None:
self.check_thread()
self.cache.clear()
for entry in self._pending_deferred_cache.values():
@@ -332,7 +332,7 @@ class CacheEntry:
self.callbacks = set(callbacks)
self.invalidated = False
- def invalidate(self):
+ def invalidate(self) -> None:
if not self.invalidated:
self.invalidated = True
for callback in self.callbacks:
diff --git a/synapse/util/caches/dictionary_cache.py b/synapse/util/caches/dictionary_cache.py
index 3f852edd..ade088aa 100644
--- a/synapse/util/caches/dictionary_cache.py
+++ b/synapse/util/caches/dictionary_cache.py
@@ -27,10 +27,14 @@ logger = logging.getLogger(__name__)
KT = TypeVar("KT")
# The type of the dictionary keys.
DKT = TypeVar("DKT")
+# The type of the dictionary values.
+DV = TypeVar("DV")
+# This class can't be generic because it uses slots with attrs.
+# See: https://github.com/python-attrs/attrs/issues/313
@attr.s(slots=True)
-class DictionaryEntry:
+class DictionaryEntry: # should be: Generic[DKT, DV].
"""Returned when getting an entry from the cache
Attributes:
@@ -43,10 +47,10 @@ class DictionaryEntry:
"""
full = attr.ib(type=bool)
- known_absent = attr.ib()
- value = attr.ib()
+ known_absent = attr.ib(type=Set[Any]) # should be: Set[DKT]
+ value = attr.ib(type=Dict[Any, Any]) # should be: Dict[DKT, DV]
- def __len__(self):
+ def __len__(self) -> int:
return len(self.value)
@@ -56,7 +60,7 @@ class _Sentinel(enum.Enum):
sentinel = object()
-class DictionaryCache(Generic[KT, DKT]):
+class DictionaryCache(Generic[KT, DKT, DV]):
"""Caches key -> dictionary lookups, supporting caching partial dicts, i.e.
fetching a subset of dictionary keys for a particular key.
"""
@@ -87,7 +91,7 @@ class DictionaryCache(Generic[KT, DKT]):
Args:
key
- dict_key: If given a set of keys then return only those keys
+ dict_keys: If given a set of keys then return only those keys
that exist in the cache.
Returns:
@@ -125,7 +129,7 @@ class DictionaryCache(Generic[KT, DKT]):
self,
sequence: int,
key: KT,
- value: Dict[DKT, Any],
+ value: Dict[DKT, DV],
fetched_keys: Optional[Set[DKT]] = None,
) -> None:
"""Updates the entry in the cache
@@ -151,15 +155,15 @@ class DictionaryCache(Generic[KT, DKT]):
self._update_or_insert(key, value, fetched_keys)
def _update_or_insert(
- self, key: KT, value: Dict[DKT, Any], known_absent: Set[DKT]
+ self, key: KT, value: Dict[DKT, DV], known_absent: Set[DKT]
) -> None:
# We pop and reinsert as we need to tell the cache the size may have
# changed
- entry = self.cache.pop(key, DictionaryEntry(False, set(), {}))
+ entry: DictionaryEntry = self.cache.pop(key, DictionaryEntry(False, set(), {}))
entry.value.update(value)
entry.known_absent.update(known_absent)
self.cache[key] = entry
- def _insert(self, key: KT, value: Dict[DKT, Any], known_absent: Set[DKT]) -> None:
+ def _insert(self, key: KT, value: Dict[DKT, DV], known_absent: Set[DKT]) -> None:
self.cache[key] = DictionaryEntry(True, known_absent, value)
diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py
index 5c65d187..39dce9dd 100644
--- a/synapse/util/caches/lrucache.py
+++ b/synapse/util/caches/lrucache.py
@@ -35,6 +35,7 @@ from typing import (
from typing_extensions import Literal
from twisted.internet import reactor
+from twisted.internet.interfaces import IReactorTime
from synapse.config import cache as cache_config
from synapse.metrics.background_process_metrics import wrap_as_background_process
@@ -341,7 +342,7 @@ class LruCache(Generic[KT, VT]):
# Default `clock` to something sensible. Note that we rename it to
# `real_clock` so that mypy doesn't think its still `Optional`.
if clock is None:
- real_clock = Clock(reactor)
+ real_clock = Clock(cast(IReactorTime, reactor))
else:
real_clock = clock
@@ -384,7 +385,7 @@ class LruCache(Generic[KT, VT]):
lock = threading.Lock()
- def evict():
+ def evict() -> None:
while cache_len() > self.max_size:
# Get the last node in the list (i.e. the oldest node).
todelete = list_root.prev_node
diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py
index 3a41a8ba..27b1da23 100644
--- a/synapse/util/caches/stream_change_cache.py
+++ b/synapse/util/caches/stream_change_cache.py
@@ -195,7 +195,7 @@ class StreamChangeCache:
for entity in r:
del self._entity_to_key[entity]
- def _evict(self):
+ def _evict(self) -> None:
while len(self._cache) > self._max_size:
k, r = self._cache.popitem(0)
self._earliest_known_stream_pos = max(k, self._earliest_known_stream_pos)
diff --git a/synapse/util/caches/treecache.py b/synapse/util/caches/treecache.py
index 4138931e..563845f8 100644
--- a/synapse/util/caches/treecache.py
+++ b/synapse/util/caches/treecache.py
@@ -35,17 +35,17 @@ class TreeCache:
root = {key_1: {key_2: _value}}
"""
- def __init__(self):
- self.size = 0
+ def __init__(self) -> None:
+ self.size: int = 0
self.root = TreeCacheNode()
- def __setitem__(self, key, value):
- return self.set(key, value)
+ def __setitem__(self, key, value) -> None:
+ self.set(key, value)
- def __contains__(self, key):
+ def __contains__(self, key) -> bool:
return self.get(key, SENTINEL) is not SENTINEL
- def set(self, key, value):
+ def set(self, key, value) -> None:
if isinstance(value, TreeCacheNode):
# this would mean we couldn't tell where our tree ended and the value
# started.
@@ -73,7 +73,7 @@ class TreeCache:
return default
return node.get(key[-1], default)
- def clear(self):
+ def clear(self) -> None:
self.size = 0
self.root = TreeCacheNode()
@@ -128,7 +128,7 @@ class TreeCache:
def values(self):
return iterate_tree_cache_entry(self.root)
- def __len__(self):
+ def __len__(self) -> int:
return self.size