Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Commit

Permalink
Only use GLOBAL_LIST if time based eviction is enabled
Browse files Browse the repository at this point in the history
  • Loading branch information
erikjohnston committed Jun 18, 2021
1 parent d8b04e8 commit f057ad8
Showing 1 changed file with 18 additions and 4 deletions.
22 changes: 18 additions & 4 deletions synapse/util/caches/lrucache.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,10 @@ def get_cache_entry(self) -> Optional[P]:
return self.cache_entry()


# Whether to insert new cache entries to the global list. We only add to it if
# time based eviction is enabled.
USE_GLOBAL_LIST = False

# A linked list of all cache entries, allowing efficient time based eviction.
GLOBAL_ROOT = _ListNode[_CacheEntry]()

Expand Down Expand Up @@ -247,6 +251,9 @@ def expire_lru_cache_entries_after(hs: "HomeServer"):
"""Start a background job that expires all cache entries if they have not
been accessed for the given number of seconds.
"""
global USE_GLOBAL_LIST
USE_GLOBAL_LIST = True

clock = hs.get_clock()
clock.looping_call(
_expire_old_entries, 30 * 1000, clock, hs.config.caches.expiry_time_msec / 1000
Expand Down Expand Up @@ -275,7 +282,9 @@ def __init__(
):
self_ref = weakref.ref(self, lambda _: self.drop_from_lists())
self.list_node = _ListNode.insert_after(self_ref, root)
self.global_list_node = _ListNode.insert_after(self_ref, GLOBAL_ROOT)
self.global_list_node = None
if USE_GLOBAL_LIST:
self.global_list_node = _ListNode.insert_after(self_ref, GLOBAL_ROOT)

# We store a weak reference to the cache object so that this _Node can
# remove itself from the cache. If the cache is dropped we ensure we
Expand Down Expand Up @@ -304,12 +313,14 @@ def __init__(
_get_size_of(key)
+ _get_size_of(value)
+ _get_size_of(self.list_node, recurse=False)
+ _get_size_of(self.global_list_node, recurse=False)
+ _get_size_of(self.callbacks, recurse=False)
+ _get_size_of(self, recurse=False)
)
self.memory += _get_size_of(self.memory, recurse=False)

if self.global_list_node:
self.memory += _get_size_of(self.global_list_node, recurse=False)

def add_callbacks(self, callbacks: Collection[Callable[[], None]]) -> None:
"""Add to stored list of callbacks, removing duplicates."""

Expand Down Expand Up @@ -347,7 +358,9 @@ def drop_from_cache(self) -> None:
def drop_from_lists(self) -> None:
"""Remove this node from the cache lists."""
self.list_node.remove_from_list()
self.global_list_node.remove_from_list()

if self.global_list_node:
self.global_list_node.remove_from_list()


class LruCache(Generic[KT, VT]):
Expand Down Expand Up @@ -478,7 +491,8 @@ def add_node(key, value, callbacks: Collection[Callable[[], None]] = ()):

def move_node_to_front(node: _Node):
node.list_node.move_after(list_root)
node.global_list_node.move_after(GLOBAL_ROOT)
if node.global_list_node:
node.global_list_node.move_after(GLOBAL_ROOT)

def delete_node(node: _Node) -> int:
node.drop_from_lists()
Expand Down

0 comments on commit f057ad8

Please sign in to comment.