mirror of
https://mau.dev/maunium/synapse.git
synced 2024-12-13 23:43:45 +01:00
Fix caching behavior for relations push rules. (#12859)
By always returning all requested values from the function wrapped by cachedList. Otherwise implicit None values get added into the cache, which are unexpected.
This commit is contained in:
parent
4cbcd4a999
commit
759f9c09e1
3 changed files with 12 additions and 9 deletions
1
changelog.d/12859.feature
Normal file
1
changelog.d/12859.feature
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Experimental support for [MSC3772](https://github.com/matrix-org/matrix-spec-proposals/pull/3772): Push rule for mutually related events.
|
|
@ -13,7 +13,6 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from collections import defaultdict
|
|
||||||
from typing import (
|
from typing import (
|
||||||
Collection,
|
Collection,
|
||||||
Dict,
|
Dict,
|
||||||
|
@ -810,7 +809,9 @@ class RelationsWorkerStore(SQLBaseStore):
|
||||||
txn: LoggingTransaction,
|
txn: LoggingTransaction,
|
||||||
) -> Dict[str, Set[Tuple[str, str]]]:
|
) -> Dict[str, Set[Tuple[str, str]]]:
|
||||||
txn.execute(sql, [event_id] + rel_type_args)
|
txn.execute(sql, [event_id] + rel_type_args)
|
||||||
result = defaultdict(set)
|
result: Dict[str, Set[Tuple[str, str]]] = {
|
||||||
|
rel_type: set() for rel_type in relation_types
|
||||||
|
}
|
||||||
for rel_type, sender, type in txn.fetchall():
|
for rel_type, sender, type in txn.fetchall():
|
||||||
result[rel_type].add((sender, type))
|
result[rel_type].add((sender, type))
|
||||||
return result
|
return result
|
||||||
|
|
|
@ -595,13 +595,14 @@ def cached(
|
||||||
def cachedList(
|
def cachedList(
|
||||||
*, cached_method_name: str, list_name: str, num_args: Optional[int] = None
|
*, cached_method_name: str, list_name: str, num_args: Optional[int] = None
|
||||||
) -> Callable[[F], _CachedFunction[F]]:
|
) -> Callable[[F], _CachedFunction[F]]:
|
||||||
"""Creates a descriptor that wraps a function in a `CacheListDescriptor`.
|
"""Creates a descriptor that wraps a function in a `DeferredCacheListDescriptor`.
|
||||||
|
|
||||||
Used to do batch lookups for an already created cache. A single argument
|
Used to do batch lookups for an already created cache. One of the arguments
|
||||||
is specified as a list that is iterated through to lookup keys in the
|
is specified as a list that is iterated through to lookup keys in the
|
||||||
original cache. A new tuple consisting of the (deduplicated) keys that weren't in
|
original cache. A new tuple consisting of the (deduplicated) keys that weren't in
|
||||||
the cache gets passed to the original function, the result of which is stored in the
|
the cache gets passed to the original function, which is expected to results
|
||||||
cache.
|
in a map of key to value for each passed value. THe new results are stored in the
|
||||||
|
original cache. Note that any missing values are cached as None.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
cached_method_name: The name of the single-item lookup method.
|
cached_method_name: The name of the single-item lookup method.
|
||||||
|
@ -614,11 +615,11 @@ def cachedList(
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
class Example:
|
class Example:
|
||||||
@cached(num_args=2)
|
@cached()
|
||||||
def do_something(self, first_arg):
|
def do_something(self, first_arg, second_arg):
|
||||||
...
|
...
|
||||||
|
|
||||||
@cachedList(do_something.cache, list_name="second_args", num_args=2)
|
@cachedList(cached_method_name="do_something", list_name="second_args")
|
||||||
def batch_do_something(self, first_arg, second_args):
|
def batch_do_something(self, first_arg, second_args):
|
||||||
...
|
...
|
||||||
"""
|
"""
|
||||||
|
|
Loading…
Reference in a new issue