mirror of
https://mau.dev/maunium/synapse.git
synced 2024-12-15 03:53:52 +01:00
String intern
This commit is contained in:
parent
fbdeb1778d
commit
75daede92f
2 changed files with 17 additions and 3 deletions
|
@ -15,6 +15,7 @@
|
||||||
|
|
||||||
from ._base import SQLBaseStore
|
from ._base import SQLBaseStore
|
||||||
from synapse.util.caches.descriptors import cached, cachedList
|
from synapse.util.caches.descriptors import cached, cachedList
|
||||||
|
from synapse.util.caches import intern_string
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
@ -155,7 +156,9 @@ class StateStore(SQLBaseStore):
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_current_state_for_key(self, room_id, event_type, state_key):
|
def get_current_state_for_key(self, room_id, event_type, state_key):
|
||||||
event_ids = yield self._get_current_state_for_key(room_id, event_type, state_key)
|
event_ids = yield self._get_current_state_for_key(
|
||||||
|
room_id, intern_string(event_type), intern_string(state_key)
|
||||||
|
)
|
||||||
events = yield self._get_events(event_ids, get_prev_content=False)
|
events = yield self._get_events(event_ids, get_prev_content=False)
|
||||||
defer.returnValue(events)
|
defer.returnValue(events)
|
||||||
|
|
||||||
|
@ -202,7 +205,7 @@ class StateStore(SQLBaseStore):
|
||||||
|
|
||||||
results = {}
|
results = {}
|
||||||
for row in rows:
|
for row in rows:
|
||||||
key = (row["type"], row["state_key"])
|
key = (intern_string(row["type"]), intern_string(row["state_key"]))
|
||||||
results.setdefault(row["state_group"], {})[key] = row["event_id"]
|
results.setdefault(row["state_group"], {})[key] = row["event_id"]
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
@ -393,7 +396,10 @@ class StateStore(SQLBaseStore):
|
||||||
# cache absence of the key, on the assumption that if we've
|
# cache absence of the key, on the assumption that if we've
|
||||||
# explicitly asked for some types then we will probably ask
|
# explicitly asked for some types then we will probably ask
|
||||||
# for them again.
|
# for them again.
|
||||||
state_dict = {key: None for key in types}
|
state_dict = {
|
||||||
|
(intern_string(etype), intern_string(state_key)): None
|
||||||
|
for (etype, state_key) in types
|
||||||
|
}
|
||||||
state_dict.update(results[group])
|
state_dict.update(results[group])
|
||||||
results[group] = state_dict
|
results[group] = state_dict
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import synapse.metrics
|
import synapse.metrics
|
||||||
|
from lrucache import LruCache
|
||||||
|
|
||||||
DEBUG_CACHES = False
|
DEBUG_CACHES = False
|
||||||
|
|
||||||
|
@ -25,3 +26,10 @@ cache_counter = metrics.register_cache(
|
||||||
lambda: {(name,): len(caches_by_name[name]) for name in caches_by_name.keys()},
|
lambda: {(name,): len(caches_by_name[name]) for name in caches_by_name.keys()},
|
||||||
labels=["name"],
|
labels=["name"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
_string_cache = LruCache(5000)
|
||||||
|
caches_by_name["string_cache"] = _string_cache
|
||||||
|
|
||||||
|
|
||||||
|
def intern_string(string):
|
||||||
|
return _string_cache.setdefault(string, string)
|
||||||
|
|
Loading…
Reference in a new issue