0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-12-15 00:33:50 +01:00

Add metrics to the threadpools (#11178)

This commit is contained in:
Erik Johnston 2021-11-01 11:21:36 +00:00 committed by GitHub
parent 2451003f6f
commit 82d2168a15
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 49 additions and 1 deletions

View file

@ -0,0 +1 @@
Add metrics for thread pool usage.

View file

@ -45,6 +45,7 @@ from synapse.events.spamcheck import load_legacy_spam_checkers
from synapse.events.third_party_rules import load_legacy_third_party_event_rules from synapse.events.third_party_rules import load_legacy_third_party_event_rules
from synapse.handlers.auth import load_legacy_password_auth_providers from synapse.handlers.auth import load_legacy_password_auth_providers
from synapse.logging.context import PreserveLoggingContext from synapse.logging.context import PreserveLoggingContext
from synapse.metrics import register_threadpool
from synapse.metrics.background_process_metrics import wrap_as_background_process from synapse.metrics.background_process_metrics import wrap_as_background_process
from synapse.metrics.jemalloc import setup_jemalloc_stats from synapse.metrics.jemalloc import setup_jemalloc_stats
from synapse.util.caches.lrucache import setup_expire_lru_cache_entries from synapse.util.caches.lrucache import setup_expire_lru_cache_entries
@ -351,6 +352,10 @@ async def start(hs: "HomeServer"):
GAIResolver(reactor, getThreadPool=lambda: resolver_threadpool) GAIResolver(reactor, getThreadPool=lambda: resolver_threadpool)
) )
# Register the threadpools with our metrics.
register_threadpool("default", reactor.getThreadPool())
register_threadpool("gai_resolver", resolver_threadpool)
# Set up the SIGHUP machinery. # Set up the SIGHUP machinery.
if hasattr(signal, "SIGHUP"): if hasattr(signal, "SIGHUP"):

View file

@ -32,6 +32,7 @@ from prometheus_client.core import (
) )
from twisted.internet import reactor from twisted.internet import reactor
from twisted.python.threadpool import ThreadPool
import synapse import synapse
from synapse.metrics._exposition import ( from synapse.metrics._exposition import (
@ -526,6 +527,42 @@ threepid_send_requests = Histogram(
labelnames=("type", "reason"), labelnames=("type", "reason"),
) )
threadpool_total_threads = Gauge(
"synapse_threadpool_total_threads",
"Total number of threads currently in the threadpool",
["name"],
)
threadpool_total_working_threads = Gauge(
"synapse_threadpool_working_threads",
"Number of threads currently working in the threadpool",
["name"],
)
threadpool_total_min_threads = Gauge(
"synapse_threadpool_min_threads",
"Minimum number of threads configured in the threadpool",
["name"],
)
threadpool_total_max_threads = Gauge(
"synapse_threadpool_max_threads",
"Maximum number of threads configured in the threadpool",
["name"],
)
def register_threadpool(name: str, threadpool: ThreadPool) -> None:
"""Add metrics for the threadpool."""
threadpool_total_min_threads.labels(name).set(threadpool.min)
threadpool_total_max_threads.labels(name).set(threadpool.max)
threadpool_total_threads.labels(name).set_function(lambda: len(threadpool.threads))
threadpool_total_working_threads.labels(name).set_function(
lambda: len(threadpool.working)
)
class ReactorLastSeenMetric: class ReactorLastSeenMetric:
def collect(self): def collect(self):

View file

@ -48,6 +48,7 @@ from synapse.logging.context import (
current_context, current_context,
make_deferred_yieldable, make_deferred_yieldable,
) )
from synapse.metrics import register_threadpool
from synapse.metrics.background_process_metrics import run_as_background_process from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.storage.background_updates import BackgroundUpdater from synapse.storage.background_updates import BackgroundUpdater
from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine
@ -104,13 +105,17 @@ def make_pool(
LoggingDatabaseConnection(conn, engine, "on_new_connection") LoggingDatabaseConnection(conn, engine, "on_new_connection")
) )
return adbapi.ConnectionPool( connection_pool = adbapi.ConnectionPool(
db_config.config["name"], db_config.config["name"],
cp_reactor=reactor, cp_reactor=reactor,
cp_openfun=_on_new_connection, cp_openfun=_on_new_connection,
**db_args, **db_args,
) )
register_threadpool(f"database-{db_config.name}", connection_pool.threadpool)
return connection_pool
def make_conn( def make_conn(
db_config: DatabaseConnectionConfig, db_config: DatabaseConnectionConfig,