mirror of
https://mau.dev/maunium/synapse.git
synced 2024-11-15 22:42:23 +01:00
cleanup
This commit is contained in:
parent
071206304d
commit
53cc2cde1f
9 changed files with 64 additions and 29 deletions
|
@ -74,9 +74,8 @@ class FederationRemoteSendQueue(object):
|
||||||
# lambda binds to the queue rather than to the name of the queue which
|
# lambda binds to the queue rather than to the name of the queue which
|
||||||
# changes. ARGH.
|
# changes. ARGH.
|
||||||
def register(name, queue):
|
def register(name, queue):
|
||||||
LaterGauge("synapse_federation_send_queue_%s_size" % (queue_name,), "",
|
LaterGauge("synapse_federation_send_queue_%s_size" % (queue_name,),
|
||||||
lambda: len(queue),
|
"", lambda: len(queue))
|
||||||
)
|
|
||||||
|
|
||||||
for queue_name in [
|
for queue_name in [
|
||||||
"presence_map", "presence_changed", "keyed_edu", "keyed_edu_changed",
|
"presence_map", "presence_changed", "keyed_edu", "keyed_edu_changed",
|
||||||
|
|
|
@ -127,12 +127,15 @@ class ApplicationServicesHandler(object):
|
||||||
now = self.clock.time_msec()
|
now = self.clock.time_msec()
|
||||||
ts = yield self.store.get_received_ts(events[-1].event_id)
|
ts = yield self.store.get_received_ts(events[-1].event_id)
|
||||||
|
|
||||||
synapse.metrics.event_processing_positions.labels("appservice_sender").set(upper_bound)
|
synapse.metrics.event_processing_positions.labels(
|
||||||
|
"appservice_sender").set(upper_bound)
|
||||||
|
|
||||||
events_processed_counter.inc(len(events))
|
events_processed_counter.inc(len(events))
|
||||||
|
|
||||||
synapse.metrics.event_processing_lag.labels("appservice_sender").set(now - ts)
|
synapse.metrics.event_processing_lag.labels(
|
||||||
synapse.metrics.event_processing_last_ts.labels("appservice_sender").set(ts)
|
"appservice_sender").set(now - ts)
|
||||||
|
synapse.metrics.event_processing_last_ts.labels(
|
||||||
|
"appservice_sender").set(ts)
|
||||||
finally:
|
finally:
|
||||||
self.is_processing = False
|
self.is_processing = False
|
||||||
|
|
||||||
|
|
|
@ -47,7 +47,8 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
notified_presence_counter = Counter("synapse_handler_presence_notified_presence", "")
|
notified_presence_counter = Counter("synapse_handler_presence_notified_presence", "")
|
||||||
federation_presence_out_counter = Counter("synapse_handler_presence_federation_presence_out", "")
|
federation_presence_out_counter = Counter(
|
||||||
|
"synapse_handler_presence_federation_presence_out", "")
|
||||||
presence_updates_counter = Counter("synapse_handler_presence_presence_updates", "")
|
presence_updates_counter = Counter("synapse_handler_presence_presence_updates", "")
|
||||||
timers_fired_counter = Counter("synapse_handler_presence_timers_fired", "")
|
timers_fired_counter = Counter("synapse_handler_presence_timers_fired", "")
|
||||||
federation_presence_counter = Counter("synapse_handler_presence_federation_presence", "")
|
federation_presence_counter = Counter("synapse_handler_presence_federation_presence", "")
|
||||||
|
@ -55,8 +56,10 @@ bump_active_time_counter = Counter("synapse_handler_presence_bump_active_time",
|
||||||
|
|
||||||
get_updates_counter = Counter("synapse_handler_presence_get_updates", "", ["type"])
|
get_updates_counter = Counter("synapse_handler_presence_get_updates", "", ["type"])
|
||||||
|
|
||||||
notify_reason_counter = Counter("synapse_handler_presence_notify_reason", "", ["reason"])
|
notify_reason_counter = Counter(
|
||||||
state_transition_counter = Counter("synapse_handler_presence_state_transition", "", ["from", "to"]
|
"synapse_handler_presence_notify_reason", "", ["reason"])
|
||||||
|
state_transition_counter = Counter(
|
||||||
|
"synapse_handler_presence_state_transition", "", ["from", "to"]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -213,7 +216,8 @@ class PresenceHandler(object):
|
||||||
60 * 1000,
|
60 * 1000,
|
||||||
)
|
)
|
||||||
|
|
||||||
LaterGauge("synapse_handlers_presence_wheel_timer_size", "", [], lambda: len(self.wheel_timer))
|
LaterGauge("synapse_handlers_presence_wheel_timer_size", "", [],
|
||||||
|
lambda: len(self.wheel_timer))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _on_shutdown(self):
|
def _on_shutdown(self):
|
||||||
|
|
|
@ -50,7 +50,8 @@ import urllib
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
outgoing_requests_counter = Counter("synapse_http_client_requests", "", ["method"])
|
outgoing_requests_counter = Counter("synapse_http_client_requests", "", ["method"])
|
||||||
incoming_responses_counter = Counter("synapse_http_client_responses", "", ["method", "code"])
|
incoming_responses_counter = Counter("synapse_http_client_responses", "",
|
||||||
|
["method", "code"])
|
||||||
|
|
||||||
|
|
||||||
class SimpleHttpClient(object):
|
class SimpleHttpClient(object):
|
||||||
|
|
|
@ -48,8 +48,10 @@ from prometheus_client import Counter
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
outbound_logger = logging.getLogger("synapse.http.outbound")
|
outbound_logger = logging.getLogger("synapse.http.outbound")
|
||||||
|
|
||||||
outgoing_requests_counter = Counter("synapse_http_matrixfederationclient_requests", "", ["method"])
|
outgoing_requests_counter = Counter("synapse_http_matrixfederationclient_requests",
|
||||||
incoming_responses_counter = Counter("synapse_http_matrixfederationclient_responses", "", ["method", "code"])
|
"", ["method"])
|
||||||
|
incoming_responses_counter = Counter("synapse_http_matrixfederationclient_responses",
|
||||||
|
"", ["method", "code"])
|
||||||
|
|
||||||
|
|
||||||
MAX_LONG_RETRIES = 10
|
MAX_LONG_RETRIES = 10
|
||||||
|
|
|
@ -21,15 +21,14 @@ import platform
|
||||||
import attr
|
import attr
|
||||||
|
|
||||||
from prometheus_client import Gauge, Histogram, Counter
|
from prometheus_client import Gauge, Histogram, Counter
|
||||||
from prometheus_client.core import (
|
from prometheus_client.core import GaugeMetricFamily, CounterMetricFamily, REGISTRY
|
||||||
GaugeMetricFamily, CounterMetricFamily, REGISTRY)
|
|
||||||
|
|
||||||
from twisted.internet import reactor
|
from twisted.internet import reactor
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
running_on_pypy = platform.python_implementation() == 'PyPy'
|
running_on_pypy = platform.python_implementation() == "PyPy"
|
||||||
all_metrics = []
|
all_metrics = []
|
||||||
all_collectors = []
|
all_collectors = []
|
||||||
all_gauges = {}
|
all_gauges = {}
|
||||||
|
@ -87,9 +86,16 @@ class LaterGauge(object):
|
||||||
#
|
#
|
||||||
|
|
||||||
gc_unreachable = Gauge("python_gc_unreachable_total", "Unreachable GC objects", ["gen"])
|
gc_unreachable = Gauge("python_gc_unreachable_total", "Unreachable GC objects", ["gen"])
|
||||||
gc_time = Histogram("python_gc_time", "Time taken to GC (ms)", ["gen"], buckets=[1, 2, 5, 10, 25, 50, 100, 250, 500, 1000])
|
gc_time = Histogram(
|
||||||
|
"python_gc_time",
|
||||||
|
"Time taken to GC (ms)",
|
||||||
|
["gen"],
|
||||||
|
buckets=[1, 2, 5, 10, 25, 50, 100, 250, 500, 1000],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class GCCounts(object):
|
class GCCounts(object):
|
||||||
|
|
||||||
def collect(self):
|
def collect(self):
|
||||||
gc_counts = gc.get_count()
|
gc_counts = gc.get_count()
|
||||||
|
|
||||||
|
@ -99,14 +105,23 @@ class GCCounts(object):
|
||||||
|
|
||||||
yield cm
|
yield cm
|
||||||
|
|
||||||
|
|
||||||
REGISTRY.register(GCCounts())
|
REGISTRY.register(GCCounts())
|
||||||
|
|
||||||
#
|
#
|
||||||
# Twisted reactor metrics
|
# Twisted reactor metrics
|
||||||
#
|
#
|
||||||
|
|
||||||
tick_time = Histogram("python_twisted_reactor_tick_time", "Tick time of the Twisted reactor (ms)", buckets=[1, 2, 5, 10, 50, 100, 250, 500, 1000, 2000])
|
tick_time = Histogram(
|
||||||
pending_calls_metric = Histogram("python_twisted_reactor_pending_calls", "Pending calls", buckets=[1, 2, 5, 10, 25, 50, 100, 250, 500, 1000])
|
"python_twisted_reactor_tick_time",
|
||||||
|
"Tick time of the Twisted reactor (ms)",
|
||||||
|
buckets=[1, 2, 5, 10, 50, 100, 250, 500, 1000, 2000],
|
||||||
|
)
|
||||||
|
pending_calls_metric = Histogram(
|
||||||
|
"python_twisted_reactor_pending_calls",
|
||||||
|
"Pending calls",
|
||||||
|
buckets=[1, 2, 5, 10, 25, 50, 100, 250, 500, 1000],
|
||||||
|
)
|
||||||
|
|
||||||
#
|
#
|
||||||
# Federation Metrics
|
# Federation Metrics
|
||||||
|
@ -134,6 +149,7 @@ event_processing_last_ts = Gauge("synapse_event_processing_last_ts", "", ["name"
|
||||||
# finished being processed.
|
# finished being processed.
|
||||||
event_processing_lag = Gauge("synapse_event_processing_lag", "", ["name"])
|
event_processing_lag = Gauge("synapse_event_processing_lag", "", ["name"])
|
||||||
|
|
||||||
|
|
||||||
def runUntilCurrentTimer(func):
|
def runUntilCurrentTimer(func):
|
||||||
|
|
||||||
@functools.wraps(func)
|
@functools.wraps(func)
|
||||||
|
|
|
@ -36,8 +36,10 @@ logger = logging.getLogger(__name__)
|
||||||
rules_by_room = {}
|
rules_by_room = {}
|
||||||
|
|
||||||
|
|
||||||
push_rules_invalidation_counter = Counter("synapse_push_bulk_push_role_evaluator_push_rules_invalidation_counter", "")
|
push_rules_invalidation_counter = Counter(
|
||||||
push_rules_state_size_counter = Counter("synapse_push_bulk_push_role_evaluator_push_rules_state_size_counter", "")
|
"synapse_push_bulk_push_role_evaluator_push_rules_invalidation_counter", "")
|
||||||
|
push_rules_state_size_counter = Counter(
|
||||||
|
"synapse_push_bulk_push_role_evaluator_push_rules_state_size_counter", "")
|
||||||
|
|
||||||
# Measures whether we use the fast path of using state deltas, or if we have to
|
# Measures whether we use the fast path of using state deltas, or if we have to
|
||||||
# recalculate from scratch
|
# recalculate from scratch
|
||||||
|
|
|
@ -45,19 +45,22 @@ from prometheus_client import Counter
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
persist_event_counter = Counter("synapse_storage_events_persisted_events", "")
|
persist_event_counter = Counter("synapse_storage_events_persisted_events", "")
|
||||||
event_counter = Counter("synapse_storage_events_persisted_events_sep", "", ["type", "origin_type", "origin_entity"])
|
event_counter = Counter("synapse_storage_events_persisted_events_sep", "",
|
||||||
|
["type", "origin_type", "origin_entity"])
|
||||||
|
|
||||||
# The number of times we are recalculating the current state
|
# The number of times we are recalculating the current state
|
||||||
state_delta_counter = Counter("synapse_storage_events_state_delta", "")
|
state_delta_counter = Counter("synapse_storage_events_state_delta", "")
|
||||||
|
|
||||||
# The number of times we are recalculating state when there is only a
|
# The number of times we are recalculating state when there is only a
|
||||||
# single forward extremity
|
# single forward extremity
|
||||||
state_delta_single_event_counter = Counter("synapse_storage_events_state_delta_single_event", "")
|
state_delta_single_event_counter = Counter(
|
||||||
|
"synapse_storage_events_state_delta_single_event", "")
|
||||||
|
|
||||||
# The number of times we are reculating state when we could have resonably
|
# The number of times we are reculating state when we could have resonably
|
||||||
# calculated the delta when we calculated the state for an event we were
|
# calculated the delta when we calculated the state for an event we were
|
||||||
# persisting.
|
# persisting.
|
||||||
state_delta_reuse_delta_counter = Counter("synapse_storage_events_state_delta_reuse_delta", "")
|
state_delta_reuse_delta_counter = Counter(
|
||||||
|
"synapse_storage_events_state_delta_reuse_delta", "")
|
||||||
|
|
||||||
|
|
||||||
def encode_json(json_object):
|
def encode_json(json_object):
|
||||||
|
|
|
@ -28,17 +28,22 @@ block_counter = Counter("synapse_util_metrics_block_count", "", ["block_name"])
|
||||||
|
|
||||||
block_timer = Counter("synapse_util_metrics_block_time_seconds", "", ["block_name"])
|
block_timer = Counter("synapse_util_metrics_block_time_seconds", "", ["block_name"])
|
||||||
|
|
||||||
block_ru_utime = Counter("synapse_util_metrics_block_ru_utime_seconds", "", ["block_name"])
|
block_ru_utime = Counter(
|
||||||
|
"synapse_util_metrics_block_ru_utime_seconds", "", ["block_name"])
|
||||||
|
|
||||||
block_ru_stime = Counter("synapse_util_metrics_block_ru_stime_seconds", "", ["block_name"])
|
block_ru_stime = Counter(
|
||||||
|
"synapse_util_metrics_block_ru_stime_seconds", "", ["block_name"])
|
||||||
|
|
||||||
block_db_txn_count = Counter("synapse_util_metrics_block_db_txn_count", "", ["block_name"])
|
block_db_txn_count = Counter(
|
||||||
|
"synapse_util_metrics_block_db_txn_count", "", ["block_name"])
|
||||||
|
|
||||||
# seconds spent waiting for db txns, excluding scheduling time, in this block
|
# seconds spent waiting for db txns, excluding scheduling time, in this block
|
||||||
block_db_txn_duration = Counter("synapse_util_metrics_block_db_txn_duration_seconds", "", ["block_name"])
|
block_db_txn_duration = Counter(
|
||||||
|
"synapse_util_metrics_block_db_txn_duration_seconds", "", ["block_name"])
|
||||||
|
|
||||||
# seconds spent waiting for a db connection, in this block
|
# seconds spent waiting for a db connection, in this block
|
||||||
block_db_sched_duration = Counter("synapse_util_metrics_block_db_sched_duration_seconds", "", ["block_name"])
|
block_db_sched_duration = Counter(
|
||||||
|
"synapse_util_metrics_block_db_sched_duration_seconds", "", ["block_name"])
|
||||||
|
|
||||||
|
|
||||||
def measure_func(name):
|
def measure_func(name):
|
||||||
|
|
Loading…
Reference in a new issue