0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-06-22 04:28:21 +02:00
This commit is contained in:
Amber Brown 2018-05-22 16:28:23 -05:00
parent 228f1f584e
commit 85ba83eb51
7 changed files with 52 additions and 24 deletions

View file

@ -34,6 +34,7 @@ from synapse.module_api import ModuleApi
from synapse.http.additional_resource import AdditionalResource from synapse.http.additional_resource import AdditionalResource
from synapse.http.server import RootRedirect from synapse.http.server import RootRedirect
from synapse.http.site import SynapseSite from synapse.http.site import SynapseSite
from synapse.metrics import RegistryProxy
from synapse.metrics.resource import METRICS_PREFIX from synapse.metrics.resource import METRICS_PREFIX
from synapse.python_dependencies import CONDITIONAL_REQUIREMENTS, \ from synapse.python_dependencies import CONDITIONAL_REQUIREMENTS, \
check_requirements check_requirements
@ -60,6 +61,8 @@ from twisted.web.resource import EncodingResourceWrapper, NoResource
from twisted.web.server import GzipEncoderFactory from twisted.web.server import GzipEncoderFactory
from twisted.web.static import File from twisted.web.static import File
from prometheus_client.twisted import MetricsResource
logger = logging.getLogger("synapse.app.homeserver") logger = logging.getLogger("synapse.app.homeserver")
@ -229,8 +232,7 @@ class SynapseHomeServer(HomeServer):
resources[WEB_CLIENT_PREFIX] = build_resource_for_web_client(self) resources[WEB_CLIENT_PREFIX] = build_resource_for_web_client(self)
if name == "metrics" and self.get_config().enable_metrics: if name == "metrics" and self.get_config().enable_metrics:
from prometheus_client.twisted import MetricsResource resources[METRICS_PREFIX] = MetricsResource(RegistryProxy())
resources[METRICS_PREFIX] = MetricsResource()
if name == "replication": if name == "replication":
resources[REPLICATION_PREFIX] = ReplicationRestResource(self) resources[REPLICATION_PREFIX] = ReplicationRestResource(self)

View file

@ -64,7 +64,7 @@ class TransactionQueue(object):
# done # done
self.pending_transactions = {} self.pending_transactions = {}
LaterGauge("pending_destinations", "", [], LaterGauge("synapse_federation_client_pending_destinations", "", [],
lambda: len(self.pending_transactions), lambda: len(self.pending_transactions),
) )
@ -89,11 +89,11 @@ class TransactionQueue(object):
self.pending_edus_keyed_by_dest = edus_keyed = {} self.pending_edus_keyed_by_dest = edus_keyed = {}
LaterGauge( LaterGauge(
"pending_pdus", "", [], "synapse_federation_client_pending_pdus", "", [],
lambda: sum(map(len, pdus.values())), lambda: sum(map(len, pdus.values())),
) )
LaterGauge( LaterGauge(
"pending_edus", "", [], "synapse_federation_client_pending_edus", "", [],
lambda: ( lambda: (
sum(map(len, edus.values())) sum(map(len, edus.values()))
+ sum(map(len, presence.values())) + sum(map(len, presence.values()))

View file

@ -29,12 +29,20 @@ from twisted.internet import reactor
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
running_on_pypy = platform.python_implementation() == 'PyPy' running_on_pypy = platform.python_implementation() == 'PyPy'
all_metrics = [] all_metrics = []
all_collectors = [] all_collectors = []
all_gauges = {} all_gauges = {}
class RegistryProxy(object):
def collect(self):
for metric in REGISTRY.collect():
if not metric.name.startswith("__"):
yield metric
@attr.s(hash=True) @attr.s(hash=True)
class LaterGauge(object): class LaterGauge(object):
@ -45,7 +53,7 @@ class LaterGauge(object):
def collect(self): def collect(self):
g = GaugeMetricFamily(self.name, self.desc, self.labels) g = GaugeMetricFamily(self.name, self.desc, labels=self.labels)
try: try:
calls = self.caller() calls = self.caller()

View file

@ -194,14 +194,14 @@ class Notifier(object):
all_user_streams.add(x) all_user_streams.add(x)
return sum(stream.count_listeners() for stream in all_user_streams) return sum(stream.count_listeners() for stream in all_user_streams)
LaterGauge("listeners", "", [], count_listeners) LaterGauge("synapse_notifier_listeners", "", [], count_listeners)
LaterGauge( LaterGauge(
"rooms", "", [], "synapse_notifier_rooms", "", [],
lambda: count(bool, self.room_to_user_streams.values()), lambda: count(bool, self.room_to_user_streams.values()),
) )
LaterGauge( LaterGauge(
"users", "", [], "synapse_notifier_users", "", [],
lambda: len(self.user_to_user_stream), lambda: len(self.user_to_user_stream),
) )

View file

@ -28,9 +28,9 @@ from prometheus_client import Counter
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
http_push_processed_counter = Counter("http_pushes_processed", "") http_push_processed_counter = Counter("synapse_http_httppusher_http_pushes_processed", "")
http_push_failed_counter = Counter("http_pushes_failed", "") http_push_failed_counter = Counter("synapse_http_httppusher_http_pushes_failed", "")
class HttpPusher(object): class HttpPusher(object):

View file

@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from prometheus_client.core import GaugeMetricFamily, REGISTRY from prometheus_client.core import Gauge, REGISTRY, GaugeMetricFamily
import os import os
@ -22,10 +22,20 @@ CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.5))
caches_by_name = {} caches_by_name = {}
collectors_by_name = {} collectors_by_name = {}
def register_cache(name, cache_name, cache): cache_size = Gauge("synapse_util_caches_cache:size", "", ["name"])
cache_hits = Gauge("synapse_util_caches_cache:hits", "", ["name"])
cache_evicted = Gauge("synapse_util_caches_cache:evicted_size", "", ["name"])
cache_total = Gauge("synapse_util_caches_cache:total", "", ["name"])
response_cache_size = Gauge("synapse_util_caches_response_cache:size", "", ["name"])
response_cache_hits = Gauge("synapse_util_caches_response_cache:hits", "", ["name"])
response_cache_evicted = Gauge("synapse_util_caches_response_cache:evicted_size", "", ["name"])
response_cache_total = Gauge("synapse_util_caches_response_cache:total", "", ["name"])
def register_cache(cache_type, cache_name, cache):
# Check if the metric is already registered. Unregister it, if so. # Check if the metric is already registered. Unregister it, if so.
metric_name = "synapse_util_caches_%s:%s" % (name, cache_name,) metric_name = "cache_%s_%s" % (cache_type, cache_name,)
if metric_name in collectors_by_name.keys(): if metric_name in collectors_by_name.keys():
REGISTRY.unregister(collectors_by_name[metric_name]) REGISTRY.unregister(collectors_by_name[metric_name])
@ -44,15 +54,22 @@ def register_cache(name, cache_name, cache):
def inc_evictions(self, size=1): def inc_evictions(self, size=1):
self.evicted_size += size self.evicted_size += size
def collect(self): def describe(self):
cache_size = len(cache) return []
gm = GaugeMetricFamily(metric_name, "", labels=["size", "hits", "misses", "total"]) def collect(self):
gm.add_metric(["size"], cache_size) if cache_type == "response_cache":
gm.add_metric(["hits"], self.hits) response_cache_size.labels(cache_name).set(len(cache))
gm.add_metric(["misses"], self.misses) response_cache_hits.labels(cache_name).set(self.hits)
gm.add_metric(["total"], self.hits + self.misses) response_cache_evicted.labels(cache_name).set(self.evicted_size)
yield gm response_cache_total.labels(cache_name).set(self.hits + self.misses)
else:
cache_size.labels(cache_name).set(len(cache))
cache_hits.labels(cache_name).set(self.hits)
cache_evicted.labels(cache_name).set(self.evicted_size)
cache_total.labels(cache_name).set(self.hits + self.misses)
yield GaugeMetricFamily("__unused", "")
metric = CacheMetric() metric = CacheMetric()
REGISTRY.register(metric) REGISTRY.register(metric)
@ -60,6 +77,7 @@ def register_cache(name, cache_name, cache):
collectors_by_name[metric_name] = metric collectors_by_name[metric_name] = metric
return metric return metric
KNOWN_KEYS = { KNOWN_KEYS = {
key: key for key in key: key for key in
( (

View file

@ -80,7 +80,7 @@ class Cache(object):
self.name = name self.name = name
self.keylen = keylen self.keylen = keylen
self.thread = None self.thread = None
self.metrics = register_cache("descriptor", name, self.cache) self.metrics = register_cache("cache", name, self.cache)
def _on_evicted(self, evicted_count): def _on_evicted(self, evicted_count):
self.metrics.inc_evictions(evicted_count) self.metrics.inc_evictions(evicted_count)