Trace how long it takes for the send trasaction to complete, including retrys (#5986)

This commit is contained in:
Jorik Schellekens 2019-09-05 17:44:55 +01:00 committed by GitHub
parent bc604e7f94
commit f7c873a643
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 56 additions and 36 deletions

1
changelog.d/5986.feature Normal file
View file

@ -0,0 +1 @@
Trace replication send times.

View file

@ -46,6 +46,7 @@ from synapse.http import (
redact_uri,
)
from synapse.logging.context import make_deferred_yieldable
from synapse.logging.opentracing import set_tag, start_active_span, tags
from synapse.util.async_helpers import timeout_deferred
from synapse.util.caches import CACHE_SIZE_FACTOR
@ -269,42 +270,56 @@ class SimpleHttpClient(object):
# log request but strip `access_token` (AS requests for example include this)
logger.info("Sending request %s %s", method, redact_uri(uri))
try:
body_producer = None
if data is not None:
body_producer = QuieterFileBodyProducer(BytesIO(data))
with start_active_span(
"outgoing-client-request",
tags={
tags.SPAN_KIND: tags.SPAN_KIND_RPC_CLIENT,
tags.HTTP_METHOD: method,
tags.HTTP_URL: uri,
},
finish_on_close=True,
):
try:
body_producer = None
if data is not None:
body_producer = QuieterFileBodyProducer(BytesIO(data))
request_deferred = treq.request(
method,
uri,
agent=self.agent,
data=body_producer,
headers=headers,
**self._extra_treq_args
)
request_deferred = timeout_deferred(
request_deferred,
60,
self.hs.get_reactor(),
cancelled_to_request_timed_out_error,
)
response = yield make_deferred_yieldable(request_deferred)
request_deferred = treq.request(
method,
uri,
agent=self.agent,
data=body_producer,
headers=headers,
**self._extra_treq_args
)
request_deferred = timeout_deferred(
request_deferred,
60,
self.hs.get_reactor(),
cancelled_to_request_timed_out_error,
)
response = yield make_deferred_yieldable(request_deferred)
incoming_responses_counter.labels(method, response.code).inc()
logger.info(
"Received response to %s %s: %s", method, redact_uri(uri), response.code
)
return response
except Exception as e:
incoming_responses_counter.labels(method, "ERR").inc()
logger.info(
"Error sending request to %s %s: %s %s",
method,
redact_uri(uri),
type(e).__name__,
e.args[0],
)
raise
incoming_responses_counter.labels(method, response.code).inc()
logger.info(
"Received response to %s %s: %s",
method,
redact_uri(uri),
response.code,
)
return response
except Exception as e:
incoming_responses_counter.labels(method, "ERR").inc()
logger.info(
"Error sending request to %s %s: %s %s",
method,
redact_uri(uri),
type(e).__name__,
e.args[0],
)
set_tag(tags.ERROR, True)
set_tag("error_reason", e.args[0])
raise
@defer.inlineCallbacks
def post_urlencoded_get_json(self, uri, args={}, headers=None):

View file

@ -345,7 +345,6 @@ class MatrixFederationHttpClient(object):
else:
query_bytes = b""
# Retreive current span
scope = start_active_span(
"outgoing-federation-request",
tags={

View file

@ -28,7 +28,11 @@ from synapse.api.errors import (
RequestSendFailed,
SynapseError,
)
from synapse.logging.opentracing import inject_active_span_byte_dict, trace_servlet
from synapse.logging.opentracing import (
inject_active_span_byte_dict,
trace,
trace_servlet,
)
from synapse.util.caches.response_cache import ResponseCache
from synapse.util.stringutils import random_string
@ -129,6 +133,7 @@ class ReplicationEndpoint(object):
client = hs.get_simple_http_client()
@trace(opname="outgoing_replication_request")
@defer.inlineCallbacks
def send_request(**kwargs):
data = yield cls._serialize_payload(**kwargs)