Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Commit

Permalink
Merge commit 'a0f574f3c' into anoa/dinsic_release_1_21_x
Browse files Browse the repository at this point in the history
* commit 'a0f574f3c':
  Reduce INFO logging (#8050)
  • Loading branch information
anoadragon453 committed Oct 19, 2020
2 parents 7600983 + a0f574f commit ff0170e
Show file tree
Hide file tree
Showing 4 changed files with 74 additions and 25 deletions.
1 change: 1 addition & 0 deletions changelog.d/8050.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Reduce amount of outbound request logging at INFO level.
2 changes: 1 addition & 1 deletion synapse/http/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -297,7 +297,7 @@ async def request(self, method, uri, data=None, headers=None):
outgoing_requests_counter.labels(method).inc()

# log request but strip `access_token` (AS requests for example include this)
logger.info("Sending request %s %s", method, redact_uri(uri))
logger.debug("Sending request %s %s", method, redact_uri(uri))

with start_active_span(
"outgoing-client-request",
Expand Down
2 changes: 1 addition & 1 deletion synapse/http/federation/matrix_federation_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ async def _do_connect(self, protocol_factory):
port = server.port

try:
logger.info("Connecting to %s:%i", host.decode("ascii"), port)
logger.debug("Connecting to %s:%i", host.decode("ascii"), port)
endpoint = HostnameEndpoint(self._reactor, host, port)
if self._tls_options:
endpoint = wrapClientTLS(self._tls_options, endpoint)
Expand Down
94 changes: 71 additions & 23 deletions synapse/http/matrixfederationclient.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,11 @@

from twisted.internet import defer, protocol
from twisted.internet.error import DNSLookupError
from twisted.internet.interfaces import IReactorPluggableNameResolver
from twisted.internet.interfaces import IReactorPluggableNameResolver, IReactorTime
from twisted.internet.task import _EPSILON, Cooperator
from twisted.web._newclient import ResponseDone
from twisted.web.http_headers import Headers
from twisted.web.iweb import IResponse

import synapse.metrics
import synapse.util.retryutils
Expand Down Expand Up @@ -74,7 +75,7 @@
_next_id = 1


@attr.s
@attr.s(frozen=True)
class MatrixFederationRequest(object):
method = attr.ib()
"""HTTP method
Expand Down Expand Up @@ -110,26 +111,52 @@ class MatrixFederationRequest(object):
:type: str|None
"""

uri = attr.ib(init=False, type=bytes)
"""The URI of this request
"""

def __attrs_post_init__(self):
global _next_id
self.txn_id = "%s-O-%s" % (self.method, _next_id)
txn_id = "%s-O-%s" % (self.method, _next_id)
_next_id = (_next_id + 1) % (MAXINT - 1)

object.__setattr__(self, "txn_id", txn_id)

destination_bytes = self.destination.encode("ascii")
path_bytes = self.path.encode("ascii")
if self.query:
query_bytes = encode_query_args(self.query)
else:
query_bytes = b""

# The object is frozen so we can pre-compute this.
uri = urllib.parse.urlunparse(
(b"matrix", destination_bytes, path_bytes, None, query_bytes, b"")
)
object.__setattr__(self, "uri", uri)

def get_json(self):
if self.json_callback:
return self.json_callback()
return self.json


async def _handle_json_response(reactor, timeout_sec, request, response):
async def _handle_json_response(
reactor: IReactorTime,
timeout_sec: float,
request: MatrixFederationRequest,
response: IResponse,
start_ms: int,
):
"""
Reads the JSON body of a response, with a timeout
Args:
reactor (IReactor): twisted reactor, for the timeout
timeout_sec (float): number of seconds to wait for response to complete
request (MatrixFederationRequest): the request that triggered the response
response (IResponse): response to the request
reactor: twisted reactor, for the timeout
timeout_sec: number of seconds to wait for response to complete
request: the request that triggered the response
response: response to the request
start_ms: Timestamp when request was made
Returns:
dict: parsed JSON response
Expand All @@ -143,23 +170,35 @@ async def _handle_json_response(reactor, timeout_sec, request, response):
body = await make_deferred_yieldable(d)
except TimeoutError as e:
logger.warning(
"{%s} [%s] Timed out reading response", request.txn_id, request.destination,
"{%s} [%s] Timed out reading response - %s %s",
request.txn_id,
request.destination,
request.method,
request.uri.decode("ascii"),
)
raise RequestSendFailed(e, can_retry=True) from e
except Exception as e:
logger.warning(
"{%s} [%s] Error reading response: %s",
"{%s} [%s] Error reading response %s %s: %s",
request.txn_id,
request.destination,
request.method,
request.uri.decode("ascii"),
e,
)
raise

time_taken_secs = reactor.seconds() - start_ms / 1000

logger.info(
"{%s} [%s] Completed: %d %s",
"{%s} [%s] Completed request: %d %s in %.2f secs - %s %s",
request.txn_id,
request.destination,
response.code,
response.phrase.decode("ascii", errors="replace"),
time_taken_secs,
request.method,
request.uri.decode("ascii"),
)
return body

Expand Down Expand Up @@ -261,7 +300,9 @@ async def _send_request_with_optional_trailing_slash(
# 'M_UNRECOGNIZED' which some endpoints can return when omitting a
# trailing slash on Synapse <= v0.99.3.
logger.info("Retrying request with trailing slash")
request.path += "/"

# Request is frozen so we create a new instance
request = attr.evolve(request, path=request.path + "/")

response = await self._send_request(request, **send_request_args)

Expand Down Expand Up @@ -373,9 +414,7 @@ async def _send_request(
else:
retries_left = MAX_SHORT_RETRIES

url_bytes = urllib.parse.urlunparse(
(b"matrix", destination_bytes, path_bytes, None, query_bytes, b"")
)
url_bytes = request.uri
url_str = url_bytes.decode("ascii")

url_to_sign_bytes = urllib.parse.urlunparse(
Expand All @@ -402,7 +441,7 @@ async def _send_request(

headers_dict[b"Authorization"] = auth_headers

logger.info(
logger.debug(
"{%s} [%s] Sending request: %s %s; timeout %fs",
request.txn_id,
request.destination,
Expand Down Expand Up @@ -436,7 +475,6 @@ async def _send_request(
except DNSLookupError as e:
raise RequestSendFailed(e, can_retry=retry_on_dns_fail) from e
except Exception as e:
logger.info("Failed to send request: %s", e)
raise RequestSendFailed(e, can_retry=True) from e

incoming_responses_counter.labels(
Expand Down Expand Up @@ -496,7 +534,7 @@ async def _send_request(

break
except RequestSendFailed as e:
logger.warning(
logger.info(
"{%s} [%s] Request failed: %s %s: %s",
request.txn_id,
request.destination,
Expand Down Expand Up @@ -654,6 +692,8 @@ async def put_json(
json=data,
)

start_ms = self.clock.time_msec()

response = await self._send_request_with_optional_trailing_slash(
request,
try_trailing_slash_on_400,
Expand All @@ -664,7 +704,7 @@ async def put_json(
)

body = await _handle_json_response(
self.reactor, self.default_timeout, request, response
self.reactor, self.default_timeout, request, response, start_ms
)

return body
Expand Down Expand Up @@ -720,6 +760,8 @@ async def post_json(
method="POST", destination=destination, path=path, query=args, json=data
)

start_ms = self.clock.time_msec()

response = await self._send_request(
request,
long_retries=long_retries,
Expand All @@ -733,7 +775,7 @@ async def post_json(
_sec_timeout = self.default_timeout

body = await _handle_json_response(
self.reactor, _sec_timeout, request, response
self.reactor, _sec_timeout, request, response, start_ms,
)
return body

Expand Down Expand Up @@ -786,6 +828,8 @@ async def get_json(
method="GET", destination=destination, path=path, query=args
)

start_ms = self.clock.time_msec()

response = await self._send_request_with_optional_trailing_slash(
request,
try_trailing_slash_on_400,
Expand All @@ -796,7 +840,7 @@ async def get_json(
)

body = await _handle_json_response(
self.reactor, self.default_timeout, request, response
self.reactor, self.default_timeout, request, response, start_ms
)

return body
Expand Down Expand Up @@ -846,6 +890,8 @@ async def delete_json(
method="DELETE", destination=destination, path=path, query=args
)

start_ms = self.clock.time_msec()

response = await self._send_request(
request,
long_retries=long_retries,
Expand All @@ -854,7 +900,7 @@ async def delete_json(
)

body = await _handle_json_response(
self.reactor, self.default_timeout, request, response
self.reactor, self.default_timeout, request, response, start_ms
)
return body

Expand Down Expand Up @@ -914,12 +960,14 @@ async def get_file(
)
raise
logger.info(
"{%s} [%s] Completed: %d %s [%d bytes]",
"{%s} [%s] Completed: %d %s [%d bytes] %s %s",
request.txn_id,
request.destination,
response.code,
response.phrase.decode("ascii", errors="replace"),
length,
request.method,
request.uri.decode("ascii"),
)
return (length, headers)

Expand Down

0 comments on commit ff0170e

Please sign in to comment.