From 823c34a940affaaae6a4dd06b9859ce72d1cbe0e Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 22 Jul 2019 13:29:15 +0100 Subject: [PATCH 01/46] One tracing decorator to rule them all. --- synapse/logging/opentracing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py index d2c209c471fa..a9b808854636 100644 --- a/synapse/logging/opentracing.py +++ b/synapse/logging/opentracing.py @@ -157,7 +157,7 @@ def deferred_function(*args, **kwargs): from canonicaljson import json -from twisted.internet import defer +from twisted.internet import defer, from synapse.config import ConfigError From 565544b6039c619cc4e9af8ad7f04793c4525b70 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Fri, 28 Jun 2019 10:34:53 +0100 Subject: [PATCH 02/46] Trace e2e --- synapse/handlers/e2e_keys.py | 50 ++++++++++++++++++++++++++-- synapse/handlers/e2e_room_keys.py | 10 ++++++ synapse/rest/client/v2_alpha/keys.py | 13 ++++++++ synapse/storage/e2e_room_keys.py | 9 +++++ synapse/storage/end_to_end_keys.py | 35 +++++++++++++++++++ 5 files changed, 115 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 1300b540e34c..8b2249fc5be5 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -15,6 +15,7 @@ # limitations under the License. import logging +from synapse.util.tracerutils import TracerUtil, trace_defered_function from six import iteritems @@ -45,6 +46,7 @@ def __init__(self, hs): "client_keys", self.on_federation_query_client_keys ) + @trace_defered_function @defer.inlineCallbacks def query_devices(self, query_body, timeout): """ Handle a device key query from a client @@ -65,6 +67,7 @@ def query_devices(self, query_body, timeout): } } """ + device_keys_query = query_body.get("device_keys", {}) # separate users by domain. @@ -79,6 +82,9 @@ def query_devices(self, query_body, timeout): else: remote_queries[user_id] = device_ids + TracerUtil.set_tag("local_key_query", local_query) + TracerUtil.set_tag("remote_key_query", remote_queries) + # First get local devices. failures = {} results = {} @@ -119,9 +125,12 @@ def query_devices(self, query_body, timeout): r[user_id] = remote_queries[user_id] # Now fetch any devices that we don't have in our cache + @trace_defered_function @defer.inlineCallbacks def do_remote_query(destination): destination_query = remote_queries_not_in_cache[destination] + + TracerUtil.set_tag("key_query", destination_query) try: remote_result = yield self.federation.query_client_keys( destination, {"device_keys": destination_query}, timeout=timeout @@ -132,7 +141,10 @@ def do_remote_query(destination): results[user_id] = keys except Exception as e: - failures[destination] = _exception_to_failure(e) + failure = _exception_to_failure(e) + failures[destination] = failure + TracerUtil.set_tag("error", True) + TracerUtil.set_tag("reason", failure) yield make_deferred_yieldable( defer.gatherResults( @@ -146,6 +158,7 @@ def do_remote_query(destination): return {"device_keys": results, "failures": failures} + @trace_defered_function @defer.inlineCallbacks def query_local_devices(self, query): """Get E2E device keys for local users @@ -158,6 +171,7 @@ def query_local_devices(self, query): defer.Deferred: (resolves to dict[string, dict[string, dict]]): map from user_id -> device_id -> device details """ + TracerUtil.set_tag("local_query", query) local_query = [] result_dict = {} @@ -165,6 +179,14 @@ def query_local_devices(self, query): # we use UserID.from_string to catch invalid user ids if not self.is_mine(UserID.from_string(user_id)): logger.warning("Request for keys for non-local user %s", user_id) + TracerUtil.log_kv( + { + "message": "Requested a local key for a user which" + + " was not local to the homeserver", + "user_id": user_id, + } + ) + TracerUtil.set_tag("error", True) raise SynapseError(400, "Not a user here") if not device_ids: @@ -189,6 +211,7 @@ def query_local_devices(self, query): r["unsigned"]["device_display_name"] = display_name result_dict[user_id][device_id] = r + TracerUtil.log_kv(results) return result_dict @defer.inlineCallbacks @@ -199,6 +222,7 @@ def on_federation_query_client_keys(self, query_body): res = yield self.query_local_devices(device_keys_query) return {"device_keys": res} + @trace_defered_function @defer.inlineCallbacks def claim_one_time_keys(self, query, timeout): local_query = [] @@ -213,6 +237,9 @@ def claim_one_time_keys(self, query, timeout): domain = get_domain_from_id(user_id) remote_queries.setdefault(domain, {})[user_id] = device_keys + TracerUtil.set_tag("local_key_query", local_query) + TracerUtil.set_tag("remote_key_query", remote_queries) + results = yield self.store.claim_e2e_one_time_keys(local_query) json_result = {} @@ -224,8 +251,10 @@ def claim_one_time_keys(self, query, timeout): key_id: json.loads(json_bytes) } + @trace_defered_function @defer.inlineCallbacks def claim_client_keys(destination): + TracerUtil.set_tag("destination", destination) device_keys = remote_queries[destination] try: remote_result = yield self.federation.claim_client_keys( @@ -234,8 +263,12 @@ def claim_client_keys(destination): for user_id, keys in remote_result["one_time_keys"].items(): if user_id in device_keys: json_result[user_id] = keys + except Exception as e: - failures[destination] = _exception_to_failure(e) + failure = _exception_to_failure(e) + failures[destination] = failure + TracerUtil.set_tag("error", True) + TracerUtil.set_tag("reason", failure) yield make_deferred_yieldable( defer.gatherResults( @@ -259,14 +292,21 @@ def claim_client_keys(destination): ), ) + TracerUtil.log_kv({"one_time_keys": json_result, "failures": failures}) return {"one_time_keys": json_result, "failures": failures} + @trace_defered_function @defer.inlineCallbacks def upload_keys_for_user(self, user_id, device_id, keys): + TracerUtil.set_tag("user_id", user_id) + TracerUtil.set_tag("device_id", device_id) + TracerUtil.set_tag("keys", keys) + time_now = self.clock.time_msec() # TODO: Validate the JSON to make sure it has the right keys. device_keys = keys.get("device_keys", None) + TracerUtil.set_tag("device_keys", device_keys) if device_keys: logger.info( "Updating device_keys for device %r for user %s at %d", @@ -287,6 +327,10 @@ def upload_keys_for_user(self, user_id, device_id, keys): yield self._upload_one_time_keys_for_user( user_id, device_id, time_now, one_time_keys ) + else: + TracerUtil.log_kv( + {"event": "did not upload one_time_keys", "reason": "no keys given"} + ) # the device should have been registered already, but it may have been # deleted due to a race with a DELETE request. Or we may be using an @@ -297,8 +341,10 @@ def upload_keys_for_user(self, user_id, device_id, keys): result = yield self.store.count_e2e_one_time_keys(user_id, device_id) + TracerUtil.set_tag("one_time_key_counts", result) return {"one_time_key_counts": result} + @trace_defered_function @defer.inlineCallbacks def _upload_one_time_keys_for_user( self, user_id, device_id, time_now, one_time_keys diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py index 41b871fc5953..06837bb12647 100644 --- a/synapse/handlers/e2e_room_keys.py +++ b/synapse/handlers/e2e_room_keys.py @@ -27,6 +27,7 @@ SynapseError, ) from synapse.util.async_helpers import Linearizer +from synapse.util.tracerutils import TracerUtil, trace_defered_function logger = logging.getLogger(__name__) @@ -49,6 +50,7 @@ def __init__(self, hs): # changed. self._upload_linearizer = Linearizer("upload_room_keys_lock") + @trace_defered_function @defer.inlineCallbacks def get_room_keys(self, user_id, version, room_id=None, session_id=None): """Bulk get the E2E room keys for a given backup, optionally filtered to a given @@ -84,8 +86,10 @@ def get_room_keys(self, user_id, version, room_id=None, session_id=None): user_id, version, room_id, session_id ) + TracerUtil.log_kv(results) return results + @trace_defered_function @defer.inlineCallbacks def delete_room_keys(self, user_id, version, room_id=None, session_id=None): """Bulk delete the E2E room keys for a given backup, optionally filtered to a given @@ -107,6 +111,7 @@ def delete_room_keys(self, user_id, version, room_id=None, session_id=None): with (yield self._upload_linearizer.queue(user_id)): yield self.store.delete_e2e_room_keys(user_id, version, room_id, session_id) + @trace_defered_function @defer.inlineCallbacks def upload_room_keys(self, user_id, version, room_keys): """Bulk upload a list of room keys into a given backup version, asserting @@ -174,6 +179,7 @@ def upload_room_keys(self, user_id, version, room_keys): user_id, version, room_id, session_id, session ) + @trace_defered_function @defer.inlineCallbacks def _upload_room_key(self, user_id, version, room_id, session_id, room_key): """Upload a given room_key for a given room and session into a given @@ -236,6 +242,7 @@ def _should_replace_room_key(current_room_key, room_key): return False return True + @trace_defered_function @defer.inlineCallbacks def create_version(self, user_id, version_info): """Create a new backup version. This automatically becomes the new @@ -264,6 +271,7 @@ def create_version(self, user_id, version_info): ) return new_version + @trace_defered_function @defer.inlineCallbacks def get_version_info(self, user_id, version=None): """Get the info about a given version of the user's backup @@ -294,6 +302,7 @@ def get_version_info(self, user_id, version=None): raise return res + @trace_defered_function @defer.inlineCallbacks def delete_version(self, user_id, version=None): """Deletes a given version of the user's e2e_room_keys backup @@ -314,6 +323,7 @@ def delete_version(self, user_id, version=None): else: raise + @trace_defered_function @defer.inlineCallbacks def update_version(self, user_id, version, version_info): """Update the info about a given version of the user's backup diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py index 6008adec7cf3..8de9e12df493 100644 --- a/synapse/rest/client/v2_alpha/keys.py +++ b/synapse/rest/client/v2_alpha/keys.py @@ -25,6 +25,10 @@ parse_string, ) from synapse.types import StreamToken +from synapse.util.tracerutils import ( + TracerUtil, + trace_defered_function_using_operation_name, +) from ._base import client_patterns @@ -68,6 +72,7 @@ def __init__(self, hs): self.auth = hs.get_auth() self.e2e_keys_handler = hs.get_e2e_keys_handler() + @trace_defered_function_using_operation_name("upload_keys") @defer.inlineCallbacks def on_POST(self, request, device_id): requester = yield self.auth.get_user_by_req(request, allow_guest=True) @@ -78,6 +83,14 @@ def on_POST(self, request, device_id): # passing the device_id here is deprecated; however, we allow it # for now for compatibility with older clients. if requester.device_id is not None and device_id != requester.device_id: + TracerUtil.set_tag("error", True) + TracerUtil.log_kv( + { + "message": "Client uploading keys for a different device", + "logged_in_id": requester.device_id, + "key_being_uploaded": device_id, + } + ) logger.warning( "Client uploading keys for a different device " "(logged in as %s, uploading for %s)", diff --git a/synapse/storage/e2e_room_keys.py b/synapse/storage/e2e_room_keys.py index 99128f2df70e..b0e426d30ee8 100644 --- a/synapse/storage/e2e_room_keys.py +++ b/synapse/storage/e2e_room_keys.py @@ -18,11 +18,13 @@ from twisted.internet import defer from synapse.api.errors import StoreError +from synapse.util.tracerutils import trace_defered_function, trace_function from ._base import SQLBaseStore class EndToEndRoomKeyStore(SQLBaseStore): + @trace_defered_function @defer.inlineCallbacks def get_e2e_room_key(self, user_id, version, room_id, session_id): """Get the encrypted E2E room key for a given session from a given @@ -63,6 +65,7 @@ def get_e2e_room_key(self, user_id, version, room_id, session_id): return row + @trace_defered_function @defer.inlineCallbacks def set_e2e_room_key(self, user_id, version, room_id, session_id, room_key): """Replaces or inserts the encrypted E2E room key for a given session in @@ -95,6 +98,7 @@ def set_e2e_room_key(self, user_id, version, room_id, session_id, room_key): lock=False, ) + @trace_defered_function @defer.inlineCallbacks def get_e2e_room_keys(self, user_id, version, room_id=None, session_id=None): """Bulk get the E2E room keys for a given backup, optionally filtered to a given @@ -153,6 +157,7 @@ def get_e2e_room_keys(self, user_id, version, room_id=None, session_id=None): return sessions + @trace_defered_function @defer.inlineCallbacks def delete_e2e_room_keys(self, user_id, version, room_id=None, session_id=None): """Bulk delete the E2E room keys for a given backup, optionally filtered to a given @@ -194,6 +199,7 @@ def _get_current_version(txn, user_id): raise StoreError(404, "No current backup version") return row[0] + @trace_function def get_e2e_room_keys_version_info(self, user_id, version=None): """Get info metadata about a version of our room_keys backup. @@ -236,6 +242,7 @@ def _get_e2e_room_keys_version_info_txn(txn): "get_e2e_room_keys_version_info", _get_e2e_room_keys_version_info_txn ) + @trace_function def create_e2e_room_keys_version(self, user_id, info): """Atomically creates a new version of this user's e2e_room_keys store with the given version info. @@ -276,6 +283,7 @@ def _create_e2e_room_keys_version_txn(txn): "create_e2e_room_keys_version_txn", _create_e2e_room_keys_version_txn ) + @trace_function def update_e2e_room_keys_version(self, user_id, version, info): """Update a given backup version @@ -292,6 +300,7 @@ def update_e2e_room_keys_version(self, user_id, version, info): desc="update_e2e_room_keys_version", ) + @trace_function def delete_e2e_room_keys_version(self, user_id, version=None): """Delete a given backup version of the user's room keys. Doesn't delete their actual key data. diff --git a/synapse/storage/end_to_end_keys.py b/synapse/storage/end_to_end_keys.py index 1e07474e706a..9779cb70c987 100644 --- a/synapse/storage/end_to_end_keys.py +++ b/synapse/storage/end_to_end_keys.py @@ -22,8 +22,11 @@ from ._base import SQLBaseStore, db_to_json +from synapse.util.tracerutils import TracerUtil, trace_defered_function, trace_function + class EndToEndKeyWorkerStore(SQLBaseStore): + @trace_defered_function @defer.inlineCallbacks def get_e2e_device_keys( self, query_list, include_all_devices=False, include_deleted_devices=False @@ -40,6 +43,7 @@ def get_e2e_device_keys( Dict mapping from user-id to dict mapping from device_id to dict containing "key_json", "device_display_name". """ + TracerUtil.set_tag("query_list", query_list) if not query_list: return {} @@ -57,9 +61,13 @@ def get_e2e_device_keys( return results + @trace_function def _get_e2e_device_keys_txn( self, txn, query_list, include_all_devices=False, include_deleted_devices=False ): + TracerUtil.set_tag("include_all_devices", include_all_devices) + TracerUtil.set_tag("include_deleted_devices", include_deleted_devices) + query_clauses = [] query_params = [] @@ -104,8 +112,10 @@ def _get_e2e_device_keys_txn( for user_id, device_id in deleted_devices: result.setdefault(user_id, {})[device_id] = None + TracerUtil.log_kv(result) return result + @trace_defered_function @defer.inlineCallbacks def get_e2e_one_time_keys(self, user_id, device_id, key_ids): """Retrieve a number of one-time keys for a user @@ -121,6 +131,10 @@ def get_e2e_one_time_keys(self, user_id, device_id, key_ids): key_id) to json string for key """ + TracerUtil.set_tag("user_id", user_id) + TracerUtil.set_tag("device_id", device_id) + TracerUtil.set_tag("key_ids", key_ids) + rows = yield self._simple_select_many_batch( table="e2e_one_time_keys_json", column="key_id", @@ -145,7 +159,11 @@ def add_e2e_one_time_keys(self, user_id, device_id, time_now, new_keys): (algorithm, key_id, key json) """ + @trace_function def _add_e2e_one_time_keys(txn): + TracerUtil.set_tag("user_id", user_id) + TracerUtil.set_tag("device_id", device_id) + TracerUtil.set_tag("new_keys", new_keys) # We are protected from race between lookup and insertion due to # a unique constraint. If there is a race of two calls to # `add_e2e_one_time_keys` then they'll conflict and we will only @@ -201,7 +219,13 @@ def set_e2e_device_keys(self, user_id, device_id, time_now, device_keys): or the keys were already in the database. """ + @trace_function def _set_e2e_device_keys_txn(txn): + TracerUtil.set_tag("user_id", user_id) + TracerUtil.set_tag("device_id", device_id) + TracerUtil.set_tag("time_now", time_now) + TracerUtil.set_tag("device_keys", device_keys) + old_key_json = self._simple_select_one_onecol_txn( txn, table="e2e_device_keys_json", @@ -215,6 +239,7 @@ def _set_e2e_device_keys_txn(txn): new_key_json = encode_canonical_json(device_keys).decode("utf-8") if old_key_json == new_key_json: + TracerUtil.set_tag("error", True) return False self._simple_upsert_txn( @@ -231,6 +256,7 @@ def _set_e2e_device_keys_txn(txn): def claim_e2e_one_time_keys(self, query_list): """Take a list of one time keys out of the database""" + @trace_function def _claim_e2e_one_time_keys(txn): sql = ( "SELECT key_id, key_json FROM e2e_one_time_keys_json" @@ -252,7 +278,13 @@ def _claim_e2e_one_time_keys(txn): " AND key_id = ?" ) for user_id, device_id, algorithm, key_id in delete: + TracerUtil.log_kv( + {"message": "executing claim transaction on database"} + ) txn.execute(sql, (user_id, device_id, algorithm, key_id)) + TracerUtil.log_kv( + {"message": "finished executing and invalidating cache"} + ) self._invalidate_cache_and_stream( txn, self.count_e2e_one_time_keys, (user_id, device_id) ) @@ -261,7 +293,10 @@ def _claim_e2e_one_time_keys(txn): return self.runInteraction("claim_e2e_one_time_keys", _claim_e2e_one_time_keys) def delete_e2e_keys_by_device(self, user_id, device_id): + @trace_function def delete_e2e_keys_by_device_txn(txn): + TracerUtil.set_tag("user_id", user_id) + TracerUtil.set_tag("device_id", device_id) self._simple_delete_txn( txn, table="e2e_device_keys_json", From 1e7099d04c62a8515c6f13e86a026cce7d17bd21 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Fri, 28 Jun 2019 15:39:18 +0100 Subject: [PATCH 03/46] Fix e2e bugs --- synapse/storage/e2e_room_keys.py | 10 +++++----- synapse/storage/end_to_end_keys.py | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/synapse/storage/e2e_room_keys.py b/synapse/storage/e2e_room_keys.py index b0e426d30ee8..ced6dd04ded0 100644 --- a/synapse/storage/e2e_room_keys.py +++ b/synapse/storage/e2e_room_keys.py @@ -18,7 +18,7 @@ from twisted.internet import defer from synapse.api.errors import StoreError -from synapse.util.tracerutils import trace_defered_function, trace_function +from synapse.util.tracerutils import trace_defered_function from ._base import SQLBaseStore @@ -199,7 +199,7 @@ def _get_current_version(txn, user_id): raise StoreError(404, "No current backup version") return row[0] - @trace_function + @trace_defered_function def get_e2e_room_keys_version_info(self, user_id, version=None): """Get info metadata about a version of our room_keys backup. @@ -242,7 +242,7 @@ def _get_e2e_room_keys_version_info_txn(txn): "get_e2e_room_keys_version_info", _get_e2e_room_keys_version_info_txn ) - @trace_function + @trace_defered_function def create_e2e_room_keys_version(self, user_id, info): """Atomically creates a new version of this user's e2e_room_keys store with the given version info. @@ -283,7 +283,7 @@ def _create_e2e_room_keys_version_txn(txn): "create_e2e_room_keys_version_txn", _create_e2e_room_keys_version_txn ) - @trace_function + @trace_defered_function def update_e2e_room_keys_version(self, user_id, version, info): """Update a given backup version @@ -300,7 +300,7 @@ def update_e2e_room_keys_version(self, user_id, version, info): desc="update_e2e_room_keys_version", ) - @trace_function + @trace_defered_function def delete_e2e_room_keys_version(self, user_id, version=None): """Delete a given backup version of the user's room keys. Doesn't delete their actual key data. diff --git a/synapse/storage/end_to_end_keys.py b/synapse/storage/end_to_end_keys.py index 9779cb70c987..acfe3da0ad07 100644 --- a/synapse/storage/end_to_end_keys.py +++ b/synapse/storage/end_to_end_keys.py @@ -239,7 +239,7 @@ def _set_e2e_device_keys_txn(txn): new_key_json = encode_canonical_json(device_keys).decode("utf-8") if old_key_json == new_key_json: - TracerUtil.set_tag("error", True) + TracerUtil.log_kv({"event", "key already stored"}) return False self._simple_upsert_txn( From d876cda7253cbf05deb0f0c8a3baab68f0c19f20 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Fri, 28 Jun 2019 17:09:25 +0100 Subject: [PATCH 04/46] Trace more e2e stuff and less e2e stuff --- synapse/handlers/e2e_room_keys.py | 1 - synapse/rest/client/v2_alpha/room_keys.py | 5 +++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py index 06837bb12647..07724d1c4a1e 100644 --- a/synapse/handlers/e2e_room_keys.py +++ b/synapse/handlers/e2e_room_keys.py @@ -271,7 +271,6 @@ def create_version(self, user_id, version_info): ) return new_version - @trace_defered_function @defer.inlineCallbacks def get_version_info(self, user_id, version=None): """Get the info about a given version of the user's backup diff --git a/synapse/rest/client/v2_alpha/room_keys.py b/synapse/rest/client/v2_alpha/room_keys.py index 10dec96208b6..34ab74d3a4e8 100644 --- a/synapse/rest/client/v2_alpha/room_keys.py +++ b/synapse/rest/client/v2_alpha/room_keys.py @@ -23,6 +23,10 @@ parse_json_object_from_request, parse_string, ) +from synapse.util.tracerutils import ( + TracerUtil, + trace_defered_function_using_operation_name, +) from ._base import client_patterns @@ -311,6 +315,7 @@ def __init__(self, hs): self.auth = hs.get_auth() self.e2e_room_keys_handler = hs.get_e2e_room_keys_handler() + @trace_defered_function_using_operation_name("get_room_keys_version") @defer.inlineCallbacks def on_GET(self, request, version): """ From fd669e5e444e79a6116bc93d6df764b26ba3ab91 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Fri, 28 Jun 2019 17:15:31 +0100 Subject: [PATCH 05/46] Trace devices --- synapse/handlers/device.py | 38 +++++++++++++++++++++++++++++++++++++- synapse/storage/devices.py | 5 +++++ 2 files changed, 42 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index d6ab33778393..2383ac670cfc 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -31,6 +31,7 @@ from synapse.util.caches.expiringcache import ExpiringCache from synapse.util.metrics import measure_func from synapse.util.retryutils import NotRetryingDestination +from synapse.util.tracerutils import TracerUtil, trace_defered_function from ._base import BaseHandler @@ -45,6 +46,7 @@ def __init__(self, hs): self.state = hs.get_state_handler() self._auth_handler = hs.get_auth_handler() + @trace_defered_function @defer.inlineCallbacks def get_devices_by_user(self, user_id): """ @@ -56,6 +58,7 @@ def get_devices_by_user(self, user_id): defer.Deferred: list[dict[str, X]]: info on each device """ + TracerUtil.set_tag("user_id", user_id) device_map = yield self.store.get_devices_by_user(user_id) ips = yield self.store.get_last_client_ip_by_device(user_id, device_id=None) @@ -64,8 +67,10 @@ def get_devices_by_user(self, user_id): for device in devices: _update_device_from_client_ips(device, ips) + TracerUtil.log_kv(device_map) return devices + @trace_defered_function @defer.inlineCallbacks def get_device(self, user_id, device_id): """ Retrieve the given device @@ -85,9 +90,14 @@ def get_device(self, user_id, device_id): raise errors.NotFoundError ips = yield self.store.get_last_client_ip_by_device(user_id, device_id) _update_device_from_client_ips(device, ips) + + TracerUtil.set_tag("device", device) + TracerUtil.set_tag("ips", ips) + return device @measure_func("device.get_user_ids_changed") + @trace_defered_function @defer.inlineCallbacks def get_user_ids_changed(self, user_id, from_token): """Get list of users that have had the devices updated, or have newly @@ -97,6 +107,9 @@ def get_user_ids_changed(self, user_id, from_token): user_id (str) from_token (StreamToken) """ + + TracerUtil("user_id", user_id) + TracerUtil.set_tag("from_token", from_token) now_room_key = yield self.store.get_room_events_max_id() room_ids = yield self.store.get_rooms_for_user(user_id) @@ -133,7 +146,7 @@ def get_user_ids_changed(self, user_id, from_token): if etype != EventTypes.Member: continue possibly_left.add(state_key) - continue + continue # Fetch the current state at the time. try: @@ -148,6 +161,9 @@ def get_user_ids_changed(self, user_id, from_token): # special-case for an empty prev state: include all members # in the changed list if not event_ids: + TracerUtil.log_kv( + {"event": "encountered empty previous state", "room_id": room_id} + ) for key, event_id in iteritems(current_state_ids): etype, state_key = key if etype != EventTypes.Member: @@ -200,6 +216,10 @@ def get_user_ids_changed(self, user_id, from_token): possibly_joined = [] possibly_left = [] + TracerUtil.log_kv( + {"changed": list(possibly_joined), "left": list(possibly_left)} + ) + return {"changed": list(possibly_joined), "left": list(possibly_left)} @@ -267,6 +287,7 @@ def check_device_registered( raise errors.StoreError(500, "Couldn't generate a device ID.") + @trace_defered_function @defer.inlineCallbacks def delete_device(self, user_id, device_id): """ Delete the given device @@ -284,6 +305,8 @@ def delete_device(self, user_id, device_id): except errors.StoreError as e: if e.code == 404: # no match + TracerUtil.set_tag("error", True) + TracerUtil.set_tag("reason", "User doesn't have that device id.") pass else: raise @@ -296,6 +319,7 @@ def delete_device(self, user_id, device_id): yield self.notify_device_update(user_id, [device_id]) + @trace_defered_function @defer.inlineCallbacks def delete_all_devices_for_user(self, user_id, except_device_id=None): """Delete all of the user's devices @@ -331,6 +355,8 @@ def delete_devices(self, user_id, device_ids): except errors.StoreError as e: if e.code == 404: # no match + TracerUtil.set_tag("error", True) + TracerUtil.set_tag("reason", "User doesn't have that device id.") pass else: raise @@ -451,12 +477,15 @@ def __init__(self, hs, device_handler): iterable=True, ) + @trace_defered_function @defer.inlineCallbacks def incoming_device_list_update(self, origin, edu_content): """Called on incoming device list update from federation. Responsible for parsing the EDU and adding to pending updates list. """ + TracerUtil.set_tag("origin", origin) + TracerUtil.set_tag("edu_content", edu_content) user_id = edu_content.pop("user_id") device_id = edu_content.pop("device_id") stream_id = str(edu_content.pop("stream_id")) # They may come as ints @@ -477,6 +506,13 @@ def incoming_device_list_update(self, origin, edu_content): if not room_ids: # We don't share any rooms with this user. Ignore update, as we # probably won't get any further updates. + TracerUtil.set_tag("error", True) + TracerUtil.log_kv( + { + "message": "Got an update from a user which " + + "doesn't share a room with the current user." + } + ) logger.warning( "Got device list update edu for %r/%r, but don't share a room", user_id, diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 8f72d9289555..4627bc049a72 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging +from synapse.util.tracerutils import TracerUtil, trace_defered_function from six import iteritems @@ -299,6 +300,7 @@ def _mark_as_sent_devices_by_remote_txn(self, txn, destination, stream_id): def get_device_stream_token(self): return self._device_list_id_gen.get_current_token() + @trace_defered_function @defer.inlineCallbacks def get_user_devices_from_cache(self, query_list): """Get the devices (and keys if any) for remote users from the cache. @@ -330,6 +332,9 @@ def get_user_devices_from_cache(self, query_list): else: results[user_id] = yield self._get_cached_devices_for_user(user_id) + TracerUtil.set_tag("in_cache", results) + TracerUtil.set_tag("not_in_cache", user_ids_not_in_cache) + return (user_ids_not_in_cache, results) @cachedInlineCallbacks(num_args=2, tree=True) From c988c02c7cafb503f36c119d343b0a33b8171330 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Fri, 28 Jun 2019 17:16:07 +0100 Subject: [PATCH 06/46] Trace device messages. --- synapse/handlers/devicemessage.py | 4 ++++ synapse/rest/client/v2_alpha/sendtodevice.py | 8 +++++++ synapse/storage/deviceinbox.py | 23 ++++++++++++++++++++ 3 files changed, 35 insertions(+) diff --git a/synapse/handlers/devicemessage.py b/synapse/handlers/devicemessage.py index e1ebb6346c3a..c99980518423 100644 --- a/synapse/handlers/devicemessage.py +++ b/synapse/handlers/devicemessage.py @@ -20,6 +20,7 @@ from synapse.api.errors import SynapseError from synapse.types import UserID, get_domain_from_id from synapse.util.stringutils import random_string +from synapse.util.tracerutils import TracerUtil, trace_defered_function logger = logging.getLogger(__name__) @@ -76,6 +77,7 @@ def on_direct_to_device_edu(self, origin, content): "to_device_key", stream_id, users=local_messages.keys() ) + @trace_defered_function @defer.inlineCallbacks def send_device_message(self, sender_user_id, message_type, messages): @@ -109,6 +111,7 @@ def send_device_message(self, sender_user_id, message_type, messages): "message_id": message_id, } + TracerUtil.log_kv(local_messages) stream_id = yield self.store.add_messages_to_device_inbox( local_messages, remote_edu_contents ) @@ -117,6 +120,7 @@ def send_device_message(self, sender_user_id, message_type, messages): "to_device_key", stream_id, users=local_messages.keys() ) + TracerUtil.log_kv(remote_messages) for destination in remote_messages.keys(): # Enqueue a new federation transaction to send the new # device messages to each remote destination. diff --git a/synapse/rest/client/v2_alpha/sendtodevice.py b/synapse/rest/client/v2_alpha/sendtodevice.py index 2613648d821b..a7782cbd7cb3 100644 --- a/synapse/rest/client/v2_alpha/sendtodevice.py +++ b/synapse/rest/client/v2_alpha/sendtodevice.py @@ -20,6 +20,11 @@ from synapse.http import servlet from synapse.http.servlet import parse_json_object_from_request from synapse.rest.client.transactions import HttpTransactionCache +from synapse.util.tracerutils import ( + TracerUtil, + trace_defered_function_using_operation_name, + tag_args, +) from ._base import client_patterns @@ -42,7 +47,10 @@ def __init__(self, hs): self.txns = HttpTransactionCache(hs) self.device_message_handler = hs.get_device_message_handler() + @trace_defered_function_using_operation_name("sendToDevice") def on_PUT(self, request, message_type, txn_id): + TracerUtil.set_tag("message_type", message_type) + TracerUtil.set_tag("txn_id", txn_id) return self.txns.fetch_or_execute_request( request, self._put, request, message_type, txn_id ) diff --git a/synapse/storage/deviceinbox.py b/synapse/storage/deviceinbox.py index 79bb0ea46db5..4a45926c45d3 100644 --- a/synapse/storage/deviceinbox.py +++ b/synapse/storage/deviceinbox.py @@ -22,6 +22,7 @@ from synapse.storage._base import SQLBaseStore from synapse.storage.background_updates import BackgroundUpdateStore from synapse.util.caches.expiringcache import ExpiringCache +from synapse.util.tracerutils import TracerUtil, trace_defered_function, trace_function logger = logging.getLogger(__name__) @@ -72,6 +73,7 @@ def get_new_messages_for_device_txn(txn): "get_new_messages_for_device", get_new_messages_for_device_txn ) + @trace_defered_function @defer.inlineCallbacks def delete_messages_for_device(self, user_id, device_id, up_to_stream_id): """ @@ -87,11 +89,15 @@ def delete_messages_for_device(self, user_id, device_id, up_to_stream_id): last_deleted_stream_id = self._last_device_delete_cache.get( (user_id, device_id), None ) + + TracerUtil.set_tag("last_deleted_stream_id", last_deleted_stream_id) + if last_deleted_stream_id: has_changed = self._device_inbox_stream_cache.has_entity_changed( user_id, last_deleted_stream_id ) if not has_changed: + TracerUtil.log_kv({"message": "No changes in cache since last check"}) return 0 def delete_messages_for_device_txn(txn): @@ -107,6 +113,10 @@ def delete_messages_for_device_txn(txn): "delete_messages_for_device", delete_messages_for_device_txn ) + TracerUtil.log_kv( + {"message": "deleted {} messages for device".format(count), "count": count} + ) + # Update the cache, ensuring that we only ever increase the value last_deleted_stream_id = self._last_device_delete_cache.get( (user_id, device_id), 0 @@ -117,6 +127,7 @@ def delete_messages_for_device_txn(txn): return count + @trace_defered_function def get_new_device_msgs_for_remote( self, destination, last_stream_id, current_stream_id, limit ): @@ -132,16 +143,23 @@ def get_new_device_msgs_for_remote( in the stream the messages got to. """ + TracerUtil.set_tag("destination", destination) + TracerUtil.set_tag("last_stream_id", last_stream_id) + TracerUtil.set_tag("current_stream_id", current_stream_id) + TracerUtil.set_tag("limit", limit) + has_changed = self._device_federation_outbox_stream_cache.has_entity_changed( destination, last_stream_id ) if not has_changed or last_stream_id == current_stream_id: + TracerUtil.log_kv({"message": "No new messages in stream"}) return defer.succeed(([], current_stream_id)) if limit <= 0: # This can happen if we run out of room for EDUs in the transaction. return defer.succeed(([], last_stream_id)) + @trace_function def get_new_messages_for_remote_destination_txn(txn): sql = ( "SELECT stream_id, messages_json FROM device_federation_outbox" @@ -156,6 +174,9 @@ def get_new_messages_for_remote_destination_txn(txn): stream_pos = row[0] messages.append(json.loads(row[1])) if len(messages) < limit: + TracerUtil.log_kv( + {"message": "Set stream position to current position"} + ) stream_pos = current_stream_id return (messages, stream_pos) @@ -164,6 +185,7 @@ def get_new_messages_for_remote_destination_txn(txn): get_new_messages_for_remote_destination_txn, ) + @trace_defered_function def delete_device_msgs_for_remote(self, destination, up_to_stream_id): """Used to delete messages when the remote destination acknowledges their receipt. @@ -214,6 +236,7 @@ def __init__(self, db_conn, hs): expiry_ms=30 * 60 * 1000, ) + @trace_defered_function @defer.inlineCallbacks def add_messages_to_device_inbox( self, local_messages_by_user_then_device, remote_messages_by_destination From 21940cadf05d6c1fa55c30b7bda07bc6643b7bf2 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Tue, 2 Jul 2019 17:34:48 +0100 Subject: [PATCH 07/46] Update to new access pattern --- synapse/handlers/device.py | 46 ++++++++-------- synapse/handlers/devicemessage.py | 8 +-- synapse/handlers/e2e_keys.py | 58 ++++++++++---------- synapse/handlers/e2e_room_keys.py | 18 +++--- synapse/rest/client/v2_alpha/keys.py | 12 ++-- synapse/rest/client/v2_alpha/room_keys.py | 7 +-- synapse/rest/client/v2_alpha/sendtodevice.py | 12 ++-- synapse/storage/deviceinbox.py | 30 +++++----- synapse/storage/devices.py | 8 +-- synapse/storage/end_to_end_keys.py | 54 +++++++++--------- 10 files changed, 121 insertions(+), 132 deletions(-) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 2383ac670cfc..fa9669c47b95 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -31,7 +31,7 @@ from synapse.util.caches.expiringcache import ExpiringCache from synapse.util.metrics import measure_func from synapse.util.retryutils import NotRetryingDestination -from synapse.util.tracerutils import TracerUtil, trace_defered_function +import synapse.util.tracerutils as tracerutils from ._base import BaseHandler @@ -46,7 +46,7 @@ def __init__(self, hs): self.state = hs.get_state_handler() self._auth_handler = hs.get_auth_handler() - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def get_devices_by_user(self, user_id): """ @@ -58,7 +58,7 @@ def get_devices_by_user(self, user_id): defer.Deferred: list[dict[str, X]]: info on each device """ - TracerUtil.set_tag("user_id", user_id) + tracerutils.set_tag("user_id", user_id) device_map = yield self.store.get_devices_by_user(user_id) ips = yield self.store.get_last_client_ip_by_device(user_id, device_id=None) @@ -67,10 +67,10 @@ def get_devices_by_user(self, user_id): for device in devices: _update_device_from_client_ips(device, ips) - TracerUtil.log_kv(device_map) + tracerutils.log_kv(device_map) return devices - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def get_device(self, user_id, device_id): """ Retrieve the given device @@ -91,13 +91,13 @@ def get_device(self, user_id, device_id): ips = yield self.store.get_last_client_ip_by_device(user_id, device_id) _update_device_from_client_ips(device, ips) - TracerUtil.set_tag("device", device) - TracerUtil.set_tag("ips", ips) + tracerutils.set_tag("device", device) + tracerutils.set_tag("ips", ips) return device @measure_func("device.get_user_ids_changed") - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def get_user_ids_changed(self, user_id, from_token): """Get list of users that have had the devices updated, or have newly @@ -108,8 +108,8 @@ def get_user_ids_changed(self, user_id, from_token): from_token (StreamToken) """ - TracerUtil("user_id", user_id) - TracerUtil.set_tag("from_token", from_token) + tracerutils("user_id", user_id) + tracerutils.set_tag("from_token", from_token) now_room_key = yield self.store.get_room_events_max_id() room_ids = yield self.store.get_rooms_for_user(user_id) @@ -161,7 +161,7 @@ def get_user_ids_changed(self, user_id, from_token): # special-case for an empty prev state: include all members # in the changed list if not event_ids: - TracerUtil.log_kv( + tracerutils.log_kv( {"event": "encountered empty previous state", "room_id": room_id} ) for key, event_id in iteritems(current_state_ids): @@ -216,7 +216,7 @@ def get_user_ids_changed(self, user_id, from_token): possibly_joined = [] possibly_left = [] - TracerUtil.log_kv( + tracerutils.log_kv( {"changed": list(possibly_joined), "left": list(possibly_left)} ) @@ -287,7 +287,7 @@ def check_device_registered( raise errors.StoreError(500, "Couldn't generate a device ID.") - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def delete_device(self, user_id, device_id): """ Delete the given device @@ -305,8 +305,8 @@ def delete_device(self, user_id, device_id): except errors.StoreError as e: if e.code == 404: # no match - TracerUtil.set_tag("error", True) - TracerUtil.set_tag("reason", "User doesn't have that device id.") + tracerutils.set_tag("error", True) + tracerutils.set_tag("reason", "User doesn't have that device id.") pass else: raise @@ -319,7 +319,7 @@ def delete_device(self, user_id, device_id): yield self.notify_device_update(user_id, [device_id]) - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def delete_all_devices_for_user(self, user_id, except_device_id=None): """Delete all of the user's devices @@ -355,8 +355,8 @@ def delete_devices(self, user_id, device_ids): except errors.StoreError as e: if e.code == 404: # no match - TracerUtil.set_tag("error", True) - TracerUtil.set_tag("reason", "User doesn't have that device id.") + tracerutils.set_tag("error", True) + tracerutils.set_tag("reason", "User doesn't have that device id.") pass else: raise @@ -477,15 +477,15 @@ def __init__(self, hs, device_handler): iterable=True, ) - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def incoming_device_list_update(self, origin, edu_content): """Called on incoming device list update from federation. Responsible for parsing the EDU and adding to pending updates list. """ - TracerUtil.set_tag("origin", origin) - TracerUtil.set_tag("edu_content", edu_content) + tracerutils.set_tag("origin", origin) + tracerutils.set_tag("edu_content", edu_content) user_id = edu_content.pop("user_id") device_id = edu_content.pop("device_id") stream_id = str(edu_content.pop("stream_id")) # They may come as ints @@ -506,8 +506,8 @@ def incoming_device_list_update(self, origin, edu_content): if not room_ids: # We don't share any rooms with this user. Ignore update, as we # probably won't get any further updates. - TracerUtil.set_tag("error", True) - TracerUtil.log_kv( + tracerutils.set_tag("error", True) + tracerutils.log_kv( { "message": "Got an update from a user which " + "doesn't share a room with the current user." diff --git a/synapse/handlers/devicemessage.py b/synapse/handlers/devicemessage.py index c99980518423..de22b37b9ee2 100644 --- a/synapse/handlers/devicemessage.py +++ b/synapse/handlers/devicemessage.py @@ -20,7 +20,7 @@ from synapse.api.errors import SynapseError from synapse.types import UserID, get_domain_from_id from synapse.util.stringutils import random_string -from synapse.util.tracerutils import TracerUtil, trace_defered_function +import synapse.util.tracerutils as tracerutils logger = logging.getLogger(__name__) @@ -77,7 +77,7 @@ def on_direct_to_device_edu(self, origin, content): "to_device_key", stream_id, users=local_messages.keys() ) - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def send_device_message(self, sender_user_id, message_type, messages): @@ -111,7 +111,7 @@ def send_device_message(self, sender_user_id, message_type, messages): "message_id": message_id, } - TracerUtil.log_kv(local_messages) + tracerutils.log_kv(local_messages) stream_id = yield self.store.add_messages_to_device_inbox( local_messages, remote_edu_contents ) @@ -120,7 +120,7 @@ def send_device_message(self, sender_user_id, message_type, messages): "to_device_key", stream_id, users=local_messages.keys() ) - TracerUtil.log_kv(remote_messages) + tracerutils.log_kv(remote_messages) for destination in remote_messages.keys(): # Enqueue a new federation transaction to send the new # device messages to each remote destination. diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 8b2249fc5be5..6bf9d955bc57 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -15,7 +15,7 @@ # limitations under the License. import logging -from synapse.util.tracerutils import TracerUtil, trace_defered_function +import synapse.util.tracerutils as tracerutils from six import iteritems @@ -46,7 +46,7 @@ def __init__(self, hs): "client_keys", self.on_federation_query_client_keys ) - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def query_devices(self, query_body, timeout): """ Handle a device key query from a client @@ -82,8 +82,8 @@ def query_devices(self, query_body, timeout): else: remote_queries[user_id] = device_ids - TracerUtil.set_tag("local_key_query", local_query) - TracerUtil.set_tag("remote_key_query", remote_queries) + tracerutils.set_tag("local_key_query", local_query) + tracerutils.set_tag("remote_key_query", remote_queries) # First get local devices. failures = {} @@ -125,12 +125,12 @@ def query_devices(self, query_body, timeout): r[user_id] = remote_queries[user_id] # Now fetch any devices that we don't have in our cache - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def do_remote_query(destination): destination_query = remote_queries_not_in_cache[destination] - TracerUtil.set_tag("key_query", destination_query) + tracerutils.set_tag("key_query", destination_query) try: remote_result = yield self.federation.query_client_keys( destination, {"device_keys": destination_query}, timeout=timeout @@ -143,8 +143,8 @@ def do_remote_query(destination): except Exception as e: failure = _exception_to_failure(e) failures[destination] = failure - TracerUtil.set_tag("error", True) - TracerUtil.set_tag("reason", failure) + tracerutils.set_tag("error", True) + tracerutils.set_tag("reason", failure) yield make_deferred_yieldable( defer.gatherResults( @@ -158,7 +158,7 @@ def do_remote_query(destination): return {"device_keys": results, "failures": failures} - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def query_local_devices(self, query): """Get E2E device keys for local users @@ -171,7 +171,7 @@ def query_local_devices(self, query): defer.Deferred: (resolves to dict[string, dict[string, dict]]): map from user_id -> device_id -> device details """ - TracerUtil.set_tag("local_query", query) + tracerutils.set_tag("local_query", query) local_query = [] result_dict = {} @@ -179,14 +179,14 @@ def query_local_devices(self, query): # we use UserID.from_string to catch invalid user ids if not self.is_mine(UserID.from_string(user_id)): logger.warning("Request for keys for non-local user %s", user_id) - TracerUtil.log_kv( + tracerutils.log_kv( { "message": "Requested a local key for a user which" + " was not local to the homeserver", "user_id": user_id, } ) - TracerUtil.set_tag("error", True) + tracerutils.set_tag("error", True) raise SynapseError(400, "Not a user here") if not device_ids: @@ -211,7 +211,7 @@ def query_local_devices(self, query): r["unsigned"]["device_display_name"] = display_name result_dict[user_id][device_id] = r - TracerUtil.log_kv(results) + tracerutils.log_kv(results) return result_dict @defer.inlineCallbacks @@ -222,7 +222,7 @@ def on_federation_query_client_keys(self, query_body): res = yield self.query_local_devices(device_keys_query) return {"device_keys": res} - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def claim_one_time_keys(self, query, timeout): local_query = [] @@ -237,8 +237,8 @@ def claim_one_time_keys(self, query, timeout): domain = get_domain_from_id(user_id) remote_queries.setdefault(domain, {})[user_id] = device_keys - TracerUtil.set_tag("local_key_query", local_query) - TracerUtil.set_tag("remote_key_query", remote_queries) + tracerutils.set_tag("local_key_query", local_query) + tracerutils.set_tag("remote_key_query", remote_queries) results = yield self.store.claim_e2e_one_time_keys(local_query) @@ -251,10 +251,10 @@ def claim_one_time_keys(self, query, timeout): key_id: json.loads(json_bytes) } - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def claim_client_keys(destination): - TracerUtil.set_tag("destination", destination) + tracerutils.set_tag("destination", destination) device_keys = remote_queries[destination] try: remote_result = yield self.federation.claim_client_keys( @@ -267,8 +267,8 @@ def claim_client_keys(destination): except Exception as e: failure = _exception_to_failure(e) failures[destination] = failure - TracerUtil.set_tag("error", True) - TracerUtil.set_tag("reason", failure) + tracerutils.set_tag("error", True) + tracerutils.set_tag("reason", failure) yield make_deferred_yieldable( defer.gatherResults( @@ -292,21 +292,21 @@ def claim_client_keys(destination): ), ) - TracerUtil.log_kv({"one_time_keys": json_result, "failures": failures}) + tracerutils.log_kv({"one_time_keys": json_result, "failures": failures}) return {"one_time_keys": json_result, "failures": failures} - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def upload_keys_for_user(self, user_id, device_id, keys): - TracerUtil.set_tag("user_id", user_id) - TracerUtil.set_tag("device_id", device_id) - TracerUtil.set_tag("keys", keys) + tracerutils.set_tag("user_id", user_id) + tracerutils.set_tag("device_id", device_id) + tracerutils.set_tag("keys", keys) time_now = self.clock.time_msec() # TODO: Validate the JSON to make sure it has the right keys. device_keys = keys.get("device_keys", None) - TracerUtil.set_tag("device_keys", device_keys) + tracerutils.set_tag("device_keys", device_keys) if device_keys: logger.info( "Updating device_keys for device %r for user %s at %d", @@ -328,7 +328,7 @@ def upload_keys_for_user(self, user_id, device_id, keys): user_id, device_id, time_now, one_time_keys ) else: - TracerUtil.log_kv( + tracerutils.log_kv( {"event": "did not upload one_time_keys", "reason": "no keys given"} ) @@ -341,10 +341,10 @@ def upload_keys_for_user(self, user_id, device_id, keys): result = yield self.store.count_e2e_one_time_keys(user_id, device_id) - TracerUtil.set_tag("one_time_key_counts", result) + tracerutils.set_tag("one_time_key_counts", result) return {"one_time_key_counts": result} - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def _upload_one_time_keys_for_user( self, user_id, device_id, time_now, one_time_keys diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py index 07724d1c4a1e..a90ec2f9fcca 100644 --- a/synapse/handlers/e2e_room_keys.py +++ b/synapse/handlers/e2e_room_keys.py @@ -27,7 +27,7 @@ SynapseError, ) from synapse.util.async_helpers import Linearizer -from synapse.util.tracerutils import TracerUtil, trace_defered_function +import synapse.util.tracerutils as tracerutils logger = logging.getLogger(__name__) @@ -50,7 +50,7 @@ def __init__(self, hs): # changed. self._upload_linearizer = Linearizer("upload_room_keys_lock") - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def get_room_keys(self, user_id, version, room_id=None, session_id=None): """Bulk get the E2E room keys for a given backup, optionally filtered to a given @@ -86,10 +86,10 @@ def get_room_keys(self, user_id, version, room_id=None, session_id=None): user_id, version, room_id, session_id ) - TracerUtil.log_kv(results) + tracerutils.log_kv(results) return results - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def delete_room_keys(self, user_id, version, room_id=None, session_id=None): """Bulk delete the E2E room keys for a given backup, optionally filtered to a given @@ -111,7 +111,7 @@ def delete_room_keys(self, user_id, version, room_id=None, session_id=None): with (yield self._upload_linearizer.queue(user_id)): yield self.store.delete_e2e_room_keys(user_id, version, room_id, session_id) - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def upload_room_keys(self, user_id, version, room_keys): """Bulk upload a list of room keys into a given backup version, asserting @@ -179,7 +179,7 @@ def upload_room_keys(self, user_id, version, room_keys): user_id, version, room_id, session_id, session ) - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def _upload_room_key(self, user_id, version, room_id, session_id, room_key): """Upload a given room_key for a given room and session into a given @@ -242,7 +242,7 @@ def _should_replace_room_key(current_room_key, room_key): return False return True - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def create_version(self, user_id, version_info): """Create a new backup version. This automatically becomes the new @@ -301,7 +301,7 @@ def get_version_info(self, user_id, version=None): raise return res - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def delete_version(self, user_id, version=None): """Deletes a given version of the user's e2e_room_keys backup @@ -322,7 +322,7 @@ def delete_version(self, user_id, version=None): else: raise - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def update_version(self, user_id, version, version_info): """Update the info about a given version of the user's backup diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py index 8de9e12df493..40052e7a05af 100644 --- a/synapse/rest/client/v2_alpha/keys.py +++ b/synapse/rest/client/v2_alpha/keys.py @@ -25,11 +25,7 @@ parse_string, ) from synapse.types import StreamToken -from synapse.util.tracerutils import ( - TracerUtil, - trace_defered_function_using_operation_name, -) - +import synapse.util.tracerutils as tracerutils from ._base import client_patterns logger = logging.getLogger(__name__) @@ -72,7 +68,7 @@ def __init__(self, hs): self.auth = hs.get_auth() self.e2e_keys_handler = hs.get_e2e_keys_handler() - @trace_defered_function_using_operation_name("upload_keys") + @tracerutils.trace_defered_function_using_operation_name("upload_keys") @defer.inlineCallbacks def on_POST(self, request, device_id): requester = yield self.auth.get_user_by_req(request, allow_guest=True) @@ -83,8 +79,8 @@ def on_POST(self, request, device_id): # passing the device_id here is deprecated; however, we allow it # for now for compatibility with older clients. if requester.device_id is not None and device_id != requester.device_id: - TracerUtil.set_tag("error", True) - TracerUtil.log_kv( + tracerutils.set_tag("error", True) + tracerutils.log_kv( { "message": "Client uploading keys for a different device", "logged_in_id": requester.device_id, diff --git a/synapse/rest/client/v2_alpha/room_keys.py b/synapse/rest/client/v2_alpha/room_keys.py index 34ab74d3a4e8..ac03d5889977 100644 --- a/synapse/rest/client/v2_alpha/room_keys.py +++ b/synapse/rest/client/v2_alpha/room_keys.py @@ -23,10 +23,7 @@ parse_json_object_from_request, parse_string, ) -from synapse.util.tracerutils import ( - TracerUtil, - trace_defered_function_using_operation_name, -) +import synapse.util.tracerutils as tracerutils from ._base import client_patterns @@ -315,7 +312,7 @@ def __init__(self, hs): self.auth = hs.get_auth() self.e2e_room_keys_handler = hs.get_e2e_room_keys_handler() - @trace_defered_function_using_operation_name("get_room_keys_version") + @tracerutils.trace_defered_function_using_operation_name("get_room_keys_version") @defer.inlineCallbacks def on_GET(self, request, version): """ diff --git a/synapse/rest/client/v2_alpha/sendtodevice.py b/synapse/rest/client/v2_alpha/sendtodevice.py index a7782cbd7cb3..9df38a8f5ef6 100644 --- a/synapse/rest/client/v2_alpha/sendtodevice.py +++ b/synapse/rest/client/v2_alpha/sendtodevice.py @@ -20,11 +20,7 @@ from synapse.http import servlet from synapse.http.servlet import parse_json_object_from_request from synapse.rest.client.transactions import HttpTransactionCache -from synapse.util.tracerutils import ( - TracerUtil, - trace_defered_function_using_operation_name, - tag_args, -) +import synapse.util.tracerutils as tracerutils from ._base import client_patterns @@ -47,10 +43,10 @@ def __init__(self, hs): self.txns = HttpTransactionCache(hs) self.device_message_handler = hs.get_device_message_handler() - @trace_defered_function_using_operation_name("sendToDevice") + @tracerutils.trace_defered_function_using_operation_name("sendToDevice") def on_PUT(self, request, message_type, txn_id): - TracerUtil.set_tag("message_type", message_type) - TracerUtil.set_tag("txn_id", txn_id) + tracerutils.set_tag("message_type", message_type) + tracerutils.set_tag("txn_id", txn_id) return self.txns.fetch_or_execute_request( request, self._put, request, message_type, txn_id ) diff --git a/synapse/storage/deviceinbox.py b/synapse/storage/deviceinbox.py index 4a45926c45d3..8687b5a046c2 100644 --- a/synapse/storage/deviceinbox.py +++ b/synapse/storage/deviceinbox.py @@ -22,7 +22,7 @@ from synapse.storage._base import SQLBaseStore from synapse.storage.background_updates import BackgroundUpdateStore from synapse.util.caches.expiringcache import ExpiringCache -from synapse.util.tracerutils import TracerUtil, trace_defered_function, trace_function +import synapse.util.tracerutils as tracerutils logger = logging.getLogger(__name__) @@ -73,7 +73,7 @@ def get_new_messages_for_device_txn(txn): "get_new_messages_for_device", get_new_messages_for_device_txn ) - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def delete_messages_for_device(self, user_id, device_id, up_to_stream_id): """ @@ -90,14 +90,14 @@ def delete_messages_for_device(self, user_id, device_id, up_to_stream_id): (user_id, device_id), None ) - TracerUtil.set_tag("last_deleted_stream_id", last_deleted_stream_id) + tracerutils.set_tag("last_deleted_stream_id", last_deleted_stream_id) if last_deleted_stream_id: has_changed = self._device_inbox_stream_cache.has_entity_changed( user_id, last_deleted_stream_id ) if not has_changed: - TracerUtil.log_kv({"message": "No changes in cache since last check"}) + tracerutils.log_kv({"message": "No changes in cache since last check"}) return 0 def delete_messages_for_device_txn(txn): @@ -113,7 +113,7 @@ def delete_messages_for_device_txn(txn): "delete_messages_for_device", delete_messages_for_device_txn ) - TracerUtil.log_kv( + tracerutils.log_kv( {"message": "deleted {} messages for device".format(count), "count": count} ) @@ -127,7 +127,7 @@ def delete_messages_for_device_txn(txn): return count - @trace_defered_function + @tracerutils.trace_defered_function def get_new_device_msgs_for_remote( self, destination, last_stream_id, current_stream_id, limit ): @@ -143,23 +143,23 @@ def get_new_device_msgs_for_remote( in the stream the messages got to. """ - TracerUtil.set_tag("destination", destination) - TracerUtil.set_tag("last_stream_id", last_stream_id) - TracerUtil.set_tag("current_stream_id", current_stream_id) - TracerUtil.set_tag("limit", limit) + tracerutils.set_tag("destination", destination) + tracerutils.set_tag("last_stream_id", last_stream_id) + tracerutils.set_tag("current_stream_id", current_stream_id) + tracerutils.set_tag("limit", limit) has_changed = self._device_federation_outbox_stream_cache.has_entity_changed( destination, last_stream_id ) if not has_changed or last_stream_id == current_stream_id: - TracerUtil.log_kv({"message": "No new messages in stream"}) + tracerutils.log_kv({"message": "No new messages in stream"}) return defer.succeed(([], current_stream_id)) if limit <= 0: # This can happen if we run out of room for EDUs in the transaction. return defer.succeed(([], last_stream_id)) - @trace_function + @tracerutils.trace_function def get_new_messages_for_remote_destination_txn(txn): sql = ( "SELECT stream_id, messages_json FROM device_federation_outbox" @@ -174,7 +174,7 @@ def get_new_messages_for_remote_destination_txn(txn): stream_pos = row[0] messages.append(json.loads(row[1])) if len(messages) < limit: - TracerUtil.log_kv( + tracerutils.log_kv( {"message": "Set stream position to current position"} ) stream_pos = current_stream_id @@ -185,7 +185,7 @@ def get_new_messages_for_remote_destination_txn(txn): get_new_messages_for_remote_destination_txn, ) - @trace_defered_function + @tracerutils.trace_defered_function def delete_device_msgs_for_remote(self, destination, up_to_stream_id): """Used to delete messages when the remote destination acknowledges their receipt. @@ -236,7 +236,7 @@ def __init__(self, db_conn, hs): expiry_ms=30 * 60 * 1000, ) - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def add_messages_to_device_inbox( self, local_messages_by_user_then_device, remote_messages_by_destination diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 4627bc049a72..12fa70d416cd 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from synapse.util.tracerutils import TracerUtil, trace_defered_function +import synapse.util.tracerutils as tracerutils from six import iteritems @@ -300,7 +300,7 @@ def _mark_as_sent_devices_by_remote_txn(self, txn, destination, stream_id): def get_device_stream_token(self): return self._device_list_id_gen.get_current_token() - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def get_user_devices_from_cache(self, query_list): """Get the devices (and keys if any) for remote users from the cache. @@ -332,8 +332,8 @@ def get_user_devices_from_cache(self, query_list): else: results[user_id] = yield self._get_cached_devices_for_user(user_id) - TracerUtil.set_tag("in_cache", results) - TracerUtil.set_tag("not_in_cache", user_ids_not_in_cache) + tracerutils.set_tag("in_cache", results) + tracerutils.set_tag("not_in_cache", user_ids_not_in_cache) return (user_ids_not_in_cache, results) diff --git a/synapse/storage/end_to_end_keys.py b/synapse/storage/end_to_end_keys.py index acfe3da0ad07..98246c347d02 100644 --- a/synapse/storage/end_to_end_keys.py +++ b/synapse/storage/end_to_end_keys.py @@ -22,11 +22,11 @@ from ._base import SQLBaseStore, db_to_json -from synapse.util.tracerutils import TracerUtil, trace_defered_function, trace_function +import synapse.util.tracerutils as tracerutils class EndToEndKeyWorkerStore(SQLBaseStore): - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def get_e2e_device_keys( self, query_list, include_all_devices=False, include_deleted_devices=False @@ -43,7 +43,7 @@ def get_e2e_device_keys( Dict mapping from user-id to dict mapping from device_id to dict containing "key_json", "device_display_name". """ - TracerUtil.set_tag("query_list", query_list) + tracerutils.set_tag("query_list", query_list) if not query_list: return {} @@ -61,12 +61,12 @@ def get_e2e_device_keys( return results - @trace_function + @tracerutils.trace_function def _get_e2e_device_keys_txn( self, txn, query_list, include_all_devices=False, include_deleted_devices=False ): - TracerUtil.set_tag("include_all_devices", include_all_devices) - TracerUtil.set_tag("include_deleted_devices", include_deleted_devices) + tracerutils.set_tag("include_all_devices", include_all_devices) + tracerutils.set_tag("include_deleted_devices", include_deleted_devices) query_clauses = [] query_params = [] @@ -112,10 +112,10 @@ def _get_e2e_device_keys_txn( for user_id, device_id in deleted_devices: result.setdefault(user_id, {})[device_id] = None - TracerUtil.log_kv(result) + tracerutils.log_kv(result) return result - @trace_defered_function + @tracerutils.trace_defered_function @defer.inlineCallbacks def get_e2e_one_time_keys(self, user_id, device_id, key_ids): """Retrieve a number of one-time keys for a user @@ -131,9 +131,9 @@ def get_e2e_one_time_keys(self, user_id, device_id, key_ids): key_id) to json string for key """ - TracerUtil.set_tag("user_id", user_id) - TracerUtil.set_tag("device_id", device_id) - TracerUtil.set_tag("key_ids", key_ids) + tracerutils.set_tag("user_id", user_id) + tracerutils.set_tag("device_id", device_id) + tracerutils.set_tag("key_ids", key_ids) rows = yield self._simple_select_many_batch( table="e2e_one_time_keys_json", @@ -159,11 +159,11 @@ def add_e2e_one_time_keys(self, user_id, device_id, time_now, new_keys): (algorithm, key_id, key json) """ - @trace_function + @tracerutils.strace_function def _add_e2e_one_time_keys(txn): - TracerUtil.set_tag("user_id", user_id) - TracerUtil.set_tag("device_id", device_id) - TracerUtil.set_tag("new_keys", new_keys) + tracerutils.set_tag("user_id", user_id) + tracerutils.set_tag("device_id", device_id) + tracerutils.set_tag("new_keys", new_keys) # We are protected from race between lookup and insertion due to # a unique constraint. If there is a race of two calls to # `add_e2e_one_time_keys` then they'll conflict and we will only @@ -219,12 +219,12 @@ def set_e2e_device_keys(self, user_id, device_id, time_now, device_keys): or the keys were already in the database. """ - @trace_function + @tracerutils.trace_function def _set_e2e_device_keys_txn(txn): - TracerUtil.set_tag("user_id", user_id) - TracerUtil.set_tag("device_id", device_id) - TracerUtil.set_tag("time_now", time_now) - TracerUtil.set_tag("device_keys", device_keys) + tracerutils.set_tag("user_id", user_id) + tracerutils.set_tag("device_id", device_id) + tracerutils.set_tag("time_now", time_now) + tracerutils.set_tag("device_keys", device_keys) old_key_json = self._simple_select_one_onecol_txn( txn, @@ -239,7 +239,7 @@ def _set_e2e_device_keys_txn(txn): new_key_json = encode_canonical_json(device_keys).decode("utf-8") if old_key_json == new_key_json: - TracerUtil.log_kv({"event", "key already stored"}) + tracerutils.log_kv({"event", "key already stored"}) return False self._simple_upsert_txn( @@ -256,7 +256,7 @@ def _set_e2e_device_keys_txn(txn): def claim_e2e_one_time_keys(self, query_list): """Take a list of one time keys out of the database""" - @trace_function + @tracerutils.trace_function def _claim_e2e_one_time_keys(txn): sql = ( "SELECT key_id, key_json FROM e2e_one_time_keys_json" @@ -278,11 +278,11 @@ def _claim_e2e_one_time_keys(txn): " AND key_id = ?" ) for user_id, device_id, algorithm, key_id in delete: - TracerUtil.log_kv( + tracerutils.log_kv( {"message": "executing claim transaction on database"} ) txn.execute(sql, (user_id, device_id, algorithm, key_id)) - TracerUtil.log_kv( + tracerutils.log_kv( {"message": "finished executing and invalidating cache"} ) self._invalidate_cache_and_stream( @@ -293,10 +293,10 @@ def _claim_e2e_one_time_keys(txn): return self.runInteraction("claim_e2e_one_time_keys", _claim_e2e_one_time_keys) def delete_e2e_keys_by_device(self, user_id, device_id): - @trace_function + @tracerutils.trace_function def delete_e2e_keys_by_device_txn(txn): - TracerUtil.set_tag("user_id", user_id) - TracerUtil.set_tag("device_id", device_id) + tracerutils.set_tag("user_id", user_id) + tracerutils.set_tag("device_id", device_id) self._simple_delete_txn( txn, table="e2e_device_keys_json", From d94897e8185b112d3963dca00d8c2e329a515a24 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Wed, 3 Jul 2019 10:53:02 +0100 Subject: [PATCH 08/46] Include servletname in incoming-request trace --- synapse/federation/transport/server.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 663264dec48d..c2188390aed0 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -297,6 +297,7 @@ async def new_func(request, *args, **kwargs): opentracing.tags.HTTP_URL: request.get_redacted_uri(), opentracing.tags.PEER_HOST_IPV6: request.getClientIP(), "authenticated_entity": origin, + "servlet_name": request.request_metrics.name, }, ): if origin: From 28113ad335c66753d3d3b9c52574b19c4e91fef1 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Wed, 3 Jul 2019 11:09:42 +0100 Subject: [PATCH 09/46] typo --- synapse/storage/end_to_end_keys.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/end_to_end_keys.py b/synapse/storage/end_to_end_keys.py index 98246c347d02..9fbff3ebf47c 100644 --- a/synapse/storage/end_to_end_keys.py +++ b/synapse/storage/end_to_end_keys.py @@ -159,7 +159,7 @@ def add_e2e_one_time_keys(self, user_id, device_id, time_now, new_keys): (algorithm, key_id, key json) """ - @tracerutils.strace_function + @tracerutils.trace_function def _add_e2e_one_time_keys(txn): tracerutils.set_tag("user_id", user_id) tracerutils.set_tag("device_id", device_id) From d9f0c7ff470fe650922cb6a53397f392777e68f3 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Wed, 3 Jul 2019 11:52:27 +0100 Subject: [PATCH 10/46] How did that half of the statement get deleted? --- synapse/handlers/device.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index fa9669c47b95..81f9795315b9 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -108,7 +108,7 @@ def get_user_ids_changed(self, user_id, from_token): from_token (StreamToken) """ - tracerutils("user_id", user_id) + tracerutils.set_tag("user_id", user_id) tracerutils.set_tag("from_token", from_token) now_room_key = yield self.store.get_room_events_max_id() From 7ae7e796ff3b77c865a843ff36b4fd479be41dd9 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Thu, 4 Jul 2019 14:24:39 +0100 Subject: [PATCH 11/46] These functions were not deferreds! --- synapse/rest/client/v2_alpha/sendtodevice.py | 2 +- synapse/storage/deviceinbox.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/rest/client/v2_alpha/sendtodevice.py b/synapse/rest/client/v2_alpha/sendtodevice.py index 9df38a8f5ef6..d58291fc856a 100644 --- a/synapse/rest/client/v2_alpha/sendtodevice.py +++ b/synapse/rest/client/v2_alpha/sendtodevice.py @@ -43,7 +43,7 @@ def __init__(self, hs): self.txns = HttpTransactionCache(hs) self.device_message_handler = hs.get_device_message_handler() - @tracerutils.trace_defered_function_using_operation_name("sendToDevice") + @tracerutils.trace_function_using_operation_name("sendToDevice") def on_PUT(self, request, message_type, txn_id): tracerutils.set_tag("message_type", message_type) tracerutils.set_tag("txn_id", txn_id) diff --git a/synapse/storage/deviceinbox.py b/synapse/storage/deviceinbox.py index 8687b5a046c2..6011312af9e4 100644 --- a/synapse/storage/deviceinbox.py +++ b/synapse/storage/deviceinbox.py @@ -127,7 +127,7 @@ def delete_messages_for_device_txn(txn): return count - @tracerutils.trace_defered_function + @tracerutils.trace_function def get_new_device_msgs_for_remote( self, destination, last_stream_id, current_stream_id, limit ): From bfc50050fd314897c1f04dcc489893a6be2466ce Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Thu, 4 Jul 2019 17:11:46 +0100 Subject: [PATCH 12/46] The great logging/ migration --- synapse/handlers/device.py | 46 ++++++++-------- synapse/handlers/devicemessage.py | 8 +-- synapse/handlers/e2e_keys.py | 58 ++++++++++---------- synapse/handlers/e2e_room_keys.py | 18 +++--- synapse/rest/client/v2_alpha/keys.py | 8 +-- synapse/rest/client/v2_alpha/room_keys.py | 4 +- synapse/rest/client/v2_alpha/sendtodevice.py | 8 +-- synapse/storage/deviceinbox.py | 30 +++++----- synapse/storage/devices.py | 8 +-- synapse/storage/e2e_room_keys.py | 2 +- synapse/storage/end_to_end_keys.py | 54 +++++++++--------- 11 files changed, 122 insertions(+), 122 deletions(-) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 81f9795315b9..746e2179e14a 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -31,7 +31,7 @@ from synapse.util.caches.expiringcache import ExpiringCache from synapse.util.metrics import measure_func from synapse.util.retryutils import NotRetryingDestination -import synapse.util.tracerutils as tracerutils +import synapse.logging.opentracing as opentracing from ._base import BaseHandler @@ -46,7 +46,7 @@ def __init__(self, hs): self.state = hs.get_state_handler() self._auth_handler = hs.get_auth_handler() - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def get_devices_by_user(self, user_id): """ @@ -58,7 +58,7 @@ def get_devices_by_user(self, user_id): defer.Deferred: list[dict[str, X]]: info on each device """ - tracerutils.set_tag("user_id", user_id) + opentracing.set_tag("user_id", user_id) device_map = yield self.store.get_devices_by_user(user_id) ips = yield self.store.get_last_client_ip_by_device(user_id, device_id=None) @@ -67,10 +67,10 @@ def get_devices_by_user(self, user_id): for device in devices: _update_device_from_client_ips(device, ips) - tracerutils.log_kv(device_map) + opentracing.log_kv(device_map) return devices - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def get_device(self, user_id, device_id): """ Retrieve the given device @@ -91,13 +91,13 @@ def get_device(self, user_id, device_id): ips = yield self.store.get_last_client_ip_by_device(user_id, device_id) _update_device_from_client_ips(device, ips) - tracerutils.set_tag("device", device) - tracerutils.set_tag("ips", ips) + opentracing.set_tag("device", device) + opentracing.set_tag("ips", ips) return device @measure_func("device.get_user_ids_changed") - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def get_user_ids_changed(self, user_id, from_token): """Get list of users that have had the devices updated, or have newly @@ -108,8 +108,8 @@ def get_user_ids_changed(self, user_id, from_token): from_token (StreamToken) """ - tracerutils.set_tag("user_id", user_id) - tracerutils.set_tag("from_token", from_token) + opentracing.set_tag("user_id", user_id) + opentracing.set_tag("from_token", from_token) now_room_key = yield self.store.get_room_events_max_id() room_ids = yield self.store.get_rooms_for_user(user_id) @@ -161,7 +161,7 @@ def get_user_ids_changed(self, user_id, from_token): # special-case for an empty prev state: include all members # in the changed list if not event_ids: - tracerutils.log_kv( + opentracing.log_kv( {"event": "encountered empty previous state", "room_id": room_id} ) for key, event_id in iteritems(current_state_ids): @@ -216,7 +216,7 @@ def get_user_ids_changed(self, user_id, from_token): possibly_joined = [] possibly_left = [] - tracerutils.log_kv( + opentracing.log_kv( {"changed": list(possibly_joined), "left": list(possibly_left)} ) @@ -287,7 +287,7 @@ def check_device_registered( raise errors.StoreError(500, "Couldn't generate a device ID.") - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def delete_device(self, user_id, device_id): """ Delete the given device @@ -305,8 +305,8 @@ def delete_device(self, user_id, device_id): except errors.StoreError as e: if e.code == 404: # no match - tracerutils.set_tag("error", True) - tracerutils.set_tag("reason", "User doesn't have that device id.") + opentracing.set_tag("error", True) + opentracing.set_tag("reason", "User doesn't have that device id.") pass else: raise @@ -319,7 +319,7 @@ def delete_device(self, user_id, device_id): yield self.notify_device_update(user_id, [device_id]) - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def delete_all_devices_for_user(self, user_id, except_device_id=None): """Delete all of the user's devices @@ -355,8 +355,8 @@ def delete_devices(self, user_id, device_ids): except errors.StoreError as e: if e.code == 404: # no match - tracerutils.set_tag("error", True) - tracerutils.set_tag("reason", "User doesn't have that device id.") + opentracing.set_tag("error", True) + opentracing.set_tag("reason", "User doesn't have that device id.") pass else: raise @@ -477,15 +477,15 @@ def __init__(self, hs, device_handler): iterable=True, ) - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def incoming_device_list_update(self, origin, edu_content): """Called on incoming device list update from federation. Responsible for parsing the EDU and adding to pending updates list. """ - tracerutils.set_tag("origin", origin) - tracerutils.set_tag("edu_content", edu_content) + opentracing.set_tag("origin", origin) + opentracing.set_tag("edu_content", edu_content) user_id = edu_content.pop("user_id") device_id = edu_content.pop("device_id") stream_id = str(edu_content.pop("stream_id")) # They may come as ints @@ -506,8 +506,8 @@ def incoming_device_list_update(self, origin, edu_content): if not room_ids: # We don't share any rooms with this user. Ignore update, as we # probably won't get any further updates. - tracerutils.set_tag("error", True) - tracerutils.log_kv( + opentracing.set_tag("error", True) + opentracing.log_kv( { "message": "Got an update from a user which " + "doesn't share a room with the current user." diff --git a/synapse/handlers/devicemessage.py b/synapse/handlers/devicemessage.py index de22b37b9ee2..6463d900cdf0 100644 --- a/synapse/handlers/devicemessage.py +++ b/synapse/handlers/devicemessage.py @@ -20,7 +20,7 @@ from synapse.api.errors import SynapseError from synapse.types import UserID, get_domain_from_id from synapse.util.stringutils import random_string -import synapse.util.tracerutils as tracerutils +import synapse.logging.opentracing as opentracing logger = logging.getLogger(__name__) @@ -77,7 +77,7 @@ def on_direct_to_device_edu(self, origin, content): "to_device_key", stream_id, users=local_messages.keys() ) - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def send_device_message(self, sender_user_id, message_type, messages): @@ -111,7 +111,7 @@ def send_device_message(self, sender_user_id, message_type, messages): "message_id": message_id, } - tracerutils.log_kv(local_messages) + opentracing.log_kv(local_messages) stream_id = yield self.store.add_messages_to_device_inbox( local_messages, remote_edu_contents ) @@ -120,7 +120,7 @@ def send_device_message(self, sender_user_id, message_type, messages): "to_device_key", stream_id, users=local_messages.keys() ) - tracerutils.log_kv(remote_messages) + opentracing.log_kv(remote_messages) for destination in remote_messages.keys(): # Enqueue a new federation transaction to send the new # device messages to each remote destination. diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 6bf9d955bc57..01d5d383bfa5 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -15,7 +15,7 @@ # limitations under the License. import logging -import synapse.util.tracerutils as tracerutils +import synapse.logging.opentracing as opentracing from six import iteritems @@ -46,7 +46,7 @@ def __init__(self, hs): "client_keys", self.on_federation_query_client_keys ) - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def query_devices(self, query_body, timeout): """ Handle a device key query from a client @@ -82,8 +82,8 @@ def query_devices(self, query_body, timeout): else: remote_queries[user_id] = device_ids - tracerutils.set_tag("local_key_query", local_query) - tracerutils.set_tag("remote_key_query", remote_queries) + opentracing.set_tag("local_key_query", local_query) + opentracing.set_tag("remote_key_query", remote_queries) # First get local devices. failures = {} @@ -125,12 +125,12 @@ def query_devices(self, query_body, timeout): r[user_id] = remote_queries[user_id] # Now fetch any devices that we don't have in our cache - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def do_remote_query(destination): destination_query = remote_queries_not_in_cache[destination] - tracerutils.set_tag("key_query", destination_query) + opentracing.set_tag("key_query", destination_query) try: remote_result = yield self.federation.query_client_keys( destination, {"device_keys": destination_query}, timeout=timeout @@ -143,8 +143,8 @@ def do_remote_query(destination): except Exception as e: failure = _exception_to_failure(e) failures[destination] = failure - tracerutils.set_tag("error", True) - tracerutils.set_tag("reason", failure) + opentracing.set_tag("error", True) + opentracing.set_tag("reason", failure) yield make_deferred_yieldable( defer.gatherResults( @@ -158,7 +158,7 @@ def do_remote_query(destination): return {"device_keys": results, "failures": failures} - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def query_local_devices(self, query): """Get E2E device keys for local users @@ -171,7 +171,7 @@ def query_local_devices(self, query): defer.Deferred: (resolves to dict[string, dict[string, dict]]): map from user_id -> device_id -> device details """ - tracerutils.set_tag("local_query", query) + opentracing.set_tag("local_query", query) local_query = [] result_dict = {} @@ -179,14 +179,14 @@ def query_local_devices(self, query): # we use UserID.from_string to catch invalid user ids if not self.is_mine(UserID.from_string(user_id)): logger.warning("Request for keys for non-local user %s", user_id) - tracerutils.log_kv( + opentracing.log_kv( { "message": "Requested a local key for a user which" + " was not local to the homeserver", "user_id": user_id, } ) - tracerutils.set_tag("error", True) + opentracing.set_tag("error", True) raise SynapseError(400, "Not a user here") if not device_ids: @@ -211,7 +211,7 @@ def query_local_devices(self, query): r["unsigned"]["device_display_name"] = display_name result_dict[user_id][device_id] = r - tracerutils.log_kv(results) + opentracing.log_kv(results) return result_dict @defer.inlineCallbacks @@ -222,7 +222,7 @@ def on_federation_query_client_keys(self, query_body): res = yield self.query_local_devices(device_keys_query) return {"device_keys": res} - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def claim_one_time_keys(self, query, timeout): local_query = [] @@ -237,8 +237,8 @@ def claim_one_time_keys(self, query, timeout): domain = get_domain_from_id(user_id) remote_queries.setdefault(domain, {})[user_id] = device_keys - tracerutils.set_tag("local_key_query", local_query) - tracerutils.set_tag("remote_key_query", remote_queries) + opentracing.set_tag("local_key_query", local_query) + opentracing.set_tag("remote_key_query", remote_queries) results = yield self.store.claim_e2e_one_time_keys(local_query) @@ -251,10 +251,10 @@ def claim_one_time_keys(self, query, timeout): key_id: json.loads(json_bytes) } - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def claim_client_keys(destination): - tracerutils.set_tag("destination", destination) + opentracing.set_tag("destination", destination) device_keys = remote_queries[destination] try: remote_result = yield self.federation.claim_client_keys( @@ -267,8 +267,8 @@ def claim_client_keys(destination): except Exception as e: failure = _exception_to_failure(e) failures[destination] = failure - tracerutils.set_tag("error", True) - tracerutils.set_tag("reason", failure) + opentracing.set_tag("error", True) + opentracing.set_tag("reason", failure) yield make_deferred_yieldable( defer.gatherResults( @@ -292,21 +292,21 @@ def claim_client_keys(destination): ), ) - tracerutils.log_kv({"one_time_keys": json_result, "failures": failures}) + opentracing.log_kv({"one_time_keys": json_result, "failures": failures}) return {"one_time_keys": json_result, "failures": failures} - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def upload_keys_for_user(self, user_id, device_id, keys): - tracerutils.set_tag("user_id", user_id) - tracerutils.set_tag("device_id", device_id) - tracerutils.set_tag("keys", keys) + opentracing.set_tag("user_id", user_id) + opentracing.set_tag("device_id", device_id) + opentracing.set_tag("keys", keys) time_now = self.clock.time_msec() # TODO: Validate the JSON to make sure it has the right keys. device_keys = keys.get("device_keys", None) - tracerutils.set_tag("device_keys", device_keys) + opentracing.set_tag("device_keys", device_keys) if device_keys: logger.info( "Updating device_keys for device %r for user %s at %d", @@ -328,7 +328,7 @@ def upload_keys_for_user(self, user_id, device_id, keys): user_id, device_id, time_now, one_time_keys ) else: - tracerutils.log_kv( + opentracing.log_kv( {"event": "did not upload one_time_keys", "reason": "no keys given"} ) @@ -341,10 +341,10 @@ def upload_keys_for_user(self, user_id, device_id, keys): result = yield self.store.count_e2e_one_time_keys(user_id, device_id) - tracerutils.set_tag("one_time_key_counts", result) + opentracing.set_tag("one_time_key_counts", result) return {"one_time_key_counts": result} - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def _upload_one_time_keys_for_user( self, user_id, device_id, time_now, one_time_keys diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py index a90ec2f9fcca..0459d52a3c81 100644 --- a/synapse/handlers/e2e_room_keys.py +++ b/synapse/handlers/e2e_room_keys.py @@ -27,7 +27,7 @@ SynapseError, ) from synapse.util.async_helpers import Linearizer -import synapse.util.tracerutils as tracerutils +import synapse.logging.opentracing as opentracing logger = logging.getLogger(__name__) @@ -50,7 +50,7 @@ def __init__(self, hs): # changed. self._upload_linearizer = Linearizer("upload_room_keys_lock") - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def get_room_keys(self, user_id, version, room_id=None, session_id=None): """Bulk get the E2E room keys for a given backup, optionally filtered to a given @@ -86,10 +86,10 @@ def get_room_keys(self, user_id, version, room_id=None, session_id=None): user_id, version, room_id, session_id ) - tracerutils.log_kv(results) + opentracing.log_kv(results) return results - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def delete_room_keys(self, user_id, version, room_id=None, session_id=None): """Bulk delete the E2E room keys for a given backup, optionally filtered to a given @@ -111,7 +111,7 @@ def delete_room_keys(self, user_id, version, room_id=None, session_id=None): with (yield self._upload_linearizer.queue(user_id)): yield self.store.delete_e2e_room_keys(user_id, version, room_id, session_id) - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def upload_room_keys(self, user_id, version, room_keys): """Bulk upload a list of room keys into a given backup version, asserting @@ -179,7 +179,7 @@ def upload_room_keys(self, user_id, version, room_keys): user_id, version, room_id, session_id, session ) - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def _upload_room_key(self, user_id, version, room_id, session_id, room_key): """Upload a given room_key for a given room and session into a given @@ -242,7 +242,7 @@ def _should_replace_room_key(current_room_key, room_key): return False return True - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def create_version(self, user_id, version_info): """Create a new backup version. This automatically becomes the new @@ -301,7 +301,7 @@ def get_version_info(self, user_id, version=None): raise return res - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def delete_version(self, user_id, version=None): """Deletes a given version of the user's e2e_room_keys backup @@ -322,7 +322,7 @@ def delete_version(self, user_id, version=None): else: raise - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def update_version(self, user_id, version, version_info): """Update the info about a given version of the user's backup diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py index 40052e7a05af..00260cde7351 100644 --- a/synapse/rest/client/v2_alpha/keys.py +++ b/synapse/rest/client/v2_alpha/keys.py @@ -25,7 +25,7 @@ parse_string, ) from synapse.types import StreamToken -import synapse.util.tracerutils as tracerutils +import synapse.logging.opentracing as opentracing from ._base import client_patterns logger = logging.getLogger(__name__) @@ -68,7 +68,7 @@ def __init__(self, hs): self.auth = hs.get_auth() self.e2e_keys_handler = hs.get_e2e_keys_handler() - @tracerutils.trace_defered_function_using_operation_name("upload_keys") + @opentracing.trace_defered_function_using_operation_name("upload_keys") @defer.inlineCallbacks def on_POST(self, request, device_id): requester = yield self.auth.get_user_by_req(request, allow_guest=True) @@ -79,8 +79,8 @@ def on_POST(self, request, device_id): # passing the device_id here is deprecated; however, we allow it # for now for compatibility with older clients. if requester.device_id is not None and device_id != requester.device_id: - tracerutils.set_tag("error", True) - tracerutils.log_kv( + opentracing.set_tag("error", True) + opentracing.log_kv( { "message": "Client uploading keys for a different device", "logged_in_id": requester.device_id, diff --git a/synapse/rest/client/v2_alpha/room_keys.py b/synapse/rest/client/v2_alpha/room_keys.py index ac03d5889977..51bd4f12fff0 100644 --- a/synapse/rest/client/v2_alpha/room_keys.py +++ b/synapse/rest/client/v2_alpha/room_keys.py @@ -23,7 +23,7 @@ parse_json_object_from_request, parse_string, ) -import synapse.util.tracerutils as tracerutils +import synapse.logging.opentracing as opentracing from ._base import client_patterns @@ -312,7 +312,7 @@ def __init__(self, hs): self.auth = hs.get_auth() self.e2e_room_keys_handler = hs.get_e2e_room_keys_handler() - @tracerutils.trace_defered_function_using_operation_name("get_room_keys_version") + @opentracing.trace_defered_function_using_operation_name("get_room_keys_version") @defer.inlineCallbacks def on_GET(self, request, version): """ diff --git a/synapse/rest/client/v2_alpha/sendtodevice.py b/synapse/rest/client/v2_alpha/sendtodevice.py index d58291fc856a..68bbcf4a48c2 100644 --- a/synapse/rest/client/v2_alpha/sendtodevice.py +++ b/synapse/rest/client/v2_alpha/sendtodevice.py @@ -20,7 +20,7 @@ from synapse.http import servlet from synapse.http.servlet import parse_json_object_from_request from synapse.rest.client.transactions import HttpTransactionCache -import synapse.util.tracerutils as tracerutils +import synapse.logging.opentracing as opentracing from ._base import client_patterns @@ -43,10 +43,10 @@ def __init__(self, hs): self.txns = HttpTransactionCache(hs) self.device_message_handler = hs.get_device_message_handler() - @tracerutils.trace_function_using_operation_name("sendToDevice") + @opentracing.trace_function_using_operation_name("sendToDevice") def on_PUT(self, request, message_type, txn_id): - tracerutils.set_tag("message_type", message_type) - tracerutils.set_tag("txn_id", txn_id) + opentracing.set_tag("message_type", message_type) + opentracing.set_tag("txn_id", txn_id) return self.txns.fetch_or_execute_request( request, self._put, request, message_type, txn_id ) diff --git a/synapse/storage/deviceinbox.py b/synapse/storage/deviceinbox.py index 6011312af9e4..91c6cd4cf60c 100644 --- a/synapse/storage/deviceinbox.py +++ b/synapse/storage/deviceinbox.py @@ -22,7 +22,7 @@ from synapse.storage._base import SQLBaseStore from synapse.storage.background_updates import BackgroundUpdateStore from synapse.util.caches.expiringcache import ExpiringCache -import synapse.util.tracerutils as tracerutils +import synapse.logging.opentracing as opentracing logger = logging.getLogger(__name__) @@ -73,7 +73,7 @@ def get_new_messages_for_device_txn(txn): "get_new_messages_for_device", get_new_messages_for_device_txn ) - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def delete_messages_for_device(self, user_id, device_id, up_to_stream_id): """ @@ -90,14 +90,14 @@ def delete_messages_for_device(self, user_id, device_id, up_to_stream_id): (user_id, device_id), None ) - tracerutils.set_tag("last_deleted_stream_id", last_deleted_stream_id) + opentracing.set_tag("last_deleted_stream_id", last_deleted_stream_id) if last_deleted_stream_id: has_changed = self._device_inbox_stream_cache.has_entity_changed( user_id, last_deleted_stream_id ) if not has_changed: - tracerutils.log_kv({"message": "No changes in cache since last check"}) + opentracing.log_kv({"message": "No changes in cache since last check"}) return 0 def delete_messages_for_device_txn(txn): @@ -113,7 +113,7 @@ def delete_messages_for_device_txn(txn): "delete_messages_for_device", delete_messages_for_device_txn ) - tracerutils.log_kv( + opentracing.log_kv( {"message": "deleted {} messages for device".format(count), "count": count} ) @@ -127,7 +127,7 @@ def delete_messages_for_device_txn(txn): return count - @tracerutils.trace_function + @opentracing.trace_function def get_new_device_msgs_for_remote( self, destination, last_stream_id, current_stream_id, limit ): @@ -143,23 +143,23 @@ def get_new_device_msgs_for_remote( in the stream the messages got to. """ - tracerutils.set_tag("destination", destination) - tracerutils.set_tag("last_stream_id", last_stream_id) - tracerutils.set_tag("current_stream_id", current_stream_id) - tracerutils.set_tag("limit", limit) + opentracing.set_tag("destination", destination) + opentracing.set_tag("last_stream_id", last_stream_id) + opentracing.set_tag("current_stream_id", current_stream_id) + opentracing.set_tag("limit", limit) has_changed = self._device_federation_outbox_stream_cache.has_entity_changed( destination, last_stream_id ) if not has_changed or last_stream_id == current_stream_id: - tracerutils.log_kv({"message": "No new messages in stream"}) + opentracing.log_kv({"message": "No new messages in stream"}) return defer.succeed(([], current_stream_id)) if limit <= 0: # This can happen if we run out of room for EDUs in the transaction. return defer.succeed(([], last_stream_id)) - @tracerutils.trace_function + @opentracing.trace_function def get_new_messages_for_remote_destination_txn(txn): sql = ( "SELECT stream_id, messages_json FROM device_federation_outbox" @@ -174,7 +174,7 @@ def get_new_messages_for_remote_destination_txn(txn): stream_pos = row[0] messages.append(json.loads(row[1])) if len(messages) < limit: - tracerutils.log_kv( + opentracing.log_kv( {"message": "Set stream position to current position"} ) stream_pos = current_stream_id @@ -185,7 +185,7 @@ def get_new_messages_for_remote_destination_txn(txn): get_new_messages_for_remote_destination_txn, ) - @tracerutils.trace_defered_function + @opentracing.trace_defered_function def delete_device_msgs_for_remote(self, destination, up_to_stream_id): """Used to delete messages when the remote destination acknowledges their receipt. @@ -236,7 +236,7 @@ def __init__(self, db_conn, hs): expiry_ms=30 * 60 * 1000, ) - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def add_messages_to_device_inbox( self, local_messages_by_user_then_device, remote_messages_by_destination diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 12fa70d416cd..e88e3f69baba 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -import synapse.util.tracerutils as tracerutils +import synapse.logging.opentracing as opentracing from six import iteritems @@ -300,7 +300,7 @@ def _mark_as_sent_devices_by_remote_txn(self, txn, destination, stream_id): def get_device_stream_token(self): return self._device_list_id_gen.get_current_token() - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def get_user_devices_from_cache(self, query_list): """Get the devices (and keys if any) for remote users from the cache. @@ -332,8 +332,8 @@ def get_user_devices_from_cache(self, query_list): else: results[user_id] = yield self._get_cached_devices_for_user(user_id) - tracerutils.set_tag("in_cache", results) - tracerutils.set_tag("not_in_cache", user_ids_not_in_cache) + opentracing.set_tag("in_cache", results) + opentracing.set_tag("not_in_cache", user_ids_not_in_cache) return (user_ids_not_in_cache, results) diff --git a/synapse/storage/e2e_room_keys.py b/synapse/storage/e2e_room_keys.py index ced6dd04ded0..f18605dfc488 100644 --- a/synapse/storage/e2e_room_keys.py +++ b/synapse/storage/e2e_room_keys.py @@ -18,7 +18,7 @@ from twisted.internet import defer from synapse.api.errors import StoreError -from synapse.util.tracerutils import trace_defered_function +from synapse.logging.opentracing import trace_defered_function from ._base import SQLBaseStore diff --git a/synapse/storage/end_to_end_keys.py b/synapse/storage/end_to_end_keys.py index 9fbff3ebf47c..4fa694e06a0d 100644 --- a/synapse/storage/end_to_end_keys.py +++ b/synapse/storage/end_to_end_keys.py @@ -22,11 +22,11 @@ from ._base import SQLBaseStore, db_to_json -import synapse.util.tracerutils as tracerutils +import synapse.logging.opentracing as opentracing class EndToEndKeyWorkerStore(SQLBaseStore): - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def get_e2e_device_keys( self, query_list, include_all_devices=False, include_deleted_devices=False @@ -43,7 +43,7 @@ def get_e2e_device_keys( Dict mapping from user-id to dict mapping from device_id to dict containing "key_json", "device_display_name". """ - tracerutils.set_tag("query_list", query_list) + opentracing.set_tag("query_list", query_list) if not query_list: return {} @@ -61,12 +61,12 @@ def get_e2e_device_keys( return results - @tracerutils.trace_function + @opentracing.trace_function def _get_e2e_device_keys_txn( self, txn, query_list, include_all_devices=False, include_deleted_devices=False ): - tracerutils.set_tag("include_all_devices", include_all_devices) - tracerutils.set_tag("include_deleted_devices", include_deleted_devices) + opentracing.set_tag("include_all_devices", include_all_devices) + opentracing.set_tag("include_deleted_devices", include_deleted_devices) query_clauses = [] query_params = [] @@ -112,10 +112,10 @@ def _get_e2e_device_keys_txn( for user_id, device_id in deleted_devices: result.setdefault(user_id, {})[device_id] = None - tracerutils.log_kv(result) + opentracing.log_kv(result) return result - @tracerutils.trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def get_e2e_one_time_keys(self, user_id, device_id, key_ids): """Retrieve a number of one-time keys for a user @@ -131,9 +131,9 @@ def get_e2e_one_time_keys(self, user_id, device_id, key_ids): key_id) to json string for key """ - tracerutils.set_tag("user_id", user_id) - tracerutils.set_tag("device_id", device_id) - tracerutils.set_tag("key_ids", key_ids) + opentracing.set_tag("user_id", user_id) + opentracing.set_tag("device_id", device_id) + opentracing.set_tag("key_ids", key_ids) rows = yield self._simple_select_many_batch( table="e2e_one_time_keys_json", @@ -159,11 +159,11 @@ def add_e2e_one_time_keys(self, user_id, device_id, time_now, new_keys): (algorithm, key_id, key json) """ - @tracerutils.trace_function + @opentracing.trace_function def _add_e2e_one_time_keys(txn): - tracerutils.set_tag("user_id", user_id) - tracerutils.set_tag("device_id", device_id) - tracerutils.set_tag("new_keys", new_keys) + opentracing.set_tag("user_id", user_id) + opentracing.set_tag("device_id", device_id) + opentracing.set_tag("new_keys", new_keys) # We are protected from race between lookup and insertion due to # a unique constraint. If there is a race of two calls to # `add_e2e_one_time_keys` then they'll conflict and we will only @@ -219,12 +219,12 @@ def set_e2e_device_keys(self, user_id, device_id, time_now, device_keys): or the keys were already in the database. """ - @tracerutils.trace_function + @opentracing.trace_function def _set_e2e_device_keys_txn(txn): - tracerutils.set_tag("user_id", user_id) - tracerutils.set_tag("device_id", device_id) - tracerutils.set_tag("time_now", time_now) - tracerutils.set_tag("device_keys", device_keys) + opentracing.set_tag("user_id", user_id) + opentracing.set_tag("device_id", device_id) + opentracing.set_tag("time_now", time_now) + opentracing.set_tag("device_keys", device_keys) old_key_json = self._simple_select_one_onecol_txn( txn, @@ -239,7 +239,7 @@ def _set_e2e_device_keys_txn(txn): new_key_json = encode_canonical_json(device_keys).decode("utf-8") if old_key_json == new_key_json: - tracerutils.log_kv({"event", "key already stored"}) + opentracing.log_kv({"event", "key already stored"}) return False self._simple_upsert_txn( @@ -256,7 +256,7 @@ def _set_e2e_device_keys_txn(txn): def claim_e2e_one_time_keys(self, query_list): """Take a list of one time keys out of the database""" - @tracerutils.trace_function + @opentracing.trace_function def _claim_e2e_one_time_keys(txn): sql = ( "SELECT key_id, key_json FROM e2e_one_time_keys_json" @@ -278,11 +278,11 @@ def _claim_e2e_one_time_keys(txn): " AND key_id = ?" ) for user_id, device_id, algorithm, key_id in delete: - tracerutils.log_kv( + opentracing.log_kv( {"message": "executing claim transaction on database"} ) txn.execute(sql, (user_id, device_id, algorithm, key_id)) - tracerutils.log_kv( + opentracing.log_kv( {"message": "finished executing and invalidating cache"} ) self._invalidate_cache_and_stream( @@ -293,10 +293,10 @@ def _claim_e2e_one_time_keys(txn): return self.runInteraction("claim_e2e_one_time_keys", _claim_e2e_one_time_keys) def delete_e2e_keys_by_device(self, user_id, device_id): - @tracerutils.trace_function + @opentracing.trace_function def delete_e2e_keys_by_device_txn(txn): - tracerutils.set_tag("user_id", user_id) - tracerutils.set_tag("device_id", device_id) + opentracing.set_tag("user_id", user_id) + opentracing.set_tag("device_id", device_id) self._simple_delete_txn( txn, table="e2e_device_keys_json", From 957cd77e95207398d56c4abb827fb3434df1e2dd Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Wed, 17 Jul 2019 13:53:22 +0100 Subject: [PATCH 13/46] Opentracing across streams --- synapse/federation/federation_server.py | 15 +- .../sender/per_destination_queue.py | 209 +++++++++++------- synapse/handlers/devicemessage.py | 24 +- synapse/storage/devices.py | 20 +- .../delta/55/add_spans_to_device_lists.sql | 16 ++ 5 files changed, 186 insertions(+), 98 deletions(-) create mode 100644 synapse/storage/schema/delta/55/add_spans_to_device_lists.sql diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index b4b9a05ca629..1095483a66a6 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -53,6 +53,8 @@ from synapse.util.async_helpers import Linearizer, concurrently_execute from synapse.util.caches.response_cache import ResponseCache +import synapse.logging.opentracing as opentracing + # when processing incoming transactions, we try to handle multiple rooms in # parallel, up to this limit. TRANSACTION_CONCURRENCY_LIMIT = 10 @@ -808,12 +810,13 @@ def on_edu(self, edu_type, origin, content): if not handler: logger.warn("No handler registered for EDU type %s", edu_type) - try: - yield handler(origin, content) - except SynapseError as e: - logger.info("Failed to handle edu %r: %r", edu_type, e) - except Exception: - logger.exception("Failed to handle edu %r", edu_type) + with opentracing.start_active_span_from_edu(content, "handle_edu"): + try: + yield handler(origin, content) + except SynapseError as e: + logger.info("Failed to handle edu %r: %r", edu_type, e) + except Exception: + logger.exception("Failed to handle edu %r", edu_type) def on_query(self, query_type, args): handler = self.query_handlers.get(query_type) diff --git a/synapse/federation/sender/per_destination_queue.py b/synapse/federation/sender/per_destination_queue.py index fad980b89307..734a908ca2f8 100644 --- a/synapse/federation/sender/per_destination_queue.py +++ b/synapse/federation/sender/per_destination_queue.py @@ -16,10 +16,12 @@ import datetime import logging +from canonicaljson import json from prometheus_client import Counter from twisted.internet import defer +import synapse.logging.opentracing as opentracing from synapse.api.errors import ( FederationDeniedError, HttpResponseException, @@ -204,97 +206,142 @@ def _transaction_transmission_loop(self): pending_edus = device_update_edus + to_device_edus - # BEGIN CRITICAL SECTION - # - # In order to avoid a race condition, we need to make sure that - # the following code (from popping the queues up to the point - # where we decide if we actually have any pending messages) is - # atomic - otherwise new PDUs or EDUs might arrive in the - # meantime, but not get sent because we hold the - # transmission_loop_running flag. - - pending_pdus = self._pending_pdus - - # We can only include at most 50 PDUs per transactions - pending_pdus, self._pending_pdus = pending_pdus[:50], pending_pdus[50:] - - pending_edus.extend(self._get_rr_edus(force_flush=False)) - pending_presence = self._pending_presence - self._pending_presence = {} - if pending_presence: - pending_edus.append( - Edu( - origin=self._server_name, - destination=self._destination, - edu_type="m.presence", - content={ - "push": [ - format_user_presence_state( - presence, self._clock.time_msec() - ) - for presence in pending_presence.values() - ] - }, - ) + # Make a transaction sending span, this span follows on from all the + # edus in that transaction. This needs to be done because if the edus + # are never received on the remote the span effectively has no causality. + + span_contexts = [ + opentracing.extract_text_map( + json.loads( + edu.get_dict().get("content", {}).get("context", "{}") + ).get("opentracing", {}) ) + for edu in pending_edus + ] - pending_edus.extend( - self._pop_pending_edus(MAX_EDUS_PER_TRANSACTION - len(pending_edus)) - ) - while ( - len(pending_edus) < MAX_EDUS_PER_TRANSACTION - and self._pending_edus_keyed + with opentracing.start_active_span_follows_from( + "send_transaction", span_contexts ): - _, val = self._pending_edus_keyed.popitem() - pending_edus.append(val) - - if pending_pdus: - logger.debug( - "TX [%s] len(pending_pdus_by_dest[dest]) = %d", - self._destination, - len(pending_pdus), + # Link each sent edu to this transaction's span + _pending_edus = [] + for edu in pending_edus: + edu_dict = edu.get_dict() + span_context = json.loads( + edu_dict.get("content", {}).get("context", "{}") + ).get("opentracing", {}) + # If there is no span context then we are either blacklisting + # this destination or we are not tracing + if not span_context == {}: + if not "references" in span_context: + span_context["references"] = [ + opentracing.active_span_context_as_string() + ] + else: + span_context["references"].append( + opentracing.active_span_context_as_string() + ) + edu_dict["content"]["context"] = json.dumps( + {"opentracing": span_context} + ) + _pending_edus.append(Edu(**edu_dict)) + pending_edus = _pending_edus + + # BEGIN CRITICAL SECTION + # + # In order to avoid a race condition, we need to make sure that + # the following code (from popping the queues up to the point + # where we decide if we actually have any pending messages) is + # atomic - otherwise new PDUs or EDUs might arrive in the + # meantime, but not get sent because we hold the + # transmission_loop_running flag. + + pending_pdus = self._pending_pdus + + # We can only include at most 50 PDUs per transactions + pending_pdus, self._pending_pdus = ( + pending_pdus[:50], + pending_pdus[50:], ) - if not pending_pdus and not pending_edus: - logger.debug("TX [%s] Nothing to send", self._destination) - self._last_device_stream_id = device_stream_id - return - - # if we've decided to send a transaction anyway, and we have room, we - # may as well send any pending RRs - if len(pending_edus) < MAX_EDUS_PER_TRANSACTION: - pending_edus.extend(self._get_rr_edus(force_flush=True)) - - # END CRITICAL SECTION - - success = yield self._transaction_manager.send_new_transaction( - self._destination, pending_pdus, pending_edus - ) - if success: - sent_transactions_counter.inc() - sent_edus_counter.inc(len(pending_edus)) - for edu in pending_edus: - sent_edus_by_type.labels(edu.edu_type).inc() - # Remove the acknowledged device messages from the database - # Only bother if we actually sent some device messages - if to_device_edus: - yield self._store.delete_device_msgs_for_remote( - self._destination, device_stream_id + pending_edus.extend(self._get_rr_edus(force_flush=False)) + pending_presence = self._pending_presence + self._pending_presence = {} + if pending_presence: + pending_edus.append( + Edu( + origin=self._server_name, + destination=self._destination, + edu_type="m.presence", + content={ + "push": [ + format_user_presence_state( + presence, self._clock.time_msec() + ) + for presence in pending_presence.values() + ] + }, + ) ) - # also mark the device updates as sent - if device_update_edus: - logger.info( - "Marking as sent %r %r", self._destination, dev_list_id + pending_edus.extend( + self._pop_pending_edus( + MAX_EDUS_PER_TRANSACTION - len(pending_edus) ) - yield self._store.mark_as_sent_devices_by_remote( - self._destination, dev_list_id + ) + while ( + len(pending_edus) < MAX_EDUS_PER_TRANSACTION + and self._pending_edus_keyed + ): + _, val = self._pending_edus_keyed.popitem() + pending_edus.append(val) + + if pending_pdus: + logger.debug( + "TX [%s] len(pending_pdus_by_dest[dest]) = %d", + self._destination, + len(pending_pdus), ) - self._last_device_stream_id = device_stream_id - self._last_device_list_stream_id = dev_list_id - else: - break + if not pending_pdus and not pending_edus: + logger.debug("TX [%s] Nothing to send", self._destination) + self._last_device_stream_id = device_stream_id + return + + # if we've decided to send a transaction anyway, and we have room, we + # may as well send any pending RRs + if len(pending_edus) < MAX_EDUS_PER_TRANSACTION: + pending_edus.extend(self._get_rr_edus(force_flush=True)) + + # END CRITICAL SECTION + + success = yield self._transaction_manager.send_new_transaction( + self._destination, pending_pdus, pending_edus + ) + if success: + sent_transactions_counter.inc() + sent_edus_counter.inc(len(pending_edus)) + for edu in pending_edus: + sent_edus_by_type.labels(edu.edu_type).inc() + # Remove the acknowledged device messages from the database + # Only bother if we actually sent some device messages + if to_device_edus: + yield self._store.delete_device_msgs_for_remote( + self._destination, device_stream_id + ) + + # also mark the device updates as sent + if device_update_edus: + logger.info( + "Marking as sent %r %r", self._destination, dev_list_id + ) + yield self._store.mark_as_sent_devices_by_remote( + self._destination, dev_list_id + ) + + self._last_device_stream_id = device_stream_id + self._last_device_list_stream_id = dev_list_id + else: + break except NotRetryingDestination as e: logger.debug( "TX [%s] not ready for retry yet (next retry at %s) - " diff --git a/synapse/handlers/devicemessage.py b/synapse/handlers/devicemessage.py index 6463d900cdf0..bed564e87bd8 100644 --- a/synapse/handlers/devicemessage.py +++ b/synapse/handlers/devicemessage.py @@ -15,12 +15,14 @@ import logging +from canonicaljson import json + from twisted.internet import defer +import synapse.logging.opentracing as opentracing from synapse.api.errors import SynapseError from synapse.types import UserID, get_domain_from_id from synapse.util.stringutils import random_string -import synapse.logging.opentracing as opentracing logger = logging.getLogger(__name__) @@ -102,14 +104,22 @@ def send_device_message(self, sender_user_id, message_type, messages): message_id = random_string(16) + context = {"opentracing": {}} + opentracing.inject_active_span_text_map(context["opentracing"]) + remote_edu_contents = {} for destination, messages in remote_messages.items(): - remote_edu_contents[destination] = { - "messages": messages, - "sender": sender_user_id, - "type": message_type, - "message_id": message_id, - } + with opentracing.start_active_span("to_device_for_user"): + opentracing.set_tag("destination", destination) + remote_edu_contents[destination] = { + "messages": messages, + "sender": sender_user_id, + "type": message_type, + "message_id": message_id, + "context": json.dumps(context) + if opentracing.whitelisted_homeserver(destination) + else "", + } opentracing.log_kv(local_messages) stream_id = yield self.store.add_messages_to_device_inbox( diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index e88e3f69baba..f89cbad2efc6 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -74,6 +74,7 @@ def get_devices_by_user(self, user_id): return {d["device_id"]: d for d in devices} + @opentracing.trace_defered_function @defer.inlineCallbacks def get_devices_by_remote(self, destination, from_stream_id, limit): """Get stream of updates to send to remote servers @@ -128,8 +129,10 @@ def get_devices_by_remote(self, destination, from_stream_id, limit): # (user_id, device_id) entries into a map, with the value being # the max stream_id across each set of duplicate entries # - # maps (user_id, device_id) -> stream_id + # maps (user_id, device_id) -> (stream_id, context) # as long as their stream_id does not match that of the last row + # where context is any metadata about the message's context such as + # opentracing data query_map = {} for update in updates: if stream_id_cutoff is not None and update[2] >= stream_id_cutoff: @@ -137,7 +140,7 @@ def get_devices_by_remote(self, destination, from_stream_id, limit): break key = (update[0], update[1]) - query_map[key] = max(query_map.get(key, 0), update[2]) + query_map[key] = (max(query_map.get(key, 0), update[2]), update[3]) # If we didn't find any updates with a stream_id lower than the cutoff, it # means that there are more than limit updates all of which have the same @@ -172,7 +175,7 @@ def _get_devices_by_remote_txn( List: List of device updates """ sql = """ - SELECT user_id, device_id, stream_id FROM device_lists_outbound_pokes + SELECT user_id, device_id, stream_id, context FROM device_lists_outbound_pokes WHERE destination = ? AND ? < stream_id AND stream_id <= ? AND sent = ? ORDER BY stream_id LIMIT ? @@ -211,12 +214,15 @@ def _get_device_update_edus_by_remote(self, destination, from_stream_id, query_m destination, user_id, from_stream_id ) for device_id, device in iteritems(user_devices): - stream_id = query_map[(user_id, device_id)] + stream_id = query_map[(user_id, device_id)][0] result = { "user_id": user_id, "device_id": device_id, "prev_id": [prev_id] if prev_id else [], "stream_id": stream_id, + "context": query_map[(user_id, device_id)][1] + if opentracing.whitelisted_homeserver(destination) + else "", } prev_id = stream_id @@ -819,6 +825,9 @@ def _add_device_change_txn(self, txn, user_id, device_ids, hosts, stream_id): ], ) + context = {"opentracing": {}} + opentracing.inject_active_span_text_map(context["opentracing"]) + self._simple_insert_many_txn( txn, table="device_lists_outbound_pokes", @@ -830,6 +839,9 @@ def _add_device_change_txn(self, txn, user_id, device_ids, hosts, stream_id): "device_id": device_id, "sent": False, "ts": now, + "context": json.dumps(context) + if opentracing.whitelisted_homeserver(destination) + else "", } for destination in hosts for device_id in device_ids diff --git a/synapse/storage/schema/delta/55/add_spans_to_device_lists.sql b/synapse/storage/schema/delta/55/add_spans_to_device_lists.sql new file mode 100644 index 000000000000..a4c6b917f777 --- /dev/null +++ b/synapse/storage/schema/delta/55/add_spans_to_device_lists.sql @@ -0,0 +1,16 @@ +/* Copyright 2019 The Matrix.org Foundation C.I.C.d + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +ALTER TABLE device_lists_outbound_pokes ADD context TEXT; \ No newline at end of file From 1ed790d67a2751feaf5da368ff3d31077219e39d Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 8 Jul 2019 14:00:36 +0100 Subject: [PATCH 14/46] Some tracing --- synapse/handlers/device.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 746e2179e14a..dc8938e5108d 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -397,6 +397,7 @@ def update_device(self, user_id, device_id, content): else: raise + @opentracing.trace_defered_function @measure_func("notify_device_update") @defer.inlineCallbacks def notify_device_update(self, user_id, device_ids): @@ -412,6 +413,8 @@ def notify_device_update(self, user_id, device_ids): hosts.update(get_domain_from_id(u) for u in users_who_share_room) hosts.discard(self.server_name) + opentracing.set_tag("hosts to update", hosts) + position = yield self.store.add_device_change_to_streams( user_id, device_ids, list(hosts) ) @@ -431,6 +434,9 @@ def notify_device_update(self, user_id, device_ids): ) for host in hosts: self.federation_sender.send_device_messages(host) + opentracing.log_kv( + {"message": "sent device update to host", "host": host} + ) @defer.inlineCallbacks def on_federation_query_user_devices(self, user_id): From 01229a4af99f2b614e43923dc36266e6299161ed Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 8 Jul 2019 14:32:00 +0100 Subject: [PATCH 15/46] Clean up room key tracing --- synapse/handlers/e2e_room_keys.py | 21 ++++++++++++++++++--- synapse/storage/e2e_room_keys.py | 23 ++++++++++++++--------- 2 files changed, 32 insertions(+), 12 deletions(-) diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py index 0459d52a3c81..1c34ff712fa0 100644 --- a/synapse/handlers/e2e_room_keys.py +++ b/synapse/handlers/e2e_room_keys.py @@ -179,7 +179,6 @@ def upload_room_keys(self, user_id, version, room_keys): user_id, version, room_id, session_id, session ) - @opentracing.trace_defered_function @defer.inlineCallbacks def _upload_room_key(self, user_id, version, room_id, session_id, room_key): """Upload a given room_key for a given room and session into a given @@ -192,7 +191,14 @@ def _upload_room_key(self, user_id, version, room_id, session_id, room_key): session_id(str): the session whose room_key we're setting room_key(dict): the room_key being set """ - + opentracing.log_kv( + { + "message": "Trying to upload room key", + "room_id": room_id, + "session_id": session_id, + "user_id": user_id, + } + ) # get the room_key for this particular row current_room_key = None try: @@ -201,14 +207,23 @@ def _upload_room_key(self, user_id, version, room_id, session_id, room_key): ) except StoreError as e: if e.code == 404: - pass + opentracing.log_kv( + { + "message": "Room key not found.", + "room_id": room_id, + "user_id": user_id, + } + ) else: raise if self._should_replace_room_key(current_room_key, room_key): + opentracing.log_kv({"message": "Replacing room key."}) yield self.store.set_e2e_room_key( user_id, version, room_id, session_id, room_key ) + else: + opentracing.log_kv({"message": "Not replacing room_key."}) @staticmethod def _should_replace_room_key(current_room_key, room_key): diff --git a/synapse/storage/e2e_room_keys.py b/synapse/storage/e2e_room_keys.py index f18605dfc488..bfd8b51362e4 100644 --- a/synapse/storage/e2e_room_keys.py +++ b/synapse/storage/e2e_room_keys.py @@ -18,13 +18,12 @@ from twisted.internet import defer from synapse.api.errors import StoreError -from synapse.logging.opentracing import trace_defered_function +import synapse.logging.opentracing as opentracing from ._base import SQLBaseStore class EndToEndRoomKeyStore(SQLBaseStore): - @trace_defered_function @defer.inlineCallbacks def get_e2e_room_key(self, user_id, version, room_id, session_id): """Get the encrypted E2E room key for a given session from a given @@ -65,7 +64,6 @@ def get_e2e_room_key(self, user_id, version, room_id, session_id): return row - @trace_defered_function @defer.inlineCallbacks def set_e2e_room_key(self, user_id, version, room_id, session_id, room_key): """Replaces or inserts the encrypted E2E room key for a given session in @@ -97,8 +95,16 @@ def set_e2e_room_key(self, user_id, version, room_id, session_id, room_key): }, lock=False, ) + opentracing.log_kv( + { + "message": "Set room key", + "room_id": room_id, + "session_id": session_id, + "room_key": room_key, + } + ) - @trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def get_e2e_room_keys(self, user_id, version, room_id=None, session_id=None): """Bulk get the E2E room keys for a given backup, optionally filtered to a given @@ -157,7 +163,7 @@ def get_e2e_room_keys(self, user_id, version, room_id=None, session_id=None): return sessions - @trace_defered_function + @opentracing.trace_defered_function @defer.inlineCallbacks def delete_e2e_room_keys(self, user_id, version, room_id=None, session_id=None): """Bulk delete the E2E room keys for a given backup, optionally filtered to a given @@ -199,7 +205,6 @@ def _get_current_version(txn, user_id): raise StoreError(404, "No current backup version") return row[0] - @trace_defered_function def get_e2e_room_keys_version_info(self, user_id, version=None): """Get info metadata about a version of our room_keys backup. @@ -242,7 +247,7 @@ def _get_e2e_room_keys_version_info_txn(txn): "get_e2e_room_keys_version_info", _get_e2e_room_keys_version_info_txn ) - @trace_defered_function + @opentracing.trace_defered_function def create_e2e_room_keys_version(self, user_id, info): """Atomically creates a new version of this user's e2e_room_keys store with the given version info. @@ -283,7 +288,7 @@ def _create_e2e_room_keys_version_txn(txn): "create_e2e_room_keys_version_txn", _create_e2e_room_keys_version_txn ) - @trace_defered_function + @opentracing.trace_defered_function def update_e2e_room_keys_version(self, user_id, version, info): """Update a given backup version @@ -300,7 +305,7 @@ def update_e2e_room_keys_version(self, user_id, version, info): desc="update_e2e_room_keys_version", ) - @trace_defered_function + @opentracing.trace_defered_function def delete_e2e_room_keys_version(self, user_id, version=None): """Delete a given backup version of the user's room keys. Doesn't delete their actual key data. From 794c9e2a75f90458db3d7b7d436a3bbd9ee082ef Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 8 Jul 2019 14:58:02 +0100 Subject: [PATCH 16/46] Cleanup key upload tracing --- synapse/handlers/e2e_keys.py | 28 +++++++++++++++++++++++----- synapse/storage/end_to_end_keys.py | 18 +++++++----------- 2 files changed, 30 insertions(+), 16 deletions(-) diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 01d5d383bfa5..a4b2f18e6b23 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -295,12 +295,10 @@ def claim_client_keys(destination): opentracing.log_kv({"one_time_keys": json_result, "failures": failures}) return {"one_time_keys": json_result, "failures": failures} - @opentracing.trace_defered_function @defer.inlineCallbacks def upload_keys_for_user(self, user_id, device_id, keys): opentracing.set_tag("user_id", user_id) opentracing.set_tag("device_id", device_id) - opentracing.set_tag("keys", keys) time_now = self.clock.time_msec() @@ -314,6 +312,13 @@ def upload_keys_for_user(self, user_id, device_id, keys): user_id, time_now, ) + opentracing.log_kv( + { + "message": "Updating device_keys for user.", + "user_id": user_id, + "device_id": device_id, + } + ) # TODO: Sign the JSON with the server key changed = yield self.store.set_e2e_device_keys( user_id, device_id, time_now, device_keys @@ -321,15 +326,26 @@ def upload_keys_for_user(self, user_id, device_id, keys): if changed: # Only notify about device updates *if* the keys actually changed yield self.device_handler.notify_device_update(user_id, [device_id]) - + else: + opentracing.log_kv( + {"message": "Not updating device_keys for user", "user_id": user_id} + ) one_time_keys = keys.get("one_time_keys", None) + opentracing.set_tag("one_time_keys", one_time_keys) if one_time_keys: + opentracing.log_kv( + { + "message": "Updating one_time_keys for device.", + "user_id": user_id, + "device_id": device_id, + } + ) yield self._upload_one_time_keys_for_user( user_id, device_id, time_now, one_time_keys ) else: opentracing.log_kv( - {"event": "did not upload one_time_keys", "reason": "no keys given"} + {"message": "Did not update one_time_keys", "reason": "no keys given"} ) # the device should have been registered already, but it may have been @@ -344,7 +360,6 @@ def upload_keys_for_user(self, user_id, device_id, keys): opentracing.set_tag("one_time_key_counts", result) return {"one_time_key_counts": result} - @opentracing.trace_defered_function @defer.inlineCallbacks def _upload_one_time_keys_for_user( self, user_id, device_id, time_now, one_time_keys @@ -386,6 +401,9 @@ def _upload_one_time_keys_for_user( (algorithm, key_id, encode_canonical_json(key).decode("ascii")) ) + opentracing.log_kv( + {"message": "Inserting new one_time_keys.", "keys": new_keys} + ) yield self.store.add_e2e_one_time_keys(user_id, device_id, time_now, new_keys) diff --git a/synapse/storage/end_to_end_keys.py b/synapse/storage/end_to_end_keys.py index 4fa694e06a0d..29d91dfa6b32 100644 --- a/synapse/storage/end_to_end_keys.py +++ b/synapse/storage/end_to_end_keys.py @@ -115,7 +115,6 @@ def _get_e2e_device_keys_txn( opentracing.log_kv(result) return result - @opentracing.trace_defered_function @defer.inlineCallbacks def get_e2e_one_time_keys(self, user_id, device_id, key_ids): """Retrieve a number of one-time keys for a user @@ -131,10 +130,6 @@ def get_e2e_one_time_keys(self, user_id, device_id, key_ids): key_id) to json string for key """ - opentracing.set_tag("user_id", user_id) - opentracing.set_tag("device_id", device_id) - opentracing.set_tag("key_ids", key_ids) - rows = yield self._simple_select_many_batch( table="e2e_one_time_keys_json", column="key_id", @@ -143,8 +138,11 @@ def get_e2e_one_time_keys(self, user_id, device_id, key_ids): keyvalues={"user_id": user_id, "device_id": device_id}, desc="add_e2e_one_time_keys_check", ) - - return {(row["algorithm"], row["key_id"]): row["key_json"] for row in rows} + result = {(row["algorithm"], row["key_id"]): row["key_json"] for row in rows} + opentracing.log_kv( + {"message": "Fetched one time keys for user", "one_time_keys": result} + ) + return result @defer.inlineCallbacks def add_e2e_one_time_keys(self, user_id, device_id, time_now, new_keys): @@ -159,7 +157,6 @@ def add_e2e_one_time_keys(self, user_id, device_id, time_now, new_keys): (algorithm, key_id, key json) """ - @opentracing.trace_function def _add_e2e_one_time_keys(txn): opentracing.set_tag("user_id", user_id) opentracing.set_tag("device_id", device_id) @@ -219,7 +216,6 @@ def set_e2e_device_keys(self, user_id, device_id, time_now, device_keys): or the keys were already in the database. """ - @opentracing.trace_function def _set_e2e_device_keys_txn(txn): opentracing.set_tag("user_id", user_id) opentracing.set_tag("device_id", device_id) @@ -239,7 +235,7 @@ def _set_e2e_device_keys_txn(txn): new_key_json = encode_canonical_json(device_keys).decode("utf-8") if old_key_json == new_key_json: - opentracing.log_kv({"event", "key already stored"}) + opentracing.log_kv({"Message", "Device key already stored."}) return False self._simple_upsert_txn( @@ -248,7 +244,7 @@ def _set_e2e_device_keys_txn(txn): keyvalues={"user_id": user_id, "device_id": device_id}, values={"ts_added_ms": time_now, "key_json": new_key_json}, ) - + opentracing.log_kv({"message": "Device keys stored."}) return True return self.runInteraction("set_e2e_device_keys", _set_e2e_device_keys_txn) From d4bdc2ba807b9a1083bda7b335acf5d4ae932c2f Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 8 Jul 2019 15:12:05 +0100 Subject: [PATCH 17/46] Trace key claiming --- synapse/federation/federation_server.py | 4 ++++ synapse/storage/end_to_end_keys.py | 4 +++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 1095483a66a6..af7bbecc4fde 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -509,6 +509,7 @@ def on_query_client_keys(self, origin, content): def on_query_user_devices(self, origin, user_id): return self.on_query_request("user_devices", user_id) + @opentracing.trace_defered_function @defer.inlineCallbacks @log_function def on_claim_client_keys(self, origin, content): @@ -517,6 +518,9 @@ def on_claim_client_keys(self, origin, content): for device_id, algorithm in device_keys.items(): query.append((user_id, device_id, algorithm)) + opentracing.log_kv( + {"message": "Claiming one time keys.", "user, device pairs": query} + ) results = yield self.store.claim_e2e_one_time_keys(query) json_result = {} diff --git a/synapse/storage/end_to_end_keys.py b/synapse/storage/end_to_end_keys.py index 29d91dfa6b32..e82c32f0225b 100644 --- a/synapse/storage/end_to_end_keys.py +++ b/synapse/storage/end_to_end_keys.py @@ -275,7 +275,9 @@ def _claim_e2e_one_time_keys(txn): ) for user_id, device_id, algorithm, key_id in delete: opentracing.log_kv( - {"message": "executing claim transaction on database"} + { + "message": "Executing claim e2e_one_time_keys transaction on database." + } ) txn.execute(sql, (user_id, device_id, algorithm, key_id)) opentracing.log_kv( From 2fd49cedbbba3616de27bb9f6c466b8dc6e4180c Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Thu, 11 Jul 2019 11:55:14 +0100 Subject: [PATCH 18/46] Nicer tracing --- synapse/handlers/device.py | 19 ++++++++++++++++--- synapse/handlers/devicemessage.py | 8 ++++---- synapse/rest/client/v2_alpha/keys.py | 6 +++++- synapse/rest/client/v2_alpha/sendtodevice.py | 2 +- synapse/storage/end_to_end_keys.py | 12 ++++++++---- 5 files changed, 34 insertions(+), 13 deletions(-) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index dc8938e5108d..d1163c163a94 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -306,7 +306,9 @@ def delete_device(self, user_id, device_id): if e.code == 404: # no match opentracing.set_tag("error", True) - opentracing.set_tag("reason", "User doesn't have that device id.") + opentracing.log_kv( + {"reason": "User doesn't have device id.", "device_id": device_id} + ) pass else: raise @@ -506,6 +508,15 @@ def incoming_device_list_update(self, origin, edu_content): device_id, origin, ) + + opentracing.set_tag("error", True) + opentracing.log_kv( + { + "message": "Got a device list update edu from a user and device which does not match the origin of the request.", + "user_id": user_id, + "device_id": device_id, + } + ) return room_ids = yield self.store.get_rooms_for_user(user_id) @@ -515,8 +526,9 @@ def incoming_device_list_update(self, origin, edu_content): opentracing.set_tag("error", True) opentracing.log_kv( { - "message": "Got an update from a user which " - + "doesn't share a room with the current user." + "message": "Got an update from a user for which " + + "we don't share any rooms", + "other user_id": user_id, } ) logger.warning( @@ -561,6 +573,7 @@ def _handle_device_updates(self, user_id): logger.debug("Need to re-sync devices for %r? %r", user_id, resync) if resync: + opentracing.log_kv({"message": "Doing resync to update device list."}) # Fetch all devices for the user. origin = get_domain_from_id(user_id) try: diff --git a/synapse/handlers/devicemessage.py b/synapse/handlers/devicemessage.py index bed564e87bd8..5963df4c1ce1 100644 --- a/synapse/handlers/devicemessage.py +++ b/synapse/handlers/devicemessage.py @@ -79,10 +79,10 @@ def on_direct_to_device_edu(self, origin, content): "to_device_key", stream_id, users=local_messages.keys() ) - @opentracing.trace_defered_function @defer.inlineCallbacks def send_device_message(self, sender_user_id, message_type, messages): - + opentracing.set_tag("number of messages", len(messages)) + opentracing.set_tag("sender", sender_user_id) local_messages = {} remote_messages = {} for user_id, by_device in messages.items(): @@ -121,7 +121,7 @@ def send_device_message(self, sender_user_id, message_type, messages): else "", } - opentracing.log_kv(local_messages) + opentracing.log_kv({"local_messages": local_messages}) stream_id = yield self.store.add_messages_to_device_inbox( local_messages, remote_edu_contents ) @@ -130,7 +130,7 @@ def send_device_message(self, sender_user_id, message_type, messages): "to_device_key", stream_id, users=local_messages.keys() ) - opentracing.log_kv(remote_messages) + opentracing.log_kv({"remote_messages": remote_messages}) for destination in remote_messages.keys(): # Enqueue a new federation transaction to send the new # device messages to each remote destination. diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py index 00260cde7351..381c1e6da775 100644 --- a/synapse/rest/client/v2_alpha/keys.py +++ b/synapse/rest/client/v2_alpha/keys.py @@ -182,20 +182,24 @@ def __init__(self, hs): self.auth = hs.get_auth() self.device_handler = hs.get_device_handler() + @opentracing.trace_defered_function @defer.inlineCallbacks def on_GET(self, request): requester = yield self.auth.get_user_by_req(request, allow_guest=True) from_token_string = parse_string(request, "from") + opentracing.set_tag("from", from_token_string) # We want to enforce they do pass us one, but we ignore it and return # changes after the "to" as well as before. - parse_string(request, "to") + opentracing.set_tag("to", parse_string(request, "to")) from_token = StreamToken.from_string(from_token_string) user_id = requester.user.to_string() + opentracing.set_tag("user_id", user_id) + results = yield self.device_handler.get_user_ids_changed(user_id, from_token) return (200, results) diff --git a/synapse/rest/client/v2_alpha/sendtodevice.py b/synapse/rest/client/v2_alpha/sendtodevice.py index 68bbcf4a48c2..1385f80e2719 100644 --- a/synapse/rest/client/v2_alpha/sendtodevice.py +++ b/synapse/rest/client/v2_alpha/sendtodevice.py @@ -43,7 +43,7 @@ def __init__(self, hs): self.txns = HttpTransactionCache(hs) self.device_message_handler = hs.get_device_message_handler() - @opentracing.trace_function_using_operation_name("sendToDevice") + @opentracing.trace_defered_function_using_operation_name("sendToDevice") def on_PUT(self, request, message_type, txn_id): opentracing.set_tag("message_type", message_type) opentracing.set_tag("txn_id", txn_id) diff --git a/synapse/storage/end_to_end_keys.py b/synapse/storage/end_to_end_keys.py index e82c32f0225b..ea5ed2737be0 100644 --- a/synapse/storage/end_to_end_keys.py +++ b/synapse/storage/end_to_end_keys.py @@ -235,7 +235,7 @@ def _set_e2e_device_keys_txn(txn): new_key_json = encode_canonical_json(device_keys).decode("utf-8") if old_key_json == new_key_json: - opentracing.log_kv({"Message", "Device key already stored."}) + opentracing.log_kv({"Message": "Device key already stored."}) return False self._simple_upsert_txn( @@ -291,10 +291,14 @@ def _claim_e2e_one_time_keys(txn): return self.runInteraction("claim_e2e_one_time_keys", _claim_e2e_one_time_keys) def delete_e2e_keys_by_device(self, user_id, device_id): - @opentracing.trace_function def delete_e2e_keys_by_device_txn(txn): - opentracing.set_tag("user_id", user_id) - opentracing.set_tag("device_id", device_id) + opentracing.log_kv( + { + "message": "Deleting keys for device", + "device_id": device_id, + "user_id": user_id, + } + ) self._simple_delete_txn( txn, table="e2e_device_keys_json", From d44f303fe0800869992b446e141da2b97f445a0f Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Wed, 17 Jul 2019 13:58:12 +0100 Subject: [PATCH 19/46] Isort of ran out of puns for this one. --- synapse/federation/federation_server.py | 3 +-- synapse/handlers/device.py | 2 +- synapse/handlers/e2e_keys.py | 2 +- synapse/handlers/e2e_room_keys.py | 2 +- synapse/rest/client/v2_alpha/keys.py | 3 ++- synapse/rest/client/v2_alpha/room_keys.py | 2 +- synapse/rest/client/v2_alpha/sendtodevice.py | 2 +- synapse/storage/deviceinbox.py | 2 +- synapse/storage/devices.py | 2 +- synapse/storage/e2e_room_keys.py | 2 +- synapse/storage/end_to_end_keys.py | 3 +-- 11 files changed, 12 insertions(+), 13 deletions(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index af7bbecc4fde..fb7bcb23f38d 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -25,6 +25,7 @@ from twisted.internet.abstract import isIPAddress from twisted.python import failure +import synapse.logging.opentracing as opentracing from synapse.api.constants import EventTypes, Membership from synapse.api.errors import ( AuthError, @@ -53,8 +54,6 @@ from synapse.util.async_helpers import Linearizer, concurrently_execute from synapse.util.caches.response_cache import ResponseCache -import synapse.logging.opentracing as opentracing - # when processing incoming transactions, we try to handle multiple rooms in # parallel, up to this limit. TRANSACTION_CONCURRENCY_LIMIT = 10 diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index d1163c163a94..cd5ab70d567d 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -18,6 +18,7 @@ from twisted.internet import defer +import synapse.logging.opentracing as opentracing from synapse.api import errors from synapse.api.constants import EventTypes from synapse.api.errors import ( @@ -31,7 +32,6 @@ from synapse.util.caches.expiringcache import ExpiringCache from synapse.util.metrics import measure_func from synapse.util.retryutils import NotRetryingDestination -import synapse.logging.opentracing as opentracing from ._base import BaseHandler diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index a4b2f18e6b23..08882ba3a96c 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -15,7 +15,6 @@ # limitations under the License. import logging -import synapse.logging.opentracing as opentracing from six import iteritems @@ -23,6 +22,7 @@ from twisted.internet import defer +import synapse.logging.opentracing as opentracing from synapse.api.errors import CodeMessageException, SynapseError from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.types import UserID, get_domain_from_id diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py index 1c34ff712fa0..2c7cf71b0ec2 100644 --- a/synapse/handlers/e2e_room_keys.py +++ b/synapse/handlers/e2e_room_keys.py @@ -19,6 +19,7 @@ from twisted.internet import defer +import synapse.logging.opentracing as opentracing from synapse.api.errors import ( Codes, NotFoundError, @@ -27,7 +28,6 @@ SynapseError, ) from synapse.util.async_helpers import Linearizer -import synapse.logging.opentracing as opentracing logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py index 381c1e6da775..1f63f0c589b6 100644 --- a/synapse/rest/client/v2_alpha/keys.py +++ b/synapse/rest/client/v2_alpha/keys.py @@ -17,6 +17,7 @@ from twisted.internet import defer +import synapse.logging.opentracing as opentracing from synapse.api.errors import SynapseError from synapse.http.servlet import ( RestServlet, @@ -25,7 +26,7 @@ parse_string, ) from synapse.types import StreamToken -import synapse.logging.opentracing as opentracing + from ._base import client_patterns logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/v2_alpha/room_keys.py b/synapse/rest/client/v2_alpha/room_keys.py index 51bd4f12fff0..2d27e2c91410 100644 --- a/synapse/rest/client/v2_alpha/room_keys.py +++ b/synapse/rest/client/v2_alpha/room_keys.py @@ -17,13 +17,13 @@ from twisted.internet import defer +import synapse.logging.opentracing as opentracing from synapse.api.errors import Codes, NotFoundError, SynapseError from synapse.http.servlet import ( RestServlet, parse_json_object_from_request, parse_string, ) -import synapse.logging.opentracing as opentracing from ._base import client_patterns diff --git a/synapse/rest/client/v2_alpha/sendtodevice.py b/synapse/rest/client/v2_alpha/sendtodevice.py index 1385f80e2719..8fa190fb13fd 100644 --- a/synapse/rest/client/v2_alpha/sendtodevice.py +++ b/synapse/rest/client/v2_alpha/sendtodevice.py @@ -17,10 +17,10 @@ from twisted.internet import defer +import synapse.logging.opentracing as opentracing from synapse.http import servlet from synapse.http.servlet import parse_json_object_from_request from synapse.rest.client.transactions import HttpTransactionCache -import synapse.logging.opentracing as opentracing from ._base import client_patterns diff --git a/synapse/storage/deviceinbox.py b/synapse/storage/deviceinbox.py index 91c6cd4cf60c..1df361bc513b 100644 --- a/synapse/storage/deviceinbox.py +++ b/synapse/storage/deviceinbox.py @@ -19,10 +19,10 @@ from twisted.internet import defer +import synapse.logging.opentracing as opentracing from synapse.storage._base import SQLBaseStore from synapse.storage.background_updates import BackgroundUpdateStore from synapse.util.caches.expiringcache import ExpiringCache -import synapse.logging.opentracing as opentracing logger = logging.getLogger(__name__) diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index f89cbad2efc6..c81b3b8fc80b 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -13,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -import synapse.logging.opentracing as opentracing from six import iteritems @@ -21,6 +20,7 @@ from twisted.internet import defer +import synapse.logging.opentracing as opentracing from synapse.api.errors import StoreError from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage._base import Cache, SQLBaseStore, db_to_json diff --git a/synapse/storage/e2e_room_keys.py b/synapse/storage/e2e_room_keys.py index bfd8b51362e4..8f77db127f47 100644 --- a/synapse/storage/e2e_room_keys.py +++ b/synapse/storage/e2e_room_keys.py @@ -17,8 +17,8 @@ from twisted.internet import defer -from synapse.api.errors import StoreError import synapse.logging.opentracing as opentracing +from synapse.api.errors import StoreError from ._base import SQLBaseStore diff --git a/synapse/storage/end_to_end_keys.py b/synapse/storage/end_to_end_keys.py index ea5ed2737be0..18a514f9ca90 100644 --- a/synapse/storage/end_to_end_keys.py +++ b/synapse/storage/end_to_end_keys.py @@ -18,12 +18,11 @@ from twisted.internet import defer +import synapse.logging.opentracing as opentracing from synapse.util.caches.descriptors import cached from ._base import SQLBaseStore, db_to_json -import synapse.logging.opentracing as opentracing - class EndToEndKeyWorkerStore(SQLBaseStore): @opentracing.trace_defered_function From a293759d8c0cfd93578cea17e5a74ae828530087 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Wed, 17 Jul 2019 14:00:22 +0100 Subject: [PATCH 20/46] =?UTF-8?q?Though=20style=20is=20subjective=20it=20d?= =?UTF-8?q?epends=20on=20a=20ruthless=20objectivity:=20you=20either=20have?= =?UTF-8?q?=20it,=20or=20you=20don't=C2=B7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- synapse/federation/sender/per_destination_queue.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/federation/sender/per_destination_queue.py b/synapse/federation/sender/per_destination_queue.py index 734a908ca2f8..f0b910b446e0 100644 --- a/synapse/federation/sender/per_destination_queue.py +++ b/synapse/federation/sender/per_destination_queue.py @@ -232,7 +232,7 @@ def _transaction_transmission_loop(self): # If there is no span context then we are either blacklisting # this destination or we are not tracing if not span_context == {}: - if not "references" in span_context: + if "references" not in span_context: span_context["references"] = [ opentracing.active_span_context_as_string() ] From ab191f99f9ab3a4308054a29a8693f42c9a3f3cc Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 15 Jul 2019 15:23:33 +0100 Subject: [PATCH 21/46] A little extra device_list tracing --- synapse/handlers/device.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index cd5ab70d567d..87fb302bf12e 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -594,16 +594,25 @@ def _handle_device_updates(self, user_id): # eventually become consistent. return except FederationDeniedError as e: + opentracing.set_tag("error", True) + opentracing.log_kv({"reason": "FederationDeniedError"}) logger.info(e) return - except Exception: + except Exception as e: # TODO: Remember that we are now out of sync and try again # later + opentracing.set_tag("error", True) + opentracing.log_kv( + { + "message": "Exception raised by federation request", + "exception": e, + } + ) logger.exception( "Failed to handle device list update for %s", user_id ) return - + opentracing.log_kv({"result": result}) stream_id = result["stream_id"] devices = result["devices"] From e49487fc919367b6c831b1bcf33546f3b36220fd Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Tue, 16 Jul 2019 11:20:20 +0100 Subject: [PATCH 22/46] Better args wrapper --- synapse/handlers/e2e_keys.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 08882ba3a96c..f0077f32d483 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -296,9 +296,8 @@ def claim_client_keys(destination): return {"one_time_keys": json_result, "failures": failures} @defer.inlineCallbacks + @opentracing.tag_args def upload_keys_for_user(self, user_id, device_id, keys): - opentracing.set_tag("user_id", user_id) - opentracing.set_tag("device_id", device_id) time_now = self.clock.time_msec() From 4824e30810729712a80968bb1a1b5044b0dad1c1 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Tue, 16 Jul 2019 18:43:11 +0100 Subject: [PATCH 23/46] Add user _id --- synapse/api/auth.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 351790cca495..39882bc30ad2 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -22,6 +22,7 @@ from twisted.internet import defer +import synapse.logging.opentracing as opentracing import synapse.types from synapse import event_auth from synapse.api.constants import EventTypes, JoinRules, Membership @@ -209,6 +210,10 @@ def get_user_by_req( user_id, app_service = yield self._get_appservice_user_id(request) if user_id: request.authenticated_entity = user_id + opentracing.set_tag("authenticated_entity", user_id) + # there is at least one other place where authenticated entity is + # set. user_id is tagged incase authenticated_entity is clobbered + opentracing.set_tag("user_id", user_id) if ip_addr and self.hs.config.track_appservice_user_ips: yield self.store.insert_client_ip( @@ -260,6 +265,11 @@ def get_user_by_req( request.authenticated_entity = user.to_string() + opentracing.set_tag("authenticated_entity", user.to_string()) + # there is at least one other place where authenticated entity is + # set. user_id is tagged incase authenticated_entity is clobbered + opentracing.set_tag("user_id", user.to_string()) + return synapse.types.create_requester( user, token_id, is_guest, device_id, app_service=app_service ) From 30738e978e21263b8ab5000db9b25e0f5ea976a2 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Wed, 17 Jul 2019 10:01:10 +0100 Subject: [PATCH 24/46] newsfile --- changelog.d/5687.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5687.misc diff --git a/changelog.d/5687.misc b/changelog.d/5687.misc new file mode 100644 index 000000000000..3d379753a73e --- /dev/null +++ b/changelog.d/5687.misc @@ -0,0 +1 @@ +Opentrace e2e code paths. From 6944c994a5d2de5ac4f6b3c5c3efb57103433bbf Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Wed, 17 Jul 2019 11:19:13 +0100 Subject: [PATCH 25/46] Make sure there is an active span here. --- synapse/api/auth.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 39882bc30ad2..c75052e6298c 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -179,6 +179,7 @@ def can_federate(self, event, auth_events): def get_public_keys(self, invite_event): return event_auth.get_public_keys(invite_event) + @opentracing.trace_deferred @defer.inlineCallbacks def get_user_by_req( self, request, allow_guest=False, rights="access", allow_expired=False From d26cbb4e236a2544a8d9d0980b2be0309356039a Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Wed, 17 Jul 2019 11:52:27 +0100 Subject: [PATCH 26/46] Unbreak json parsing. --- synapse/handlers/devicemessage.py | 2 +- synapse/storage/devices.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/handlers/devicemessage.py b/synapse/handlers/devicemessage.py index 5963df4c1ce1..625b1b70af16 100644 --- a/synapse/handlers/devicemessage.py +++ b/synapse/handlers/devicemessage.py @@ -118,7 +118,7 @@ def send_device_message(self, sender_user_id, message_type, messages): "message_id": message_id, "context": json.dumps(context) if opentracing.whitelisted_homeserver(destination) - else "", + else "{}", } opentracing.log_kv({"local_messages": local_messages}) diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index c81b3b8fc80b..671b80624b56 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -222,7 +222,7 @@ def _get_device_update_edus_by_remote(self, destination, from_stream_id, query_m "stream_id": stream_id, "context": query_map[(user_id, device_id)][1] if opentracing.whitelisted_homeserver(destination) - else "", + else "{}", } prev_id = stream_id @@ -841,7 +841,7 @@ def _add_device_change_txn(self, txn, user_id, device_ids, hosts, stream_id): "ts": now, "context": json.dumps(context) if opentracing.whitelisted_homeserver(destination) - else "", + else "{}", } for destination in hosts for device_id in device_ids From cffba280de117ea2710263a809087f13fd959850 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Wed, 17 Jul 2019 14:39:47 +0100 Subject: [PATCH 27/46] Trailing .d --- synapse/storage/schema/delta/55/add_spans_to_device_lists.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/schema/delta/55/add_spans_to_device_lists.sql b/synapse/storage/schema/delta/55/add_spans_to_device_lists.sql index a4c6b917f777..d4c09005ab88 100644 --- a/synapse/storage/schema/delta/55/add_spans_to_device_lists.sql +++ b/synapse/storage/schema/delta/55/add_spans_to_device_lists.sql @@ -1,4 +1,4 @@ -/* Copyright 2019 The Matrix.org Foundation C.I.C.d +/* Copyright 2019 The Matrix.org Foundation C.I.C * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. From d395650bcf567f41415c67671d6b840588912005 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Wed, 17 Jul 2019 13:56:29 +0100 Subject: [PATCH 28/46] Use better decorator names. --- synapse/federation/federation_server.py | 2 +- synapse/handlers/device.py | 14 +++++++------- synapse/handlers/e2e_keys.py | 10 +++++----- synapse/handlers/e2e_room_keys.py | 12 ++++++------ synapse/rest/client/v2_alpha/keys.py | 4 ++-- synapse/rest/client/v2_alpha/room_keys.py | 2 +- synapse/rest/client/v2_alpha/sendtodevice.py | 2 +- synapse/storage/deviceinbox.py | 10 +++++----- synapse/storage/devices.py | 4 ++-- synapse/storage/e2e_room_keys.py | 10 +++++----- synapse/storage/end_to_end_keys.py | 6 +++--- 11 files changed, 38 insertions(+), 38 deletions(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index fb7bcb23f38d..eaaa20152959 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -508,7 +508,7 @@ def on_query_client_keys(self, origin, content): def on_query_user_devices(self, origin, user_id): return self.on_query_request("user_devices", user_id) - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks @log_function def on_claim_client_keys(self, origin, content): diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 87fb302bf12e..d46952205127 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -46,7 +46,7 @@ def __init__(self, hs): self.state = hs.get_state_handler() self._auth_handler = hs.get_auth_handler() - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def get_devices_by_user(self, user_id): """ @@ -70,7 +70,7 @@ def get_devices_by_user(self, user_id): opentracing.log_kv(device_map) return devices - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def get_device(self, user_id, device_id): """ Retrieve the given device @@ -97,7 +97,7 @@ def get_device(self, user_id, device_id): return device @measure_func("device.get_user_ids_changed") - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def get_user_ids_changed(self, user_id, from_token): """Get list of users that have had the devices updated, or have newly @@ -287,7 +287,7 @@ def check_device_registered( raise errors.StoreError(500, "Couldn't generate a device ID.") - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def delete_device(self, user_id, device_id): """ Delete the given device @@ -321,7 +321,7 @@ def delete_device(self, user_id, device_id): yield self.notify_device_update(user_id, [device_id]) - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def delete_all_devices_for_user(self, user_id, except_device_id=None): """Delete all of the user's devices @@ -399,7 +399,7 @@ def update_device(self, user_id, device_id, content): else: raise - @opentracing.trace_defered_function + @opentracing.trace_deferred @measure_func("notify_device_update") @defer.inlineCallbacks def notify_device_update(self, user_id, device_ids): @@ -485,7 +485,7 @@ def __init__(self, hs, device_handler): iterable=True, ) - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def incoming_device_list_update(self, origin, edu_content): """Called on incoming device list update from federation. Responsible diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index f0077f32d483..0b1523fca9b7 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -46,7 +46,7 @@ def __init__(self, hs): "client_keys", self.on_federation_query_client_keys ) - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def query_devices(self, query_body, timeout): """ Handle a device key query from a client @@ -125,7 +125,7 @@ def query_devices(self, query_body, timeout): r[user_id] = remote_queries[user_id] # Now fetch any devices that we don't have in our cache - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def do_remote_query(destination): destination_query = remote_queries_not_in_cache[destination] @@ -158,7 +158,7 @@ def do_remote_query(destination): return {"device_keys": results, "failures": failures} - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def query_local_devices(self, query): """Get E2E device keys for local users @@ -222,7 +222,7 @@ def on_federation_query_client_keys(self, query_body): res = yield self.query_local_devices(device_keys_query) return {"device_keys": res} - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def claim_one_time_keys(self, query, timeout): local_query = [] @@ -251,7 +251,7 @@ def claim_one_time_keys(self, query, timeout): key_id: json.loads(json_bytes) } - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def claim_client_keys(destination): opentracing.set_tag("destination", destination) diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py index 2c7cf71b0ec2..ea291d79bc66 100644 --- a/synapse/handlers/e2e_room_keys.py +++ b/synapse/handlers/e2e_room_keys.py @@ -50,7 +50,7 @@ def __init__(self, hs): # changed. self._upload_linearizer = Linearizer("upload_room_keys_lock") - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def get_room_keys(self, user_id, version, room_id=None, session_id=None): """Bulk get the E2E room keys for a given backup, optionally filtered to a given @@ -89,7 +89,7 @@ def get_room_keys(self, user_id, version, room_id=None, session_id=None): opentracing.log_kv(results) return results - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def delete_room_keys(self, user_id, version, room_id=None, session_id=None): """Bulk delete the E2E room keys for a given backup, optionally filtered to a given @@ -111,7 +111,7 @@ def delete_room_keys(self, user_id, version, room_id=None, session_id=None): with (yield self._upload_linearizer.queue(user_id)): yield self.store.delete_e2e_room_keys(user_id, version, room_id, session_id) - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def upload_room_keys(self, user_id, version, room_keys): """Bulk upload a list of room keys into a given backup version, asserting @@ -257,7 +257,7 @@ def _should_replace_room_key(current_room_key, room_key): return False return True - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def create_version(self, user_id, version_info): """Create a new backup version. This automatically becomes the new @@ -316,7 +316,7 @@ def get_version_info(self, user_id, version=None): raise return res - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def delete_version(self, user_id, version=None): """Deletes a given version of the user's e2e_room_keys backup @@ -337,7 +337,7 @@ def delete_version(self, user_id, version=None): else: raise - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def update_version(self, user_id, version, version_info): """Update the info about a given version of the user's backup diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py index 1f63f0c589b6..880cf9d6e770 100644 --- a/synapse/rest/client/v2_alpha/keys.py +++ b/synapse/rest/client/v2_alpha/keys.py @@ -69,7 +69,7 @@ def __init__(self, hs): self.auth = hs.get_auth() self.e2e_keys_handler = hs.get_e2e_keys_handler() - @opentracing.trace_defered_function_using_operation_name("upload_keys") + @opentracing.trace_deferred_using_operation_name("upload_keys") @defer.inlineCallbacks def on_POST(self, request, device_id): requester = yield self.auth.get_user_by_req(request, allow_guest=True) @@ -183,7 +183,7 @@ def __init__(self, hs): self.auth = hs.get_auth() self.device_handler = hs.get_device_handler() - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def on_GET(self, request): requester = yield self.auth.get_user_by_req(request, allow_guest=True) diff --git a/synapse/rest/client/v2_alpha/room_keys.py b/synapse/rest/client/v2_alpha/room_keys.py index 2d27e2c91410..a9e3875a464d 100644 --- a/synapse/rest/client/v2_alpha/room_keys.py +++ b/synapse/rest/client/v2_alpha/room_keys.py @@ -312,7 +312,7 @@ def __init__(self, hs): self.auth = hs.get_auth() self.e2e_room_keys_handler = hs.get_e2e_room_keys_handler() - @opentracing.trace_defered_function_using_operation_name("get_room_keys_version") + @opentracing.trace_deferred_using_operation_name("get_room_keys_version") @defer.inlineCallbacks def on_GET(self, request, version): """ diff --git a/synapse/rest/client/v2_alpha/sendtodevice.py b/synapse/rest/client/v2_alpha/sendtodevice.py index 8fa190fb13fd..7245299691f1 100644 --- a/synapse/rest/client/v2_alpha/sendtodevice.py +++ b/synapse/rest/client/v2_alpha/sendtodevice.py @@ -43,7 +43,7 @@ def __init__(self, hs): self.txns = HttpTransactionCache(hs) self.device_message_handler = hs.get_device_message_handler() - @opentracing.trace_defered_function_using_operation_name("sendToDevice") + @opentracing.trace_deferred_using_operation_name("sendToDevice") def on_PUT(self, request, message_type, txn_id): opentracing.set_tag("message_type", message_type) opentracing.set_tag("txn_id", txn_id) diff --git a/synapse/storage/deviceinbox.py b/synapse/storage/deviceinbox.py index 1df361bc513b..4eb35d13db94 100644 --- a/synapse/storage/deviceinbox.py +++ b/synapse/storage/deviceinbox.py @@ -73,7 +73,7 @@ def get_new_messages_for_device_txn(txn): "get_new_messages_for_device", get_new_messages_for_device_txn ) - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def delete_messages_for_device(self, user_id, device_id, up_to_stream_id): """ @@ -127,7 +127,7 @@ def delete_messages_for_device_txn(txn): return count - @opentracing.trace_function + @opentracing.trace def get_new_device_msgs_for_remote( self, destination, last_stream_id, current_stream_id, limit ): @@ -159,7 +159,7 @@ def get_new_device_msgs_for_remote( # This can happen if we run out of room for EDUs in the transaction. return defer.succeed(([], last_stream_id)) - @opentracing.trace_function + @opentracing.trace def get_new_messages_for_remote_destination_txn(txn): sql = ( "SELECT stream_id, messages_json FROM device_federation_outbox" @@ -185,7 +185,7 @@ def get_new_messages_for_remote_destination_txn(txn): get_new_messages_for_remote_destination_txn, ) - @opentracing.trace_defered_function + @opentracing.trace_deferred def delete_device_msgs_for_remote(self, destination, up_to_stream_id): """Used to delete messages when the remote destination acknowledges their receipt. @@ -236,7 +236,7 @@ def __init__(self, db_conn, hs): expiry_ms=30 * 60 * 1000, ) - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def add_messages_to_device_inbox( self, local_messages_by_user_then_device, remote_messages_by_destination diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 671b80624b56..96746f6eee6b 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -74,7 +74,7 @@ def get_devices_by_user(self, user_id): return {d["device_id"]: d for d in devices} - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def get_devices_by_remote(self, destination, from_stream_id, limit): """Get stream of updates to send to remote servers @@ -306,7 +306,7 @@ def _mark_as_sent_devices_by_remote_txn(self, txn, destination, stream_id): def get_device_stream_token(self): return self._device_list_id_gen.get_current_token() - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def get_user_devices_from_cache(self, query_list): """Get the devices (and keys if any) for remote users from the cache. diff --git a/synapse/storage/e2e_room_keys.py b/synapse/storage/e2e_room_keys.py index 8f77db127f47..594ea97d9fe6 100644 --- a/synapse/storage/e2e_room_keys.py +++ b/synapse/storage/e2e_room_keys.py @@ -104,7 +104,7 @@ def set_e2e_room_key(self, user_id, version, room_id, session_id, room_key): } ) - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def get_e2e_room_keys(self, user_id, version, room_id=None, session_id=None): """Bulk get the E2E room keys for a given backup, optionally filtered to a given @@ -163,7 +163,7 @@ def get_e2e_room_keys(self, user_id, version, room_id=None, session_id=None): return sessions - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def delete_e2e_room_keys(self, user_id, version, room_id=None, session_id=None): """Bulk delete the E2E room keys for a given backup, optionally filtered to a given @@ -247,7 +247,7 @@ def _get_e2e_room_keys_version_info_txn(txn): "get_e2e_room_keys_version_info", _get_e2e_room_keys_version_info_txn ) - @opentracing.trace_defered_function + @opentracing.trace_deferred def create_e2e_room_keys_version(self, user_id, info): """Atomically creates a new version of this user's e2e_room_keys store with the given version info. @@ -288,7 +288,7 @@ def _create_e2e_room_keys_version_txn(txn): "create_e2e_room_keys_version_txn", _create_e2e_room_keys_version_txn ) - @opentracing.trace_defered_function + @opentracing.trace_deferred def update_e2e_room_keys_version(self, user_id, version, info): """Update a given backup version @@ -305,7 +305,7 @@ def update_e2e_room_keys_version(self, user_id, version, info): desc="update_e2e_room_keys_version", ) - @opentracing.trace_defered_function + @opentracing.trace_deferred def delete_e2e_room_keys_version(self, user_id, version=None): """Delete a given backup version of the user's room keys. Doesn't delete their actual key data. diff --git a/synapse/storage/end_to_end_keys.py b/synapse/storage/end_to_end_keys.py index 18a514f9ca90..73c51f009178 100644 --- a/synapse/storage/end_to_end_keys.py +++ b/synapse/storage/end_to_end_keys.py @@ -25,7 +25,7 @@ class EndToEndKeyWorkerStore(SQLBaseStore): - @opentracing.trace_defered_function + @opentracing.trace_deferred @defer.inlineCallbacks def get_e2e_device_keys( self, query_list, include_all_devices=False, include_deleted_devices=False @@ -60,7 +60,7 @@ def get_e2e_device_keys( return results - @opentracing.trace_function + @opentracing.trace def _get_e2e_device_keys_txn( self, txn, query_list, include_all_devices=False, include_deleted_devices=False ): @@ -251,7 +251,7 @@ def _set_e2e_device_keys_txn(txn): def claim_e2e_one_time_keys(self, query_list): """Take a list of one time keys out of the database""" - @opentracing.trace_function + @opentracing.trace def _claim_e2e_one_time_keys(txn): sql = ( "SELECT key_id, key_json FROM e2e_one_time_keys_json" From beea2e31b948f4808ceeef631d2a49569850dfbb Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Wed, 17 Jul 2019 16:53:08 +0100 Subject: [PATCH 29/46] I wish python had a good type system. --- synapse/storage/devices.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 96746f6eee6b..4f9d440d3d2a 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -140,7 +140,17 @@ def get_devices_by_remote(self, destination, from_stream_id, limit): break key = (update[0], update[1]) - query_map[key] = (max(query_map.get(key, 0), update[2]), update[3]) + logger.info("++++++++++++++++++++++++ update-0 %s", update[0]) + logger.info("++++++++++++++++++++++++ update-1 %s", update[1]) + logger.info("++++++++++++++++++++++++ update-2 %s", update[2]) + logger.info("++++++++++++++++++++++++ update-3 %s", update[3]) + logger.info( + "+++++++++++++++++++++++++++ %s", (query_map.get(key, 0), update[2]) + ) + query_map[key] = ( + max(query_map.get(key, (0, None))[0], update[2]), + update[3], + ) # If we didn't find any updates with a stream_id lower than the cutoff, it # means that there are more than limit updates all of which have the same From 1801578795bf4cdb92426791898f0e4da941ce71 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 22 Jul 2019 13:38:47 +0100 Subject: [PATCH 30/46] Use unified trace method --- synapse/api/auth.py | 2 +- synapse/federation/federation_server.py | 2 +- synapse/handlers/device.py | 14 +++++++------- synapse/handlers/e2e_keys.py | 10 +++++----- synapse/handlers/e2e_room_keys.py | 12 ++++++------ synapse/logging/opentracing.py | 2 +- synapse/rest/client/v2_alpha/keys.py | 4 ++-- synapse/rest/client/v2_alpha/room_keys.py | 2 +- synapse/rest/client/v2_alpha/sendtodevice.py | 2 +- synapse/storage/deviceinbox.py | 6 +++--- synapse/storage/devices.py | 4 ++-- synapse/storage/e2e_room_keys.py | 10 +++++----- synapse/storage/end_to_end_keys.py | 2 +- 13 files changed, 36 insertions(+), 36 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index c75052e6298c..fef1ee4040b5 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -179,7 +179,7 @@ def can_federate(self, event, auth_events): def get_public_keys(self, invite_event): return event_auth.get_public_keys(invite_event) - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def get_user_by_req( self, request, allow_guest=False, rights="access", allow_expired=False diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index eaaa20152959..0da39dcd78b3 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -508,7 +508,7 @@ def on_query_client_keys(self, origin, content): def on_query_user_devices(self, origin, user_id): return self.on_query_request("user_devices", user_id) - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks @log_function def on_claim_client_keys(self, origin, content): diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index d46952205127..4fef2aef0c22 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -46,7 +46,7 @@ def __init__(self, hs): self.state = hs.get_state_handler() self._auth_handler = hs.get_auth_handler() - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def get_devices_by_user(self, user_id): """ @@ -70,7 +70,7 @@ def get_devices_by_user(self, user_id): opentracing.log_kv(device_map) return devices - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def get_device(self, user_id, device_id): """ Retrieve the given device @@ -97,7 +97,7 @@ def get_device(self, user_id, device_id): return device @measure_func("device.get_user_ids_changed") - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def get_user_ids_changed(self, user_id, from_token): """Get list of users that have had the devices updated, or have newly @@ -287,7 +287,7 @@ def check_device_registered( raise errors.StoreError(500, "Couldn't generate a device ID.") - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def delete_device(self, user_id, device_id): """ Delete the given device @@ -321,7 +321,7 @@ def delete_device(self, user_id, device_id): yield self.notify_device_update(user_id, [device_id]) - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def delete_all_devices_for_user(self, user_id, except_device_id=None): """Delete all of the user's devices @@ -399,7 +399,7 @@ def update_device(self, user_id, device_id, content): else: raise - @opentracing.trace_deferred + @opentracing.trace @measure_func("notify_device_update") @defer.inlineCallbacks def notify_device_update(self, user_id, device_ids): @@ -485,7 +485,7 @@ def __init__(self, hs, device_handler): iterable=True, ) - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def incoming_device_list_update(self, origin, edu_content): """Called on incoming device list update from federation. Responsible diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 0b1523fca9b7..d6f894dd722f 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -46,7 +46,7 @@ def __init__(self, hs): "client_keys", self.on_federation_query_client_keys ) - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def query_devices(self, query_body, timeout): """ Handle a device key query from a client @@ -125,7 +125,7 @@ def query_devices(self, query_body, timeout): r[user_id] = remote_queries[user_id] # Now fetch any devices that we don't have in our cache - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def do_remote_query(destination): destination_query = remote_queries_not_in_cache[destination] @@ -158,7 +158,7 @@ def do_remote_query(destination): return {"device_keys": results, "failures": failures} - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def query_local_devices(self, query): """Get E2E device keys for local users @@ -222,7 +222,7 @@ def on_federation_query_client_keys(self, query_body): res = yield self.query_local_devices(device_keys_query) return {"device_keys": res} - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def claim_one_time_keys(self, query, timeout): local_query = [] @@ -251,7 +251,7 @@ def claim_one_time_keys(self, query, timeout): key_id: json.loads(json_bytes) } - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def claim_client_keys(destination): opentracing.set_tag("destination", destination) diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py index ea291d79bc66..f6a95bce18d7 100644 --- a/synapse/handlers/e2e_room_keys.py +++ b/synapse/handlers/e2e_room_keys.py @@ -50,7 +50,7 @@ def __init__(self, hs): # changed. self._upload_linearizer = Linearizer("upload_room_keys_lock") - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def get_room_keys(self, user_id, version, room_id=None, session_id=None): """Bulk get the E2E room keys for a given backup, optionally filtered to a given @@ -89,7 +89,7 @@ def get_room_keys(self, user_id, version, room_id=None, session_id=None): opentracing.log_kv(results) return results - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def delete_room_keys(self, user_id, version, room_id=None, session_id=None): """Bulk delete the E2E room keys for a given backup, optionally filtered to a given @@ -111,7 +111,7 @@ def delete_room_keys(self, user_id, version, room_id=None, session_id=None): with (yield self._upload_linearizer.queue(user_id)): yield self.store.delete_e2e_room_keys(user_id, version, room_id, session_id) - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def upload_room_keys(self, user_id, version, room_keys): """Bulk upload a list of room keys into a given backup version, asserting @@ -257,7 +257,7 @@ def _should_replace_room_key(current_room_key, room_key): return False return True - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def create_version(self, user_id, version_info): """Create a new backup version. This automatically becomes the new @@ -316,7 +316,7 @@ def get_version_info(self, user_id, version=None): raise return res - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def delete_version(self, user_id, version=None): """Deletes a given version of the user's e2e_room_keys backup @@ -337,7 +337,7 @@ def delete_version(self, user_id, version=None): else: raise - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def update_version(self, user_id, version, version_info): """Update the info about a given version of the user's backup diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py index a9b808854636..d2c209c471fa 100644 --- a/synapse/logging/opentracing.py +++ b/synapse/logging/opentracing.py @@ -157,7 +157,7 @@ def deferred_function(*args, **kwargs): from canonicaljson import json -from twisted.internet import defer, +from twisted.internet import defer from synapse.config import ConfigError diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py index 880cf9d6e770..41cdab9532df 100644 --- a/synapse/rest/client/v2_alpha/keys.py +++ b/synapse/rest/client/v2_alpha/keys.py @@ -69,7 +69,7 @@ def __init__(self, hs): self.auth = hs.get_auth() self.e2e_keys_handler = hs.get_e2e_keys_handler() - @opentracing.trace_deferred_using_operation_name("upload_keys") + @opentracing.trace_using_operation_name("upload_keys") @defer.inlineCallbacks def on_POST(self, request, device_id): requester = yield self.auth.get_user_by_req(request, allow_guest=True) @@ -183,7 +183,7 @@ def __init__(self, hs): self.auth = hs.get_auth() self.device_handler = hs.get_device_handler() - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def on_GET(self, request): requester = yield self.auth.get_user_by_req(request, allow_guest=True) diff --git a/synapse/rest/client/v2_alpha/room_keys.py b/synapse/rest/client/v2_alpha/room_keys.py index a9e3875a464d..6b60aa525f6d 100644 --- a/synapse/rest/client/v2_alpha/room_keys.py +++ b/synapse/rest/client/v2_alpha/room_keys.py @@ -312,7 +312,7 @@ def __init__(self, hs): self.auth = hs.get_auth() self.e2e_room_keys_handler = hs.get_e2e_room_keys_handler() - @opentracing.trace_deferred_using_operation_name("get_room_keys_version") + @opentracing.trace_using_operation_name("get_room_keys_version") @defer.inlineCallbacks def on_GET(self, request, version): """ diff --git a/synapse/rest/client/v2_alpha/sendtodevice.py b/synapse/rest/client/v2_alpha/sendtodevice.py index 7245299691f1..db8103bd5dba 100644 --- a/synapse/rest/client/v2_alpha/sendtodevice.py +++ b/synapse/rest/client/v2_alpha/sendtodevice.py @@ -43,7 +43,7 @@ def __init__(self, hs): self.txns = HttpTransactionCache(hs) self.device_message_handler = hs.get_device_message_handler() - @opentracing.trace_deferred_using_operation_name("sendToDevice") + @opentracing.trace_using_operation_name("sendToDevice") def on_PUT(self, request, message_type, txn_id): opentracing.set_tag("message_type", message_type) opentracing.set_tag("txn_id", txn_id) diff --git a/synapse/storage/deviceinbox.py b/synapse/storage/deviceinbox.py index 4eb35d13db94..5f09b86bf0ac 100644 --- a/synapse/storage/deviceinbox.py +++ b/synapse/storage/deviceinbox.py @@ -73,7 +73,7 @@ def get_new_messages_for_device_txn(txn): "get_new_messages_for_device", get_new_messages_for_device_txn ) - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def delete_messages_for_device(self, user_id, device_id, up_to_stream_id): """ @@ -185,7 +185,7 @@ def get_new_messages_for_remote_destination_txn(txn): get_new_messages_for_remote_destination_txn, ) - @opentracing.trace_deferred + @opentracing.trace def delete_device_msgs_for_remote(self, destination, up_to_stream_id): """Used to delete messages when the remote destination acknowledges their receipt. @@ -236,7 +236,7 @@ def __init__(self, db_conn, hs): expiry_ms=30 * 60 * 1000, ) - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def add_messages_to_device_inbox( self, local_messages_by_user_then_device, remote_messages_by_destination diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 4f9d440d3d2a..04310abcba71 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -74,7 +74,7 @@ def get_devices_by_user(self, user_id): return {d["device_id"]: d for d in devices} - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def get_devices_by_remote(self, destination, from_stream_id, limit): """Get stream of updates to send to remote servers @@ -316,7 +316,7 @@ def _mark_as_sent_devices_by_remote_txn(self, txn, destination, stream_id): def get_device_stream_token(self): return self._device_list_id_gen.get_current_token() - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def get_user_devices_from_cache(self, query_list): """Get the devices (and keys if any) for remote users from the cache. diff --git a/synapse/storage/e2e_room_keys.py b/synapse/storage/e2e_room_keys.py index 594ea97d9fe6..4a6b2be6f617 100644 --- a/synapse/storage/e2e_room_keys.py +++ b/synapse/storage/e2e_room_keys.py @@ -104,7 +104,7 @@ def set_e2e_room_key(self, user_id, version, room_id, session_id, room_key): } ) - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def get_e2e_room_keys(self, user_id, version, room_id=None, session_id=None): """Bulk get the E2E room keys for a given backup, optionally filtered to a given @@ -163,7 +163,7 @@ def get_e2e_room_keys(self, user_id, version, room_id=None, session_id=None): return sessions - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def delete_e2e_room_keys(self, user_id, version, room_id=None, session_id=None): """Bulk delete the E2E room keys for a given backup, optionally filtered to a given @@ -247,7 +247,7 @@ def _get_e2e_room_keys_version_info_txn(txn): "get_e2e_room_keys_version_info", _get_e2e_room_keys_version_info_txn ) - @opentracing.trace_deferred + @opentracing.trace def create_e2e_room_keys_version(self, user_id, info): """Atomically creates a new version of this user's e2e_room_keys store with the given version info. @@ -288,7 +288,7 @@ def _create_e2e_room_keys_version_txn(txn): "create_e2e_room_keys_version_txn", _create_e2e_room_keys_version_txn ) - @opentracing.trace_deferred + @opentracing.trace def update_e2e_room_keys_version(self, user_id, version, info): """Update a given backup version @@ -305,7 +305,7 @@ def update_e2e_room_keys_version(self, user_id, version, info): desc="update_e2e_room_keys_version", ) - @opentracing.trace_deferred + @opentracing.trace def delete_e2e_room_keys_version(self, user_id, version=None): """Delete a given backup version of the user's room keys. Doesn't delete their actual key data. diff --git a/synapse/storage/end_to_end_keys.py b/synapse/storage/end_to_end_keys.py index 73c51f009178..7ee973a8bb3c 100644 --- a/synapse/storage/end_to_end_keys.py +++ b/synapse/storage/end_to_end_keys.py @@ -25,7 +25,7 @@ class EndToEndKeyWorkerStore(SQLBaseStore): - @opentracing.trace_deferred + @opentracing.trace @defer.inlineCallbacks def get_e2e_device_keys( self, query_list, include_all_devices=False, include_deleted_devices=False From 08aaad0756c8864f9ca044cf37a4e3836c83ea8b Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 5 Aug 2019 11:30:18 +0100 Subject: [PATCH 31/46] Nicer changelog Co-Authored-By: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> --- changelog.d/5687.misc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog.d/5687.misc b/changelog.d/5687.misc index 3d379753a73e..9d98c4abb157 100644 --- a/changelog.d/5687.misc +++ b/changelog.d/5687.misc @@ -1 +1 @@ -Opentrace e2e code paths. +Add OpenTracing instrumentation for e2e code paths. From 8b53f8e9bda8a6eae353dab96fc6d639504393ac Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 5 Aug 2019 11:36:50 +0100 Subject: [PATCH 32/46] Typo Co-Authored-By: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> --- synapse/api/auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index fef1ee4040b5..1102d002656e 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -213,7 +213,7 @@ def get_user_by_req( request.authenticated_entity = user_id opentracing.set_tag("authenticated_entity", user_id) # there is at least one other place where authenticated entity is - # set. user_id is tagged incase authenticated_entity is clobbered + # set. user_id is tagged in case authenticated_entity is clobbered opentracing.set_tag("user_id", user_id) if ip_addr and self.hs.config.track_appservice_user_ips: From 35eb018c02652b3786bcfdab44961ed8dbb29417 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 5 Aug 2019 11:39:58 +0100 Subject: [PATCH 33/46] Double negatives do not make code that isn't unclear.. Co-Authored-By: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> --- synapse/federation/sender/per_destination_queue.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/federation/sender/per_destination_queue.py b/synapse/federation/sender/per_destination_queue.py index f0b910b446e0..2cba9d191ed4 100644 --- a/synapse/federation/sender/per_destination_queue.py +++ b/synapse/federation/sender/per_destination_queue.py @@ -231,7 +231,7 @@ def _transaction_transmission_loop(self): ).get("opentracing", {}) # If there is no span context then we are either blacklisting # this destination or we are not tracing - if not span_context == {}: + if span_context: if "references" not in span_context: span_context["references"] = [ opentracing.active_span_context_as_string() From 46e41ee08d40750f6992b9479be18006162bee32 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 5 Aug 2019 11:48:10 +0100 Subject: [PATCH 34/46] Comment for 'context' column in device_lists_outbound_pokes' --- .../storage/schema/delta/55/add_spans_to_device_lists.sql | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/synapse/storage/schema/delta/55/add_spans_to_device_lists.sql b/synapse/storage/schema/delta/55/add_spans_to_device_lists.sql index d4c09005ab88..bbd5d58486e5 100644 --- a/synapse/storage/schema/delta/55/add_spans_to_device_lists.sql +++ b/synapse/storage/schema/delta/55/add_spans_to_device_lists.sql @@ -13,4 +13,11 @@ * limitations under the License. */ +/* + * Opentracing needs to inject a span_context into data item which leaves the + * current execution context. Since device list updates are dumped in here + * and then processed later they need to include the span context for opentraing. + * Since we may also decide later to include other tracking information the column + * has just been called "context", the structure of the data within it may change. + */ ALTER TABLE device_lists_outbound_pokes ADD context TEXT; \ No newline at end of file From d859c34fb9b6387f773380ba1d58c5d449b912f4 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 5 Aug 2019 12:16:10 +0100 Subject: [PATCH 35/46] Nicer use of dict update methods. --- synapse/federation/sender/per_destination_queue.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/synapse/federation/sender/per_destination_queue.py b/synapse/federation/sender/per_destination_queue.py index 2cba9d191ed4..0d7246a613da 100644 --- a/synapse/federation/sender/per_destination_queue.py +++ b/synapse/federation/sender/per_destination_queue.py @@ -232,14 +232,9 @@ def _transaction_transmission_loop(self): # If there is no span context then we are either blacklisting # this destination or we are not tracing if span_context: - if "references" not in span_context: - span_context["references"] = [ - opentracing.active_span_context_as_string() - ] - else: - span_context["references"].append( - opentracing.active_span_context_as_string() - ) + span_context.setdefault("references", []).append( + opentracing.active_span_context_as_string() + ) edu_dict["content"]["context"] = json.dumps( {"opentracing": span_context} ) From e7f42857bbb191140bd9985dd50ce76ad7fd7e3a Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 5 Aug 2019 13:48:09 +0100 Subject: [PATCH 36/46] Refactor return value so we don't create identical lists each time. --- synapse/handlers/device.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 4fef2aef0c22..ff198d0a3e35 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -216,11 +216,11 @@ def get_user_ids_changed(self, user_id, from_token): possibly_joined = [] possibly_left = [] - opentracing.log_kv( - {"changed": list(possibly_joined), "left": list(possibly_left)} - ) + result = {"changed": list(possibly_joined), "left": list(possibly_left)} + + opentracing.log_kv(result) - return {"changed": list(possibly_joined), "left": list(possibly_left)} + return {result} class DeviceHandler(DeviceWorkerHandler): From 50964d2341d658df8ffa05880e0a1e1c0cc07e65 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 5 Aug 2019 13:56:04 +0100 Subject: [PATCH 37/46] String concatenation without the '+' --- synapse/handlers/device.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index ff198d0a3e35..f658d4baf6e9 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -512,7 +512,8 @@ def incoming_device_list_update(self, origin, edu_content): opentracing.set_tag("error", True) opentracing.log_kv( { - "message": "Got a device list update edu from a user and device which does not match the origin of the request.", + "message": "Got a device list update edu from a user and " + "device which does not match the origin of the request.", "user_id": user_id, "device_id": device_id, } @@ -527,7 +528,7 @@ def incoming_device_list_update(self, origin, edu_content): opentracing.log_kv( { "message": "Got an update from a user for which " - + "we don't share any rooms", + "we don't share any rooms", "other user_id": user_id, } ) From 03958693ce05bcbfad4857469eb3926cddb56871 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 5 Aug 2019 13:59:04 +0100 Subject: [PATCH 38/46] Use underscores. --- synapse/handlers/devicemessage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/devicemessage.py b/synapse/handlers/devicemessage.py index 625b1b70af16..424965368e55 100644 --- a/synapse/handlers/devicemessage.py +++ b/synapse/handlers/devicemessage.py @@ -81,7 +81,7 @@ def on_direct_to_device_edu(self, origin, content): @defer.inlineCallbacks def send_device_message(self, sender_user_id, message_type, messages): - opentracing.set_tag("number of messages", len(messages)) + opentracing.set_tag("number_of_messages", len(messages)) opentracing.set_tag("sender", sender_user_id) local_messages = {} remote_messages = {} From 7ab2088ddc5604940f8755bc8f621c6a55a26072 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 5 Aug 2019 14:04:07 +0100 Subject: [PATCH 39/46] Remove redundent tagging. --- synapse/handlers/e2e_keys.py | 2 -- synapse/rest/client/v2_alpha/keys.py | 2 -- 2 files changed, 4 deletions(-) diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index d6f894dd722f..43492add6fbe 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -303,7 +303,6 @@ def upload_keys_for_user(self, user_id, device_id, keys): # TODO: Validate the JSON to make sure it has the right keys. device_keys = keys.get("device_keys", None) - opentracing.set_tag("device_keys", device_keys) if device_keys: logger.info( "Updating device_keys for device %r for user %s at %d", @@ -330,7 +329,6 @@ def upload_keys_for_user(self, user_id, device_id, keys): {"message": "Not updating device_keys for user", "user_id": user_id} ) one_time_keys = keys.get("one_time_keys", None) - opentracing.set_tag("one_time_keys", one_time_keys) if one_time_keys: opentracing.log_kv( { diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py index 41cdab9532df..f2722be18127 100644 --- a/synapse/rest/client/v2_alpha/keys.py +++ b/synapse/rest/client/v2_alpha/keys.py @@ -199,8 +199,6 @@ def on_GET(self, request): user_id = requester.user.to_string() - opentracing.set_tag("user_id", user_id) - results = yield self.device_handler.get_user_ids_changed(user_id, from_token) return (200, results) From f76b071c99ec1165d34e889a48f814f12389e87d Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 5 Aug 2019 14:20:39 +0100 Subject: [PATCH 40/46] Remove redundent spans. --- synapse/rest/client/v2_alpha/keys.py | 1 - synapse/rest/client/v2_alpha/room_keys.py | 1 - 2 files changed, 2 deletions(-) diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py index f2722be18127..3282b5589291 100644 --- a/synapse/rest/client/v2_alpha/keys.py +++ b/synapse/rest/client/v2_alpha/keys.py @@ -183,7 +183,6 @@ def __init__(self, hs): self.auth = hs.get_auth() self.device_handler = hs.get_device_handler() - @opentracing.trace @defer.inlineCallbacks def on_GET(self, request): requester = yield self.auth.get_user_by_req(request, allow_guest=True) diff --git a/synapse/rest/client/v2_alpha/room_keys.py b/synapse/rest/client/v2_alpha/room_keys.py index 6b60aa525f6d..1571730b4631 100644 --- a/synapse/rest/client/v2_alpha/room_keys.py +++ b/synapse/rest/client/v2_alpha/room_keys.py @@ -312,7 +312,6 @@ def __init__(self, hs): self.auth = hs.get_auth() self.e2e_room_keys_handler = hs.get_e2e_room_keys_handler() - @opentracing.trace_using_operation_name("get_room_keys_version") @defer.inlineCallbacks def on_GET(self, request, version): """ From 22b7e6a01156bdeab8fac420807a0abbff4eaf27 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 5 Aug 2019 14:23:11 +0100 Subject: [PATCH 41/46] Debug comments gone rampant. --- synapse/storage/devices.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 04310abcba71..0f00d9a4c5d9 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -140,13 +140,6 @@ def get_devices_by_remote(self, destination, from_stream_id, limit): break key = (update[0], update[1]) - logger.info("++++++++++++++++++++++++ update-0 %s", update[0]) - logger.info("++++++++++++++++++++++++ update-1 %s", update[1]) - logger.info("++++++++++++++++++++++++ update-2 %s", update[2]) - logger.info("++++++++++++++++++++++++ update-3 %s", update[3]) - logger.info( - "+++++++++++++++++++++++++++ %s", (query_map.get(key, 0), update[2]) - ) query_map[key] = ( max(query_map.get(key, (0, None))[0], update[2]), update[3], From 6a355ca20ab6d11f205eeb5b45b7d3c9f9d198d3 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 5 Aug 2019 14:26:30 +0100 Subject: [PATCH 42/46] Refactor for clarity. --- synapse/storage/devices.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 0f00d9a4c5d9..046c000c4085 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -140,10 +140,10 @@ def get_devices_by_remote(self, destination, from_stream_id, limit): break key = (update[0], update[1]) - query_map[key] = ( - max(query_map.get(key, (0, None))[0], update[2]), - update[3], - ) + context = update[3] + stream_id = max(query_map.get(key, (0, None))[0], update[2]) + + query_map[key] = (stream_id, context) # If we didn't find any updates with a stream_id lower than the cutoff, it # means that there are more than limit updates all of which have the same From bd0ed7b0c42a22eb4cdd149700034fda6a7e1dd9 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 5 Aug 2019 14:44:19 +0100 Subject: [PATCH 43/46] Docstrings shouldn't lie. --- synapse/storage/devices.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 046c000c4085..3b2e9c669911 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -194,8 +194,8 @@ def _get_device_update_edus_by_remote(self, destination, from_stream_id, query_m Args: destination (str): The host the device updates are intended for from_stream_id (int): The minimum stream_id to filter updates by, exclusive - query_map (Dict[(str, str): int]): Dictionary mapping - user_id/device_id to update stream_id + query_map (Dict[(str, str): (int, str)]): Dictionary mapping + user_id/device_id to update stream_id and the relevent opentracing context Returns: List[Dict]: List of objects representing an device update EDU @@ -217,7 +217,7 @@ def _get_device_update_edus_by_remote(self, destination, from_stream_id, query_m destination, user_id, from_stream_id ) for device_id, device in iteritems(user_devices): - stream_id = query_map[(user_id, device_id)][0] + stream_id, _ = query_map[(user_id, device_id)] result = { "user_id": user_id, "device_id": device_id, From 4f36a2dfeca8222898b9b1f5dbb12935a85e234c Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 5 Aug 2019 15:02:29 +0100 Subject: [PATCH 44/46] Create and use a method to get the span context as a dict. --- synapse/logging/opentracing.py | 25 +++++++++++++++++++++++++ synapse/storage/devices.py | 3 +-- 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py index d2c209c471fa..bbc609a56973 100644 --- a/synapse/logging/opentracing.py +++ b/synapse/logging/opentracing.py @@ -527,6 +527,31 @@ def inject_active_span_text_map(carrier, destination=None): ) +def get_active_span_text_map(destination=None): + """ + Gets a span context as a dict. This can be used instead of injecting a span + into an empty carrier. + + Args: + destination (str): the name of the remote server. The dict will only + contain a span context if the destination matches the homeserver_whitelist + or if destination is None. + + Returns: + A dict containing the span context. + """ + + if not opentracing or (destination and not whitelisted_homeserver(destination)): + return {} + + carrier = {} + opentracing.tracer.inject( + opentracing.tracer.active_span, opentracing.Format.TEXT_MAP, carrier + ) + + return carrier + + def active_span_context_as_string(): """ Returns: diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 3b2e9c669911..724c5b76222e 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -828,8 +828,7 @@ def _add_device_change_txn(self, txn, user_id, device_ids, hosts, stream_id): ], ) - context = {"opentracing": {}} - opentracing.inject_active_span_text_map(context["opentracing"]) + context = {"opentracing": opentracing.get_active_span_text_map()} self._simple_insert_many_txn( txn, From 82d6eb186ce739dac3a10163e242ec7dad6b7722 Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 5 Aug 2019 15:12:17 +0100 Subject: [PATCH 45/46] Remove unused import --- synapse/rest/client/v2_alpha/room_keys.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/rest/client/v2_alpha/room_keys.py b/synapse/rest/client/v2_alpha/room_keys.py index 1571730b4631..10dec96208b6 100644 --- a/synapse/rest/client/v2_alpha/room_keys.py +++ b/synapse/rest/client/v2_alpha/room_keys.py @@ -17,7 +17,6 @@ from twisted.internet import defer -import synapse.logging.opentracing as opentracing from synapse.api.errors import Codes, NotFoundError, SynapseError from synapse.http.servlet import ( RestServlet, From eee4eff050a7708d38e95cbc6533d44e4ef5b6cd Mon Sep 17 00:00:00 2001 From: Jorik Schellekens Date: Mon, 5 Aug 2019 16:11:54 +0100 Subject: [PATCH 46/46] Bind exception to name --- synapse/handlers/device.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 486d52beadea..d7e1935d0f11 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -654,7 +654,7 @@ def user_device_resync(self, user_id): opentracing.log_kv({"reason": "FederationDeniedError"}) logger.info(e) return - except Exception: + except Exception as e: # TODO: Remember that we are now out of sync and try again # later opentracing.set_tag("error", True)