Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Commit

Permalink
Merge commit '5bf8e5f55' into anoa/dinsic_release_1_21_x
Browse files Browse the repository at this point in the history
* commit '5bf8e5f55':
  Convert the well known resolver to async (#8214)
  Convert additional databases to async/await part 2 (#8200)
  Make MultiWriterIDGenerator work for streams that use negative stream IDs (#8203)
  Do not install setuptools 50.0. (#8212)
  Move and rename `get_devices_with_keys_by_user` (#8204)
  Rename `get_e2e_device_keys` to better reflect its purpose (#8205)
  Add a comment about _LimitedHostnameResolver
  • Loading branch information
anoadragon453 committed Oct 20, 2020
2 parents d49dd2f + 5bf8e5f commit 059e0fd
Show file tree
Hide file tree
Showing 36 changed files with 392 additions and 195 deletions.
2 changes: 1 addition & 1 deletion INSTALL.md
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ mkdir -p ~/synapse
virtualenv -p python3 ~/synapse/env
source ~/synapse/env/bin/activate
pip install --upgrade pip
pip install --upgrade setuptools
pip install --upgrade setuptools!=50.0 # setuptools==50.0 fails on some older Python versions
pip install matrix-synapse
```

Expand Down
1 change: 1 addition & 0 deletions changelog.d/8200.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Convert various parts of the codebase to async/await.
1 change: 1 addition & 0 deletions changelog.d/8203.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Make `MultiWriterIDGenerator` work for streams that use negative values.
1 change: 1 addition & 0 deletions changelog.d/8204.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Refactor queries for device keys and cross-signatures.
1 change: 1 addition & 0 deletions changelog.d/8205.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Refactor queries for device keys and cross-signatures.
1 change: 1 addition & 0 deletions changelog.d/8212.bugfix
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Do not install setuptools 50.0. It can lead to a broken configuration on some older Python versions.
1 change: 1 addition & 0 deletions changelog.d/8214.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Convert various parts of the codebase to async/await.
1 change: 1 addition & 0 deletions mypy.ini
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ files =
synapse/handlers/saml_handler.py,
synapse/handlers/sync.py,
synapse/handlers/ui_auth,
synapse/http/federation/well_known_resolver.py,
synapse/http/server.py,
synapse/http/site.py,
synapse/logging/,
Expand Down
7 changes: 7 additions & 0 deletions synapse/app/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,6 +334,13 @@ def install_dns_limiter(reactor, max_dns_requests_in_flight=100):
This is to workaround https://twistedmatrix.com/trac/ticket/9620, where we
can run out of file descriptors and infinite loop if we attempt to do too
many DNS queries at once
XXX: I'm confused by this. reactor.nameResolver does not use twisted.names unless
you explicitly install twisted.names as the resolver; rather it uses a GAIResolver
backed by the reactor's default threadpool (which is limited to 10 threads). So
(a) I don't understand why twisted ticket 9620 is relevant, and (b) I don't
understand why we would run out of FDs if we did too many lookups at once.
-- richvdh 2020/08/29
"""
new_resolver = _LimitedHostnameResolver(
reactor.nameResolver, max_dns_requests_in_flight
Expand Down
19 changes: 12 additions & 7 deletions synapse/events/builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from typing import Any, Dict, List, Optional, Tuple, Union

import attr
from nacl.signing import SigningKey
Expand Down Expand Up @@ -97,14 +97,14 @@ def state_key(self):
def is_state(self):
return self._state_key is not None

async def build(self, prev_event_ids):
async def build(self, prev_event_ids: List[str]) -> EventBase:
"""Transform into a fully signed and hashed event
Args:
prev_event_ids (list[str]): The event IDs to use as the prev events
prev_event_ids: The event IDs to use as the prev events
Returns:
FrozenEvent
The signed and hashed event.
"""

state_ids = await self._state.get_current_state_ids(
Expand All @@ -114,8 +114,13 @@ async def build(self, prev_event_ids):

format_version = self.room_version.event_format
if format_version == EventFormatVersions.V1:
auth_events = await self._store.add_event_hashes(auth_ids)
prev_events = await self._store.add_event_hashes(prev_event_ids)
# The types of auth/prev events changes between event versions.
auth_events = await self._store.add_event_hashes(
auth_ids
) # type: Union[List[str], List[Tuple[str, Dict[str, str]]]]
prev_events = await self._store.add_event_hashes(
prev_event_ids
) # type: Union[List[str], List[Tuple[str, Dict[str, str]]]]
else:
auth_events = auth_ids
prev_events = prev_event_ids
Expand All @@ -138,7 +143,7 @@ async def build(self, prev_event_ids):
"unsigned": self.unsigned,
"depth": depth,
"prev_state": [],
}
} # type: Dict[str, Any]

if self.is_state():
event_dict["state_key"] = self._state_key
Expand Down
4 changes: 3 additions & 1 deletion synapse/handlers/device.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,9 @@ async def get_user_ids_changed(self, user_id, from_token):
return result

async def on_federation_query_user_devices(self, user_id):
stream_id, devices = await self.store.get_devices_with_keys_by_user(user_id)
stream_id, devices = await self.store.get_e2e_device_keys_for_federation_query(
user_id
)
master_key = await self.store.get_e2e_cross_signing_key(user_id, "master")
self_signing_key = await self.store.get_e2e_cross_signing_key(
user_id, "self_signing"
Expand Down
4 changes: 2 additions & 2 deletions synapse/handlers/e2e_keys.py
Original file line number Diff line number Diff line change
Expand Up @@ -353,7 +353,7 @@ async def query_local_devices(
# make sure that each queried user appears in the result dict
result_dict[user_id] = {}

results = await self.store.get_e2e_device_keys(local_query)
results = await self.store.get_e2e_device_keys_for_cs_api(local_query)

# Build the result structure
for user_id, device_keys in results.items():
Expand Down Expand Up @@ -734,7 +734,7 @@ async def _process_self_signatures(self, user_id, signatures):
# fetch our stored devices. This is used to 1. verify
# signatures on the master key, and 2. to compare with what
# was sent if the device was signed
devices = await self.store.get_e2e_device_keys([(user_id, None)])
devices = await self.store.get_e2e_device_keys_for_cs_api([(user_id, None)])

if user_id not in devices:
raise NotFoundError("No device keys found")
Expand Down
13 changes: 3 additions & 10 deletions synapse/handlers/message.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,14 +49,7 @@
from synapse.replication.http.send_event import ReplicationSendEventRestServlet
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
from synapse.storage.state import StateFilter
from synapse.types import (
Collection,
Requester,
RoomAlias,
StreamToken,
UserID,
create_requester,
)
from synapse.types import Requester, RoomAlias, StreamToken, UserID, create_requester
from synapse.util import json_decoder
from synapse.util.async_helpers import Linearizer
from synapse.util.frozenutils import frozendict_json_encoder
Expand Down Expand Up @@ -449,7 +442,7 @@ async def create_event(
event_dict: dict,
token_id: Optional[str] = None,
txn_id: Optional[str] = None,
prev_event_ids: Optional[Collection[str]] = None,
prev_event_ids: Optional[List[str]] = None,
require_consent: bool = True,
) -> Tuple[EventBase, EventContext]:
"""
Expand Down Expand Up @@ -789,7 +782,7 @@ async def create_new_client_event(
self,
builder: EventBuilder,
requester: Optional[Requester] = None,
prev_event_ids: Optional[Collection[str]] = None,
prev_event_ids: Optional[List[str]] = None,
) -> Tuple[EventBase, EventContext]:
"""Create a new event for a local client
Expand Down
12 changes: 2 additions & 10 deletions synapse/handlers/room_member.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,15 +38,7 @@
from synapse.events.snapshot import EventContext
from synapse.events.validator import EventValidator
from synapse.storage.roommember import RoomsForUser
from synapse.types import (
Collection,
JsonDict,
Requester,
RoomAlias,
RoomID,
StateMap,
UserID,
)
from synapse.types import JsonDict, Requester, RoomAlias, RoomID, StateMap, UserID
from synapse.util.async_helpers import Linearizer
from synapse.util.distributor import user_joined_room, user_left_room

Expand Down Expand Up @@ -185,7 +177,7 @@ async def _local_membership_update(
target: UserID,
room_id: str,
membership: str,
prev_event_ids: Collection[str],
prev_event_ids: List[str],
txn_id: Optional[str] = None,
ratelimit: bool = True,
content: Optional[dict] = None,
Expand Down
4 changes: 2 additions & 2 deletions synapse/http/federation/matrix_federation_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,8 +134,8 @@ def request(self, method, uri, headers=None, bodyProducer=None):
and not _is_ip_literal(parsed_uri.hostname)
and not parsed_uri.port
):
well_known_result = yield self._well_known_resolver.get_well_known(
parsed_uri.hostname
well_known_result = yield defer.ensureDeferred(
self._well_known_resolver.get_well_known(parsed_uri.hostname)
)
delegated_server = well_known_result.delegated_server

Expand Down
57 changes: 31 additions & 26 deletions synapse/http/federation/well_known_resolver.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,15 @@
import logging
import random
import time
from typing import Callable, Dict, Optional, Tuple

import attr

from twisted.internet import defer
from twisted.web.client import RedirectAgent, readBody
from twisted.web.http import stringToDatetime
from twisted.web.http_headers import Headers
from twisted.web.iweb import IResponse

from synapse.logging.context import make_deferred_yieldable
from synapse.util import Clock, json_decoder
Expand Down Expand Up @@ -99,15 +101,14 @@ def __init__(
self._well_known_agent = RedirectAgent(agent)
self.user_agent = user_agent

@defer.inlineCallbacks
def get_well_known(self, server_name):
async def get_well_known(self, server_name: bytes) -> WellKnownLookupResult:
"""Attempt to fetch and parse a .well-known file for the given server
Args:
server_name (bytes): name of the server, from the requested url
server_name: name of the server, from the requested url
Returns:
Deferred[WellKnownLookupResult]: The result of the lookup
The result of the lookup
"""
try:
prev_result, expiry, ttl = self._well_known_cache.get_with_expiry(
Expand All @@ -124,7 +125,9 @@ def get_well_known(self, server_name):
# requests for the same server in parallel?
try:
with Measure(self._clock, "get_well_known"):
result, cache_period = yield self._fetch_well_known(server_name)
result, cache_period = await self._fetch_well_known(
server_name
) # type: Tuple[Optional[bytes], float]

except _FetchWellKnownFailure as e:
if prev_result and e.temporary:
Expand Down Expand Up @@ -153,26 +156,25 @@ def get_well_known(self, server_name):

return WellKnownLookupResult(delegated_server=result)

@defer.inlineCallbacks
def _fetch_well_known(self, server_name):
async def _fetch_well_known(self, server_name: bytes) -> Tuple[bytes, float]:
"""Actually fetch and parse a .well-known, without checking the cache
Args:
server_name (bytes): name of the server, from the requested url
server_name: name of the server, from the requested url
Raises:
_FetchWellKnownFailure if we fail to lookup a result
Returns:
Deferred[Tuple[bytes,int]]: The lookup result and cache period.
The lookup result and cache period.
"""

had_valid_well_known = self._had_valid_well_known_cache.get(server_name, False)

# We do this in two steps to differentiate between possibly transient
# errors (e.g. can't connect to host, 503 response) and more permenant
# errors (such as getting a 404 response).
response, body = yield self._make_well_known_request(
response, body = await self._make_well_known_request(
server_name, retry=had_valid_well_known
)

Expand Down Expand Up @@ -215,20 +217,20 @@ def _fetch_well_known(self, server_name):

return result, cache_period

@defer.inlineCallbacks
def _make_well_known_request(self, server_name, retry):
async def _make_well_known_request(
self, server_name: bytes, retry: bool
) -> Tuple[IResponse, bytes]:
"""Make the well known request.
This will retry the request if requested and it fails (with unable
to connect or receives a 5xx error).
Args:
server_name (bytes)
retry (bool): Whether to retry the request if it fails.
server_name: name of the server, from the requested url
retry: Whether to retry the request if it fails.
Returns:
Deferred[tuple[IResponse, bytes]] Returns the response object and
body. Response may be a non-200 response.
Returns the response object and body. Response may be a non-200 response.
"""
uri = b"https://%s/.well-known/matrix/server" % (server_name,)
uri_str = uri.decode("ascii")
Expand All @@ -243,12 +245,12 @@ def _make_well_known_request(self, server_name, retry):

logger.info("Fetching %s", uri_str)
try:
response = yield make_deferred_yieldable(
response = await make_deferred_yieldable(
self._well_known_agent.request(
b"GET", uri, headers=Headers(headers)
)
)
body = yield make_deferred_yieldable(readBody(response))
body = await make_deferred_yieldable(readBody(response))

if 500 <= response.code < 600:
raise Exception("Non-200 response %s" % (response.code,))
Expand All @@ -265,21 +267,24 @@ def _make_well_known_request(self, server_name, retry):
logger.info("Error fetching %s: %s. Retrying", uri_str, e)

# Sleep briefly in the hopes that they come back up
yield self._clock.sleep(0.5)
await self._clock.sleep(0.5)


def _cache_period_from_headers(headers, time_now=time.time):
def _cache_period_from_headers(
headers: Headers, time_now: Callable[[], float] = time.time
) -> Optional[float]:
cache_controls = _parse_cache_control(headers)

if b"no-store" in cache_controls:
return 0

if b"max-age" in cache_controls:
try:
max_age = int(cache_controls[b"max-age"])
return max_age
except ValueError:
pass
max_age = cache_controls[b"max-age"]
if max_age:
try:
return int(max_age)
except ValueError:
pass

expires = headers.getRawHeaders(b"expires")
if expires is not None:
Expand All @@ -295,7 +300,7 @@ def _cache_period_from_headers(headers, time_now=time.time):
return None


def _parse_cache_control(headers):
def _parse_cache_control(headers: Headers) -> Dict[bytes, Optional[bytes]]:
cache_controls = {}
for hdr in headers.getRawHeaders(b"cache-control", []):
for directive in hdr.split(b","):
Expand Down
4 changes: 4 additions & 0 deletions synapse/python_dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,10 @@
"Jinja2>=2.9",
"bleach>=1.4.3",
"typing-extensions>=3.7.4",
# setuptools is required by a variety of dependencies, unfortunately version
# 50.0 is incompatible with older Python versions, see
# https://github.com/pypa/setuptools/issues/2352
"setuptools!=50.0",
]

CONDITIONAL_REQUIREMENTS = {
Expand Down
3 changes: 3 additions & 0 deletions synapse/replication/slave/storage/devices.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,9 @@ def __init__(self, database: DatabasePool, db_conn, hs):
"DeviceListFederationStreamChangeCache", device_list_max
)

def get_device_stream_token(self) -> int:
return self._device_list_id_gen.get_current_token()

def process_replication_rows(self, stream_name, instance_name, token, rows):
if stream_name == DeviceListsStream.NAME:
self._device_list_id_gen.advance(instance_name, token)
Expand Down
3 changes: 3 additions & 0 deletions synapse/storage/databases/main/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,6 +264,9 @@ def __init__(self, database: DatabasePool, db_conn, hs):
# Used in _generate_user_daily_visits to keep track of progress
self._last_user_visit_update = self._get_start_of_day()

def get_device_stream_token(self) -> int:
return self._device_list_id_gen.get_current_token()

def take_presence_startup_info(self):
active_on_startup = self._presence_on_startup
self._presence_on_startup = None
Expand Down
Loading

0 comments on commit 059e0fd

Please sign in to comment.