summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndrej Shadura <andrewsh@debian.org>2021-04-07 13:45:54 +0200
committerAndrej Shadura <andrewsh@debian.org>2021-04-07 13:45:54 +0200
commit55e3da6770137d9858028903525a99764b39e8b3 (patch)
treeda29719a78b70733aaf791d9aaf0e0a42a4e83ba
parent219ec5d1cff9f388d0be41e02d8984e733fa99d0 (diff)
New upstream version 1.31.0
-rwxr-xr-x.buildkite/scripts/test_old_deps.sh2
-rwxr-xr-x.buildkite/scripts/test_synapse_port_db.sh2
-rw-r--r--CHANGES.md115
-rw-r--r--INSTALL.md42
-rw-r--r--README.rst11
-rw-r--r--UPGRADE.rst9
-rw-r--r--contrib/purge_api/purge_history.sh2
-rw-r--r--contrib/purge_api/purge_remote_media.sh2
-rw-r--r--debian/changelog12
-rwxr-xr-xdemo/clean.sh2
-rwxr-xr-xdemo/start.sh2
-rwxr-xr-xdemo/stop.sh2
-rw-r--r--docker/Dockerfile50
-rw-r--r--docker/build_debian.sh2
-rwxr-xr-xdocker/run_pg_tests.sh2
-rw-r--r--docs/deprecation_policy.md33
-rw-r--r--docs/reverse_proxy.md7
-rw-r--r--docs/sample_config.yaml56
-rw-r--r--docs/workers.md3
-rw-r--r--mypy.ini8
-rwxr-xr-xscripts-dev/check-newsfragment2
-rwxr-xr-xscripts-dev/config-lint.sh2
-rwxr-xr-xscripts-dev/federation_client.py75
-rwxr-xr-xscripts-dev/generate_sample_config2
-rwxr-xr-xscripts-dev/lint.sh2
-rwxr-xr-xscripts-dev/make_full_schema.sh2
-rwxr-xr-xscripts-dev/next_github_number.sh4
-rwxr-xr-xscripts/move_remote_media_to_new_store.py2
-rw-r--r--setup.cfg3
-rwxr-xr-xsetup.py3
-rw-r--r--synapse/__init__.py2
-rw-r--r--synapse/api/auth.py5
-rw-r--r--synapse/api/constants.py7
-rw-r--r--synapse/app/__init__.py4
-rw-r--r--synapse/app/_base.py21
-rw-r--r--synapse/app/generic_worker.py24
-rw-r--r--synapse/app/homeserver.py9
-rw-r--r--synapse/config/cache.py6
-rw-r--r--synapse/config/experimental.py4
-rw-r--r--synapse/config/key.py6
-rw-r--r--synapse/config/metrics.py4
-rw-r--r--synapse/config/oidc_config.py72
-rw-r--r--synapse/config/ratelimiting.py8
-rw-r--r--synapse/config/repository.py4
-rw-r--r--synapse/config/saml2_config.py4
-rw-r--r--synapse/config/tracer.py4
-rw-r--r--synapse/crypto/context_factory.py4
-rw-r--r--synapse/crypto/keyring.py2
-rw-r--r--synapse/events/__init__.py9
-rw-r--r--synapse/events/third_party_rules.py15
-rw-r--r--synapse/events/utils.py16
-rw-r--r--synapse/federation/federation_client.py182
-rw-r--r--synapse/federation/federation_server.py25
-rw-r--r--synapse/federation/send_queue.py88
-rw-r--r--synapse/federation/sender/__init__.py116
-rw-r--r--synapse/federation/sender/per_destination_queue.py106
-rw-r--r--synapse/federation/transport/client.py35
-rw-r--r--synapse/federation/transport/server.py67
-rw-r--r--synapse/groups/attestations.py2
-rw-r--r--synapse/groups/groups_server.py2
-rw-r--r--synapse/handlers/_base.py2
-rw-r--r--synapse/handlers/account_data.py2
-rw-r--r--synapse/handlers/account_validity.py2
-rw-r--r--synapse/handlers/acme.py2
-rw-r--r--synapse/handlers/admin.py2
-rw-r--r--synapse/handlers/appservice.py2
-rw-r--r--synapse/handlers/auth.py15
-rw-r--r--synapse/handlers/cas_handler.py2
-rw-r--r--synapse/handlers/deactivate_account.py2
-rw-r--r--synapse/handlers/device.py26
-rw-r--r--synapse/handlers/devicemessage.py2
-rw-r--r--synapse/handlers/e2e_keys.py2
-rw-r--r--synapse/handlers/e2e_room_keys.py2
-rw-r--r--synapse/handlers/groups_local.py2
-rw-r--r--synapse/handlers/oidc_handler.py16
-rw-r--r--synapse/handlers/password_policy.py2
-rw-r--r--synapse/handlers/presence.py12
-rw-r--r--synapse/handlers/profile.py2
-rw-r--r--synapse/handlers/read_marker.py2
-rw-r--r--synapse/handlers/receipts.py2
-rw-r--r--synapse/handlers/register.py6
-rw-r--r--synapse/handlers/room_list.py2
-rw-r--r--synapse/handlers/room_member.py4
-rw-r--r--synapse/handlers/room_member_worker.py10
-rw-r--r--synapse/handlers/search.py2
-rw-r--r--synapse/handlers/set_password.py4
-rw-r--r--synapse/handlers/space_summary.py395
-rw-r--r--synapse/handlers/state_deltas.py2
-rw-r--r--synapse/handlers/stats.py2
-rw-r--r--synapse/handlers/sync.py12
-rw-r--r--synapse/handlers/user_directory.py2
-rw-r--r--synapse/http/client.py2
-rw-r--r--synapse/http/connectproxyclient.py96
-rw-r--r--synapse/http/federation/well_known_resolver.py10
-rw-r--r--synapse/http/proxyagent.py81
-rw-r--r--synapse/logging/context.py2
-rw-r--r--synapse/logging/opentracing.py4
-rw-r--r--synapse/push/__init__.py2
-rw-r--r--synapse/push/action_generator.py2
-rw-r--r--synapse/push/bulk_push_rule_evaluator.py2
-rw-r--r--synapse/push/emailpusher.py2
-rw-r--r--synapse/push/httppusher.py4
-rw-r--r--synapse/push/mailer.py2
-rw-r--r--synapse/push/pusher.py2
-rw-r--r--synapse/python_dependencies.py20
-rw-r--r--synapse/replication/http/federation.py3
-rw-r--r--synapse/replication/http/send_event.py4
-rw-r--r--synapse/replication/slave/storage/pushers.py2
-rw-r--r--synapse/replication/tcp/commands.py6
-rw-r--r--synapse/replication/tcp/protocol.py2
-rw-r--r--synapse/replication/tcp/streams/_base.py17
-rw-r--r--synapse/replication/tcp/streams/federation.py14
-rw-r--r--synapse/rest/admin/media.py2
-rw-r--r--synapse/rest/admin/rooms.py3
-rw-r--r--synapse/rest/admin/users.py5
-rw-r--r--synapse/rest/client/v1/room.py84
-rw-r--r--synapse/rest/client/v2_alpha/account.py2
-rw-r--r--synapse/rest/client/v2_alpha/capabilities.py23
-rw-r--r--synapse/rest/client/v2_alpha/groups.py2
-rw-r--r--synapse/rest/client/v2_alpha/sync.py14
-rw-r--r--synapse/rest/client/versions.py2
-rw-r--r--synapse/rest/media/v1/config_resource.py2
-rw-r--r--synapse/rest/media/v1/download_resource.py2
-rw-r--r--synapse/rest/media/v1/media_repository.py2
-rw-r--r--synapse/rest/media/v1/preview_url_resource.py4
-rw-r--r--synapse/rest/media/v1/storage_provider.py2
-rw-r--r--synapse/rest/media/v1/thumbnail_resource.py2
-rw-r--r--synapse/rest/media/v1/upload_resource.py2
-rw-r--r--synapse/rest/synapse/client/pick_username.py3
-rw-r--r--synapse/secrets.py8
-rw-r--r--synapse/server.py28
-rw-r--r--synapse/server_notices/consent_server_notices.py18
-rw-r--r--synapse/server_notices/resource_limits_server_notices.py11
-rw-r--r--synapse/server_notices/server_notices_manager.py2
-rw-r--r--synapse/server_notices/server_notices_sender.py18
-rw-r--r--synapse/server_notices/worker_server_notices_sender.py11
-rw-r--r--synapse/storage/__init__.py2
-rw-r--r--synapse/storage/_base.py2
-rw-r--r--synapse/storage/background_updates.py2
-rw-r--r--synapse/storage/database.py13
-rw-r--r--synapse/storage/databases/main/appservice.py2
-rw-r--r--synapse/storage/databases/main/deviceinbox.py6
-rw-r--r--synapse/storage/databases/main/event_federation.py2
-rw-r--r--synapse/storage/databases/main/events.py19
-rw-r--r--synapse/storage/databases/main/events_worker.py5
-rw-r--r--synapse/storage/databases/main/monthly_active_users.py4
-rw-r--r--synapse/storage/databases/main/presence.py60
-rw-r--r--synapse/storage/databases/main/pusher.py2
-rw-r--r--synapse/storage/databases/main/registration.py1
-rw-r--r--synapse/storage/databases/main/transactions.py45
-rw-r--r--synapse/storage/databases/state/store.py9
-rw-r--r--synapse/storage/purge_events.py2
-rw-r--r--synapse/storage/state.py6
-rw-r--r--synapse/util/async_helpers.py2
-rw-r--r--synapse/util/caches/__init__.py6
-rw-r--r--synapse/util/caches/dictionary_cache.py64
-rw-r--r--synapse/util/caches/ttlcache.py53
-rw-r--r--synapse/util/frozenutils.py2
-rw-r--r--synapse/visibility.py78
-rwxr-xr-xtest_postgresql.sh2
-rw-r--r--tests/federation/test_federation_catch_up.py49
-rw-r--r--tests/handlers/test_oidc.py132
-rw-r--r--tests/handlers/test_presence.py20
-rw-r--r--tests/http/test_proxyagent.py40
-rw-r--r--tests/replication/_base.py2
-rw-r--r--tests/replication/tcp/streams/test_typing.py1
-rw-r--r--tests/replication/test_multi_media_repo.py4
-rw-r--r--tests/rest/admin/test_user.py173
-rw-r--r--tests/rest/client/test_third_party_rules.py62
-rw-r--r--tests/rest/client/v2_alpha/test_capabilities.py36
-rw-r--r--tests/rest/client/v2_alpha/test_relations.py62
-rw-r--r--tests/server.py30
-rw-r--r--tests/storage/test_state.py22
-rw-r--r--tests/unittest.py12
-rw-r--r--tests/util/test_dict_cache.py4
175 files changed, 2667 insertions, 758 deletions
diff --git a/.buildkite/scripts/test_old_deps.sh b/.buildkite/scripts/test_old_deps.sh
index 28e6694b..9fe5b696 100755
--- a/.buildkite/scripts/test_old_deps.sh
+++ b/.buildkite/scripts/test_old_deps.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
# this script is run by buildkite in a plain `xenial` container; it installs the
# minimal requirements for tox and hands over to the py35-old tox environment.
diff --git a/.buildkite/scripts/test_synapse_port_db.sh b/.buildkite/scripts/test_synapse_port_db.sh
index 9ed21776..8914319e 100755
--- a/.buildkite/scripts/test_synapse_port_db.sh
+++ b/.buildkite/scripts/test_synapse_port_db.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
#
# Test script for 'synapse_port_db', which creates a virtualenv, installs Synapse along
# with additional dependencies needed for the test (such as coverage or the PostgreSQL
diff --git a/CHANGES.md b/CHANGES.md
index 1bf9514a..27483532 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,3 +1,118 @@
+Synapse 1.31.0 (2021-04-06)
+===========================
+
+**Note:** As announced in v1.25.0, and in line with the deprecation policy for platform dependencies, this is the last release to support Python 3.5 and PostgreSQL 9.5. Future versions of Synapse will require Python 3.6+ and PostgreSQL 9.6+, as per our [deprecation policy](docs/deprecation_policy.md).
+
+This is also the last release that the Synapse team will be publishing packages for Debian Stretch and Ubuntu Xenial.
+
+
+Improved Documentation
+----------------------
+
+- Add a document describing the deprecation policy for platform dependencies. ([\#9723](https://github.com/matrix-org/synapse/issues/9723))
+
+
+Internal Changes
+----------------
+
+- Revert using `dmypy run` in lint script. ([\#9720](https://github.com/matrix-org/synapse/issues/9720))
+- Pin flake8-bugbear's version. ([\#9734](https://github.com/matrix-org/synapse/issues/9734))
+
+
+Synapse 1.31.0rc1 (2021-03-30)
+==============================
+
+Features
+--------
+
+- Add support to OpenID Connect login for requiring attributes on the `userinfo` response. Contributed by Hubbe King. ([\#9609](https://github.com/matrix-org/synapse/issues/9609))
+- Add initial experimental support for a "space summary" API. ([\#9643](https://github.com/matrix-org/synapse/issues/9643), [\#9652](https://github.com/matrix-org/synapse/issues/9652), [\#9653](https://github.com/matrix-org/synapse/issues/9653))
+- Add support for the busy presence state as described in [MSC3026](https://github.com/matrix-org/matrix-doc/pull/3026). ([\#9644](https://github.com/matrix-org/synapse/issues/9644))
+- Add support for credentials for proxy authentication in the `HTTPS_PROXY` environment variable. ([\#9657](https://github.com/matrix-org/synapse/issues/9657))
+
+
+Bugfixes
+--------
+
+- Fix a longstanding bug that could cause issues when editing a reply to a message. ([\#9585](https://github.com/matrix-org/synapse/issues/9585))
+- Fix the `/capabilities` endpoint to return `m.change_password` as disabled if the local password database is not used for authentication. Contributed by @dklimpel. ([\#9588](https://github.com/matrix-org/synapse/issues/9588))
+- Check if local passwords are enabled before setting them for the user. ([\#9636](https://github.com/matrix-org/synapse/issues/9636))
+- Fix a bug where federation sending can stall due to `concurrent access` database exceptions when it falls behind. ([\#9639](https://github.com/matrix-org/synapse/issues/9639))
+- Fix a bug introduced in Synapse 1.30.1 which meant the suggested `pip` incantation to install an updated `cryptography` was incorrect. ([\#9699](https://github.com/matrix-org/synapse/issues/9699))
+
+
+Updates to the Docker image
+---------------------------
+
+- Speed up Docker builds and make it nicer to test against Complement while developing (install all dependencies before copying the project). ([\#9610](https://github.com/matrix-org/synapse/issues/9610))
+- Include [opencontainers labels](https://github.com/opencontainers/image-spec/blob/master/annotations.md#pre-defined-annotation-keys) in the Docker image. ([\#9612](https://github.com/matrix-org/synapse/issues/9612))
+
+
+Improved Documentation
+----------------------
+
+- Clarify that `register_new_matrix_user` is present also when installed via non-pip package. ([\#9074](https://github.com/matrix-org/synapse/issues/9074))
+- Update source install documentation to mention platform prerequisites before the source install steps. ([\#9667](https://github.com/matrix-org/synapse/issues/9667))
+- Improve worker documentation for fallback/web auth endpoints. ([\#9679](https://github.com/matrix-org/synapse/issues/9679))
+- Update the sample configuration for OIDC authentication. ([\#9695](https://github.com/matrix-org/synapse/issues/9695))
+
+
+Internal Changes
+----------------
+
+- Preparatory steps for removing redundant `outlier` data from `event_json.internal_metadata` column. ([\#9411](https://github.com/matrix-org/synapse/issues/9411))
+- Add type hints to the caching module. ([\#9442](https://github.com/matrix-org/synapse/issues/9442))
+- Introduce flake8-bugbear to the test suite and fix some of its lint violations. ([\#9499](https://github.com/matrix-org/synapse/issues/9499), [\#9659](https://github.com/matrix-org/synapse/issues/9659))
+- Add additional type hints to the Homeserver object. ([\#9631](https://github.com/matrix-org/synapse/issues/9631), [\#9638](https://github.com/matrix-org/synapse/issues/9638), [\#9675](https://github.com/matrix-org/synapse/issues/9675), [\#9681](https://github.com/matrix-org/synapse/issues/9681))
+- Only save remote cross-signing and device keys if they're different from the current ones. ([\#9634](https://github.com/matrix-org/synapse/issues/9634))
+- Rename storage function to fix spelling and not conflict with another function's name. ([\#9637](https://github.com/matrix-org/synapse/issues/9637))
+- Improve performance of federation catch up by sending the latest events in the room to the remote, rather than just the last event sent by the local server. ([\#9640](https://github.com/matrix-org/synapse/issues/9640), [\#9664](https://github.com/matrix-org/synapse/issues/9664))
+- In the `federation_client` commandline client, stop automatically adding the URL prefix, so that servlets on other prefixes can be tested. ([\#9645](https://github.com/matrix-org/synapse/issues/9645))
+- In the `federation_client` commandline client, handle inline `signing_key`s in `homeserver.yaml`. ([\#9647](https://github.com/matrix-org/synapse/issues/9647))
+- Fixed some antipattern issues to improve code quality. ([\#9649](https://github.com/matrix-org/synapse/issues/9649))
+- Add a storage method for pulling all current user presence state from the database. ([\#9650](https://github.com/matrix-org/synapse/issues/9650))
+- Import `HomeServer` from the proper module. ([\#9665](https://github.com/matrix-org/synapse/issues/9665))
+- Increase default join ratelimiting burst rate. ([\#9674](https://github.com/matrix-org/synapse/issues/9674))
+- Add type hints to third party event rules and visibility modules. ([\#9676](https://github.com/matrix-org/synapse/issues/9676))
+- Bump mypy-zope to 0.2.13 to fix "Cannot determine consistent method resolution order (MRO)" errors when running mypy a second time. ([\#9678](https://github.com/matrix-org/synapse/issues/9678))
+- Use interpreter from `$PATH` via `/usr/bin/env` instead of absolute paths in various scripts. ([\#9689](https://github.com/matrix-org/synapse/issues/9689))
+- Make it possible to use `dmypy`. ([\#9692](https://github.com/matrix-org/synapse/issues/9692))
+- Suppress "CryptographyDeprecationWarning: int_from_bytes is deprecated". ([\#9698](https://github.com/matrix-org/synapse/issues/9698))
+- Use `dmypy run` in lint script for improved performance in type-checking while developing. ([\#9701](https://github.com/matrix-org/synapse/issues/9701))
+- Fix undetected mypy error when using Python 3.6. ([\#9703](https://github.com/matrix-org/synapse/issues/9703))
+- Fix type-checking CI on develop. ([\#9709](https://github.com/matrix-org/synapse/issues/9709))
+
+
+Synapse 1.30.1 (2021-03-26)
+===========================
+
+This release is identical to Synapse 1.30.0, with the exception of explicitly
+setting a minimum version of Python's Cryptography library to ensure that users
+of Synapse are protected from the recent [OpenSSL security advisories](https://mta.openssl.org/pipermail/openssl-announce/2021-March/000198.html),
+especially CVE-2021-3449.
+
+Note that Cryptography defaults to bundling its own statically linked copy of
+OpenSSL, which means that you may not be protected by your operating system's
+security updates.
+
+It's also worth noting that Cryptography no longer supports Python 3.5, so
+admins deploying to older environments may not be protected against this or
+future vulnerabilities. Synapse will be dropping support for Python 3.5 at the
+end of March.
+
+
+Updates to the Docker image
+---------------------------
+
+- Ensure that the docker container has up to date versions of openssl. ([\#9697](https://github.com/matrix-org/synapse/issues/9697))
+
+
+Internal Changes
+----------------
+
+- Enforce that `cryptography` dependency is up to date to ensure it has the most recent openssl patches. ([\#9697](https://github.com/matrix-org/synapse/issues/9697))
+
+
Synapse 1.30.0 (2021-03-22)
===========================
diff --git a/INSTALL.md b/INSTALL.md
index b9e3f613..7b406892 100644
--- a/INSTALL.md
+++ b/INSTALL.md
@@ -6,7 +6,7 @@ There are 3 steps to follow under **Installation Instructions**.
- [Choosing your server name](#choosing-your-server-name)
- [Installing Synapse](#installing-synapse)
- [Installing from source](#installing-from-source)
- - [Platform-Specific Instructions](#platform-specific-instructions)
+ - [Platform-specific prerequisites](#platform-specific-prerequisites)
- [Debian/Ubuntu/Raspbian](#debianubunturaspbian)
- [ArchLinux](#archlinux)
- [CentOS/Fedora](#centosfedora)
@@ -38,6 +38,7 @@ There are 3 steps to follow under **Installation Instructions**.
- [URL previews](#url-previews)
- [Troubleshooting Installation](#troubleshooting-installation)
+
## Choosing your server name
It is important to choose the name for your server before you install Synapse,
@@ -60,17 +61,14 @@ that your email address is probably `user@example.com` rather than
(Prebuilt packages are available for some platforms - see [Prebuilt packages](#prebuilt-packages).)
+When installing from source please make sure that the [Platform-specific prerequisites](#platform-specific-prerequisites) are already installed.
+
System requirements:
- POSIX-compliant system (tested on Linux & OS X)
- Python 3.5.2 or later, up to Python 3.9.
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
-Synapse is written in Python but some of the libraries it uses are written in
-C. So before we can install Synapse itself we need a working C compiler and the
-header files for Python C extensions. See [Platform-Specific
-Instructions](#platform-specific-instructions) for information on installing
-these on various platforms.
To install the Synapse homeserver run:
@@ -128,7 +126,11 @@ source env/bin/activate
synctl start
```
-#### Platform-Specific Instructions
+#### Platform-specific prerequisites
+
+Synapse is written in Python but some of the libraries it uses are written in
+C. So before we can install Synapse itself we need a working C compiler and the
+header files for Python C extensions.
##### Debian/Ubuntu/Raspbian
@@ -526,14 +528,24 @@ email will be disabled.
The easiest way to create a new user is to do so from a client like [Element](https://element.io/).
-Alternatively you can do so from the command line if you have installed via pip.
-
-This can be done as follows:
-
-```sh
-$ source ~/synapse/env/bin/activate
-$ synctl start # if not already running
-$ register_new_matrix_user -c homeserver.yaml http://localhost:8008
+Alternatively, you can do so from the command line. This can be done as follows:
+
+ 1. If synapse was installed via pip, activate the virtualenv as follows (if Synapse was
+ installed via a prebuilt package, `register_new_matrix_user` should already be
+ on the search path):
+ ```sh
+ cd ~/synapse
+ source env/bin/activate
+ synctl start # if not already running
+ ```
+ 2. Run the following command:
+ ```sh
+ register_new_matrix_user -c homeserver.yaml http://localhost:8008
+ ```
+
+This will prompt you to add details for the new user, and will then connect to
+the running Synapse to create the new user. For example:
+```
New user localpart: erikj
Password:
Confirm password:
diff --git a/README.rst b/README.rst
index 6a1e7135..655a2bf3 100644
--- a/README.rst
+++ b/README.rst
@@ -314,6 +314,15 @@ Testing with SyTest is recommended for verifying that changes related to the
Client-Server API are functioning correctly. See the `installation instructions
<https://github.com/matrix-org/sytest#installing>`_ for details.
+
+Platform dependencies
+=====================
+
+Synapse uses a number of platform dependencies such as Python and PostgreSQL,
+and aims to follow supported upstream versions. See the
+`<docs/deprecation_policy.md>`_ document for more details.
+
+
Troubleshooting
===============
@@ -389,7 +398,7 @@ likely cause. The misbehavior can be worked around by setting
People can't accept room invitations from me
--------------------------------------------
-The typical failure mode here is that you send an invitation to someone
+The typical failure mode here is that you send an invitation to someone
to join a room or direct chat, but when they go to accept it, they get an
error (typically along the lines of "Invalid signature"). They might see
something like the following in their logs::
diff --git a/UPGRADE.rst b/UPGRADE.rst
index 8bc2ff91..ba488e10 100644
--- a/UPGRADE.rst
+++ b/UPGRADE.rst
@@ -98,9 +98,12 @@ will log a warning on each received request.
To avoid the warning, administrators using a reverse proxy should ensure that
the reverse proxy sets `X-Forwarded-Proto` header to `https` or `http` to
-indicate the protocol used by the client. See the `reverse proxy documentation
-<docs/reverse_proxy.md>`_, where the example configurations have been updated to
-show how to set this header.
+indicate the protocol used by the client.
+
+Synapse also requires the `Host` header to be preserved.
+
+See the `reverse proxy documentation <docs/reverse_proxy.md>`_, where the
+example configurations have been updated to show how to set these headers.
(Users of `Caddy <https://caddyserver.com/>`_ are unaffected, since we believe it
sets `X-Forwarded-Proto` by default.)
diff --git a/contrib/purge_api/purge_history.sh b/contrib/purge_api/purge_history.sh
index e7dd5d64..c45136ff 100644
--- a/contrib/purge_api/purge_history.sh
+++ b/contrib/purge_api/purge_history.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
# this script will use the api:
# https://github.com/matrix-org/synapse/blob/master/docs/admin_api/purge_history_api.rst
diff --git a/contrib/purge_api/purge_remote_media.sh b/contrib/purge_api/purge_remote_media.sh
index 77220d3b..4930d952 100644
--- a/contrib/purge_api/purge_remote_media.sh
+++ b/contrib/purge_api/purge_remote_media.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
DOMAIN=yourserver.tld
# add this user as admin in your home server:
diff --git a/debian/changelog b/debian/changelog
index e6b2122d..09602ff5 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,15 @@
+matrix-synapse-py3 (1.31.0) stable; urgency=medium
+
+ * New synapse release 1.31.0.
+
+ -- Synapse Packaging team <packages@matrix.org> Tue, 06 Apr 2021 13:08:29 +0100
+
+matrix-synapse-py3 (1.30.1) stable; urgency=medium
+
+ * New synapse release 1.30.1.
+
+ -- Synapse Packaging team <packages@matrix.org> Fri, 26 Mar 2021 12:01:28 +0000
+
matrix-synapse-py3 (1.30.0) stable; urgency=medium
* New synapse release 1.30.0.
diff --git a/demo/clean.sh b/demo/clean.sh
index 418ca945..6b809f6e 100755
--- a/demo/clean.sh
+++ b/demo/clean.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
set -e
diff --git a/demo/start.sh b/demo/start.sh
index f6b5ea13..621a5698 100755
--- a/demo/start.sh
+++ b/demo/start.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
DIR="$( cd "$( dirname "$0" )" && pwd )"
diff --git a/demo/stop.sh b/demo/stop.sh
index 85a1d2c1..f9dddc59 100755
--- a/demo/stop.sh
+++ b/demo/stop.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
DIR="$( cd "$( dirname "$0" )" && pwd )"
diff --git a/docker/Dockerfile b/docker/Dockerfile
index def45015..5b7bf027 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -18,6 +18,11 @@ ARG PYTHON_VERSION=3.8
###
FROM docker.io/python:${PYTHON_VERSION}-slim as builder
+LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
+LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md'
+LABEL org.opencontainers.image.source='https://github.com/matrix-org/synapse.git'
+LABEL org.opencontainers.image.licenses='Apache-2.0'
+
# install the OS build deps
RUN apt-get update && apt-get install -y \
build-essential \
@@ -28,33 +33,32 @@ RUN apt-get update && apt-get install -y \
libwebp-dev \
libxml++2.6-dev \
libxslt1-dev \
+ openssl \
rustc \
zlib1g-dev \
- && rm -rf /var/lib/apt/lists/*
+ && rm -rf /var/lib/apt/lists/*
-# Build dependencies that are not available as wheels, to speed up rebuilds
-RUN pip install --prefix="/install" --no-warn-script-location \
- cryptography \
- frozendict \
- jaeger-client \
- opentracing \
- # Match the version constraints of Synapse
- "prometheus_client>=0.4.0" \
- psycopg2 \
- pycparser \
- pyrsistent \
- pyyaml \
- simplejson \
- threadloop \
- thrift
-
-# now install synapse and all of the python deps to /install.
-COPY synapse /synapse/synapse/
+# Copy just what we need to pip install
COPY scripts /synapse/scripts/
COPY MANIFEST.in README.rst setup.py synctl /synapse/
+COPY synapse/__init__.py /synapse/synapse/__init__.py
+COPY synapse/python_dependencies.py /synapse/synapse/python_dependencies.py
+# To speed up rebuilds, install all of the dependencies before we copy over
+# the whole synapse project so that we this layer in the Docker cache can be
+# used while you develop on the source
+#
+# This is aiming at installing the `install_requires` and `extras_require` from `setup.py`
RUN pip install --prefix="/install" --no-warn-script-location \
- /synapse[all]
+ /synapse[all]
+
+# Copy over the rest of the project
+COPY synapse /synapse/synapse/
+
+# Install the synapse package itself and all of its children packages.
+#
+# This is aiming at installing only the `packages=find_packages(...)` from `setup.py
+RUN pip install --prefix="/install" --no-deps --no-warn-script-location /synapse
###
### Stage 1: runtime
@@ -70,7 +74,9 @@ RUN apt-get update && apt-get install -y \
libwebp6 \
xmlsec1 \
libjemalloc2 \
- && rm -rf /var/lib/apt/lists/*
+ libssl-dev \
+ openssl \
+ && rm -rf /var/lib/apt/lists/*
COPY --from=builder /install /usr/local
COPY ./docker/start.py /start.py
@@ -83,4 +89,4 @@ EXPOSE 8008/tcp 8009/tcp 8448/tcp
ENTRYPOINT ["/start.py"]
HEALTHCHECK --interval=1m --timeout=5s \
- CMD curl -fSs http://localhost:8008/health || exit 1
+ CMD curl -fSs http://localhost:8008/health || exit 1
diff --git a/docker/build_debian.sh b/docker/build_debian.sh
index f312f071..f426d2b7 100644
--- a/docker/build_debian.sh
+++ b/docker/build_debian.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
# The script to build the Debian package, as ran inside the Docker image.
diff --git a/docker/run_pg_tests.sh b/docker/run_pg_tests.sh
index d18d1e4c..1fd08cb6 100755
--- a/docker/run_pg_tests.sh
+++ b/docker/run_pg_tests.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
# This script runs the PostgreSQL tests inside a Docker container. It expects
# the relevant source files to be mounted into /src (done automatically by the
diff --git a/docs/deprecation_policy.md b/docs/deprecation_policy.md
new file mode 100644
index 00000000..06ea3405
--- /dev/null
+++ b/docs/deprecation_policy.md
@@ -0,0 +1,33 @@
+Deprecation Policy for Platform Dependencies
+============================================
+
+Synapse has a number of platform dependencies, including Python and PostgreSQL.
+This document outlines the policy towards which versions we support, and when we
+drop support for versions in the future.
+
+
+Policy
+------
+
+Synapse follows the upstream support life cycles for Python and PostgreSQL,
+i.e. when a version reaches End of Life Synapse will withdraw support for that
+version in future releases.
+
+Details on the upstream support life cycles for Python and PostgreSQL are
+documented at https://endoflife.date/python and
+https://endoflife.date/postgresql.
+
+
+Context
+-------
+
+It is important for system admins to have a clear understanding of the platform
+requirements of Synapse and its deprecation policies so that they can
+effectively plan upgrading their infrastructure ahead of time. This is
+especially important in contexts where upgrading the infrastructure requires
+auditing and approval from a security team, or where otherwise upgrading is a
+long process.
+
+By following the upstream support life cycles Synapse can ensure that its
+dependencies continue to get security patches, while not requiring system admins
+to constantly update their platform dependencies to the latest versions.
diff --git a/docs/reverse_proxy.md b/docs/reverse_proxy.md
index 860afd5a..cf1b835b 100644
--- a/docs/reverse_proxy.md
+++ b/docs/reverse_proxy.md
@@ -104,10 +104,11 @@ example.com:8448 {
```
<VirtualHost *:443>
SSLEngine on
- ServerName matrix.example.com;
+ ServerName matrix.example.com
RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME}
AllowEncodedSlashes NoDecode
+ ProxyPreserveHost on
ProxyPass /_matrix http://127.0.0.1:8008/_matrix nocanon
ProxyPassReverse /_matrix http://127.0.0.1:8008/_matrix
ProxyPass /_synapse/client http://127.0.0.1:8008/_synapse/client nocanon
@@ -116,7 +117,7 @@ example.com:8448 {
<VirtualHost *:8448>
SSLEngine on
- ServerName example.com;
+ ServerName example.com
RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME}
AllowEncodedSlashes NoDecode
@@ -135,6 +136,8 @@ example.com:8448 {
</IfModule>
```
+**NOTE 3**: Missing `ProxyPreserveHost on` can lead to a redirect loop.
+
### HAProxy
```
diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml
index 7de000f4..17cda71a 100644
--- a/docs/sample_config.yaml
+++ b/docs/sample_config.yaml
@@ -869,10 +869,10 @@ log_config: "CONFDIR/SERVERNAME.log.config"
#rc_joins:
# local:
# per_second: 0.1
-# burst_count: 3
+# burst_count: 10
# remote:
# per_second: 0.01
-# burst_count: 3
+# burst_count: 10
#
#rc_3pid_validation:
# per_second: 0.003
@@ -1758,6 +1758,9 @@ saml2_config:
# Note that, if this is changed, users authenticating via that provider
# will no longer be recognised as the same user!
#
+# (Use "oidc" here if you are migrating from an old "oidc_config"
+# configuration.)
+#
# idp_name: A user-facing name for this identity provider, which is used to
# offer the user a choice of login mechanisms.
#
@@ -1873,6 +1876,24 @@ saml2_config:
# which is set to the claims returned by the UserInfo Endpoint and/or
# in the ID Token.
#
+# It is possible to configure Synapse to only allow logins if certain attributes
+# match particular values in the OIDC userinfo. The requirements can be listed under
+# `attribute_requirements` as shown below. All of the listed attributes must
+# match for the login to be permitted. Additional attributes can be added to
+# userinfo by expanding the `scopes` section of the OIDC config to retrieve
+# additional information from the OIDC provider.
+#
+# If the OIDC claim is a list, then the attribute must match any value in the list.
+# Otherwise, it must exactly match the value of the claim. Using the example
+# below, the `family_name` claim MUST be "Stephensson", but the `groups`
+# claim MUST contain "admin".
+#
+# attribute_requirements:
+# - attribute: family_name
+# value: "Stephensson"
+# - attribute: groups
+# value: "admin"
+#
# See https://github.com/matrix-org/synapse/blob/master/docs/openid.md
# for information on how to configure these options.
#
@@ -1905,34 +1926,9 @@ oidc_providers:
# localpart_template: "{{ user.login }}"
# display_name_template: "{{ user.name }}"
# email_template: "{{ user.email }}"
-
- # For use with Keycloak
- #
- #- idp_id: keycloak
- # idp_name: Keycloak
- # issuer: "https://127.0.0.1:8443/auth/realms/my_realm_name"
- # client_id: "synapse"
- # client_secret: "copy secret generated in Keycloak UI"
- # scopes: ["openid", "profile"]
-
- # For use with Github
- #
- #- idp_id: github
- # idp_name: Github
- # idp_brand: github
- # discover: false
- # issuer: "https://github.com/"
- # client_id: "your-client-id" # TO BE FILLED
- # client_secret: "your-client-secret" # TO BE FILLED
- # authorization_endpoint: "https://github.com/login/oauth/authorize"
- # token_endpoint: "https://github.com/login/oauth/access_token"
- # userinfo_endpoint: "https://api.github.com/user"
- # scopes: ["read:user"]
- # user_mapping_provider:
- # config:
- # subject_claim: "id"
- # localpart_template: "{{ user.login }}"
- # display_name_template: "{{ user.name }}"
+ # attribute_requirements:
+ # - attribute: userGroup
+ # value: "synapseUsers"
# Enable Central Authentication Service (CAS) for registration and login.
diff --git a/docs/workers.md b/docs/workers.md
index e7bf9b8c..c6282165 100644
--- a/docs/workers.md
+++ b/docs/workers.md
@@ -232,7 +232,6 @@ expressions:
# Registration/login requests
^/_matrix/client/(api/v1|r0|unstable)/login$
^/_matrix/client/(r0|unstable)/register$
- ^/_matrix/client/(r0|unstable)/auth/.*/fallback/web$
# Event sending requests
^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/redact
@@ -276,7 +275,7 @@ using):
Ensure that all SSO logins go to a single process.
For multiple workers not handling the SSO endpoints properly, see
-[#7530](https://github.com/matrix-org/synapse/issues/7530) and
+[#7530](https://github.com/matrix-org/synapse/issues/7530) and
[#9427](https://github.com/matrix-org/synapse/issues/9427).
Note that a HTTP listener with `client` and `federation` resources must be
diff --git a/mypy.ini b/mypy.ini
index e0685e09..3ae5d457 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -1,12 +1,13 @@
[mypy]
namespace_packages = True
plugins = mypy_zope:plugin, scripts-dev/mypy_synapse_plugin.py
-follow_imports = silent
+follow_imports = normal
check_untyped_defs = True
show_error_codes = True
show_traceback = True
mypy_path = stubs
warn_unreachable = True
+local_partial_types = True
# To find all folders that pass mypy you run:
#
@@ -20,8 +21,9 @@ files =
synapse/crypto,
synapse/event_auth.py,
synapse/events/builder.py,
- synapse/events/validator.py,
synapse/events/spamcheck.py,
+ synapse/events/third_party_rules.py,
+ synapse/events/validator.py,
synapse/federation,
synapse/groups,
synapse/handlers,
@@ -38,6 +40,7 @@ files =
synapse/push,
synapse/replication,
synapse/rest,
+ synapse/secrets.py,
synapse/server.py,
synapse/server_notices,
synapse/spam_checker_api,
@@ -71,6 +74,7 @@ files =
synapse/util/metrics.py,
synapse/util/macaroons.py,
synapse/util/stringutils.py,
+ synapse/visibility.py,
tests/replication,
tests/test_utils,
tests/handlers/test_password_providers.py,
diff --git a/scripts-dev/check-newsfragment b/scripts-dev/check-newsfragment
index 448cadb8..af6d32e3 100755
--- a/scripts-dev/check-newsfragment
+++ b/scripts-dev/check-newsfragment
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
#
# A script which checks that an appropriate news file has been added on this
# branch.
diff --git a/scripts-dev/config-lint.sh b/scripts-dev/config-lint.sh
index 91321604..8c6323e5 100755
--- a/scripts-dev/config-lint.sh
+++ b/scripts-dev/config-lint.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
# Find linting errors in Synapse's default config file.
# Exits with 0 if there are no problems, or another code otherwise.
diff --git a/scripts-dev/federation_client.py b/scripts-dev/federation_client.py
index abcec48c..6f76c08f 100755
--- a/scripts-dev/federation_client.py
+++ b/scripts-dev/federation_client.py
@@ -22,8 +22,8 @@ import sys
from typing import Any, Optional
from urllib import parse as urlparse
-import nacl.signing
import requests
+import signedjson.key
import signedjson.types
import srvlookup
import yaml
@@ -44,18 +44,6 @@ def encode_base64(input_bytes):
return output_string
-def decode_base64(input_string):
- """Decode a base64 string to bytes inferring padding from the length of the
- string."""
-
- input_bytes = input_string.encode("ascii")
- input_len = len(input_bytes)
- padding = b"=" * (3 - ((input_len + 3) % 4))
- output_len = 3 * ((input_len + 2) // 4) + (input_len + 2) % 4 - 2
- output_bytes = base64.b64decode(input_bytes + padding)
- return output_bytes[:output_len]
-
-
def encode_canonical_json(value):
return json.dumps(
value,
@@ -88,42 +76,6 @@ def sign_json(
return json_object
-NACL_ED25519 = "ed25519"
-
-
-def decode_signing_key_base64(algorithm, version, key_base64):
- """Decode a base64 encoded signing key
- Args:
- algorithm (str): The algorithm the key is for (currently "ed25519").
- version (str): Identifies this key out of the keys for this entity.
- key_base64 (str): Base64 encoded bytes of the key.
- Returns:
- A SigningKey object.
- """
- if algorithm == NACL_ED25519:
- key_bytes = decode_base64(key_base64)
- key = nacl.signing.SigningKey(key_bytes)
- key.version = version
- key.alg = NACL_ED25519
- return key
- else:
- raise ValueError("Unsupported algorithm %s" % (algorithm,))
-
-
-def read_signing_keys(stream):
- """Reads a list of keys from a stream
- Args:
- stream : A stream to iterate for keys.
- Returns:
- list of SigningKey objects.
- """
- keys = []
- for line in stream:
- algorithm, version, key_base64 = line.split()
- keys.append(decode_signing_key_base64(algorithm, version, key_base64))
- return keys
-
-
def request(
method: Optional[str],
origin_name: str,
@@ -223,23 +175,28 @@ def main():
parser.add_argument("--body", help="Data to send as the body of the HTTP request")
parser.add_argument(
- "path", help="request path. We will add '/_matrix/federation/v1/' to this."
+ "path", help="request path, including the '/_matrix/federation/...' prefix."
)
args = parser.parse_args()
- if not args.server_name or not args.signing_key_path:
+ args.signing_key = None
+ if args.signing_key_path:
+ with open(args.signing_key_path) as f:
+ args.signing_key = f.readline()
+
+ if not args.server_name or not args.signing_key:
read_args_from_config(args)
- with open(args.signing_key_path) as f:
- key = read_signing_keys(f)[0]
+ algorithm, version, key_base64 = args.signing_key.split()
+ key = signedjson.key.decode_signing_key_base64(algorithm, version, key_base64)
result = request(
args.method,
args.server_name,
key,
args.destination,
- "/_matrix/federation/v1/" + args.path,
+ args.path,
content=args.body,
)
@@ -255,10 +212,16 @@ def main():
def read_args_from_config(args):
with open(args.config, "r") as fh:
config = yaml.safe_load(fh)
+
if not args.server_name:
args.server_name = config["server_name"]
- if not args.signing_key_path:
- args.signing_key_path = config["signing_key_path"]
+
+ if not args.signing_key:
+ if "signing_key" in config:
+ args.signing_key = config["signing_key"]
+ else:
+ with open(config["signing_key_path"]) as f:
+ args.signing_key = f.readline()
class MatrixConnectionAdapter(HTTPAdapter):
diff --git a/scripts-dev/generate_sample_config b/scripts-dev/generate_sample_config
index 9cb4630a..02739894 100755
--- a/scripts-dev/generate_sample_config
+++ b/scripts-dev/generate_sample_config
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
#
# Update/check the docs/sample_config.yaml
diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh
index fe2965cd..9761e975 100755
--- a/scripts-dev/lint.sh
+++ b/scripts-dev/lint.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
#
# Runs linting scripts over the local Synapse checkout
# isort - sorts import statements
diff --git a/scripts-dev/make_full_schema.sh b/scripts-dev/make_full_schema.sh
index b8d1e636..bc8f9786 100755
--- a/scripts-dev/make_full_schema.sh
+++ b/scripts-dev/make_full_schema.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
#
# This script generates SQL files for creating a brand new Synapse DB with the latest
# schema, on both SQLite3 and Postgres.
diff --git a/scripts-dev/next_github_number.sh b/scripts-dev/next_github_number.sh
index 37628002..00e9b145 100755
--- a/scripts-dev/next_github_number.sh
+++ b/scripts-dev/next_github_number.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
set -e
@@ -6,4 +6,4 @@ set -e
# next PR number.
CURRENT_NUMBER=`curl -s "https://api.github.com/repos/matrix-org/synapse/issues?state=all&per_page=1" | jq -r ".[0].number"`
CURRENT_NUMBER=$((CURRENT_NUMBER+1))
-echo $CURRENT_NUMBER \ No newline at end of file
+echo $CURRENT_NUMBER
diff --git a/scripts/move_remote_media_to_new_store.py b/scripts/move_remote_media_to_new_store.py
index ab2e7633..8477955a 100755
--- a/scripts/move_remote_media_to_new_store.py
+++ b/scripts/move_remote_media_to_new_store.py
@@ -51,7 +51,7 @@ def main(src_repo, dest_repo):
parts = line.split("|")
if len(parts) != 2:
print("Unable to parse input line %s" % line, file=sys.stderr)
- exit(1)
+ sys.exit(1)
move_media(parts[0], parts[1], src_paths, dest_paths)
diff --git a/setup.cfg b/setup.cfg
index 5e301c2c..7329eed2 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -18,7 +18,8 @@ ignore =
# E203: whitespace before ':' (which is contrary to pep8?)
# E731: do not assign a lambda expression, use a def
# E501: Line too long (black enforces this for us)
-ignore=W503,W504,E203,E731,E501
+# B00*: Subsection of the bugbear suite (TODO: add in remaining fixes)
+ignore=W503,W504,E203,E731,E501,B006,B007,B008
[isort]
line_length = 88
diff --git a/setup.py b/setup.py
index bbd9e786..29e9971d 100755
--- a/setup.py
+++ b/setup.py
@@ -99,10 +99,11 @@ CONDITIONAL_REQUIREMENTS["lint"] = [
"isort==5.7.0",
"black==20.8b1",
"flake8-comprehensions",
+ "flake8-bugbear==21.3.2",
"flake8",
]
-CONDITIONAL_REQUIREMENTS["mypy"] = ["mypy==0.812", "mypy-zope==0.2.11"]
+CONDITIONAL_REQUIREMENTS["mypy"] = ["mypy==0.812", "mypy-zope==0.2.13"]
# Dependencies which are exclusively required by unit test code. This is
# NOT a list of all modules that are necessary to run the unit tests.
diff --git a/synapse/__init__.py b/synapse/__init__.py
index 8e57739c..1d2883ac 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -48,7 +48,7 @@ try:
except ImportError:
pass
-__version__ = "1.30.0"
+__version__ = "1.31.0"
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
# We import here so that we don't have to install a bunch of deps when
diff --git a/synapse/api/auth.py b/synapse/api/auth.py
index e10e33fd..7d9930ae 100644
--- a/synapse/api/auth.py
+++ b/synapse/api/auth.py
@@ -558,6 +558,9 @@ class Auth:
Returns:
bool: False if no access_token was given, True otherwise.
"""
+ # This will always be set by the time Twisted calls us.
+ assert request.args is not None
+
query_params = request.args.get(b"access_token")
auth_headers = request.requestHeaders.getRawHeaders(b"Authorization")
return bool(query_params) or bool(auth_headers)
@@ -574,6 +577,8 @@ class Auth:
MissingClientTokenError: If there isn't a single access_token in the
request
"""
+ # This will always be set by the time Twisted calls us.
+ assert request.args is not None
auth_headers = request.requestHeaders.getRawHeaders(b"Authorization")
query_params = request.args.get(b"access_token")
diff --git a/synapse/api/constants.py b/synapse/api/constants.py
index 691f8f9a..8f37d2cf 100644
--- a/synapse/api/constants.py
+++ b/synapse/api/constants.py
@@ -51,6 +51,7 @@ class PresenceState:
OFFLINE = "offline"
UNAVAILABLE = "unavailable"
ONLINE = "online"
+ BUSY = "org.matrix.msc3026.busy"
class JoinRules:
@@ -100,6 +101,9 @@ class EventTypes:
Dummy = "org.matrix.dummy_event"
+ MSC1772_SPACE_CHILD = "org.matrix.msc1772.space.child"
+ MSC1772_SPACE_PARENT = "org.matrix.msc1772.space.parent"
+
class EduTypes:
Presence = "m.presence"
@@ -160,6 +164,9 @@ class EventContentFields:
# cf https://github.com/matrix-org/matrix-doc/pull/2228
SELF_DESTRUCT_AFTER = "org.matrix.self_destruct_after"
+ # cf https://github.com/matrix-org/matrix-doc/pull/1772
+ MSC1772_ROOM_TYPE = "org.matrix.msc1772.type"
+
class RoomEncryptionAlgorithms:
MEGOLM_V1_AES_SHA2 = "m.megolm.v1.aes-sha2"
diff --git a/synapse/app/__init__.py b/synapse/app/__init__.py
index 4a9b0129..d1a2cd5e 100644
--- a/synapse/app/__init__.py
+++ b/synapse/app/__init__.py
@@ -22,7 +22,9 @@ logger = logging.getLogger(__name__)
try:
python_dependencies.check_requirements()
except python_dependencies.DependencyException as e:
- sys.stderr.writelines(e.message)
+ sys.stderr.writelines(
+ e.message # noqa: B306, DependencyException.message is a property
+ )
sys.exit(1)
diff --git a/synapse/app/_base.py b/synapse/app/_base.py
index 43b1f1e9..3912c899 100644
--- a/synapse/app/_base.py
+++ b/synapse/app/_base.py
@@ -21,8 +21,10 @@ import signal
import socket
import sys
import traceback
+import warnings
from typing import Awaitable, Callable, Iterable
+from cryptography.utils import CryptographyDeprecationWarning
from typing_extensions import NoReturn
from twisted.internet import defer, error, reactor
@@ -195,6 +197,25 @@ def listen_metrics(bind_addresses, port):
start_http_server(port, addr=host, registry=RegistryProxy)
+def listen_manhole(bind_addresses: Iterable[str], port: int, manhole_globals: dict):
+ # twisted.conch.manhole 21.1.0 uses "int_from_bytes", which produces a confusing
+ # warning. It's fixed by https://github.com/twisted/twisted/pull/1522), so
+ # suppress the warning for now.
+ warnings.filterwarnings(
+ action="ignore",
+ category=CryptographyDeprecationWarning,
+ message="int_from_bytes is deprecated",
+ )
+
+ from synapse.util.manhole import manhole
+
+ listen_tcp(
+ bind_addresses,
+ port,
+ manhole(username="matrix", password="rabbithole", globals=manhole_globals),
+ )
+
+
def listen_tcp(bind_addresses, port, factory, reactor=reactor, backlog=50):
"""
Create a TCP socket for a port and several addresses
diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py
index 274d582d..3df2aa5c 100644
--- a/synapse/app/generic_worker.py
+++ b/synapse/app/generic_worker.py
@@ -147,7 +147,6 @@ from synapse.storage.databases.main.user_directory import UserDirectoryStore
from synapse.types import ReadReceipt
from synapse.util.async_helpers import Linearizer
from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.manhole import manhole
from synapse.util.versionstring import get_version_string
logger = logging.getLogger("synapse.app.generic_worker")
@@ -302,6 +301,8 @@ class GenericWorkerPresence(BasePresenceHandler):
self.send_stop_syncing, UPDATE_SYNCING_USERS_MS
)
+ self._busy_presence_enabled = hs.config.experimental.msc3026_enabled
+
hs.get_reactor().addSystemEventTrigger(
"before",
"shutdown",
@@ -439,8 +440,12 @@ class GenericWorkerPresence(BasePresenceHandler):
PresenceState.ONLINE,
PresenceState.UNAVAILABLE,
PresenceState.OFFLINE,
+ PresenceState.BUSY,
)
- if presence not in valid_presence:
+
+ if presence not in valid_presence or (
+ presence == PresenceState.BUSY and not self._busy_presence_enabled
+ ):
raise SynapseError(400, "Invalid presence state")
user_id = target_user.to_string()
@@ -634,12 +639,8 @@ class GenericWorkerServer(HomeServer):
if listener.type == "http":
self._listen_http(listener)
elif listener.type == "manhole":
- _base.listen_tcp(
- listener.bind_addresses,
- listener.port,
- manhole(
- username="matrix", password="rabbithole", globals={"hs": self}
- ),
+ _base.listen_manhole(
+ listener.bind_addresses, listener.port, manhole_globals={"hs": self}
)
elif listener.type == "metrics":
if not self.get_config().enable_metrics:
@@ -786,13 +787,6 @@ class FederationSenderHandler:
self._fed_position_linearizer = Linearizer(name="_fed_position_linearizer")
- def on_start(self):
- # There may be some events that are persisted but haven't been sent,
- # so send them now.
- self.federation_sender.notify_new_events(
- self.store.get_room_max_stream_ordering()
- )
-
def wake_destination(self, server: str):
self.federation_sender.wake_destination(server)
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index 244657cb..3bfe9d50 100644
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -67,7 +67,6 @@ from synapse.storage import DataStore
from synapse.storage.engines import IncorrectDatabaseSetup
from synapse.storage.prepare_database import UpgradeDatabaseException
from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.manhole import manhole
from synapse.util.module_loader import load_module
from synapse.util.versionstring import get_version_string
@@ -288,12 +287,8 @@ class SynapseHomeServer(HomeServer):
if listener.type == "http":
self._listening_services.extend(self._listener_http(config, listener))
elif listener.type == "manhole":
- listen_tcp(
- listener.bind_addresses,
- listener.port,
- manhole(
- username="matrix", password="rabbithole", globals={"hs": self}
- ),
+ _base.listen_manhole(
+ listener.bind_addresses, listener.port, manhole_globals={"hs": self}
)
elif listener.type == "replication":
services = listen_tcp(
diff --git a/synapse/config/cache.py b/synapse/config/cache.py
index 8e03f140..4e8abbf8 100644
--- a/synapse/config/cache.py
+++ b/synapse/config/cache.py
@@ -24,7 +24,7 @@ from ._base import Config, ConfigError
_CACHE_PREFIX = "SYNAPSE_CACHE_FACTOR"
# Map from canonicalised cache name to cache.
-_CACHES = {}
+_CACHES = {} # type: Dict[str, Callable[[float], None]]
# a lock on the contents of _CACHES
_CACHES_LOCK = threading.Lock()
@@ -59,7 +59,9 @@ def _canonicalise_cache_name(cache_name: str) -> str:
return cache_name.lower()
-def add_resizable_cache(cache_name: str, cache_resize_callback: Callable):
+def add_resizable_cache(
+ cache_name: str, cache_resize_callback: Callable[[float], None]
+):
"""Register a cache that's size can dynamically change
Args:
diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py
index b1c1c51e..86f4d9af 100644
--- a/synapse/config/experimental.py
+++ b/synapse/config/experimental.py
@@ -27,3 +27,7 @@ class ExperimentalConfig(Config):
# MSC2858 (multiple SSO identity providers)
self.msc2858_enabled = experimental.get("msc2858_enabled", False) # type: bool
+ # Spaces (MSC1772, MSC2946, etc)
+ self.spaces_enabled = experimental.get("spaces_enabled", False) # type: bool
+ # MSC3026 (busy presence state)
+ self.msc3026_enabled = experimental.get("msc3026_enabled", False) # type: bool
diff --git a/synapse/config/key.py b/synapse/config/key.py
index de964dff..350ff1d6 100644
--- a/synapse/config/key.py
+++ b/synapse/config/key.py
@@ -404,7 +404,11 @@ def _parse_key_servers(key_servers, federation_verify_certificates):
try:
jsonschema.validate(key_servers, TRUSTED_KEY_SERVERS_SCHEMA)
except jsonschema.ValidationError as e:
- raise ConfigError("Unable to parse 'trusted_key_servers': " + e.message)
+ raise ConfigError(
+ "Unable to parse 'trusted_key_servers': {}".format(
+ e.message # noqa: B306, jsonschema.ValidationError.message is a valid attribute
+ )
+ )
for server in key_servers:
server_name = server["server_name"]
diff --git a/synapse/config/metrics.py b/synapse/config/metrics.py
index dfd27e15..2b289f42 100644
--- a/synapse/config/metrics.py
+++ b/synapse/config/metrics.py
@@ -56,7 +56,9 @@ class MetricsConfig(Config):
try:
check_requirements("sentry")
except DependencyException as e:
- raise ConfigError(e.message)
+ raise ConfigError(
+ e.message # noqa: B306, DependencyException.message is a property
+ )
self.sentry_dsn = config["sentry"].get("dsn")
if not self.sentry_dsn:
diff --git a/synapse/config/oidc_config.py b/synapse/config/oidc_config.py
index 2bfb537c..05733ec4 100644
--- a/synapse/config/oidc_config.py
+++ b/synapse/config/oidc_config.py
@@ -15,11 +15,12 @@
# limitations under the License.
from collections import Counter
-from typing import Iterable, Mapping, Optional, Tuple, Type
+from typing import Iterable, List, Mapping, Optional, Tuple, Type
import attr
from synapse.config._util import validate_config
+from synapse.config.sso import SsoAttributeRequirement
from synapse.python_dependencies import DependencyException, check_requirements
from synapse.types import Collection, JsonDict
from synapse.util.module_loader import load_module
@@ -41,7 +42,9 @@ class OIDCConfig(Config):
try:
check_requirements("oidc")
except DependencyException as e:
- raise ConfigError(e.message) from e
+ raise ConfigError(
+ e.message # noqa: B306, DependencyException.message is a property
+ ) from e
# check we don't have any duplicate idp_ids now. (The SSO handler will also
# check for duplicates when the REST listeners get registered, but that happens
@@ -76,6 +79,9 @@ class OIDCConfig(Config):
# Note that, if this is changed, users authenticating via that provider
# will no longer be recognised as the same user!
#
+ # (Use "oidc" here if you are migrating from an old "oidc_config"
+ # configuration.)
+ #
# idp_name: A user-facing name for this identity provider, which is used to
# offer the user a choice of login mechanisms.
#
@@ -191,6 +197,24 @@ class OIDCConfig(Config):
# which is set to the claims returned by the UserInfo Endpoint and/or
# in the ID Token.
#
+ # It is possible to configure Synapse to only allow logins if certain attributes
+ # match particular values in the OIDC userinfo. The requirements can be listed under
+ # `attribute_requirements` as shown below. All of the listed attributes must
+ # match for the login to be permitted. Additional attributes can be added to
+ # userinfo by expanding the `scopes` section of the OIDC config to retrieve
+ # additional information from the OIDC provider.
+ #
+ # If the OIDC claim is a list, then the attribute must match any value in the list.
+ # Otherwise, it must exactly match the value of the claim. Using the example
+ # below, the `family_name` claim MUST be "Stephensson", but the `groups`
+ # claim MUST contain "admin".
+ #
+ # attribute_requirements:
+ # - attribute: family_name
+ # value: "Stephensson"
+ # - attribute: groups
+ # value: "admin"
+ #
# See https://github.com/matrix-org/synapse/blob/master/docs/openid.md
# for information on how to configure these options.
#
@@ -223,34 +247,9 @@ class OIDCConfig(Config):
# localpart_template: "{{{{ user.login }}}}"
# display_name_template: "{{{{ user.name }}}}"
# email_template: "{{{{ user.email }}}}"
-
- # For use with Keycloak
- #
- #- idp_id: keycloak
- # idp_name: Keycloak
- # issuer: "https://127.0.0.1:8443/auth/realms/my_realm_name"
- # client_id: "synapse"
- # client_secret: "copy secret generated in Keycloak UI"
- # scopes: ["openid", "profile"]
-
- # For use with Github
- #
- #- idp_id: github
- # idp_name: Github
- # idp_brand: github
- # discover: false
- # issuer: "https://github.com/"
- # client_id: "your-client-id" # TO BE FILLED
- # client_secret: "your-client-secret" # TO BE FILLED
- # authorization_endpoint: "https://github.com/login/oauth/authorize"
- # token_endpoint: "https://github.com/login/oauth/access_token"
- # userinfo_endpoint: "https://api.github.com/user"
- # scopes: ["read:user"]
- # user_mapping_provider:
- # config:
- # subject_claim: "id"
- # localpart_template: "{{{{ user.login }}}}"
- # display_name_template: "{{{{ user.name }}}}"
+ # attribute_requirements:
+ # - attribute: userGroup
+ # value: "synapseUsers"
""".format(
mapping_provider=DEFAULT_USER_MAPPING_PROVIDER
)
@@ -329,6 +328,10 @@ OIDC_PROVIDER_CONFIG_SCHEMA = {
},
"allow_existing_users": {"type": "boolean"},
"user_mapping_provider": {"type": ["object", "null"]},
+ "attribute_requirements": {
+ "type": "array",
+ "items": SsoAttributeRequirement.JSON_SCHEMA,
+ },
},
}
@@ -465,6 +468,11 @@ def _parse_oidc_config_dict(
jwt_header=client_secret_jwt_key_config["jwt_header"],
jwt_payload=client_secret_jwt_key_config.get("jwt_payload", {}),
)
+ # parse attribute_requirements from config (list of dicts) into a list of SsoAttributeRequirement
+ attribute_requirements = [
+ SsoAttributeRequirement(**x)
+ for x in oidc_config.get("attribute_requirements", [])
+ ]
return OidcProviderConfig(
idp_id=idp_id,
@@ -488,6 +496,7 @@ def _parse_oidc_config_dict(
allow_existing_users=oidc_config.get("allow_existing_users", False),
user_mapping_provider_class=user_mapping_provider_class,
user_mapping_provider_config=user_mapping_provider_config,
+ attribute_requirements=attribute_requirements,
)
@@ -577,3 +586,6 @@ class OidcProviderConfig:
# the config of the user mapping provider
user_mapping_provider_config = attr.ib()
+
+ # required attributes to require in userinfo to allow login/registration
+ attribute_requirements = attr.ib(type=List[SsoAttributeRequirement])
diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py
index 847d2512..3f3997f4 100644
--- a/synapse/config/ratelimiting.py
+++ b/synapse/config/ratelimiting.py
@@ -95,11 +95,11 @@ class RatelimitConfig(Config):
self.rc_joins_local = RateLimitConfig(
config.get("rc_joins", {}).get("local", {}),
- defaults={"per_second": 0.1, "burst_count": 3},
+ defaults={"per_second": 0.1, "burst_count": 10},
)
self.rc_joins_remote = RateLimitConfig(
config.get("rc_joins", {}).get("remote", {}),
- defaults={"per_second": 0.01, "burst_count": 3},
+ defaults={"per_second": 0.01, "burst_count": 10},
)
# Ratelimit cross-user key requests:
@@ -187,10 +187,10 @@ class RatelimitConfig(Config):
#rc_joins:
# local:
# per_second: 0.1
- # burst_count: 3
+ # burst_count: 10
# remote:
# per_second: 0.01
- # burst_count: 3
+ # burst_count: 10
#
#rc_3pid_validation:
# per_second: 0.003
diff --git a/synapse/config/repository.py b/synapse/config/repository.py
index 69d9de5a..061c4ec8 100644
--- a/synapse/config/repository.py
+++ b/synapse/config/repository.py
@@ -176,7 +176,9 @@ class ContentRepositoryConfig(Config):
check_requirements("url_preview")
except DependencyException as e:
- raise ConfigError(e.message)
+ raise ConfigError(
+ e.message # noqa: B306, DependencyException.message is a property
+ )
if "url_preview_ip_range_blacklist" not in config:
raise ConfigError(
diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py
index 4b494f21..6db9cb5c 100644
--- a/synapse/config/saml2_config.py
+++ b/synapse/config/saml2_config.py
@@ -76,7 +76,9 @@ class SAML2Config(Config):
try:
check_requirements("saml2")
except DependencyException as e:
- raise ConfigError(e.message)
+ raise ConfigError(
+ e.message # noqa: B306, DependencyException.message is a property
+ )
self.saml2_enabled = True
diff --git a/synapse/config/tracer.py b/synapse/config/tracer.py
index 0c1a854f..727a1e70 100644
--- a/synapse/config/tracer.py
+++ b/synapse/config/tracer.py
@@ -39,7 +39,9 @@ class TracerConfig(Config):
try:
check_requirements("opentracing")
except DependencyException as e:
- raise ConfigError(e.message)
+ raise ConfigError(
+ e.message # noqa: B306, DependencyException.message is a property
+ )
# The tracer is enabled so sanitize the config
diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py
index 14b21796..c644b4df 100644
--- a/synapse/crypto/context_factory.py
+++ b/synapse/crypto/context_factory.py
@@ -191,7 +191,7 @@ def _context_info_cb(ssl_connection, where, ret):
# ... we further assume that SSLClientConnectionCreator has set the
# '_synapse_tls_verifier' attribute to a ConnectionVerifier object.
tls_protocol._synapse_tls_verifier.verify_context_info_cb(ssl_connection, where)
- except: # noqa: E722, taken from the twisted implementation
+ except BaseException: # taken from the twisted implementation
logger.exception("Error during info_callback")
f = Failure()
tls_protocol.failVerification(f)
@@ -219,7 +219,7 @@ class SSLClientConnectionCreator:
# ... and we also gut-wrench a '_synapse_tls_verifier' attribute into the
# tls_protocol so that the SSL context's info callback has something to
# call to do the cert verification.
- setattr(tls_protocol, "_synapse_tls_verifier", self._verifier)
+ tls_protocol._synapse_tls_verifier = self._verifier
return connection
diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py
index 902128a2..d5fb5151 100644
--- a/synapse/crypto/keyring.py
+++ b/synapse/crypto/keyring.py
@@ -57,7 +57,7 @@ from synapse.util.metrics import Measure
from synapse.util.retryutils import NotRetryingDestination
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py
index 3ec4120f..8f6b955d 100644
--- a/synapse/events/__init__.py
+++ b/synapse/events/__init__.py
@@ -98,7 +98,7 @@ class DefaultDictProperty(DictProperty):
class _EventInternalMetadata:
- __slots__ = ["_dict", "stream_ordering"]
+ __slots__ = ["_dict", "stream_ordering", "outlier"]
def __init__(self, internal_metadata_dict: JsonDict):
# we have to copy the dict, because it turns out that the same dict is
@@ -108,7 +108,10 @@ class _EventInternalMetadata:
# the stream ordering of this event. None, until it has been persisted.
self.stream_ordering = None # type: Optional[int]
- outlier = DictProperty("outlier") # type: bool
+ # whether this event is an outlier (ie, whether we have the state at that point
+ # in the DAG)
+ self.outlier = False
+
out_of_band_membership = DictProperty("out_of_band_membership") # type: bool
send_on_behalf_of = DictProperty("send_on_behalf_of") # type: str
recheck_redaction = DictProperty("recheck_redaction") # type: bool
@@ -129,7 +132,7 @@ class _EventInternalMetadata:
return dict(self._dict)
def is_outlier(self) -> bool:
- return self._dict.get("outlier", False)
+ return self.outlier
def is_out_of_band_membership(self) -> bool:
"""Whether this is an out of band membership, like an invite or an invite
diff --git a/synapse/events/third_party_rules.py b/synapse/events/third_party_rules.py
index 02bce8b5..9767d239 100644
--- a/synapse/events/third_party_rules.py
+++ b/synapse/events/third_party_rules.py
@@ -13,12 +13,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import Callable, Union
+from typing import TYPE_CHECKING, Union
from synapse.events import EventBase
from synapse.events.snapshot import EventContext
from synapse.types import Requester, StateMap
+if TYPE_CHECKING:
+ from synapse.server import HomeServer
+
class ThirdPartyEventRules:
"""Allows server admins to provide a Python module implementing an extra
@@ -28,7 +31,7 @@ class ThirdPartyEventRules:
behaviours.
"""
- def __init__(self, hs):
+ def __init__(self, hs: "HomeServer"):
self.third_party_rules = None
self.store = hs.get_datastore()
@@ -95,10 +98,9 @@ class ThirdPartyEventRules:
if self.third_party_rules is None:
return True
- ret = await self.third_party_rules.on_create_room(
+ return await self.third_party_rules.on_create_room(
requester, config, is_requester_admin
)
- return ret
async def check_threepid_can_be_invited(
self, medium: str, address: str, room_id: str
@@ -119,10 +121,9 @@ class ThirdPartyEventRules:
state_events = await self._get_state_map_for_room(room_id)
- ret = await self.third_party_rules.check_threepid_can_be_invited(
+ return await self.third_party_rules.check_threepid_can_be_invited(
medium, address, state_events
)
- return ret
async def check_visibility_can_be_modified(
self, room_id: str, new_visibility: str
@@ -143,7 +144,7 @@ class ThirdPartyEventRules:
check_func = getattr(
self.third_party_rules, "check_visibility_can_be_modified", None
)
- if not check_func or not isinstance(check_func, Callable):
+ if not check_func or not callable(check_func):
return True
state_events = await self._get_state_map_for_room(room_id)
diff --git a/synapse/events/utils.py b/synapse/events/utils.py
index 7ca5c994..0f8a3b5a 100644
--- a/synapse/events/utils.py
+++ b/synapse/events/utils.py
@@ -22,6 +22,7 @@ from synapse.api.constants import EventTypes, RelationTypes
from synapse.api.errors import Codes, SynapseError
from synapse.api.room_versions import RoomVersion
from synapse.util.async_helpers import yieldable_gather_results
+from synapse.util.frozenutils import unfreeze
from . import EventBase
@@ -54,6 +55,8 @@ def prune_event(event: EventBase) -> EventBase:
event.internal_metadata.stream_ordering
)
+ pruned_event.internal_metadata.outlier = event.internal_metadata.outlier
+
# Mark the event as redacted
pruned_event.internal_metadata.redacted = True
@@ -400,10 +403,19 @@ class EventClientSerializer:
# If there is an edit replace the content, preserving existing
# relations.
+ # Ensure we take copies of the edit content, otherwise we risk modifying
+ # the original event.
+ edit_content = edit.content.copy()
+
+ # Unfreeze the event content if necessary, so that we may modify it below
+ edit_content = unfreeze(edit_content)
+ serialized_event["content"] = edit_content.get("m.new_content", {})
+
+ # Check for existing relations
relations = event.content.get("m.relates_to")
- serialized_event["content"] = edit.content.get("m.new_content", {})
if relations:
- serialized_event["content"]["m.relates_to"] = relations
+ # Keep the relations, ensuring we use a dict copy of the original
+ serialized_event["content"]["m.relates_to"] = relations.copy()
else:
serialized_event["content"].pop("m.relates_to", None)
diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py
index bee81fc0..afdb5bf2 100644
--- a/synapse/federation/federation_client.py
+++ b/synapse/federation/federation_client.py
@@ -27,11 +27,13 @@ from typing import (
List,
Mapping,
Optional,
+ Sequence,
Tuple,
TypeVar,
Union,
)
+import attr
from prometheus_client import Counter
from twisted.internet import defer
@@ -62,7 +64,7 @@ from synapse.util.caches.expiringcache import ExpiringCache
from synapse.util.retryutils import NotRetryingDestination
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
@@ -455,6 +457,7 @@ class FederationClient(FederationBase):
description: str,
destinations: Iterable[str],
callback: Callable[[str], Awaitable[T]],
+ failover_on_unknown_endpoint: bool = False,
) -> T:
"""Try an operation on a series of servers, until it succeeds
@@ -474,6 +477,10 @@ class FederationClient(FederationBase):
next server tried. Normally the stacktrace is logged but this is
suppressed if the exception is an InvalidResponseError.
+ failover_on_unknown_endpoint: if True, we will try other servers if it looks
+ like a server doesn't support the endpoint. This is typically useful
+ if the endpoint in question is new or experimental.
+
Returns:
The result of callback, if it succeeds
@@ -493,16 +500,31 @@ class FederationClient(FederationBase):
except UnsupportedRoomVersionError:
raise
except HttpResponseException as e:
- if not 500 <= e.code < 600:
- raise e.to_synapse_error()
- else:
- logger.warning(
- "Failed to %s via %s: %i %s",
- description,
- destination,
- e.code,
- e.args[0],
- )
+ synapse_error = e.to_synapse_error()
+ failover = False
+
+ if 500 <= e.code < 600:
+ failover = True
+
+ elif failover_on_unknown_endpoint:
+ # there is no good way to detect an "unknown" endpoint. Dendrite
+ # returns a 404 (with no body); synapse returns a 400
+ # with M_UNRECOGNISED.
+ if e.code == 404 or (
+ e.code == 400 and synapse_error.errcode == Codes.UNRECOGNIZED
+ ):
+ failover = True
+
+ if not failover:
+ raise synapse_error from e
+
+ logger.warning(
+ "Failed to %s via %s: %i %s",
+ description,
+ destination,
+ e.code,
+ e.args[0],
+ )
except Exception:
logger.warning(
"Failed to %s via %s", description, destination, exc_info=True
@@ -1042,3 +1064,141 @@ class FederationClient(FederationBase):
# If we don't manage to find it, return None. It's not an error if a
# server doesn't give it to us.
return None
+
+ async def get_space_summary(
+ self,
+ destinations: Iterable[str],
+ room_id: str,
+ suggested_only: bool,
+ max_rooms_per_space: Optional[int],
+ exclude_rooms: List[str],
+ ) -> "FederationSpaceSummaryResult":
+ """
+ Call other servers to get a summary of the given space
+
+
+ Args:
+ destinations: The remote servers. We will try them in turn, omitting any
+ that have been blacklisted.
+
+ room_id: ID of the space to be queried
+
+ suggested_only: If true, ask the remote server to only return children
+ with the "suggested" flag set
+
+ max_rooms_per_space: A limit on the number of children to return for each
+ space
+
+ exclude_rooms: A list of room IDs to tell the remote server to skip
+
+ Returns:
+ a parsed FederationSpaceSummaryResult
+
+ Raises:
+ SynapseError if we were unable to get a valid summary from any of the
+ remote servers
+ """
+
+ async def send_request(destination: str) -> FederationSpaceSummaryResult:
+ res = await self.transport_layer.get_space_summary(
+ destination=destination,
+ room_id=room_id,
+ suggested_only=suggested_only,
+ max_rooms_per_space=max_rooms_per_space,
+ exclude_rooms=exclude_rooms,
+ )
+
+ try:
+ return FederationSpaceSummaryResult.from_json_dict(res)
+ except ValueError as e:
+ raise InvalidResponseError(str(e))
+
+ return await self._try_destination_list(
+ "fetch space summary",
+ destinations,
+ send_request,
+ failover_on_unknown_endpoint=True,
+ )
+
+
+@attr.s(frozen=True, slots=True)
+class FederationSpaceSummaryEventResult:
+ """Represents a single event in the result of a successful get_space_summary call.
+
+ It's essentially just a serialised event object, but we do a bit of parsing and
+ validation in `from_json_dict` and store some of the validated properties in
+ object attributes.
+ """
+
+ event_type = attr.ib(type=str)
+ state_key = attr.ib(type=str)
+ via = attr.ib(type=Sequence[str])
+
+ # the raw data, including the above keys
+ data = attr.ib(type=JsonDict)
+
+ @classmethod
+ def from_json_dict(cls, d: JsonDict) -> "FederationSpaceSummaryEventResult":
+ """Parse an event within the result of a /spaces/ request
+
+ Args:
+ d: json object to be parsed
+
+ Raises:
+ ValueError if d is not a valid event
+ """
+
+ event_type = d.get("type")
+ if not isinstance(event_type, str):
+ raise ValueError("Invalid event: 'event_type' must be a str")
+
+ state_key = d.get("state_key")
+ if not isinstance(state_key, str):
+ raise ValueError("Invalid event: 'state_key' must be a str")
+
+ content = d.get("content")
+ if not isinstance(content, dict):
+ raise ValueError("Invalid event: 'content' must be a dict")
+
+ via = content.get("via")
+ if not isinstance(via, Sequence):
+ raise ValueError("Invalid event: 'via' must be a list")
+ if any(not isinstance(v, str) for v in via):
+ raise ValueError("Invalid event: 'via' must be a list of strings")
+
+ return cls(event_type, state_key, via, d)
+
+
+@attr.s(frozen=True, slots=True)
+class FederationSpaceSummaryResult:
+ """Represents the data returned by a successful get_space_summary call."""
+
+ rooms = attr.ib(type=Sequence[JsonDict])
+ events = attr.ib(type=Sequence[FederationSpaceSummaryEventResult])
+
+ @classmethod
+ def from_json_dict(cls, d: JsonDict) -> "FederationSpaceSummaryResult":
+ """Parse the result of a /spaces/ request
+
+ Args:
+ d: json object to be parsed
+
+ Raises:
+ ValueError if d is not a valid /spaces/ response
+ """
+ rooms = d.get("rooms")
+ if not isinstance(rooms, Sequence):
+ raise ValueError("'rooms' must be a list")
+ if any(not isinstance(r, dict) for r in rooms):
+ raise ValueError("Invalid room in 'rooms' list")
+
+ events = d.get("events")
+ if not isinstance(events, Sequence):
+ raise ValueError("'events' must be a list")
+ if any(not isinstance(e, dict) for e in events):
+ raise ValueError("Invalid event in 'events' list")
+ parsed_events = [
+ FederationSpaceSummaryEventResult.from_json_dict(e) for e in events
+ ]
+
+ return cls(rooms, parsed_events)
diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py
index 9839d3d0..d84e3620 100644
--- a/synapse/federation/federation_server.py
+++ b/synapse/federation/federation_server.py
@@ -35,7 +35,7 @@ from twisted.internet import defer
from twisted.internet.abstract import isIPAddress
from twisted.python import failure
-from synapse.api.constants import EduTypes, EventTypes, Membership
+from synapse.api.constants import EduTypes, EventTypes
from synapse.api.errors import (
AuthError,
Codes,
@@ -63,7 +63,7 @@ from synapse.replication.http.federation import (
ReplicationFederationSendEduRestServlet,
ReplicationGetQueryRestServlet,
)
-from synapse.types import JsonDict, get_domain_from_id
+from synapse.types import JsonDict
from synapse.util import glob_to_regex, json_decoder, unwrapFirstError
from synapse.util.async_helpers import Linearizer, concurrently_execute
from synapse.util.caches.response_cache import ResponseCache
@@ -727,27 +727,6 @@ class FederationServer(FederationBase):
if the event was unacceptable for any other reason (eg, too large,
too many prev_events, couldn't find the prev_events)
"""
- # check that it's actually being sent from a valid destination to
- # workaround bug #1753 in 0.18.5 and 0.18.6
- if origin != get_domain_from_id(pdu.sender):
- # We continue to accept join events from any server; this is
- # necessary for the federation join dance to work correctly.
- # (When we join over federation, the "helper" server is
- # responsible for sending out the join event, rather than the
- # origin. See bug #1893. This is also true for some third party
- # invites).
- if not (
- pdu.type == "m.room.member"
- and pdu.content
- and pdu.content.get("membership", None)
- in (Membership.JOIN, Membership.INVITE)
- ):
- logger.info(
- "Discarding PDU %s from invalid origin %s", pdu.event_id, origin
- )
- return
- else:
- logger.info("Accepting join PDU %s from %s", pdu.event_id, origin)
# We've already checked that we know the room version by this point
room_version = await self.store.get_room_version(pdu.room_id)
diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py
index 3e993b42..0c18c49a 100644
--- a/synapse/federation/send_queue.py
+++ b/synapse/federation/send_queue.py
@@ -31,25 +31,39 @@ Events are replicated via a separate events stream.
import logging
from collections import namedtuple
-from typing import Dict, List, Tuple, Type
+from typing import (
+ TYPE_CHECKING,
+ Dict,
+ Hashable,
+ Iterable,
+ List,
+ Optional,
+ Sized,
+ Tuple,
+ Type,
+)
from sortedcontainers import SortedDict
-from twisted.internet import defer
-
from synapse.api.presence import UserPresenceState
+from synapse.federation.sender import AbstractFederationSender, FederationSender
from synapse.metrics import LaterGauge
+from synapse.replication.tcp.streams.federation import FederationStream
+from synapse.types import JsonDict, ReadReceipt, RoomStreamToken
from synapse.util.metrics import Measure
from .units import Edu
+if TYPE_CHECKING:
+ from synapse.server import HomeServer
+
logger = logging.getLogger(__name__)
-class FederationRemoteSendQueue:
+class FederationRemoteSendQueue(AbstractFederationSender):
"""A drop in replacement for FederationSender"""
- def __init__(self, hs):
+ def __init__(self, hs: "HomeServer"):
self.server_name = hs.hostname
self.clock = hs.get_clock()
self.notifier = hs.get_notifier()
@@ -58,7 +72,7 @@ class FederationRemoteSendQueue:
# We may have multiple federation sender instances, so we need to track
# their positions separately.
self._sender_instances = hs.config.worker.federation_shard_config.instances
- self._sender_positions = {}
+ self._sender_positions = {} # type: Dict[str, int]
# Pending presence map user_id -> UserPresenceState
self.presence_map = {} # type: Dict[str, UserPresenceState]
@@ -71,7 +85,7 @@ class FederationRemoteSendQueue:
# Stream position -> (user_id, destinations)
self.presence_destinations = (
SortedDict()
- ) # type: SortedDict[int, Tuple[str, List[str]]]
+ ) # type: SortedDict[int, Tuple[str, Iterable[str]]]
# (destination, key) -> EDU
self.keyed_edu = {} # type: Dict[Tuple[str, tuple], Edu]
@@ -94,7 +108,7 @@ class FederationRemoteSendQueue:
# we make a new function, so we need to make a new function so the inner
# lambda binds to the queue rather than to the name of the queue which
# changes. ARGH.
- def register(name, queue):
+ def register(name: str, queue: Sized) -> None:
LaterGauge(
"synapse_federation_send_queue_%s_size" % (queue_name,),
"",
@@ -115,13 +129,13 @@ class FederationRemoteSendQueue:
self.clock.looping_call(self._clear_queue, 30 * 1000)
- def _next_pos(self):
+ def _next_pos(self) -> int:
pos = self.pos
self.pos += 1
self.pos_time[self.clock.time_msec()] = pos
return pos
- def _clear_queue(self):
+ def _clear_queue(self) -> None:
"""Clear the queues for anything older than N minutes"""
FIVE_MINUTES_AGO = 5 * 60 * 1000
@@ -138,7 +152,7 @@ class FederationRemoteSendQueue:
self._clear_queue_before_pos(position_to_delete)
- def _clear_queue_before_pos(self, position_to_delete):
+ def _clear_queue_before_pos(self, position_to_delete: int) -> None:
"""Clear all the queues from before a given position"""
with Measure(self.clock, "send_queue._clear"):
# Delete things out of presence maps
@@ -188,13 +202,18 @@ class FederationRemoteSendQueue:
for key in keys[:i]:
del self.edus[key]
- def notify_new_events(self, max_token):
+ def notify_new_events(self, max_token: RoomStreamToken) -> None:
"""As per FederationSender"""
- # We don't need to replicate this as it gets sent down a different
- # stream.
- pass
+ # This should never get called.
+ raise NotImplementedError()
- def build_and_send_edu(self, destination, edu_type, content, key=None):
+ def build_and_send_edu(
+ self,
+ destination: str,
+ edu_type: str,
+ content: JsonDict,
+ key: Optional[Hashable] = None,
+ ) -> None:
"""As per FederationSender"""
if destination == self.server_name:
logger.info("Not sending EDU to ourselves")
@@ -218,38 +237,39 @@ class FederationRemoteSendQueue:
self.notifier.on_new_replication_data()
- def send_read_receipt(self, receipt):
+ async def send_read_receipt(self, receipt: ReadReceipt) -> None:
"""As per FederationSender
Args:
- receipt (synapse.types.ReadReceipt):
+ receipt:
"""
# nothing to do here: the replication listener will handle it.
- return defer.succeed(None)
- def send_presence(self, states):
+ def send_presence(self, states: List[UserPresenceState]) -> None:
"""As per FederationSender
Args:
- states (list(UserPresenceState))
+ states
"""
pos = self._next_pos()
# We only want to send presence for our own users, so lets always just
# filter here just in case.
- local_states = list(filter(lambda s: self.is_mine_id(s.user_id), states))
+ local_states = [s for s in states if self.is_mine_id(s.user_id)]
self.presence_map.update({state.user_id: state for state in local_states})
self.presence_changed[pos] = [state.user_id for state in local_states]
self.notifier.on_new_replication_data()
- def send_presence_to_destinations(self, states, destinations):
+ def send_presence_to_destinations(
+ self, states: Iterable[UserPresenceState], destinations: Iterable[str]
+ ) -> None:
"""As per FederationSender
Args:
- states (list[UserPresenceState])
- destinations (list[str])
+ states
+ destinations
"""
for state in states:
pos = self._next_pos()
@@ -258,15 +278,18 @@ class FederationRemoteSendQueue:
self.notifier.on_new_replication_data()
- def send_device_messages(self, destination):
+ def send_device_messages(self, destination: str) -> None:
"""As per FederationSender"""
# We don't need to replicate this as it gets sent down a different
# stream.
- def get_current_token(self):
+ def wake_destination(self, server: str) -> None:
+ pass
+
+ def get_current_token(self) -> int:
return self.pos - 1
- def federation_ack(self, instance_name, token):
+ def federation_ack(self, instance_name: str, token: int) -> None:
if self._sender_instances:
# If we have configured multiple federation sender instances we need
# to track their positions separately, and only clear the queue up
@@ -504,13 +527,16 @@ ParsedFederationStreamData = namedtuple(
)
-def process_rows_for_federation(transaction_queue, rows):
+def process_rows_for_federation(
+ transaction_queue: FederationSender,
+ rows: List[FederationStream.FederationStreamRow],
+) -> None:
"""Parse a list of rows from the federation stream and put them in the
transaction queue ready for sending to the relevant homeservers.
Args:
- transaction_queue (FederationSender)
- rows (list(synapse.replication.tcp.streams.federation.FederationStream.FederationStreamRow))
+ transaction_queue
+ rows
"""
# The federation stream contains a bunch of different types of
diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py
index 24ebc4b8..8babb1eb 100644
--- a/synapse/federation/sender/__init__.py
+++ b/synapse/federation/sender/__init__.py
@@ -13,14 +13,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import abc
import logging
-from typing import Dict, Hashable, Iterable, List, Optional, Set, Tuple
+from typing import TYPE_CHECKING, Dict, Hashable, Iterable, List, Optional, Set, Tuple
from prometheus_client import Counter
from twisted.internet import defer
-import synapse
import synapse.metrics
from synapse.api.presence import UserPresenceState
from synapse.events import EventBase
@@ -40,9 +40,12 @@ from synapse.metrics import (
events_processed_counter,
)
from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.types import ReadReceipt, RoomStreamToken
+from synapse.types import JsonDict, ReadReceipt, RoomStreamToken
from synapse.util.metrics import Measure, measure_func
+if TYPE_CHECKING:
+ from synapse.server import HomeServer
+
logger = logging.getLogger(__name__)
sent_pdus_destination_dist_count = Counter(
@@ -65,8 +68,91 @@ CATCH_UP_STARTUP_DELAY_SEC = 15
CATCH_UP_STARTUP_INTERVAL_SEC = 5
-class FederationSender:
- def __init__(self, hs: "synapse.server.HomeServer"):
+class AbstractFederationSender(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def notify_new_events(self, max_token: RoomStreamToken) -> None:
+ """This gets called when we have some new events we might want to
+ send out to other servers.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ async def send_read_receipt(self, receipt: ReadReceipt) -> None:
+ """Send a RR to any other servers in the room
+
+ Args:
+ receipt: receipt to be sent
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def send_presence(self, states: List[UserPresenceState]) -> None:
+ """Send the new presence states to the appropriate destinations.
+
+ This actually queues up the presence states ready for sending and
+ triggers a background task to process them and send out the transactions.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def send_presence_to_destinations(
+ self, states: Iterable[UserPresenceState], destinations: Iterable[str]
+ ) -> None:
+ """Send the given presence states to the given destinations.
+
+ Args:
+ destinations:
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def build_and_send_edu(
+ self,
+ destination: str,
+ edu_type: str,
+ content: JsonDict,
+ key: Optional[Hashable] = None,
+ ) -> None:
+ """Construct an Edu object, and queue it for sending
+
+ Args:
+ destination: name of server to send to
+ edu_type: type of EDU to send
+ content: content of EDU
+ key: clobbering key for this edu
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def send_device_messages(self, destination: str) -> None:
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def wake_destination(self, destination: str) -> None:
+ """Called when we want to retry sending transactions to a remote.
+
+ This is mainly useful if the remote server has been down and we think it
+ might have come back.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def get_current_token(self) -> int:
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def federation_ack(self, instance_name: str, token: int) -> None:
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ async def get_replication_rows(
+ self, instance_name: str, from_token: int, to_token: int, target_row_count: int
+ ) -> Tuple[List[Tuple[int, Tuple]], int, bool]:
+ raise NotImplementedError()
+
+
+class FederationSender(AbstractFederationSender):
+ def __init__(self, hs: "HomeServer"):
self.hs = hs
self.server_name = hs.hostname
@@ -432,7 +518,7 @@ class FederationSender:
queue.flush_read_receipts_for_room(room_id)
@preserve_fn # the caller should not yield on this
- async def send_presence(self, states: List[UserPresenceState]):
+ async def send_presence(self, states: List[UserPresenceState]) -> None:
"""Send the new presence states to the appropriate destinations.
This actually queues up the presence states ready for sending and
@@ -494,7 +580,7 @@ class FederationSender:
self._get_per_destination_queue(destination).send_presence(states)
@measure_func("txnqueue._process_presence")
- async def _process_presence_inner(self, states: List[UserPresenceState]):
+ async def _process_presence_inner(self, states: List[UserPresenceState]) -> None:
"""Given a list of states populate self.pending_presence_by_dest and
poke to send a new transaction to each destination
"""
@@ -516,9 +602,9 @@ class FederationSender:
self,
destination: str,
edu_type: str,
- content: dict,
+ content: JsonDict,
key: Optional[Hashable] = None,
- ):
+ ) -> None:
"""Construct an Edu object, and queue it for sending
Args:
@@ -545,7 +631,7 @@ class FederationSender:
self.send_edu(edu, key)
- def send_edu(self, edu: Edu, key: Optional[Hashable]):
+ def send_edu(self, edu: Edu, key: Optional[Hashable]) -> None:
"""Queue an EDU for sending
Args:
@@ -563,7 +649,7 @@ class FederationSender:
else:
queue.send_edu(edu)
- def send_device_messages(self, destination: str):
+ def send_device_messages(self, destination: str) -> None:
if destination == self.server_name:
logger.warning("Not sending device update to ourselves")
return
@@ -575,7 +661,7 @@ class FederationSender:
self._get_per_destination_queue(destination).attempt_new_transaction()
- def wake_destination(self, destination: str):
+ def wake_destination(self, destination: str) -> None:
"""Called when we want to retry sending transactions to a remote.
This is mainly useful if the remote server has been down and we think it
@@ -599,6 +685,10 @@ class FederationSender:
# to a worker.
return 0
+ def federation_ack(self, instance_name: str, token: int) -> None:
+ # It is not expected that this gets called on FederationSender.
+ raise NotImplementedError()
+
@staticmethod
async def get_replication_rows(
instance_name: str, from_token: int, to_token: int, target_row_count: int
@@ -607,7 +697,7 @@ class FederationSender:
# to a worker.
return [], 0, False
- async def _wake_destinations_needing_catchup(self):
+ async def _wake_destinations_needing_catchup(self) -> None:
"""
Wakes up destinations that need catch-up and are not currently being
backed off from.
diff --git a/synapse/federation/sender/per_destination_queue.py b/synapse/federation/sender/per_destination_queue.py
index cc0d765e..89df9a61 100644
--- a/synapse/federation/sender/per_destination_queue.py
+++ b/synapse/federation/sender/per_destination_queue.py
@@ -15,7 +15,7 @@
# limitations under the License.
import datetime
import logging
-from typing import TYPE_CHECKING, Dict, Hashable, Iterable, List, Optional, Tuple, cast
+from typing import TYPE_CHECKING, Dict, Hashable, Iterable, List, Optional, Tuple
import attr
from prometheus_client import Counter
@@ -77,6 +77,7 @@ class PerDestinationQueue:
self._transaction_manager = transaction_manager
self._instance_name = hs.get_instance_name()
self._federation_shard_config = hs.config.worker.federation_shard_config
+ self._state = hs.get_state_handler()
self._should_send_on_this_instance = True
if not self._federation_shard_config.should_handle(
@@ -415,22 +416,97 @@ class PerDestinationQueue:
"This should not happen." % event_ids
)
- if logger.isEnabledFor(logging.INFO):
- rooms = [p.room_id for p in catchup_pdus]
- logger.info("Catching up rooms to %s: %r", self._destination, rooms)
+ # We send transactions with events from one room only, as its likely
+ # that the remote will have to do additional processing, which may
+ # take some time. It's better to give it small amounts of work
+ # rather than risk the request timing out and repeatedly being
+ # retried, and not making any progress.
+ #
+ # Note: `catchup_pdus` will have exactly one PDU per room.
+ for pdu in catchup_pdus:
+ # The PDU from the DB will be the last PDU in the room from
+ # *this server* that wasn't sent to the remote. However, other
+ # servers may have sent lots of events since then, and we want
+ # to try and tell the remote only about the *latest* events in
+ # the room. This is so that it doesn't get inundated by events
+ # from various parts of the DAG, which all need to be processed.
+ #
+ # Note: this does mean that in large rooms a server coming back
+ # online will get sent the same events from all the different
+ # servers, but the remote will correctly deduplicate them and
+ # handle it only once.
+
+ # Step 1, fetch the current extremities
+ extrems = await self._store.get_prev_events_for_room(pdu.room_id)
+
+ if pdu.event_id in extrems:
+ # If the event is in the extremities, then great! We can just
+ # use that without having to do further checks.
+ room_catchup_pdus = [pdu]
+ else:
+ # If not, fetch the extremities and figure out which we can
+ # send.
+ extrem_events = await self._store.get_events_as_list(extrems)
+
+ new_pdus = []
+ for p in extrem_events:
+ # We pulled this from the DB, so it'll be non-null
+ assert p.internal_metadata.stream_ordering
+
+ # Filter out events that happened before the remote went
+ # offline
+ if (
+ p.internal_metadata.stream_ordering
+ < self._last_successful_stream_ordering
+ ):
+ continue
- await self._transaction_manager.send_new_transaction(
- self._destination, catchup_pdus, []
- )
+ # Filter out events where the server is not in the room,
+ # e.g. it may have left/been kicked. *Ideally* we'd pull
+ # out the kick and send that, but it's a rare edge case
+ # so we don't bother for now (the server that sent the
+ # kick should send it out if its online).
+ hosts = await self._state.get_hosts_in_room_at_events(
+ p.room_id, [p.event_id]
+ )
+ if self._destination not in hosts:
+ continue
- sent_transactions_counter.inc()
- final_pdu = catchup_pdus[-1]
- self._last_successful_stream_ordering = cast(
- int, final_pdu.internal_metadata.stream_ordering
- )
- await self._store.set_destination_last_successful_stream_ordering(
- self._destination, self._last_successful_stream_ordering
- )
+ new_pdus.append(p)
+
+ # If we've filtered out all the extremities, fall back to
+ # sending the original event. This should ensure that the
+ # server gets at least some of missed events (especially if
+ # the other sending servers are up).
+ if new_pdus:
+ room_catchup_pdus = new_pdus
+ else:
+ room_catchup_pdus = [pdu]
+
+ logger.info(
+ "Catching up rooms to %s: %r", self._destination, pdu.room_id
+ )
+
+ await self._transaction_manager.send_new_transaction(
+ self._destination, room_catchup_pdus, []
+ )
+
+ sent_transactions_counter.inc()
+
+ # We pulled this from the DB, so it'll be non-null
+ assert pdu.internal_metadata.stream_ordering
+
+ # Note that we mark the last successful stream ordering as that
+ # from the *original* PDU, rather than the PDU(s) we actually
+ # send. This is because we use it to mark our position in the
+ # queue of missed PDUs to process.
+ self._last_successful_stream_ordering = (
+ pdu.internal_metadata.stream_ordering
+ )
+
+ await self._store.set_destination_last_successful_stream_ordering(
+ self._destination, self._last_successful_stream_ordering
+ )
def _get_rr_edus(self, force_flush: bool) -> Iterable[Edu]:
if not self._pending_rrs:
diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py
index 10c4747f..6aee47c4 100644
--- a/synapse/federation/transport/client.py
+++ b/synapse/federation/transport/client.py
@@ -16,7 +16,7 @@
import logging
import urllib
-from typing import Any, Dict, Optional
+from typing import Any, Dict, List, Optional
from synapse.api.constants import Membership
from synapse.api.errors import Codes, HttpResponseException, SynapseError
@@ -26,6 +26,7 @@ from synapse.api.urls import (
FEDERATION_V2_PREFIX,
)
from synapse.logging.utils import log_function
+from synapse.types import JsonDict
logger = logging.getLogger(__name__)
@@ -978,6 +979,38 @@ class TransportLayerClient:
return self.client.get_json(destination=destination, path=path)
+ async def get_space_summary(
+ self,
+ destination: str,
+ room_id: str,
+ suggested_only: bool,
+ max_rooms_per_space: Optional[int],
+ exclude_rooms: List[str],
+ ) -> JsonDict:
+ """
+ Args:
+ destination: The remote server
+ room_id: The room ID to ask about.
+ suggested_only: if True, only suggested rooms will be returned
+ max_rooms_per_space: an optional limit to the number of children to be
+ returned per space
+ exclude_rooms: a list of any rooms we can skip
+ """
+ path = _create_path(
+ FEDERATION_UNSTABLE_PREFIX, "/org.matrix.msc2946/spaces/%s", room_id
+ )
+
+ params = {
+ "suggested_only": suggested_only,
+ "exclude_rooms": exclude_rooms,
+ }
+ if max_rooms_per_space is not None:
+ params["max_rooms_per_space"] = max_rooms_per_space
+
+ return await self.client.post_json(
+ destination=destination, path=path, data=params
+ )
+
def _create_path(federation_prefix, path, *args):
"""
diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py
index 2cf935f3..84e39c5a 100644
--- a/synapse/federation/transport/server.py
+++ b/synapse/federation/transport/server.py
@@ -18,7 +18,7 @@
import functools
import logging
import re
-from typing import Optional, Tuple, Type
+from typing import Container, Mapping, Optional, Sequence, Tuple, Type
import synapse
from synapse.api.constants import MAX_GROUP_CATEGORYID_LENGTH, MAX_GROUP_ROLEID_LENGTH
@@ -29,7 +29,7 @@ from synapse.api.urls import (
FEDERATION_V1_PREFIX,
FEDERATION_V2_PREFIX,
)
-from synapse.http.server import JsonResource
+from synapse.http.server import HttpServer, JsonResource
from synapse.http.servlet import (
parse_boolean_from_args,
parse_integer_from_args,
@@ -44,7 +44,8 @@ from synapse.logging.opentracing import (
whitelisted_homeserver,
)
from synapse.server import HomeServer
-from synapse.types import ThirdPartyInstanceID, get_domain_from_id
+from synapse.types import JsonDict, ThirdPartyInstanceID, get_domain_from_id
+from synapse.util.ratelimitutils import FederationRateLimiter
from synapse.util.stringutils import parse_and_validate_server_name
from synapse.util.versionstring import get_version_string
@@ -1376,6 +1377,40 @@ class FederationGroupsSettingJoinPolicyServlet(BaseFederationServlet):
return 200, new_content
+class FederationSpaceSummaryServlet(BaseFederationServlet):
+ PREFIX = FEDERATION_UNSTABLE_PREFIX + "/org.matrix.msc2946"
+ PATH = "/spaces/(?P<room_id>[^/]*)"
+
+ async def on_POST(
+ self,
+ origin: str,
+ content: JsonDict,
+ query: Mapping[bytes, Sequence[bytes]],
+ room_id: str,
+ ) -> Tuple[int, JsonDict]:
+ suggested_only = content.get("suggested_only", False)
+ if not isinstance(suggested_only, bool):
+ raise SynapseError(
+ 400, "'suggested_only' must be a boolean", Codes.BAD_JSON
+ )
+
+ exclude_rooms = content.get("exclude_rooms", [])
+ if not isinstance(exclude_rooms, list) or any(
+ not isinstance(x, str) for x in exclude_rooms
+ ):
+ raise SynapseError(400, "bad value for 'exclude_rooms'", Codes.BAD_JSON)
+
+ max_rooms_per_space = content.get("max_rooms_per_space")
+ if max_rooms_per_space is not None and not isinstance(max_rooms_per_space, int):
+ raise SynapseError(
+ 400, "bad value for 'max_rooms_per_space'", Codes.BAD_JSON
+ )
+
+ return 200, await self.handler.federation_space_summary(
+ room_id, suggested_only, max_rooms_per_space, exclude_rooms
+ )
+
+
class RoomComplexityServlet(BaseFederationServlet):
"""
Indicates to other servers how complex (and therefore likely
@@ -1474,18 +1509,24 @@ DEFAULT_SERVLET_GROUPS = (
)
-def register_servlets(hs, resource, authenticator, ratelimiter, servlet_groups=None):
+def register_servlets(
+ hs: HomeServer,
+ resource: HttpServer,
+ authenticator: Authenticator,
+ ratelimiter: FederationRateLimiter,
+ servlet_groups: Optional[Container[str]] = None,
+):
"""Initialize and register servlet classes.
Will by default register all servlets. For custom behaviour, pass in
a list of servlet_groups to register.
Args:
- hs (synapse.server.HomeServer): homeserver
- resource (JsonResource): resource class to register to
- authenticator (Authenticator): authenticator to use
- ratelimiter (util.ratelimitutils.FederationRateLimiter): ratelimiter to use
- servlet_groups (list[str], optional): List of servlet groups to register.
+ hs: homeserver
+ resource: resource class to register to
+ authenticator: authenticator to use
+ ratelimiter: ratelimiter to use
+ servlet_groups: List of servlet groups to register.
Defaults to ``DEFAULT_SERVLET_GROUPS``.
"""
if not servlet_groups:
@@ -1500,6 +1541,14 @@ def register_servlets(hs, resource, authenticator, ratelimiter, servlet_groups=N
server_name=hs.hostname,
).register(resource)
+ if hs.config.experimental.spaces_enabled:
+ FederationSpaceSummaryServlet(
+ handler=hs.get_space_summary_handler(),
+ authenticator=authenticator,
+ ratelimiter=ratelimiter,
+ server_name=hs.hostname,
+ ).register(resource)
+
if "openid" in servlet_groups:
for servletclass in OPENID_SERVLET_CLASSES:
servletclass(
diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py
index a3f8d92d..368c4470 100644
--- a/synapse/groups/attestations.py
+++ b/synapse/groups/attestations.py
@@ -46,7 +46,7 @@ from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.types import JsonDict, get_domain_from_id
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py
index f9a0f402..4b16a4ac 100644
--- a/synapse/groups/groups_server.py
+++ b/synapse/groups/groups_server.py
@@ -25,7 +25,7 @@ from synapse.types import GroupID, JsonDict, RoomID, UserID, get_domain_from_id
from synapse.util.async_helpers import concurrently_execute
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py
index d29b066a..aade2c4a 100644
--- a/synapse/handlers/_base.py
+++ b/synapse/handlers/_base.py
@@ -24,7 +24,7 @@ from synapse.api.ratelimiting import Ratelimiter
from synapse.types import UserID
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/account_data.py b/synapse/handlers/account_data.py
index b1a5df96..1ce6d697 100644
--- a/synapse/handlers/account_data.py
+++ b/synapse/handlers/account_data.py
@@ -25,7 +25,7 @@ from synapse.replication.http.account_data import (
from synapse.types import JsonDict, UserID
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
class AccountDataHandler:
diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py
index 664d09da..d781bb25 100644
--- a/synapse/handlers/account_validity.py
+++ b/synapse/handlers/account_validity.py
@@ -27,7 +27,7 @@ from synapse.types import UserID
from synapse.util import stringutils
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py
index 132be238..2a25af62 100644
--- a/synapse/handlers/acme.py
+++ b/synapse/handlers/acme.py
@@ -24,7 +24,7 @@ from twisted.web.resource import Resource
from synapse.app import check_bind_error
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/admin.py b/synapse/handlers/admin.py
index db68c94c..c494de49 100644
--- a/synapse/handlers/admin.py
+++ b/synapse/handlers/admin.py
@@ -25,7 +25,7 @@ from synapse.visibility import filter_events_for_client
from ._base import BaseHandler
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py
index deab8ff2..996f9e5d 100644
--- a/synapse/handlers/appservice.py
+++ b/synapse/handlers/appservice.py
@@ -38,7 +38,7 @@ from synapse.types import Collection, JsonDict, RoomAlias, RoomStreamToken, User
from synapse.util.metrics import Measure
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index fb5f8118..d537ea81 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -70,7 +70,7 @@ from synapse.util.msisdn import phone_number_to_msisdn
from synapse.util.threepids import canonicalise_email
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
@@ -886,6 +886,19 @@ class AuthHandler(BaseHandler):
)
return result
+ def can_change_password(self) -> bool:
+ """Get whether users on this server are allowed to change or set a password.
+
+ Both `config.password_enabled` and `config.password_localdb_enabled` must be true.
+
+ Note that any account (even SSO accounts) are allowed to add passwords if the above
+ is true.
+
+ Returns:
+ Whether users on this server are allowed to change or set a password
+ """
+ return self._password_enabled and self._password_localdb_enabled
+
def get_supported_login_types(self) -> Iterable[str]:
"""Get a the login types supported for the /login API
diff --git a/synapse/handlers/cas_handler.py b/synapse/handlers/cas_handler.py
index cb67589f..5060936f 100644
--- a/synapse/handlers/cas_handler.py
+++ b/synapse/handlers/cas_handler.py
@@ -27,7 +27,7 @@ from synapse.http.site import SynapseRequest
from synapse.types import UserID, map_username_to_mxid_localpart
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py
index 3886d312..2bcd8f54 100644
--- a/synapse/handlers/deactivate_account.py
+++ b/synapse/handlers/deactivate_account.py
@@ -23,7 +23,7 @@ from synapse.types import Requester, UserID, create_requester
from ._base import BaseHandler
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py
index df3cdc8f..54293d0b 100644
--- a/synapse/handlers/device.py
+++ b/synapse/handlers/device.py
@@ -45,7 +45,7 @@ from synapse.util.retryutils import NotRetryingDestination
from ._base import BaseHandler
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
@@ -166,7 +166,7 @@ class DeviceWorkerHandler(BaseHandler):
# Fetch the current state at the time.
try:
- event_ids = await self.store.get_forward_extremeties_for_room(
+ event_ids = await self.store.get_forward_extremities_for_room_at_stream_ordering(
room_id, stream_ordering=stream_ordering
)
except errors.StoreError:
@@ -907,6 +907,7 @@ class DeviceListUpdater:
master_key = result.get("master_key")
self_signing_key = result.get("self_signing_key")
+ ignore_devices = False
# If the remote server has more than ~1000 devices for this user
# we assume that something is going horribly wrong (e.g. a bot
# that logs in and creates a new device every time it tries to
@@ -925,6 +926,12 @@ class DeviceListUpdater:
len(devices),
)
devices = []
+ ignore_devices = True
+ else:
+ cached_devices = await self.store.get_cached_devices_for_user(user_id)
+ if cached_devices == {d["device_id"]: d for d in devices}:
+ devices = []
+ ignore_devices = True
for device in devices:
logger.debug(
@@ -934,7 +941,10 @@ class DeviceListUpdater:
stream_id,
)
- await self.store.update_remote_device_list_cache(user_id, devices, stream_id)
+ if not ignore_devices:
+ await self.store.update_remote_device_list_cache(
+ user_id, devices, stream_id
+ )
device_ids = [device["device_id"] for device in devices]
# Handle cross-signing keys.
@@ -945,7 +955,8 @@ class DeviceListUpdater:
)
device_ids = device_ids + cross_signing_device_ids
- await self.device_handler.notify_device_update(user_id, device_ids)
+ if device_ids:
+ await self.device_handler.notify_device_update(user_id, device_ids)
# We clobber the seen updates since we've re-synced from a given
# point.
@@ -973,14 +984,17 @@ class DeviceListUpdater:
"""
device_ids = []
- if master_key:
+ current_keys_map = await self.store.get_e2e_cross_signing_keys_bulk([user_id])
+ current_keys = current_keys_map.get(user_id) or {}
+
+ if master_key and master_key != current_keys.get("master"):
await self.store.set_e2e_cross_signing_key(user_id, "master", master_key)
_, verify_key = get_verify_key_from_cross_signing_key(master_key)
# verify_key is a VerifyKey from signedjson, which uses
# .version to denote the portion of the key ID after the
# algorithm and colon, which is the device ID
device_ids.append(verify_key.version)
- if self_signing_key:
+ if self_signing_key and self_signing_key != current_keys.get("self_signing"):
await self.store.set_e2e_cross_signing_key(
user_id, "self_signing", self_signing_key
)
diff --git a/synapse/handlers/devicemessage.py b/synapse/handlers/devicemessage.py
index 7db4f489..eb547743 100644
--- a/synapse/handlers/devicemessage.py
+++ b/synapse/handlers/devicemessage.py
@@ -32,7 +32,7 @@ from synapse.util import json_encoder
from synapse.util.stringutils import random_string
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py
index 9a946a3c..2ad9b6d9 100644
--- a/synapse/handlers/e2e_keys.py
+++ b/synapse/handlers/e2e_keys.py
@@ -42,7 +42,7 @@ from synapse.util.caches.expiringcache import ExpiringCache
from synapse.util.retryutils import NotRetryingDestination
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py
index 622cae23..a910d246 100644
--- a/synapse/handlers/e2e_room_keys.py
+++ b/synapse/handlers/e2e_room_keys.py
@@ -29,7 +29,7 @@ from synapse.types import JsonDict
from synapse.util.async_helpers import Linearizer
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py
index bfb95e3e..a41ca5df 100644
--- a/synapse/handlers/groups_local.py
+++ b/synapse/handlers/groups_local.py
@@ -21,7 +21,7 @@ from synapse.api.errors import HttpResponseException, RequestSendFailed, Synapse
from synapse.types import GroupID, JsonDict, get_domain_from_id
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/oidc_handler.py b/synapse/handlers/oidc_handler.py
index 6d8551a6..6624212d 100644
--- a/synapse/handlers/oidc_handler.py
+++ b/synapse/handlers/oidc_handler.py
@@ -149,6 +149,9 @@ class OidcHandler:
Args:
request: the incoming request from the browser.
"""
+ # This will always be set by the time Twisted calls us.
+ assert request.args is not None
+
# The provider might redirect with an error.
# In that case, just display it as-is.
if b"error" in request.args:
@@ -280,6 +283,7 @@ class OidcProvider:
self._config = provider
self._callback_url = hs.config.oidc_callback_url # type: str
+ self._oidc_attribute_requirements = provider.attribute_requirements
self._scopes = provider.scopes
self._user_profile_method = provider.user_profile_method
@@ -859,6 +863,18 @@ class OidcProvider:
)
# otherwise, it's a login
+ logger.debug("Userinfo for OIDC login: %s", userinfo)
+
+ # Ensure that the attributes of the logged in user meet the required
+ # attributes by checking the userinfo against attribute_requirements
+ # In order to deal with the fact that OIDC userinfo can contain many
+ # types of data, we wrap non-list values in lists.
+ if not self._sso_handler.check_required_attributes(
+ request,
+ {k: v if isinstance(v, list) else [v] for k, v in userinfo.items()},
+ self._oidc_attribute_requirements,
+ ):
+ return
# Call the mapper to register/login the user
try:
diff --git a/synapse/handlers/password_policy.py b/synapse/handlers/password_policy.py
index 6c635cc3..92cefa11 100644
--- a/synapse/handlers/password_policy.py
+++ b/synapse/handlers/password_policy.py
@@ -21,7 +21,7 @@ from typing import TYPE_CHECKING
from synapse.api.errors import Codes, PasswordRefusedError
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py
index 54631b4e..da92feac 100644
--- a/synapse/handlers/presence.py
+++ b/synapse/handlers/presence.py
@@ -104,6 +104,8 @@ class BasePresenceHandler(abc.ABC):
self.clock = hs.get_clock()
self.store = hs.get_datastore()
+ self._busy_presence_enabled = hs.config.experimental.msc3026_enabled
+
active_presence = self.store.take_presence_startup_info()
self.user_to_current_state = {state.user_id: state for state in active_presence}
@@ -730,8 +732,12 @@ class PresenceHandler(BasePresenceHandler):
PresenceState.ONLINE,
PresenceState.UNAVAILABLE,
PresenceState.OFFLINE,
+ PresenceState.BUSY,
)
- if presence not in valid_presence:
+
+ if presence not in valid_presence or (
+ presence == PresenceState.BUSY and not self._busy_presence_enabled
+ ):
raise SynapseError(400, "Invalid presence state")
user_id = target_user.to_string()
@@ -744,7 +750,9 @@ class PresenceHandler(BasePresenceHandler):
msg = status_msg if presence != PresenceState.OFFLINE else None
new_fields["status_msg"] = msg
- if presence == PresenceState.ONLINE:
+ if presence == PresenceState.ONLINE or (
+ presence == PresenceState.BUSY and self._busy_presence_enabled
+ ):
new_fields["last_active_ts"] = self.clock.time_msec()
await self._update_states([prev_state.copy_and_replace(**new_fields)])
diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py
index dd59392b..a755363c 100644
--- a/synapse/handlers/profile.py
+++ b/synapse/handlers/profile.py
@@ -36,7 +36,7 @@ from synapse.types import (
from ._base import BaseHandler
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/read_marker.py b/synapse/handlers/read_marker.py
index 6bb2fd93..a54fe196 100644
--- a/synapse/handlers/read_marker.py
+++ b/synapse/handlers/read_marker.py
@@ -21,7 +21,7 @@ from synapse.util.async_helpers import Linearizer
from ._base import BaseHandler
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py
index 6a6c5288..dbfe9bfa 100644
--- a/synapse/handlers/receipts.py
+++ b/synapse/handlers/receipts.py
@@ -20,7 +20,7 @@ from synapse.handlers._base import BaseHandler
from synapse.types import JsonDict, ReadReceipt, get_domain_from_id
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index 1abc8875..0fc2bf15 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -38,7 +38,7 @@ from synapse.types import RoomAlias, UserID, create_requester
from ._base import BaseHandler
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
@@ -437,10 +437,10 @@ class RegistrationHandler(BaseHandler):
if RoomAlias.is_valid(r):
(
- room_id,
+ room,
remote_room_hosts,
) = await room_member_handler.lookup_room_alias(room_alias)
- room_id = room_id.to_string()
+ room_id = room.to_string()
else:
raise SynapseError(
400, "%s was not legal room ID or room alias" % (r,)
diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py
index 8bfc46c6..924b81db 100644
--- a/synapse/handlers/room_list.py
+++ b/synapse/handlers/room_list.py
@@ -29,7 +29,7 @@ from synapse.util.caches.response_cache import ResponseCache
from ._base import BaseHandler
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index 16609213..4d20ed83 100644
--- a/synapse/handlers/room_member.py
+++ b/synapse/handlers/room_member.py
@@ -155,6 +155,10 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
"""
raise NotImplementedError()
+ @abc.abstractmethod
+ async def forget(self, user: UserID, room_id: str) -> None:
+ raise NotImplementedError()
+
def ratelimit_invite(self, room_id: Optional[str], invitee_user_id: str):
"""Ratelimit invites by room and by target user.
diff --git a/synapse/handlers/room_member_worker.py b/synapse/handlers/room_member_worker.py
index 108730a7..3a90fc0c 100644
--- a/synapse/handlers/room_member_worker.py
+++ b/synapse/handlers/room_member_worker.py
@@ -14,7 +14,7 @@
# limitations under the License.
import logging
-from typing import List, Optional, Tuple
+from typing import TYPE_CHECKING, List, Optional, Tuple
from synapse.api.errors import SynapseError
from synapse.handlers.room_member import RoomMemberHandler
@@ -25,11 +25,14 @@ from synapse.replication.http.membership import (
)
from synapse.types import Requester, UserID
+if TYPE_CHECKING:
+ from synapse.server import HomeServer
+
logger = logging.getLogger(__name__)
class RoomMemberWorkerHandler(RoomMemberHandler):
- def __init__(self, hs):
+ def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self._remote_join_client = ReplRemoteJoin.make_client(hs)
@@ -83,3 +86,6 @@ class RoomMemberWorkerHandler(RoomMemberHandler):
await self._notify_change_client(
user_id=target.to_string(), room_id=room_id, change="left"
)
+
+ async def forget(self, target: UserID, room_id: str) -> None:
+ raise RuntimeError("Cannot forget rooms on workers.")
diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py
index 94062e79..d742dfbd 100644
--- a/synapse/handlers/search.py
+++ b/synapse/handlers/search.py
@@ -30,7 +30,7 @@ from synapse.visibility import filter_events_for_client
from ._base import BaseHandler
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/set_password.py b/synapse/handlers/set_password.py
index 84af2dde..f98a338e 100644
--- a/synapse/handlers/set_password.py
+++ b/synapse/handlers/set_password.py
@@ -21,7 +21,7 @@ from synapse.types import Requester
from ._base import BaseHandler
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
@@ -41,7 +41,7 @@ class SetPasswordHandler(BaseHandler):
logout_devices: bool,
requester: Optional[Requester] = None,
) -> None:
- if not self.hs.config.password_localdb_enabled:
+ if not self._auth_handler.can_change_password():
raise SynapseError(403, "Password change disabled", errcode=Codes.FORBIDDEN)
try:
diff --git a/synapse/handlers/space_summary.py b/synapse/handlers/space_summary.py
new file mode 100644
index 00000000..5d941896
--- /dev/null
+++ b/synapse/handlers/space_summary.py
@@ -0,0 +1,395 @@
+# -*- coding: utf-8 -*-
+# Copyright 2021 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import itertools
+import logging
+from collections import deque
+from typing import TYPE_CHECKING, Iterable, List, Optional, Sequence, Set, Tuple, cast
+
+import attr
+
+from synapse.api.constants import EventContentFields, EventTypes, HistoryVisibility
+from synapse.api.errors import AuthError
+from synapse.events import EventBase
+from synapse.events.utils import format_event_for_client_v2
+from synapse.types import JsonDict
+
+if TYPE_CHECKING:
+ from synapse.server import HomeServer
+
+logger = logging.getLogger(__name__)
+
+# number of rooms to return. We'll stop once we hit this limit.
+# TODO: allow clients to reduce this with a request param.
+MAX_ROOMS = 50
+
+# max number of events to return per room.
+MAX_ROOMS_PER_SPACE = 50
+
+# max number of federation servers to hit per room
+MAX_SERVERS_PER_SPACE = 3
+
+
+class SpaceSummaryHandler:
+ def __init__(self, hs: "HomeServer"):
+ self._clock = hs.get_clock()
+ self._auth = hs.get_auth()
+ self._room_list_handler = hs.get_room_list_handler()
+ self._state_handler = hs.get_state_handler()
+ self._store = hs.get_datastore()
+ self._event_serializer = hs.get_event_client_serializer()
+ self._server_name = hs.hostname
+ self._federation_client = hs.get_federation_client()
+
+ async def get_space_summary(
+ self,
+ requester: str,
+ room_id: str,
+ suggested_only: bool = False,
+ max_rooms_per_space: Optional[int] = None,
+ ) -> JsonDict:
+ """
+ Implementation of the space summary C-S API
+
+ Args:
+ requester: user id of the user making this request
+
+ room_id: room id to start the summary at
+
+ suggested_only: whether we should only return children with the "suggested"
+ flag set.
+
+ max_rooms_per_space: an optional limit on the number of child rooms we will
+ return. This does not apply to the root room (ie, room_id), and
+ is overridden by MAX_ROOMS_PER_SPACE.
+
+ Returns:
+ summary dict to return
+ """
+ # first of all, check that the user is in the room in question (or it's
+ # world-readable)
+ await self._auth.check_user_in_room_or_world_readable(room_id, requester)
+
+ # the queue of rooms to process
+ room_queue = deque((_RoomQueueEntry(room_id, ()),))
+
+ # rooms we have already processed
+ processed_rooms = set() # type: Set[str]
+
+ # events we have already processed. We don't necessarily have their event ids,
+ # so instead we key on (room id, state key)
+ processed_events = set() # type: Set[Tuple[str, str]]
+
+ rooms_result = [] # type: List[JsonDict]
+ events_result = [] # type: List[JsonDict]
+
+ while room_queue and len(rooms_result) < MAX_ROOMS:
+ queue_entry = room_queue.popleft()
+ room_id = queue_entry.room_id
+ if room_id in processed_rooms:
+ # already done this room
+ continue
+
+ logger.debug("Processing room %s", room_id)
+
+ is_in_room = await self._store.is_host_joined(room_id, self._server_name)
+
+ # The client-specified max_rooms_per_space limit doesn't apply to the
+ # room_id specified in the request, so we ignore it if this is the
+ # first room we are processing.
+ max_children = max_rooms_per_space if processed_rooms else None
+
+ if is_in_room:
+ rooms, events = await self._summarize_local_room(
+ requester, room_id, suggested_only, max_children
+ )
+ else:
+ rooms, events = await self._summarize_remote_room(
+ queue_entry,
+ suggested_only,
+ max_children,
+ exclude_rooms=processed_rooms,
+ )
+
+ logger.debug(
+ "Query of %s returned rooms %s, events %s",
+ queue_entry.room_id,
+ [room.get("room_id") for room in rooms],
+ ["%s->%s" % (ev["room_id"], ev["state_key"]) for ev in events],
+ )
+
+ rooms_result.extend(rooms)
+
+ # any rooms returned don't need visiting again
+ processed_rooms.update(cast(str, room.get("room_id")) for room in rooms)
+
+ # the room we queried may or may not have been returned, but don't process
+ # it again, anyway.
+ processed_rooms.add(room_id)
+
+ # XXX: is it ok that we blindly iterate through any events returned by
+ # a remote server, whether or not they actually link to any rooms in our
+ # tree?
+ for ev in events:
+ # remote servers might return events we have already processed
+ # (eg, Dendrite returns inward pointers as well as outward ones), so
+ # we need to filter them out, to avoid returning duplicate links to the
+ # client.
+ ev_key = (ev["room_id"], ev["state_key"])
+ if ev_key in processed_events:
+ continue
+ events_result.append(ev)
+
+ # add the child to the queue. we have already validated
+ # that the vias are a list of server names.
+ room_queue.append(
+ _RoomQueueEntry(ev["state_key"], ev["content"]["via"])
+ )
+ processed_events.add(ev_key)
+
+ return {"rooms": rooms_result, "events": events_result}
+
+ async def federation_space_summary(
+ self,
+ room_id: str,
+ suggested_only: bool,
+ max_rooms_per_space: Optional[int],
+ exclude_rooms: Iterable[str],
+ ) -> JsonDict:
+ """
+ Implementation of the space summary Federation API
+
+ Args:
+ room_id: room id to start the summary at
+
+ suggested_only: whether we should only return children with the "suggested"
+ flag set.
+
+ max_rooms_per_space: an optional limit on the number of child rooms we will
+ return. Unlike the C-S API, this applies to the root room (room_id).
+ It is clipped to MAX_ROOMS_PER_SPACE.
+
+ exclude_rooms: a list of rooms to skip over (presumably because the
+ calling server has already seen them).
+
+ Returns:
+ summary dict to return
+ """
+ # the queue of rooms to process
+ room_queue = deque((room_id,))
+
+ # the set of rooms that we should not walk further. Initialise it with the
+ # excluded-rooms list; we will add other rooms as we process them so that
+ # we do not loop.
+ processed_rooms = set(exclude_rooms) # type: Set[str]
+
+ rooms_result = [] # type: List[JsonDict]
+ events_result = [] # type: List[JsonDict]
+
+ while room_queue and len(rooms_result) < MAX_ROOMS:
+ room_id = room_queue.popleft()
+ if room_id in processed_rooms:
+ # already done this room
+ continue
+
+ logger.debug("Processing room %s", room_id)
+
+ rooms, events = await self._summarize_local_room(
+ None, room_id, suggested_only, max_rooms_per_space
+ )
+
+ processed_rooms.add(room_id)
+
+ rooms_result.extend(rooms)
+ events_result.extend(events)
+
+ # add any children to the queue
+ room_queue.extend(edge_event["state_key"] for edge_event in events)
+
+ return {"rooms": rooms_result, "events": events_result}
+
+ async def _summarize_local_room(
+ self,
+ requester: Optional[str],
+ room_id: str,
+ suggested_only: bool,
+ max_children: Optional[int],
+ ) -> Tuple[Sequence[JsonDict], Sequence[JsonDict]]:
+ if not await self._is_room_accessible(room_id, requester):
+ return (), ()
+
+ room_entry = await self._build_room_entry(room_id)
+
+ # look for child rooms/spaces.
+ child_events = await self._get_child_events(room_id)
+
+ if suggested_only:
+ # we only care about suggested children
+ child_events = filter(_is_suggested_child_event, child_events)
+
+ if max_children is None or max_children > MAX_ROOMS_PER_SPACE:
+ max_children = MAX_ROOMS_PER_SPACE
+
+ now = self._clock.time_msec()
+ events_result = [] # type: List[JsonDict]
+ for edge_event in itertools.islice(child_events, max_children):
+ events_result.append(
+ await self._event_serializer.serialize_event(
+ edge_event,
+ time_now=now,
+ event_format=format_event_for_client_v2,
+ )
+ )
+ return (room_entry,), events_result
+
+ async def _summarize_remote_room(
+ self,
+ room: "_RoomQueueEntry",
+ suggested_only: bool,
+ max_children: Optional[int],
+ exclude_rooms: Iterable[str],
+ ) -> Tuple[Sequence[JsonDict], Sequence[JsonDict]]:
+ room_id = room.room_id
+ logger.info("Requesting summary for %s via %s", room_id, room.via)
+
+ # we need to make the exclusion list json-serialisable
+ exclude_rooms = list(exclude_rooms)
+
+ via = itertools.islice(room.via, MAX_SERVERS_PER_SPACE)
+ try:
+ res = await self._federation_client.get_space_summary(
+ via,
+ room_id,
+ suggested_only=suggested_only,
+ max_rooms_per_space=max_children,
+ exclude_rooms=exclude_rooms,
+ )
+ except Exception as e:
+ logger.warning(
+ "Unable to get summary of %s via federation: %s",
+ room_id,
+ e,
+ exc_info=logger.isEnabledFor(logging.DEBUG),
+ )
+ return (), ()
+
+ return res.rooms, tuple(
+ ev.data
+ for ev in res.events
+ if ev.event_type == EventTypes.MSC1772_SPACE_CHILD
+ )
+
+ async def _is_room_accessible(self, room_id: str, requester: Optional[str]) -> bool:
+ # if we have an authenticated requesting user, first check if they are in the
+ # room
+ if requester:
+ try:
+ await self._auth.check_user_in_room(room_id, requester)
+ return True
+ except AuthError:
+ pass
+
+ # otherwise, check if the room is peekable
+ hist_vis_ev = await self._state_handler.get_current_state(
+ room_id, EventTypes.RoomHistoryVisibility, ""
+ )
+ if hist_vis_ev:
+ hist_vis = hist_vis_ev.content.get("history_visibility")
+ if hist_vis == HistoryVisibility.WORLD_READABLE:
+ return True
+
+ logger.info(
+ "room %s is unpeekable and user %s is not a member, omitting from summary",
+ room_id,
+ requester,
+ )
+ return False
+
+ async def _build_room_entry(self, room_id: str) -> JsonDict:
+ """Generate en entry suitable for the 'rooms' list in the summary response"""
+ stats = await self._store.get_room_with_stats(room_id)
+
+ # currently this should be impossible because we call
+ # check_user_in_room_or_world_readable on the room before we get here, so
+ # there should always be an entry
+ assert stats is not None, "unable to retrieve stats for %s" % (room_id,)
+
+ current_state_ids = await self._store.get_current_state_ids(room_id)
+ create_event = await self._store.get_event(
+ current_state_ids[(EventTypes.Create, "")]
+ )
+
+ # TODO: update once MSC1772 lands
+ room_type = create_event.content.get(EventContentFields.MSC1772_ROOM_TYPE)
+
+ entry = {
+ "room_id": stats["room_id"],
+ "name": stats["name"],
+ "topic": stats["topic"],
+ "canonical_alias": stats["canonical_alias"],
+ "num_joined_members": stats["joined_members"],
+ "avatar_url": stats["avatar"],
+ "world_readable": (
+ stats["history_visibility"] == HistoryVisibility.WORLD_READABLE
+ ),
+ "guest_can_join": stats["guest_access"] == "can_join",
+ "room_type": room_type,
+ }
+
+ # Filter out Nones – rather omit the field altogether
+ room_entry = {k: v for k, v in entry.items() if v is not None}
+
+ return room_entry
+
+ async def _get_child_events(self, room_id: str) -> Iterable[EventBase]:
+ # look for child rooms/spaces.
+ current_state_ids = await self._store.get_current_state_ids(room_id)
+
+ events = await self._store.get_events_as_list(
+ [
+ event_id
+ for key, event_id in current_state_ids.items()
+ # TODO: update once MSC1772 lands
+ if key[0] == EventTypes.MSC1772_SPACE_CHILD
+ ]
+ )
+
+ # filter out any events without a "via" (which implies it has been redacted)
+ return (e for e in events if _has_valid_via(e))
+
+
+@attr.s(frozen=True, slots=True)
+class _RoomQueueEntry:
+ room_id = attr.ib(type=str)
+ via = attr.ib(type=Sequence[str])
+
+
+def _has_valid_via(e: EventBase) -> bool:
+ via = e.content.get("via")
+ if not via or not isinstance(via, Sequence):
+ return False
+ for v in via:
+ if not isinstance(v, str):
+ logger.debug("Ignoring edge event %s with invalid via entry", e.event_id)
+ return False
+ return True
+
+
+def _is_suggested_child_event(edge_event: EventBase) -> bool:
+ suggested = edge_event.content.get("suggested")
+ if isinstance(suggested, bool) and suggested:
+ return True
+ logger.debug("Ignorning not-suggested child %s", edge_event.state_key)
+ return False
diff --git a/synapse/handlers/state_deltas.py b/synapse/handlers/state_deltas.py
index b3f98753..ee8f87e5 100644
--- a/synapse/handlers/state_deltas.py
+++ b/synapse/handlers/state_deltas.py
@@ -17,7 +17,7 @@ import logging
from typing import TYPE_CHECKING, Optional
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/stats.py b/synapse/handlers/stats.py
index 92428114..8730f99d 100644
--- a/synapse/handlers/stats.py
+++ b/synapse/handlers/stats.py
@@ -24,7 +24,7 @@ from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.types import JsonDict
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index f50257cd..ee607e6e 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -80,7 +80,7 @@ class SyncConfig:
filter_collection = attr.ib(type=FilterCollection)
is_guest = attr.ib(type=bool)
request_key = attr.ib(type=Tuple[Any, ...])
- device_id = attr.ib(type=str)
+ device_id = attr.ib(type=Optional[str])
@attr.s(slots=True, frozen=True)
@@ -723,7 +723,9 @@ class SyncHandler:
return summary
- def get_lazy_loaded_members_cache(self, cache_key: Tuple[str, str]) -> LruCache:
+ def get_lazy_loaded_members_cache(
+ self, cache_key: Tuple[str, Optional[str]]
+ ) -> LruCache:
cache = self.lazy_loaded_members_cache.get(cache_key)
if cache is None:
logger.debug("creating LruCache for %r", cache_key)
@@ -1979,8 +1981,10 @@ class SyncHandler:
logger.info("User joined room after current token: %s", room_id)
- extrems = await self.store.get_forward_extremeties_for_room(
- room_id, event_pos.stream
+ extrems = (
+ await self.store.get_forward_extremities_for_room_at_stream_ordering(
+ room_id, event_pos.stream
+ )
)
users_in_room = await self.state.get_current_users_in_room(room_id, extrems)
if user_id in users_in_room:
diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py
index 1a834000..b121286d 100644
--- a/synapse/handlers/user_directory.py
+++ b/synapse/handlers/user_directory.py
@@ -25,7 +25,7 @@ from synapse.types import JsonDict
from synapse.util.metrics import Measure
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/http/client.py b/synapse/http/client.py
index 1e01e0a9..a0caba84 100644
--- a/synapse/http/client.py
+++ b/synapse/http/client.py
@@ -77,7 +77,7 @@ from synapse.util import json_decoder
from synapse.util.async_helpers import timeout_deferred
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/http/connectproxyclient.py b/synapse/http/connectproxyclient.py
index 856e2845..b797e3ce 100644
--- a/synapse/http/connectproxyclient.py
+++ b/synapse/http/connectproxyclient.py
@@ -19,9 +19,10 @@ from zope.interface import implementer
from twisted.internet import defer, protocol
from twisted.internet.error import ConnectError
-from twisted.internet.interfaces import IStreamClientEndpoint
-from twisted.internet.protocol import connectionDone
+from twisted.internet.interfaces import IReactorCore, IStreamClientEndpoint
+from twisted.internet.protocol import ClientFactory, Protocol, connectionDone
from twisted.web import http
+from twisted.web.http_headers import Headers
logger = logging.getLogger(__name__)
@@ -43,23 +44,33 @@ class HTTPConnectProxyEndpoint:
Args:
reactor: the Twisted reactor to use for the connection
- proxy_endpoint (IStreamClientEndpoint): the endpoint to use to connect to the
- proxy
- host (bytes): hostname that we want to CONNECT to
- port (int): port that we want to connect to
+ proxy_endpoint: the endpoint to use to connect to the proxy
+ host: hostname that we want to CONNECT to
+ port: port that we want to connect to
+ headers: Extra HTTP headers to include in the CONNECT request
"""
- def __init__(self, reactor, proxy_endpoint, host, port):
+ def __init__(
+ self,
+ reactor: IReactorCore,
+ proxy_endpoint: IStreamClientEndpoint,
+ host: bytes,
+ port: int,
+ headers: Headers,
+ ):
self._reactor = reactor
self._proxy_endpoint = proxy_endpoint
self._host = host
self._port = port
+ self._headers = headers
def __repr__(self):
return "<HTTPConnectProxyEndpoint %s>" % (self._proxy_endpoint,)
- def connect(self, protocolFactory):
- f = HTTPProxiedClientFactory(self._host, self._port, protocolFactory)
+ def connect(self, protocolFactory: ClientFactory):
+ f = HTTPProxiedClientFactory(
+ self._host, self._port, protocolFactory, self._headers
+ )
d = self._proxy_endpoint.connect(f)
# once the tcp socket connects successfully, we need to wait for the
# CONNECT to complete.
@@ -74,15 +85,23 @@ class HTTPProxiedClientFactory(protocol.ClientFactory):
HTTP Protocol object and run the rest of the connection.
Args:
- dst_host (bytes): hostname that we want to CONNECT to
- dst_port (int): port that we want to connect to
- wrapped_factory (protocol.ClientFactory): The original Factory
+ dst_host: hostname that we want to CONNECT to
+ dst_port: port that we want to connect to
+ wrapped_factory: The original Factory
+ headers: Extra HTTP headers to include in the CONNECT request
"""
- def __init__(self, dst_host, dst_port, wrapped_factory):
+ def __init__(
+ self,
+ dst_host: bytes,
+ dst_port: int,
+ wrapped_factory: ClientFactory,
+ headers: Headers,
+ ):
self.dst_host = dst_host
self.dst_port = dst_port
self.wrapped_factory = wrapped_factory
+ self.headers = headers
self.on_connection = defer.Deferred()
def startedConnecting(self, connector):
@@ -92,7 +111,11 @@ class HTTPProxiedClientFactory(protocol.ClientFactory):
wrapped_protocol = self.wrapped_factory.buildProtocol(addr)
return HTTPConnectProtocol(
- self.dst_host, self.dst_port, wrapped_protocol, self.on_connection
+ self.dst_host,
+ self.dst_port,
+ wrapped_protocol,
+ self.on_connection,
+ self.headers,
)
def clientConnectionFailed(self, connector, reason):
@@ -112,24 +135,37 @@ class HTTPConnectProtocol(protocol.Protocol):
"""Protocol that wraps an existing Protocol to do a CONNECT handshake at connect
Args:
- host (bytes): The original HTTP(s) hostname or IPv4 or IPv6 address literal
+ host: The original HTTP(s) hostname or IPv4 or IPv6 address literal
to put in the CONNECT request
- port (int): The original HTTP(s) port to put in the CONNECT request
+ port: The original HTTP(s) port to put in the CONNECT request
- wrapped_protocol (interfaces.IProtocol): the original protocol (probably
- HTTPChannel or TLSMemoryBIOProtocol, but could be anything really)
+ wrapped_protocol: the original protocol (probably HTTPChannel or
+ TLSMemoryBIOProtocol, but could be anything really)
- connected_deferred (Deferred): a Deferred which will be callbacked with
+ connected_deferred: a Deferred which will be callbacked with
wrapped_protocol when the CONNECT completes
+
+ headers: Extra HTTP headers to include in the CONNECT request
"""
- def __init__(self, host, port, wrapped_protocol, connected_deferred):
+ def __init__(
+ self,
+ host: bytes,
+ port: int,
+ wrapped_protocol: Protocol,
+ connected_deferred: defer.Deferred,
+ headers: Headers,
+ ):
self.host = host
self.port = port
self.wrapped_protocol = wrapped_protocol
self.connected_deferred = connected_deferred
- self.http_setup_client = HTTPConnectSetupClient(self.host, self.port)
+ self.headers = headers
+
+ self.http_setup_client = HTTPConnectSetupClient(
+ self.host, self.port, self.headers
+ )
self.http_setup_client.on_connected.addCallback(self.proxyConnected)
def connectionMade(self):
@@ -154,7 +190,7 @@ class HTTPConnectProtocol(protocol.Protocol):
if buf:
self.wrapped_protocol.dataReceived(buf)
- def dataReceived(self, data):
+ def dataReceived(self, data: bytes):
# if we've set up the HTTP protocol, we can send the data there
if self.wrapped_protocol.connected:
return self.wrapped_protocol.dataReceived(data)
@@ -168,21 +204,29 @@ class HTTPConnectSetupClient(http.HTTPClient):
"""HTTPClient protocol to send a CONNECT message for proxies and read the response.
Args:
- host (bytes): The hostname to send in the CONNECT message
- port (int): The port to send in the CONNECT message
+ host: The hostname to send in the CONNECT message
+ port: The port to send in the CONNECT message
+ headers: Extra headers to send with the CONNECT message
"""
- def __init__(self, host, port):
+ def __init__(self, host: bytes, port: int, headers: Headers):
self.host = host
self.port = port
+ self.headers = headers
self.on_connected = defer.Deferred()
def connectionMade(self):
logger.debug("Connected to proxy, sending CONNECT")
self.sendCommand(b"CONNECT", b"%s:%d" % (self.host, self.port))
+
+ # Send any additional specified headers
+ for name, values in self.headers.getAllRawHeaders():
+ for value in values:
+ self.sendHeader(name, value)
+
self.endHeaders()
- def handleStatus(self, version, status, message):
+ def handleStatus(self, version: bytes, status: bytes, message: bytes):
logger.debug("Got Status: %s %s %s", status, message, version)
if status != b"200":
raise ProxyConnectError("Unexpected status on CONNECT: %s" % status)
diff --git a/synapse/http/federation/well_known_resolver.py b/synapse/http/federation/well_known_resolver.py
index ecd63e65..ce4079f1 100644
--- a/synapse/http/federation/well_known_resolver.py
+++ b/synapse/http/federation/well_known_resolver.py
@@ -71,8 +71,10 @@ WELL_KNOWN_RETRY_ATTEMPTS = 3
logger = logging.getLogger(__name__)
-_well_known_cache = TTLCache("well-known")
-_had_valid_well_known_cache = TTLCache("had-valid-well-known")
+_well_known_cache = TTLCache("well-known") # type: TTLCache[bytes, Optional[bytes]]
+_had_valid_well_known_cache = TTLCache(
+ "had-valid-well-known"
+) # type: TTLCache[bytes, bool]
@attr.s(slots=True, frozen=True)
@@ -88,8 +90,8 @@ class WellKnownResolver:
reactor: IReactorTime,
agent: IAgent,
user_agent: bytes,
- well_known_cache: Optional[TTLCache] = None,
- had_well_known_cache: Optional[TTLCache] = None,
+ well_known_cache: Optional[TTLCache[bytes, Optional[bytes]]] = None,
+ had_well_known_cache: Optional[TTLCache[bytes, bool]] = None,
):
self._reactor = reactor
self._clock = Clock(reactor)
diff --git a/synapse/http/proxyagent.py b/synapse/http/proxyagent.py
index 3d553ae2..16ec8500 100644
--- a/synapse/http/proxyagent.py
+++ b/synapse/http/proxyagent.py
@@ -12,10 +12,13 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+import base64
import logging
import re
+from typing import Optional, Tuple
from urllib.request import getproxies_environment, proxy_bypass_environment
+import attr
from zope.interface import implementer
from twisted.internet import defer
@@ -23,6 +26,7 @@ from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS
from twisted.python.failure import Failure
from twisted.web.client import URI, BrowserLikePolicyForHTTPS, _AgentBase
from twisted.web.error import SchemeNotSupported
+from twisted.web.http_headers import Headers
from twisted.web.iweb import IAgent
from synapse.http.connectproxyclient import HTTPConnectProxyEndpoint
@@ -32,6 +36,22 @@ logger = logging.getLogger(__name__)
_VALID_URI = re.compile(br"\A[\x21-\x7e]+\Z")
+@attr.s
+class ProxyCredentials:
+ username_password = attr.ib(type=bytes)
+
+ def as_proxy_authorization_value(self) -> bytes:
+ """
+ Return the value for a Proxy-Authorization header (i.e. 'Basic abdef==').
+
+ Returns:
+ A transformation of the authentication string the encoded value for
+ a Proxy-Authorization header.
+ """
+ # Encode as base64 and prepend the authorization type
+ return b"Basic " + base64.encodebytes(self.username_password)
+
+
@implementer(IAgent)
class ProxyAgent(_AgentBase):
"""An Agent implementation which will use an HTTP proxy if one was requested
@@ -96,6 +116,9 @@ class ProxyAgent(_AgentBase):
https_proxy = proxies["https"].encode() if "https" in proxies else None
no_proxy = proxies["no"] if "no" in proxies else None
+ # Parse credentials from https proxy connection string if present
+ self.https_proxy_creds, https_proxy = parse_username_password(https_proxy)
+
self.http_proxy_endpoint = _http_proxy_endpoint(
http_proxy, self.proxy_reactor, **self._endpoint_kwargs
)
@@ -175,11 +198,22 @@ class ProxyAgent(_AgentBase):
and self.https_proxy_endpoint
and not should_skip_proxy
):
+ connect_headers = Headers()
+
+ # Determine whether we need to set Proxy-Authorization headers
+ if self.https_proxy_creds:
+ # Set a Proxy-Authorization header
+ connect_headers.addRawHeader(
+ b"Proxy-Authorization",
+ self.https_proxy_creds.as_proxy_authorization_value(),
+ )
+
endpoint = HTTPConnectProxyEndpoint(
self.proxy_reactor,
self.https_proxy_endpoint,
parsed_uri.host,
parsed_uri.port,
+ headers=connect_headers,
)
else:
# not using a proxy
@@ -208,12 +242,16 @@ class ProxyAgent(_AgentBase):
)
-def _http_proxy_endpoint(proxy, reactor, **kwargs):
+def _http_proxy_endpoint(proxy: Optional[bytes], reactor, **kwargs):
"""Parses an http proxy setting and returns an endpoint for the proxy
Args:
- proxy (bytes|None): the proxy setting
+ proxy: the proxy setting in the form: [<username>:<password>@]<host>[:<port>]
+ Note that compared to other apps, this function currently lacks support
+ for specifying a protocol schema (i.e. protocol://...).
+
reactor: reactor to be used to connect to the proxy
+
kwargs: other args to be passed to HostnameEndpoint
Returns:
@@ -223,16 +261,43 @@ def _http_proxy_endpoint(proxy, reactor, **kwargs):
if proxy is None:
return None
- # currently we only support hostname:port. Some apps also support
- # protocol://<host>[:port], which allows a way of requiring a TLS connection to the
- # proxy.
-
+ # Parse the connection string
host, port = parse_host_port(proxy, default_port=1080)
return HostnameEndpoint(reactor, host, port, **kwargs)
-def parse_host_port(hostport, default_port=None):
- # could have sworn we had one of these somewhere else...
+def parse_username_password(proxy: bytes) -> Tuple[Optional[ProxyCredentials], bytes]:
+ """
+ Parses the username and password from a proxy declaration e.g
+ username:password@hostname:port.
+
+ Args:
+ proxy: The proxy connection string.
+
+ Returns
+ An instance of ProxyCredentials and the proxy connection string with any credentials
+ stripped, i.e u:p@host:port -> host:port. If no credentials were found, the
+ ProxyCredentials instance is replaced with None.
+ """
+ if proxy and b"@" in proxy:
+ # We use rsplit here as the password could contain an @ character
+ credentials, proxy_without_credentials = proxy.rsplit(b"@", 1)
+ return ProxyCredentials(credentials), proxy_without_credentials
+
+ return None, proxy
+
+
+def parse_host_port(hostport: bytes, default_port: int = None) -> Tuple[bytes, int]:
+ """
+ Parse the hostname and port from a proxy connection byte string.
+
+ Args:
+ hostport: The proxy connection string. Must be in the form 'host[:port]'.
+ default_port: The default port to return if one is not found in `hostport`.
+
+ Returns:
+ A tuple containing the hostname and port. Uses `default_port` if one was not found.
+ """
if b":" in hostport:
host, port = hostport.rsplit(b":", 1)
try:
diff --git a/synapse/logging/context.py b/synapse/logging/context.py
index 1a7ea4fa..03cf3c2b 100644
--- a/synapse/logging/context.py
+++ b/synapse/logging/context.py
@@ -689,7 +689,7 @@ def run_in_background(f, *args, **kwargs) -> defer.Deferred:
current = current_context()
try:
res = f(*args, **kwargs)
- except: # noqa: E722
+ except Exception:
# the assumption here is that the caller doesn't want to be disturbed
# by synchronous exceptions, so let's turn them into Failures.
return defer.fail()
diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py
index 10bd4a14..aa146e8b 100644
--- a/synapse/logging/opentracing.py
+++ b/synapse/logging/opentracing.py
@@ -169,7 +169,7 @@ import inspect
import logging
import re
from functools import wraps
-from typing import TYPE_CHECKING, Dict, Optional, Type
+from typing import TYPE_CHECKING, Dict, Optional, Pattern, Type
import attr
@@ -262,7 +262,7 @@ logger = logging.getLogger(__name__)
# Block everything by default
# A regex which matches the server_names to expose traces for.
# None means 'block everything'.
-_homeserver_whitelist = None
+_homeserver_whitelist = None # type: Optional[Pattern[str]]
# Util methods
diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py
index f4f7ec96..9fc3da49 100644
--- a/synapse/push/__init__.py
+++ b/synapse/push/__init__.py
@@ -21,7 +21,7 @@ import attr
from synapse.types import JsonDict, RoomStreamToken
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
@attr.s(slots=True)
diff --git a/synapse/push/action_generator.py b/synapse/push/action_generator.py
index aaed2865..38a47a60 100644
--- a/synapse/push/action_generator.py
+++ b/synapse/push/action_generator.py
@@ -22,7 +22,7 @@ from synapse.push.bulk_push_rule_evaluator import BulkPushRuleEvaluator
from synapse.util.metrics import Measure
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py
index c016a839..1897f591 100644
--- a/synapse/push/bulk_push_rule_evaluator.py
+++ b/synapse/push/bulk_push_rule_evaluator.py
@@ -33,7 +33,7 @@ from synapse.util.caches.lrucache import LruCache
from .push_rule_evaluator import PushRuleEvaluatorForEvent
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/push/emailpusher.py b/synapse/push/emailpusher.py
index 3dc06a79..c0968dc7 100644
--- a/synapse/push/emailpusher.py
+++ b/synapse/push/emailpusher.py
@@ -24,7 +24,7 @@ from synapse.push import Pusher, PusherConfig, ThrottleParams
from synapse.push.mailer import Mailer
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py
index eb6de8ba..26af5309 100644
--- a/synapse/push/httppusher.py
+++ b/synapse/push/httppusher.py
@@ -31,7 +31,7 @@ from synapse.push import Pusher, PusherConfig, PusherConfigException
from . import push_rule_evaluator, push_tools
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
@@ -290,7 +290,7 @@ class HttpPusher(Pusher):
if rejected is False:
return False
- if isinstance(rejected, list) or isinstance(rejected, tuple):
+ if isinstance(rejected, (list, tuple)):
for pk in rejected:
if pk != self.pushkey:
# for sanity, we only remove the pushkey if it
diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py
index d10201b6..2e5161de 100644
--- a/synapse/push/mailer.py
+++ b/synapse/push/mailer.py
@@ -40,7 +40,7 @@ from synapse.util.async_helpers import concurrently_execute
from synapse.visibility import filter_events_for_client
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/push/pusher.py b/synapse/push/pusher.py
index 2aa7918f..cb941278 100644
--- a/synapse/push/pusher.py
+++ b/synapse/push/pusher.py
@@ -22,7 +22,7 @@ from synapse.push.httppusher import HttpPusher
from synapse.push.mailer import Mailer
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index 321a3338..2a1c925e 100644
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -15,6 +15,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import itertools
import logging
from typing import List, Set
@@ -82,6 +83,9 @@ REQUIREMENTS = [
"Jinja2>=2.9",
"bleach>=1.4.3",
"typing-extensions>=3.7.4",
+ # We enforce that we have a `cryptography` version that bundles an `openssl`
+ # with the latest security patches.
+ "cryptography>=3.4.7;python_version>='3.6'",
]
CONDITIONAL_REQUIREMENTS = {
@@ -98,7 +102,7 @@ CONDITIONAL_REQUIREMENTS = {
"txacme>=0.9.2",
# txacme depends on eliot. Eliot 1.8.0 is incompatible with
# python 3.5.2, as per https://github.com/itamarst/eliot/issues/418
- 'eliot<1.8.0;python_version<"3.5.3"',
+ "eliot<1.8.0;python_version<'3.5.3'",
],
"saml2": [
# pysaml2 6.4.0 is incompatible with Python 3.5 (see https://github.com/IdentityPython/pysaml2/issues/749)
@@ -128,6 +132,18 @@ for name, optional_deps in CONDITIONAL_REQUIREMENTS.items():
ALL_OPTIONAL_REQUIREMENTS = set(optional_deps) | ALL_OPTIONAL_REQUIREMENTS
+# ensure there are no double-quote characters in any of the deps (otherwise the
+# 'pip install' incantation in DependencyException will break)
+for dep in itertools.chain(
+ REQUIREMENTS,
+ *CONDITIONAL_REQUIREMENTS.values(),
+):
+ if '"' in dep:
+ raise Exception(
+ "Dependency `%s` contains double-quote; use single-quotes instead" % (dep,)
+ )
+
+
def list_requirements():
return list(set(REQUIREMENTS) | ALL_OPTIONAL_REQUIREMENTS)
@@ -147,7 +163,7 @@ class DependencyException(Exception):
@property
def dependencies(self):
for i in self.args[0]:
- yield "'" + i + "'"
+ yield '"' + i + '"'
def check_requirements(for_feature=None):
diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py
index 8af53b4f..82ea3b89 100644
--- a/synapse/replication/http/federation.py
+++ b/synapse/replication/http/federation.py
@@ -40,6 +40,7 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint):
// containing the event
"event_format_version": .., // 1,2,3 etc: the event format version
"internal_metadata": { .. serialized internal_metadata .. },
+ "outlier": true|false,
"rejected_reason": .., // The event.rejected_reason field
"context": { .. serialized event context .. },
}],
@@ -84,6 +85,7 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint):
"room_version": event.room_version.identifier,
"event_format_version": event.format_version,
"internal_metadata": event.internal_metadata.get_dict(),
+ "outlier": event.internal_metadata.is_outlier(),
"rejected_reason": event.rejected_reason,
"context": serialized_context,
}
@@ -116,6 +118,7 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint):
event = make_event_from_dict(
event_dict, room_ver, internal_metadata, rejected_reason
)
+ event.internal_metadata.outlier = event_payload["outlier"]
context = EventContext.deserialize(
self.storage, event_payload["context"]
diff --git a/synapse/replication/http/send_event.py b/synapse/replication/http/send_event.py
index 8fa104c8..a4c5b442 100644
--- a/synapse/replication/http/send_event.py
+++ b/synapse/replication/http/send_event.py
@@ -40,6 +40,7 @@ class ReplicationSendEventRestServlet(ReplicationEndpoint):
// containing the event
"event_format_version": .., // 1,2,3 etc: the event format version
"internal_metadata": { .. serialized internal_metadata .. },
+ "outlier": true|false,
"rejected_reason": .., // The event.rejected_reason field
"context": { .. serialized event context .. },
"requester": { .. serialized requester .. },
@@ -79,7 +80,6 @@ class ReplicationSendEventRestServlet(ReplicationEndpoint):
ratelimit (bool)
extra_users (list(UserID)): Any extra users to notify about event
"""
-
serialized_context = await context.serialize(event, store)
payload = {
@@ -87,6 +87,7 @@ class ReplicationSendEventRestServlet(ReplicationEndpoint):
"room_version": event.room_version.identifier,
"event_format_version": event.format_version,
"internal_metadata": event.internal_metadata.get_dict(),
+ "outlier": event.internal_metadata.is_outlier(),
"rejected_reason": event.rejected_reason,
"context": serialized_context,
"requester": requester.serialize(),
@@ -108,6 +109,7 @@ class ReplicationSendEventRestServlet(ReplicationEndpoint):
event = make_event_from_dict(
event_dict, room_ver, internal_metadata, rejected_reason
)
+ event.internal_metadata.outlier = content["outlier"]
requester = Requester.deserialize(self.store, content["requester"])
context = EventContext.deserialize(self.storage, content["context"])
diff --git a/synapse/replication/slave/storage/pushers.py b/synapse/replication/slave/storage/pushers.py
index 045bd014..93161c3d 100644
--- a/synapse/replication/slave/storage/pushers.py
+++ b/synapse/replication/slave/storage/pushers.py
@@ -24,7 +24,7 @@ from ._base import BaseSlavedStore
from ._slaved_id_tracker import SlavedIdTracker
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
class SlavedPusherStore(PusherWorkerStore, BaseSlavedStore):
diff --git a/synapse/replication/tcp/commands.py b/synapse/replication/tcp/commands.py
index bb447f75..8abed1f5 100644
--- a/synapse/replication/tcp/commands.py
+++ b/synapse/replication/tcp/commands.py
@@ -312,16 +312,16 @@ class FederationAckCommand(Command):
NAME = "FEDERATION_ACK"
- def __init__(self, instance_name, token):
+ def __init__(self, instance_name: str, token: int):
self.instance_name = instance_name
self.token = token
@classmethod
- def from_line(cls, line):
+ def from_line(cls, line: str) -> "FederationAckCommand":
instance_name, token = line.split(" ")
return cls(instance_name, int(token))
- def to_line(self):
+ def to_line(self) -> str:
return "%s %s" % (self.instance_name, self.token)
diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py
index 825900f6..e829add2 100644
--- a/synapse/replication/tcp/protocol.py
+++ b/synapse/replication/tcp/protocol.py
@@ -104,7 +104,7 @@ tcp_outbound_commands_counter = Counter(
# A list of all connected protocols. This allows us to send metrics about the
# connections.
-connected_connections = []
+connected_connections = [] # type: List[BaseReplicationStreamProtocol]
logger = logging.getLogger(__name__)
diff --git a/synapse/replication/tcp/streams/_base.py b/synapse/replication/tcp/streams/_base.py
index f45e7a8c..3dfee767 100644
--- a/synapse/replication/tcp/streams/_base.py
+++ b/synapse/replication/tcp/streams/_base.py
@@ -33,7 +33,7 @@ import attr
from synapse.replication.http.streams import ReplicationGetStreamUpdates
if TYPE_CHECKING:
- import synapse.server
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
@@ -299,20 +299,23 @@ class TypingStream(Stream):
NAME = "typing"
ROW_TYPE = TypingStreamRow
- def __init__(self, hs):
- typing_handler = hs.get_typing_handler()
-
+ def __init__(self, hs: "HomeServer"):
writer_instance = hs.config.worker.writers.typing
if writer_instance == hs.get_instance_name():
# On the writer, query the typing handler
- update_function = typing_handler.get_all_typing_updates
+ typing_writer_handler = hs.get_typing_writer_handler()
+ update_function = (
+ typing_writer_handler.get_all_typing_updates
+ ) # type: Callable[[str, int, int, int], Awaitable[Tuple[List[Tuple[int, Any]], int, bool]]]
+ current_token_function = typing_writer_handler.get_current_token
else:
# Query the typing writer process
update_function = make_http_update_function(hs, self.NAME)
+ current_token_function = hs.get_typing_handler().get_current_token
super().__init__(
hs.get_instance_name(),
- current_token_without_instance(typing_handler.get_current_token),
+ current_token_without_instance(current_token_function),
update_function,
)
@@ -509,7 +512,7 @@ class AccountDataStream(Stream):
NAME = "account_data"
ROW_TYPE = AccountDataStreamRow
- def __init__(self, hs: "synapse.server.HomeServer"):
+ def __init__(self, hs: "HomeServer"):
self.store = hs.get_datastore()
super().__init__(
hs.get_instance_name(),
diff --git a/synapse/replication/tcp/streams/federation.py b/synapse/replication/tcp/streams/federation.py
index 9bcd13b0..9bb8e9e1 100644
--- a/synapse/replication/tcp/streams/federation.py
+++ b/synapse/replication/tcp/streams/federation.py
@@ -14,6 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import namedtuple
+from typing import TYPE_CHECKING, Any, Awaitable, Callable, List, Tuple
from synapse.replication.tcp.streams._base import (
Stream,
@@ -21,6 +22,9 @@ from synapse.replication.tcp.streams._base import (
make_http_update_function,
)
+if TYPE_CHECKING:
+ from synapse.server import HomeServer
+
class FederationStream(Stream):
"""Data to be sent over federation. Only available when master has federation
@@ -38,7 +42,7 @@ class FederationStream(Stream):
NAME = "federation"
ROW_TYPE = FederationStreamRow
- def __init__(self, hs):
+ def __init__(self, hs: "HomeServer"):
if hs.config.worker_app is None:
# master process: get updates from the FederationRemoteSendQueue.
# (if the master is configured to send federation itself, federation_sender
@@ -48,7 +52,9 @@ class FederationStream(Stream):
current_token = current_token_without_instance(
federation_sender.get_current_token
)
- update_function = federation_sender.get_replication_rows
+ update_function = (
+ federation_sender.get_replication_rows
+ ) # type: Callable[[str, int, int, int], Awaitable[Tuple[List[Tuple[int, Any]], int, bool]]]
elif hs.should_send_federation():
# federation sender: Query master process
@@ -69,5 +75,7 @@ class FederationStream(Stream):
return 0
@staticmethod
- async def _stub_update_function(instance_name, from_token, upto_token, limit):
+ async def _stub_update_function(
+ instance_name: str, from_token: int, upto_token: int, limit: int
+ ) -> Tuple[list, int, bool]:
return [], upto_token, False
diff --git a/synapse/rest/admin/media.py b/synapse/rest/admin/media.py
index 7fcc48a9..40646ef2 100644
--- a/synapse/rest/admin/media.py
+++ b/synapse/rest/admin/media.py
@@ -28,7 +28,7 @@ from synapse.rest.admin._base import (
from synapse.types import JsonDict
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/rest/admin/rooms.py b/synapse/rest/admin/rooms.py
index 263d8ec0..cfe1bebb 100644
--- a/synapse/rest/admin/rooms.py
+++ b/synapse/rest/admin/rooms.py
@@ -390,6 +390,9 @@ class JoinRoomAliasServlet(ResolveRoomIdMixin, RestServlet):
async def on_POST(
self, request: SynapseRequest, room_identifier: str
) -> Tuple[int, JsonDict]:
+ # This will always be set by the time Twisted calls us.
+ assert request.args is not None
+
requester = await self.auth.get_user_by_req(request)
await assert_user_is_admin(self.auth, requester.user)
diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py
index 2c89b62e..309bd277 100644
--- a/synapse/rest/admin/users.py
+++ b/synapse/rest/admin/users.py
@@ -271,7 +271,7 @@ class UserRestServletV2(RestServlet):
elif not deactivate and user["deactivated"]:
if (
"password" not in body
- and self.hs.config.password_localdb_enabled
+ and self.auth_handler.can_change_password()
):
raise SynapseError(
400, "Must provide a password to re-activate an account."
@@ -833,6 +833,9 @@ class UserMediaRestServlet(RestServlet):
async def on_GET(
self, request: SynapseRequest, user_id: str
) -> Tuple[int, JsonDict]:
+ # This will always be set by the time Twisted calls us.
+ assert request.args is not None
+
await assert_requester_is_admin(self.auth, request)
if not self.is_mine(UserID.from_string(user_id)):
diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py
index 5884daea..525efdf2 100644
--- a/synapse/rest/client/v1/room.py
+++ b/synapse/rest/client/v1/room.py
@@ -18,7 +18,7 @@
import logging
import re
-from typing import TYPE_CHECKING, List, Optional
+from typing import TYPE_CHECKING, List, Optional, Tuple
from urllib import parse as urlparse
from synapse.api.constants import EventTypes, Membership
@@ -35,21 +35,30 @@ from synapse.events.utils import format_event_for_client_v2
from synapse.http.servlet import (
RestServlet,
assert_params_in_dict,
+ parse_boolean,
parse_integer,
parse_json_object_from_request,
parse_string,
)
+from synapse.http.site import SynapseRequest
from synapse.logging.opentracing import set_tag
from synapse.rest.client.transactions import HttpTransactionCache
from synapse.rest.client.v2_alpha._base import client_patterns
from synapse.storage.state import StateFilter
from synapse.streams.config import PaginationConfig
-from synapse.types import RoomAlias, RoomID, StreamToken, ThirdPartyInstanceID, UserID
+from synapse.types import (
+ JsonDict,
+ RoomAlias,
+ RoomID,
+ StreamToken,
+ ThirdPartyInstanceID,
+ UserID,
+)
from synapse.util import json_decoder
from synapse.util.stringutils import parse_and_validate_server_name, random_string
if TYPE_CHECKING:
- import synapse.server
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
@@ -846,10 +855,10 @@ class RoomTypingRestServlet(RestServlet):
"/rooms/(?P<room_id>[^/]*)/typing/(?P<user_id>[^/]*)$", v1=True
)
- def __init__(self, hs):
+ def __init__(self, hs: "HomeServer"):
super().__init__()
+ self.hs = hs
self.presence_handler = hs.get_presence_handler()
- self.typing_handler = hs.get_typing_handler()
self.auth = hs.get_auth()
# If we're not on the typing writer instance we should scream if we get
@@ -874,16 +883,19 @@ class RoomTypingRestServlet(RestServlet):
# Limit timeout to stop people from setting silly typing timeouts.
timeout = min(content.get("timeout", 30000), 120000)
+ # Defer getting the typing handler since it will raise on workers.
+ typing_handler = self.hs.get_typing_writer_handler()
+
try:
if content["typing"]:
- await self.typing_handler.started_typing(
+ await typing_handler.started_typing(
target_user=target_user,
requester=requester,
room_id=room_id,
timeout=timeout,
)
else:
- await self.typing_handler.stopped_typing(
+ await typing_handler.stopped_typing(
target_user=target_user, requester=requester, room_id=room_id
)
except ShadowBanError:
@@ -901,7 +913,7 @@ class RoomAliasListServlet(RestServlet):
),
]
- def __init__(self, hs: "synapse.server.HomeServer"):
+ def __init__(self, hs: "HomeServer"):
super().__init__()
self.auth = hs.get_auth()
self.directory_handler = hs.get_directory_handler()
@@ -984,7 +996,58 @@ def register_txn_path(servlet, regex_string, http_server, with_get=False):
)
-def register_servlets(hs, http_server, is_worker=False):
+class RoomSpaceSummaryRestServlet(RestServlet):
+ PATTERNS = (
+ re.compile(
+ "^/_matrix/client/unstable/org.matrix.msc2946"
+ "/rooms/(?P<room_id>[^/]*)/spaces$"
+ ),
+ )
+
+ def __init__(self, hs: "HomeServer"):
+ super().__init__()
+ self._auth = hs.get_auth()
+ self._space_summary_handler = hs.get_space_summary_handler()
+
+ async def on_GET(
+ self, request: SynapseRequest, room_id: str
+ ) -> Tuple[int, JsonDict]:
+ requester = await self._auth.get_user_by_req(request, allow_guest=True)
+
+ return 200, await self._space_summary_handler.get_space_summary(
+ requester.user.to_string(),
+ room_id,
+ suggested_only=parse_boolean(request, "suggested_only", default=False),
+ max_rooms_per_space=parse_integer(request, "max_rooms_per_space"),
+ )
+
+ async def on_POST(
+ self, request: SynapseRequest, room_id: str
+ ) -> Tuple[int, JsonDict]:
+ requester = await self._auth.get_user_by_req(request, allow_guest=True)
+ content = parse_json_object_from_request(request)
+
+ suggested_only = content.get("suggested_only", False)
+ if not isinstance(suggested_only, bool):
+ raise SynapseError(
+ 400, "'suggested_only' must be a boolean", Codes.BAD_JSON
+ )
+
+ max_rooms_per_space = content.get("max_rooms_per_space")
+ if max_rooms_per_space is not None and not isinstance(max_rooms_per_space, int):
+ raise SynapseError(
+ 400, "'max_rooms_per_space' must be an integer", Codes.BAD_JSON
+ )
+
+ return 200, await self._space_summary_handler.get_space_summary(
+ requester.user.to_string(),
+ room_id,
+ suggested_only=suggested_only,
+ max_rooms_per_space=max_rooms_per_space,
+ )
+
+
+def register_servlets(hs: "HomeServer", http_server, is_worker=False):
RoomStateEventRestServlet(hs).register(http_server)
RoomMemberListRestServlet(hs).register(http_server)
JoinedRoomMemberListRestServlet(hs).register(http_server)
@@ -998,6 +1061,9 @@ def register_servlets(hs, http_server, is_worker=False):
RoomTypingRestServlet(hs).register(http_server)
RoomEventContextServlet(hs).register(http_server)
+ if hs.config.experimental.spaces_enabled:
+ RoomSpaceSummaryRestServlet(hs).register(http_server)
+
# Some servlets only get registered for the main process.
if not is_worker:
RoomCreateRestServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py
index adf1d397..c2ba790b 100644
--- a/synapse/rest/client/v2_alpha/account.py
+++ b/synapse/rest/client/v2_alpha/account.py
@@ -45,7 +45,7 @@ from synapse.util.threepids import canonicalise_email, check_3pid_allowed
from ._base import client_patterns, interactive_auth_handler
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/rest/client/v2_alpha/capabilities.py b/synapse/rest/client/v2_alpha/capabilities.py
index 76879ac5..44ccf10e 100644
--- a/synapse/rest/client/v2_alpha/capabilities.py
+++ b/synapse/rest/client/v2_alpha/capabilities.py
@@ -13,12 +13,18 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
+from typing import TYPE_CHECKING, Tuple
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
from synapse.http.servlet import RestServlet
+from synapse.http.site import SynapseRequest
+from synapse.types import JsonDict
from ._base import client_patterns
+if TYPE_CHECKING:
+ from synapse.server import HomeServer
+
logger = logging.getLogger(__name__)
@@ -27,21 +33,16 @@ class CapabilitiesRestServlet(RestServlet):
PATTERNS = client_patterns("/capabilities$")
- def __init__(self, hs):
- """
- Args:
- hs (synapse.server.HomeServer): server
- """
+ def __init__(self, hs: "HomeServer"):
super().__init__()
self.hs = hs
self.config = hs.config
self.auth = hs.get_auth()
- self.store = hs.get_datastore()
+ self.auth_handler = hs.get_auth_handler()
- async def on_GET(self, request):
- requester = await self.auth.get_user_by_req(request, allow_guest=True)
- user = await self.store.get_user_by_id(requester.user.to_string())
- change_password = bool(user["password_hash"])
+ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
+ await self.auth.get_user_by_req(request, allow_guest=True)
+ change_password = self.auth_handler.can_change_password()
response = {
"capabilities": {
@@ -58,5 +59,5 @@ class CapabilitiesRestServlet(RestServlet):
return 200, response
-def register_servlets(hs, http_server):
+def register_servlets(hs: "HomeServer", http_server):
CapabilitiesRestServlet(hs).register(http_server)
diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py
index 5901432f..08fb6b2b 100644
--- a/synapse/rest/client/v2_alpha/groups.py
+++ b/synapse/rest/client/v2_alpha/groups.py
@@ -38,7 +38,7 @@ from synapse.types import GroupID, JsonDict
from ._base import client_patterns
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py
index 8e52e4cc..3481770c 100644
--- a/synapse/rest/client/v2_alpha/sync.py
+++ b/synapse/rest/client/v2_alpha/sync.py
@@ -15,6 +15,7 @@
import itertools
import logging
+from typing import TYPE_CHECKING, Tuple
from synapse.api.constants import PresenceState
from synapse.api.errors import Codes, StoreError, SynapseError
@@ -26,11 +27,15 @@ from synapse.events.utils import (
from synapse.handlers.presence import format_user_presence_state
from synapse.handlers.sync import SyncConfig
from synapse.http.servlet import RestServlet, parse_boolean, parse_integer, parse_string
-from synapse.types import StreamToken
+from synapse.http.site import SynapseRequest
+from synapse.types import JsonDict, StreamToken
from synapse.util import json_decoder
from ._base import client_patterns, set_timeline_upper_limit
+if TYPE_CHECKING:
+ from synapse.server import HomeServer
+
logger = logging.getLogger(__name__)
@@ -73,7 +78,7 @@ class SyncRestServlet(RestServlet):
PATTERNS = client_patterns("/sync$")
ALLOWED_PRESENCE = {"online", "offline", "unavailable"}
- def __init__(self, hs):
+ def __init__(self, hs: "HomeServer"):
super().__init__()
self.hs = hs
self.auth = hs.get_auth()
@@ -85,7 +90,10 @@ class SyncRestServlet(RestServlet):
self._server_notices_sender = hs.get_server_notices_sender()
self._event_serializer = hs.get_event_client_serializer()
- async def on_GET(self, request):
+ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
+ # This will always be set by the time Twisted calls us.
+ assert request.args is not None
+
if b"from" in request.args:
# /events used to use 'from', but /sync uses 'since'.
# Lets be helpful and whine if we see a 'from'.
diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py
index d24a1993..3e3d8839 100644
--- a/synapse/rest/client/versions.py
+++ b/synapse/rest/client/versions.py
@@ -81,6 +81,8 @@ class VersionsRestServlet(RestServlet):
"io.element.e2ee_forced.public": self.e2ee_forced_public,
"io.element.e2ee_forced.private": self.e2ee_forced_private,
"io.element.e2ee_forced.trusted_private": self.e2ee_forced_trusted_private,
+ # Supports the busy presence state described in MSC3026.
+ "org.matrix.msc3026.busy_presence": self.config.experimental.msc3026_enabled,
},
},
)
diff --git a/synapse/rest/media/v1/config_resource.py b/synapse/rest/media/v1/config_resource.py
index 1eff98ef..c41a7ab4 100644
--- a/synapse/rest/media/v1/config_resource.py
+++ b/synapse/rest/media/v1/config_resource.py
@@ -23,7 +23,7 @@ from synapse.http.server import DirectServeJsonResource, respond_with_json
from synapse.http.site import SynapseRequest
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
class MediaConfigResource(DirectServeJsonResource):
diff --git a/synapse/rest/media/v1/download_resource.py b/synapse/rest/media/v1/download_resource.py
index 8a43581f..5dadaeaf 100644
--- a/synapse/rest/media/v1/download_resource.py
+++ b/synapse/rest/media/v1/download_resource.py
@@ -24,8 +24,8 @@ from synapse.http.servlet import parse_boolean
from ._base import parse_media_id, respond_404
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
from synapse.rest.media.v1.media_repository import MediaRepository
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py
index 8b4841ed..0c041b54 100644
--- a/synapse/rest/media/v1/media_repository.py
+++ b/synapse/rest/media/v1/media_repository.py
@@ -58,7 +58,7 @@ from .thumbnailer import Thumbnailer, ThumbnailError
from .upload_resource import UploadResource
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py
index b8895aea..c4ed9dfd 100644
--- a/synapse/rest/media/v1/preview_url_resource.py
+++ b/synapse/rest/media/v1/preview_url_resource.py
@@ -54,8 +54,8 @@ from ._base import FileInfo
if TYPE_CHECKING:
from lxml import etree
- from synapse.app.homeserver import HomeServer
from synapse.rest.media.v1.media_repository import MediaRepository
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
@@ -187,6 +187,8 @@ class PreviewUrlResource(DirectServeJsonResource):
respond_with_json(request, 200, {}, send_cors=True)
async def _async_render_GET(self, request: SynapseRequest) -> None:
+ # This will always be set by the time Twisted calls us.
+ assert request.args is not None
# XXX: if get_user_by_req fails, what should we do in an async render?
requester = await self.auth.get_user_by_req(request)
diff --git a/synapse/rest/media/v1/storage_provider.py b/synapse/rest/media/v1/storage_provider.py
index e92006fa..03194755 100644
--- a/synapse/rest/media/v1/storage_provider.py
+++ b/synapse/rest/media/v1/storage_provider.py
@@ -29,7 +29,7 @@ from .media_storage import FileResponder
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
class StorageProvider(metaclass=abc.ABCMeta):
diff --git a/synapse/rest/media/v1/thumbnail_resource.py b/synapse/rest/media/v1/thumbnail_resource.py
index fbcd50f1..af802bc0 100644
--- a/synapse/rest/media/v1/thumbnail_resource.py
+++ b/synapse/rest/media/v1/thumbnail_resource.py
@@ -34,8 +34,8 @@ from ._base import (
)
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
from synapse.rest.media.v1.media_repository import MediaRepository
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py
index ae5aef2f..0138b2e2 100644
--- a/synapse/rest/media/v1/upload_resource.py
+++ b/synapse/rest/media/v1/upload_resource.py
@@ -26,8 +26,8 @@ from synapse.http.site import SynapseRequest
from synapse.rest.media.v1.media_storage import SpamMediaException
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
from synapse.rest.media.v1.media_repository import MediaRepository
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/rest/synapse/client/pick_username.py b/synapse/rest/synapse/client/pick_username.py
index 51acaa9a..d9ffe844 100644
--- a/synapse/rest/synapse/client/pick_username.py
+++ b/synapse/rest/synapse/client/pick_username.py
@@ -104,6 +104,9 @@ class AccountDetailsResource(DirectServeHtmlResource):
respond_with_html(request, 200, html)
async def _async_render_POST(self, request: SynapseRequest):
+ # This will always be set by the time Twisted calls us.
+ assert request.args is not None
+
try:
session_id = get_username_mapping_session_cookie_from_request(request)
except SynapseError as e:
diff --git a/synapse/secrets.py b/synapse/secrets.py
index fb6d90a3..7939db75 100644
--- a/synapse/secrets.py
+++ b/synapse/secrets.py
@@ -26,10 +26,10 @@ if sys.version_info[0:2] >= (3, 6):
import secrets
class Secrets:
- def token_bytes(self, nbytes=32):
+ def token_bytes(self, nbytes: int = 32) -> bytes:
return secrets.token_bytes(nbytes)
- def token_hex(self, nbytes=32):
+ def token_hex(self, nbytes: int = 32) -> str:
return secrets.token_hex(nbytes)
@@ -38,8 +38,8 @@ else:
import os
class Secrets:
- def token_bytes(self, nbytes=32):
+ def token_bytes(self, nbytes: int = 32) -> bytes:
return os.urandom(nbytes)
- def token_hex(self, nbytes=32):
+ def token_hex(self, nbytes: int = 32) -> str:
return binascii.hexlify(self.token_bytes(nbytes)).decode("ascii")
diff --git a/synapse/server.py b/synapse/server.py
index 48ac87a1..e85b9391 100644
--- a/synapse/server.py
+++ b/synapse/server.py
@@ -60,7 +60,7 @@ from synapse.federation.federation_server import (
FederationServer,
)
from synapse.federation.send_queue import FederationRemoteSendQueue
-from synapse.federation.sender import FederationSender
+from synapse.federation.sender import AbstractFederationSender, FederationSender
from synapse.federation.transport.client import TransportLayerClient
from synapse.groups.attestations import GroupAttestationSigning, GroupAttestionRenewer
from synapse.groups.groups_server import GroupsServerHandler, GroupsServerWorkerHandler
@@ -96,10 +96,11 @@ from synapse.handlers.room import (
RoomShutdownHandler,
)
from synapse.handlers.room_list import RoomListHandler
-from synapse.handlers.room_member import RoomMemberMasterHandler
+from synapse.handlers.room_member import RoomMemberHandler, RoomMemberMasterHandler
from synapse.handlers.room_member_worker import RoomMemberWorkerHandler
from synapse.handlers.search import SearchHandler
from synapse.handlers.set_password import SetPasswordHandler
+from synapse.handlers.space_summary import SpaceSummaryHandler
from synapse.handlers.sso import SsoHandler
from synapse.handlers.stats import StatsHandler
from synapse.handlers.sync import SyncHandler
@@ -417,10 +418,19 @@ class HomeServer(metaclass=abc.ABCMeta):
return PresenceHandler(self)
@cache_in_self
- def get_typing_handler(self):
+ def get_typing_writer_handler(self) -> TypingWriterHandler:
if self.config.worker.writers.typing == self.get_instance_name():
return TypingWriterHandler(self)
else:
+ raise Exception("Workers cannot write typing")
+
+ @cache_in_self
+ def get_typing_handler(self) -> FollowerTypingHandler:
+ if self.config.worker.writers.typing == self.get_instance_name():
+ # Use get_typing_writer_handler to ensure that we use the same
+ # cached version.
+ return self.get_typing_writer_handler()
+ else:
return FollowerTypingHandler(self)
@cache_in_self
@@ -561,7 +571,7 @@ class HomeServer(metaclass=abc.ABCMeta):
return TransportLayerClient(self)
@cache_in_self
- def get_federation_sender(self):
+ def get_federation_sender(self) -> AbstractFederationSender:
if self.should_send_federation():
return FederationSender(self)
elif not self.config.worker_app:
@@ -630,7 +640,7 @@ class HomeServer(metaclass=abc.ABCMeta):
return ThirdPartyEventRules(self)
@cache_in_self
- def get_room_member_handler(self):
+ def get_room_member_handler(self) -> RoomMemberHandler:
if self.config.worker_app:
return RoomMemberWorkerHandler(self)
return RoomMemberMasterHandler(self)
@@ -640,13 +650,13 @@ class HomeServer(metaclass=abc.ABCMeta):
return FederationHandlerRegistry(self)
@cache_in_self
- def get_server_notices_manager(self):
+ def get_server_notices_manager(self) -> ServerNoticesManager:
if self.config.worker_app:
raise Exception("Workers cannot send server notices")
return ServerNoticesManager(self)
@cache_in_self
- def get_server_notices_sender(self):
+ def get_server_notices_sender(self) -> WorkerServerNoticesSender:
if self.config.worker_app:
return WorkerServerNoticesSender(self)
return ServerNoticesSender(self)
@@ -724,6 +734,10 @@ class HomeServer(metaclass=abc.ABCMeta):
return AccountDataHandler(self)
@cache_in_self
+ def get_space_summary_handler(self) -> SpaceSummaryHandler:
+ return SpaceSummaryHandler(self)
+
+ @cache_in_self
def get_external_cache(self) -> ExternalCache:
return ExternalCache(self)
diff --git a/synapse/server_notices/consent_server_notices.py b/synapse/server_notices/consent_server_notices.py
index 9137c4ed..a9349bf9 100644
--- a/synapse/server_notices/consent_server_notices.py
+++ b/synapse/server_notices/consent_server_notices.py
@@ -13,13 +13,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
-from typing import Any
+from typing import TYPE_CHECKING, Any, Set
from synapse.api.errors import SynapseError
from synapse.api.urls import ConsentURIBuilder
from synapse.config import ConfigError
from synapse.types import get_localpart_from_id
+if TYPE_CHECKING:
+ from synapse.server import HomeServer
+
logger = logging.getLogger(__name__)
@@ -28,16 +31,11 @@ class ConsentServerNotices:
privacy policy consent, and sends one if we do.
"""
- def __init__(self, hs):
- """
-
- Args:
- hs (synapse.server.HomeServer):
- """
+ def __init__(self, hs: "HomeServer"):
self._server_notices_manager = hs.get_server_notices_manager()
self._store = hs.get_datastore()
- self._users_in_progress = set()
+ self._users_in_progress = set() # type: Set[str]
self._current_consent_version = hs.config.user_consent_version
self._server_notice_content = hs.config.user_consent_server_notice_content
@@ -73,6 +71,10 @@ class ConsentServerNotices:
try:
u = await self._store.get_user_by_id(user_id)
+ # The user doesn't exist.
+ if u is None:
+ return
+
if u["is_guest"] and not self._send_to_guests:
# don't send to guests
return
diff --git a/synapse/server_notices/resource_limits_server_notices.py b/synapse/server_notices/resource_limits_server_notices.py
index 66524513..a18a2e76 100644
--- a/synapse/server_notices/resource_limits_server_notices.py
+++ b/synapse/server_notices/resource_limits_server_notices.py
@@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
-from typing import List, Tuple
+from typing import TYPE_CHECKING, List, Tuple
from synapse.api.constants import (
EventTypes,
@@ -24,6 +24,9 @@ from synapse.api.constants import (
from synapse.api.errors import AuthError, ResourceLimitError, SynapseError
from synapse.server_notices.server_notices_manager import SERVER_NOTICE_ROOM_TAG
+if TYPE_CHECKING:
+ from synapse.server import HomeServer
+
logger = logging.getLogger(__name__)
@@ -32,11 +35,7 @@ class ResourceLimitsServerNotices:
ensures that the client is kept up to date.
"""
- def __init__(self, hs):
- """
- Args:
- hs (synapse.server.HomeServer):
- """
+ def __init__(self, hs: "HomeServer"):
self._server_notices_manager = hs.get_server_notices_manager()
self._store = hs.get_datastore()
self._auth = hs.get_auth()
diff --git a/synapse/server_notices/server_notices_manager.py b/synapse/server_notices/server_notices_manager.py
index c46b2f04..144e1da7 100644
--- a/synapse/server_notices/server_notices_manager.py
+++ b/synapse/server_notices/server_notices_manager.py
@@ -58,7 +58,7 @@ class ServerNoticesManager:
user_id: str,
event_content: dict,
type: str = EventTypes.Message,
- state_key: Optional[bool] = None,
+ state_key: Optional[str] = None,
) -> EventBase:
"""Send a notice to the given user
diff --git a/synapse/server_notices/server_notices_sender.py b/synapse/server_notices/server_notices_sender.py
index 6870b67c..965c6458 100644
--- a/synapse/server_notices/server_notices_sender.py
+++ b/synapse/server_notices/server_notices_sender.py
@@ -12,25 +12,27 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import Iterable, Union
+from typing import TYPE_CHECKING, Iterable, Union
from synapse.server_notices.consent_server_notices import ConsentServerNotices
from synapse.server_notices.resource_limits_server_notices import (
ResourceLimitsServerNotices,
)
+from synapse.server_notices.worker_server_notices_sender import (
+ WorkerServerNoticesSender,
+)
+
+if TYPE_CHECKING:
+ from synapse.server import HomeServer
-class ServerNoticesSender:
+class ServerNoticesSender(WorkerServerNoticesSender):
"""A centralised place which sends server notices automatically when
Certain Events take place
"""
- def __init__(self, hs):
- """
-
- Args:
- hs (synapse.server.HomeServer):
- """
+ def __init__(self, hs: "HomeServer"):
+ super().__init__(hs)
self._server_notices = (
ConsentServerNotices(hs),
ResourceLimitsServerNotices(hs),
diff --git a/synapse/server_notices/worker_server_notices_sender.py b/synapse/server_notices/worker_server_notices_sender.py
index 9273e618..c76bd574 100644
--- a/synapse/server_notices/worker_server_notices_sender.py
+++ b/synapse/server_notices/worker_server_notices_sender.py
@@ -12,16 +12,17 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from synapse.server import HomeServer
class WorkerServerNoticesSender:
"""Stub impl of ServerNoticesSender which does nothing"""
- def __init__(self, hs):
- """
- Args:
- hs (synapse.server.HomeServer):
- """
+ def __init__(self, hs: "HomeServer"):
+ pass
async def on_user_syncing(self, user_id: str) -> None:
"""Called when the user performs a sync operation.
diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py
index a3c52695..0b9007e5 100644
--- a/synapse/storage/__init__.py
+++ b/synapse/storage/__init__.py
@@ -36,7 +36,7 @@ from synapse.storage.purge_events import PurgeEventsStorage
from synapse.storage.state import StateGroupStorage
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
__all__ = ["Databases", "DataStore"]
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index a25c4093..24090532 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -27,7 +27,7 @@ from synapse.types import Collection, StreamToken, get_domain_from_id
from synapse.util import json_decoder
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py
index 329660cf..ccb06aab 100644
--- a/synapse/storage/background_updates.py
+++ b/synapse/storage/background_updates.py
@@ -23,7 +23,7 @@ from synapse.util import json_encoder
from . import engines
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
from synapse.storage.database import DatabasePool, LoggingTransaction
logger = logging.getLogger(__name__)
diff --git a/synapse/storage/database.py b/synapse/storage/database.py
index f1ba529a..94590e7b 100644
--- a/synapse/storage/database.py
+++ b/synapse/storage/database.py
@@ -670,7 +670,7 @@ class DatabasePool:
for after_callback, after_args, after_kwargs in after_callbacks:
after_callback(*after_args, **after_kwargs)
- except: # noqa: E722, as we reraise the exception this is fine.
+ except Exception:
for after_callback, after_args, after_kwargs in exception_callbacks:
after_callback(*after_args, **after_kwargs)
raise
@@ -1906,6 +1906,7 @@ class DatabasePool:
retcols: Iterable[str],
filters: Optional[Dict[str, Any]] = None,
keyvalues: Optional[Dict[str, Any]] = None,
+ exclude_keyvalues: Optional[Dict[str, Any]] = None,
order_direction: str = "ASC",
) -> List[Dict[str, Any]]:
"""
@@ -1929,7 +1930,10 @@ class DatabasePool:
apply a WHERE ? LIKE ? clause.
keyvalues:
column names and values to select the rows with, or None to not
- apply a WHERE clause.
+ apply a WHERE key = value clause.
+ exclude_keyvalues:
+ column names and values to exclude rows with, or None to not
+ apply a WHERE key != value clause.
order_direction: Whether the results should be ordered "ASC" or "DESC".
Returns:
@@ -1938,7 +1942,7 @@ class DatabasePool:
if order_direction not in ["ASC", "DESC"]:
raise ValueError("order_direction must be one of 'ASC' or 'DESC'.")
- where_clause = "WHERE " if filters or keyvalues else ""
+ where_clause = "WHERE " if filters or keyvalues or exclude_keyvalues else ""
arg_list = [] # type: List[Any]
if filters:
where_clause += " AND ".join("%s LIKE ?" % (k,) for k in filters)
@@ -1947,6 +1951,9 @@ class DatabasePool:
if keyvalues:
where_clause += " AND ".join("%s = ?" % (k,) for k in keyvalues)
arg_list += list(keyvalues.values())
+ if exclude_keyvalues:
+ where_clause += " AND ".join("%s != ?" % (k,) for k in exclude_keyvalues)
+ arg_list += list(exclude_keyvalues.values())
sql = "SELECT %s FROM %s %s ORDER BY %s %s LIMIT ? OFFSET ?" % (
", ".join(retcols),
diff --git a/synapse/storage/databases/main/appservice.py b/synapse/storage/databases/main/appservice.py
index 03a38422..85bb853d 100644
--- a/synapse/storage/databases/main/appservice.py
+++ b/synapse/storage/databases/main/appservice.py
@@ -32,7 +32,7 @@ from synapse.types import JsonDict
from synapse.util import json_encoder
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/storage/databases/main/deviceinbox.py b/synapse/storage/databases/main/deviceinbox.py
index 45ca6620..691080ce 100644
--- a/synapse/storage/databases/main/deviceinbox.py
+++ b/synapse/storage/databases/main/deviceinbox.py
@@ -14,7 +14,7 @@
# limitations under the License.
import logging
-from typing import List, Tuple
+from typing import List, Optional, Tuple
from synapse.logging.opentracing import log_kv, set_tag, trace
from synapse.replication.tcp.streams import ToDeviceStream
@@ -115,7 +115,7 @@ class DeviceInboxWorkerStore(SQLBaseStore):
async def get_new_messages_for_device(
self,
user_id: str,
- device_id: str,
+ device_id: Optional[str],
last_stream_id: int,
current_stream_id: int,
limit: int = 100,
@@ -163,7 +163,7 @@ class DeviceInboxWorkerStore(SQLBaseStore):
@trace
async def delete_messages_for_device(
- self, user_id: str, device_id: str, up_to_stream_id: int
+ self, user_id: str, device_id: Optional[str], up_to_stream_id: int
) -> int:
"""
Args:
diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py
index 332193ad..a956be49 100644
--- a/synapse/storage/databases/main/event_federation.py
+++ b/synapse/storage/databases/main/event_federation.py
@@ -793,7 +793,7 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas
return int(min_depth) if min_depth is not None else None
- async def get_forward_extremeties_for_room(
+ async def get_forward_extremities_for_room_at_stream_ordering(
self, room_id: str, stream_ordering: int
) -> List[str]:
"""For a given room_id and stream_ordering, return the forward
diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py
index cd1ceac5..98dac19a 100644
--- a/synapse/storage/databases/main/events.py
+++ b/synapse/storage/databases/main/events.py
@@ -1270,8 +1270,10 @@ class PersistEventsStore:
logger.exception("")
raise
+ # update the stored internal_metadata to update the "outlier" flag.
+ # TODO: This is unused as of Synapse 1.31. Remove it once we are happy
+ # to drop backwards-compatibility with 1.30.
metadata_json = json_encoder.encode(event.internal_metadata.get_dict())
-
sql = "UPDATE event_json SET internal_metadata = ? WHERE event_id = ?"
txn.execute(sql, (metadata_json, event.event_id))
@@ -1319,6 +1321,19 @@ class PersistEventsStore:
d.pop("redacted_because", None)
return d
+ def get_internal_metadata(event):
+ im = event.internal_metadata.get_dict()
+
+ # temporary hack for database compatibility with Synapse 1.30 and earlier:
+ # store the `outlier` flag inside the internal_metadata json as well as in
+ # the `events` table, so that if anyone rolls back to an older Synapse,
+ # things keep working. This can be removed once we are happy to drop support
+ # for that
+ if event.internal_metadata.is_outlier():
+ im["outlier"] = True
+
+ return im
+
self.db_pool.simple_insert_many_txn(
txn,
table="event_json",
@@ -1327,7 +1342,7 @@ class PersistEventsStore:
"event_id": event.event_id,
"room_id": event.room_id,
"internal_metadata": json_encoder.encode(
- event.internal_metadata.get_dict()
+ get_internal_metadata(event)
),
"json": json_encoder.encode(event_dict(event)),
"format_version": event.format_version,
diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py
index c04e162c..952d4969 100644
--- a/synapse/storage/databases/main/events_worker.py
+++ b/synapse/storage/databases/main/events_worker.py
@@ -799,6 +799,7 @@ class EventsWorkerStore(SQLBaseStore):
rejected_reason=rejected_reason,
)
original_ev.internal_metadata.stream_ordering = row["stream_ordering"]
+ original_ev.internal_metadata.outlier = row["outlier"]
event_map[event_id] = original_ev
@@ -905,7 +906,8 @@ class EventsWorkerStore(SQLBaseStore):
ej.json,
ej.format_version,
r.room_version,
- rej.reason
+ rej.reason,
+ e.outlier
FROM events AS e
JOIN event_json AS ej USING (event_id)
LEFT JOIN rooms r ON r.room_id = e.room_id
@@ -929,6 +931,7 @@ class EventsWorkerStore(SQLBaseStore):
"room_version_id": row[5],
"rejected_reason": row[6],
"redactions": [],
+ "outlier": row[7],
}
# check for redactions
diff --git a/synapse/storage/databases/main/monthly_active_users.py b/synapse/storage/databases/main/monthly_active_users.py
index d788dc0f..757da3d5 100644
--- a/synapse/storage/databases/main/monthly_active_users.py
+++ b/synapse/storage/databases/main/monthly_active_users.py
@@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
-from typing import Dict, List
+from typing import Dict, List, Optional
from synapse.metrics.background_process_metrics import wrap_as_background_process
from synapse.storage._base import SQLBaseStore
@@ -109,7 +109,7 @@ class MonthlyActiveUsersWorkerStore(SQLBaseStore):
return users
@cached(num_args=1)
- async def user_last_seen_monthly_active(self, user_id: str) -> int:
+ async def user_last_seen_monthly_active(self, user_id: str) -> Optional[int]:
"""
Checks if a given user is part of the monthly active user group
diff --git a/synapse/storage/databases/main/presence.py b/synapse/storage/databases/main/presence.py
index 29edab34..0ff693a3 100644
--- a/synapse/storage/databases/main/presence.py
+++ b/synapse/storage/databases/main/presence.py
@@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import List, Tuple
+from typing import Dict, List, Tuple
from synapse.api.presence import UserPresenceState
from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause
@@ -157,5 +157,63 @@ class PresenceStore(SQLBaseStore):
return {row["user_id"]: UserPresenceState(**row) for row in rows}
+ async def get_presence_for_all_users(
+ self,
+ include_offline: bool = True,
+ ) -> Dict[str, UserPresenceState]:
+ """Retrieve the current presence state for all users.
+
+ Note that the presence_stream table is culled frequently, so it should only
+ contain the latest presence state for each user.
+
+ Args:
+ include_offline: Whether to include offline presence states
+
+ Returns:
+ A dict of user IDs to their current UserPresenceState.
+ """
+ users_to_state = {}
+
+ exclude_keyvalues = None
+ if not include_offline:
+ # Exclude offline presence state
+ exclude_keyvalues = {"state": "offline"}
+
+ # This may be a very heavy database query.
+ # We paginate in order to not block a database connection.
+ limit = 100
+ offset = 0
+ while True:
+ rows = await self.db_pool.runInteraction(
+ "get_presence_for_all_users",
+ self.db_pool.simple_select_list_paginate_txn,
+ "presence_stream",
+ orderby="stream_id",
+ start=offset,
+ limit=limit,
+ exclude_keyvalues=exclude_keyvalues,
+ retcols=(
+ "user_id",
+ "state",
+ "last_active_ts",
+ "last_federation_update_ts",
+ "last_user_sync_ts",
+ "status_msg",
+ "currently_active",
+ ),
+ order_direction="ASC",
+ )
+
+ for row in rows:
+ users_to_state[row["user_id"]] = UserPresenceState(**row)
+
+ # We've run out of updates to query
+ if len(rows) < limit:
+ break
+
+ offset += limit
+
+ return users_to_state
+
def get_current_presence_token(self):
return self._presence_id_gen.get_current_token()
diff --git a/synapse/storage/databases/main/pusher.py b/synapse/storage/databases/main/pusher.py
index 85f1ebac..c65558c2 100644
--- a/synapse/storage/databases/main/pusher.py
+++ b/synapse/storage/databases/main/pusher.py
@@ -27,7 +27,7 @@ from synapse.util import json_encoder
from synapse.util.caches.descriptors import cached, cachedList
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py
index eba66ff3..90a8f664 100644
--- a/synapse/storage/databases/main/registration.py
+++ b/synapse/storage/databases/main/registration.py
@@ -1210,6 +1210,7 @@ class RegistrationBackgroundUpdateStore(RegistrationWorkerStore):
self._invalidate_cache_and_stream(
txn, self.get_user_deactivated_status, (user_id,)
)
+ self._invalidate_cache_and_stream(txn, self.get_user_by_id, (user_id,))
txn.call_after(self.is_guest.invalidate, (user_id,))
@cached()
diff --git a/synapse/storage/databases/main/transactions.py b/synapse/storage/databases/main/transactions.py
index 03096618..b7072f1f 100644
--- a/synapse/storage/databases/main/transactions.py
+++ b/synapse/storage/databases/main/transactions.py
@@ -22,7 +22,6 @@ from canonicaljson import encode_canonical_json
from synapse.metrics.background_process_metrics import wrap_as_background_process
from synapse.storage._base import SQLBaseStore, db_to_json
from synapse.storage.database import DatabasePool, LoggingTransaction
-from synapse.storage.engines import PostgresEngine, Sqlite3Engine
from synapse.types import JsonDict
from synapse.util.caches.expiringcache import ExpiringCache
@@ -312,49 +311,23 @@ class TransactionStore(TransactionWorkerStore):
stream_ordering: the stream_ordering of the event
"""
- return await self.db_pool.runInteraction(
- "store_destination_rooms_entries",
- self._store_destination_rooms_entries_txn,
- destinations,
- room_id,
- stream_ordering,
+ await self.db_pool.simple_upsert_many(
+ table="destinations",
+ key_names=("destination",),
+ key_values=[(d,) for d in destinations],
+ value_names=[],
+ value_values=[],
+ desc="store_destination_rooms_entries_dests",
)
- def _store_destination_rooms_entries_txn(
- self,
- txn: LoggingTransaction,
- destinations: Iterable[str],
- room_id: str,
- stream_ordering: int,
- ) -> None:
-
- # ensure we have a `destinations` row for this destination, as there is
- # a foreign key constraint.
- if isinstance(self.database_engine, PostgresEngine):
- q = """
- INSERT INTO destinations (destination)
- VALUES (?)
- ON CONFLICT DO NOTHING;
- """
- elif isinstance(self.database_engine, Sqlite3Engine):
- q = """
- INSERT OR IGNORE INTO destinations (destination)
- VALUES (?);
- """
- else:
- raise RuntimeError("Unknown database engine")
-
- txn.execute_batch(q, ((destination,) for destination in destinations))
-
rows = [(destination, room_id) for destination in destinations]
-
- self.db_pool.simple_upsert_many_txn(
- txn,
+ await self.db_pool.simple_upsert_many(
table="destination_rooms",
key_names=("destination", "room_id"),
key_values=rows,
value_names=["stream_ordering"],
value_values=[(stream_ordering,)] * len(rows),
+ desc="store_destination_rooms_entries_rooms",
)
async def get_destination_last_successful_stream_ordering(
diff --git a/synapse/storage/databases/state/store.py b/synapse/storage/databases/state/store.py
index e2240703..97ec65f7 100644
--- a/synapse/storage/databases/state/store.py
+++ b/synapse/storage/databases/state/store.py
@@ -183,12 +183,13 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore):
requests state from the cache, if False we need to query the DB for the
missing state.
"""
- is_all, known_absent, state_dict_ids = cache.get(group)
+ cache_entry = cache.get(group)
+ state_dict_ids = cache_entry.value
- if is_all or state_filter.is_full():
+ if cache_entry.full or state_filter.is_full():
# Either we have everything or want everything, either way
# `is_all` tells us whether we've gotten everything.
- return state_filter.filter_state(state_dict_ids), is_all
+ return state_filter.filter_state(state_dict_ids), cache_entry.full
# tracks whether any of our requested types are missing from the cache
missing_types = False
@@ -202,7 +203,7 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore):
# There aren't any wild cards, so `concrete_types()` returns the
# complete list of event types we're wanting.
for key in state_filter.concrete_types():
- if key not in state_dict_ids and key not in known_absent:
+ if key not in state_dict_ids and key not in cache_entry.known_absent:
missing_types = True
break
diff --git a/synapse/storage/purge_events.py b/synapse/storage/purge_events.py
index 4dcd848c..ad954990 100644
--- a/synapse/storage/purge_events.py
+++ b/synapse/storage/purge_events.py
@@ -20,7 +20,7 @@ from typing import TYPE_CHECKING, Set
from synapse.storage.databases import Databases
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
logger = logging.getLogger(__name__)
diff --git a/synapse/storage/state.py b/synapse/storage/state.py
index d179a418..2e277a21 100644
--- a/synapse/storage/state.py
+++ b/synapse/storage/state.py
@@ -32,7 +32,7 @@ from synapse.events import EventBase
from synapse.types import MutableStateMap, StateMap
if TYPE_CHECKING:
- from synapse.app.homeserver import HomeServer
+ from synapse.server import HomeServer
from synapse.storage.databases import Databases
logger = logging.getLogger(__name__)
@@ -449,7 +449,7 @@ class StateGroupStorage:
return self.stores.state._get_state_groups_from_groups(groups, state_filter)
async def get_state_for_events(
- self, event_ids: List[str], state_filter: StateFilter = StateFilter.all()
+ self, event_ids: Iterable[str], state_filter: StateFilter = StateFilter.all()
) -> Dict[str, StateMap[EventBase]]:
"""Given a list of event_ids and type tuples, return a list of state
dicts for each event.
@@ -485,7 +485,7 @@ class StateGroupStorage:
return {event: event_to_state[event] for event in event_ids}
async def get_state_ids_for_events(
- self, event_ids: List[str], state_filter: StateFilter = StateFilter.all()
+ self, event_ids: Iterable[str], state_filter: StateFilter = StateFilter.all()
) -> Dict[str, StateMap[str]]:
"""
Get the state dicts corresponding to a list of events, containing the event_ids
diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py
index f33c1158..c3b2d981 100644
--- a/synapse/util/async_helpers.py
+++ b/synapse/util/async_helpers.py
@@ -496,7 +496,7 @@ def timeout_deferred(
try:
deferred.cancel()
- except: # noqa: E722, if we throw any exception it'll break time outs
+ except Exception: # if we throw any exception it'll break time outs
logger.exception("Canceller failed during timeout")
# the cancel() call should have set off a chain of errbacks which
diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py
index e676c2ca..48f64eeb 100644
--- a/synapse/util/caches/__init__.py
+++ b/synapse/util/caches/__init__.py
@@ -25,8 +25,8 @@ from synapse.config.cache import add_resizable_cache
logger = logging.getLogger(__name__)
-caches_by_name = {}
-collectors_by_name = {} # type: Dict
+caches_by_name = {} # type: Dict[str, Sized]
+collectors_by_name = {} # type: Dict[str, CacheMetric]
cache_size = Gauge("synapse_util_caches_cache:size", "", ["name"])
cache_hits = Gauge("synapse_util_caches_cache:hits", "", ["name"])
@@ -116,7 +116,7 @@ def register_cache(
"""
if resizable:
if not resize_callback:
- resize_callback = getattr(cache, "set_cache_factor")
+ resize_callback = cache.set_cache_factor # type: ignore
add_resizable_cache(cache_name, resize_callback)
metric = CacheMetric(cache, cache_type, cache_name, collect_callback)
diff --git a/synapse/util/caches/dictionary_cache.py b/synapse/util/caches/dictionary_cache.py
index 588d2d49..b3b413b0 100644
--- a/synapse/util/caches/dictionary_cache.py
+++ b/synapse/util/caches/dictionary_cache.py
@@ -15,26 +15,38 @@
import enum
import logging
import threading
-from collections import namedtuple
-from typing import Any
+from typing import Any, Dict, Generic, Iterable, Optional, Set, TypeVar
+
+import attr
from synapse.util.caches.lrucache import LruCache
logger = logging.getLogger(__name__)
-class DictionaryEntry(namedtuple("DictionaryEntry", ("full", "known_absent", "value"))):
+# The type of the cache keys.
+KT = TypeVar("KT")
+# The type of the dictionary keys.
+DKT = TypeVar("DKT")
+
+
+@attr.s(slots=True)
+class DictionaryEntry:
"""Returned when getting an entry from the cache
Attributes:
- full (bool): Whether the cache has the full or dict or just some keys.
+ full: Whether the cache has the full or dict or just some keys.
If not full then not all requested keys will necessarily be present
in `value`
- known_absent (set): Keys that were looked up in the dict and were not
+ known_absent: Keys that were looked up in the dict and were not
there.
- value (dict): The full or partial dict value
+ value: The full or partial dict value
"""
+ full = attr.ib(type=bool)
+ known_absent = attr.ib()
+ value = attr.ib()
+
def __len__(self):
return len(self.value)
@@ -45,21 +57,21 @@ class _Sentinel(enum.Enum):
sentinel = object()
-class DictionaryCache:
+class DictionaryCache(Generic[KT, DKT]):
"""Caches key -> dictionary lookups, supporting caching partial dicts, i.e.
fetching a subset of dictionary keys for a particular key.
"""
- def __init__(self, name, max_entries=1000):
+ def __init__(self, name: str, max_entries: int = 1000):
self.cache = LruCache(
max_size=max_entries, cache_name=name, size_callback=len
- ) # type: LruCache[Any, DictionaryEntry]
+ ) # type: LruCache[KT, DictionaryEntry]
self.name = name
self.sequence = 0
- self.thread = None
+ self.thread = None # type: Optional[threading.Thread]
- def check_thread(self):
+ def check_thread(self) -> None:
expected_thread = self.thread
if expected_thread is None:
self.thread = threading.current_thread()
@@ -69,12 +81,14 @@ class DictionaryCache:
"Cache objects can only be accessed from the main thread"
)
- def get(self, key, dict_keys=None):
+ def get(
+ self, key: KT, dict_keys: Optional[Iterable[DKT]] = None
+ ) -> DictionaryEntry:
"""Fetch an entry out of the cache
Args:
key
- dict_key(list): If given a set of keys then return only those keys
+ dict_key: If given a set of keys then return only those keys
that exist in the cache.
Returns:
@@ -95,7 +109,7 @@ class DictionaryCache:
return DictionaryEntry(False, set(), {})
- def invalidate(self, key):
+ def invalidate(self, key: KT) -> None:
self.check_thread()
# Increment the sequence number so that any SELECT statements that
@@ -103,19 +117,25 @@ class DictionaryCache:
self.sequence += 1
self.cache.pop(key, None)
- def invalidate_all(self):
+ def invalidate_all(self) -> None:
self.check_thread()
self.sequence += 1
self.cache.clear()
- def update(self, sequence, key, value, fetched_keys=None):
+ def update(
+ self,
+ sequence: int,
+ key: KT,
+ value: Dict[DKT, Any],
+ fetched_keys: Optional[Set[DKT]] = None,
+ ) -> None:
"""Updates the entry in the cache
Args:
sequence
- key (K)
- value (dict[X,Y]): The value to update the cache with.
- fetched_keys (None|set[X]): All of the dictionary keys which were
+ key
+ value: The value to update the cache with.
+ fetched_keys: All of the dictionary keys which were
fetched from the database.
If None, this is the complete value for key K. Otherwise, it
@@ -131,7 +151,9 @@ class DictionaryCache:
else:
self._update_or_insert(key, value, fetched_keys)
- def _update_or_insert(self, key, value, known_absent):
+ def _update_or_insert(
+ self, key: KT, value: Dict[DKT, Any], known_absent: Set[DKT]
+ ) -> None:
# We pop and reinsert as we need to tell the cache the size may have
# changed
@@ -140,5 +162,5 @@ class DictionaryCache:
entry.known_absent.update(known_absent)
self.cache[key] = entry
- def _insert(self, key, value, known_absent):
+ def _insert(self, key: KT, value: Dict[DKT, Any], known_absent: Set[DKT]) -> None:
self.cache[key] = DictionaryEntry(True, known_absent, value)
diff --git a/synapse/util/caches/ttlcache.py b/synapse/util/caches/ttlcache.py
index 6ce2a3d1..96a82749 100644
--- a/synapse/util/caches/ttlcache.py
+++ b/synapse/util/caches/ttlcache.py
@@ -15,6 +15,7 @@
import logging
import time
+from typing import Any, Callable, Dict, Generic, Tuple, TypeVar, Union
import attr
from sortedcontainers import SortedList
@@ -23,15 +24,19 @@ from synapse.util.caches import register_cache
logger = logging.getLogger(__name__)
-SENTINEL = object()
+SENTINEL = object() # type: Any
+T = TypeVar("T")
+KT = TypeVar("KT")
+VT = TypeVar("VT")
-class TTLCache:
+
+class TTLCache(Generic[KT, VT]):
"""A key/value cache implementation where each entry has its own TTL"""
- def __init__(self, cache_name, timer=time.time):
+ def __init__(self, cache_name: str, timer: Callable[[], float] = time.time):
# map from key to _CacheEntry
- self._data = {}
+ self._data = {} # type: Dict[KT, _CacheEntry]
# the _CacheEntries, sorted by expiry time
self._expiry_list = SortedList() # type: SortedList[_CacheEntry]
@@ -40,26 +45,27 @@ class TTLCache:
self._metrics = register_cache("ttl", cache_name, self, resizable=False)
- def set(self, key, value, ttl):
+ def set(self, key: KT, value: VT, ttl: float) -> None:
"""Add/update an entry in the cache
Args:
key: key for this entry
value: value for this entry
- ttl (float): TTL for this entry, in seconds
+ ttl: TTL for this entry, in seconds
"""
expiry = self._timer() + ttl
self.expire()
e = self._data.pop(key, SENTINEL)
- if e != SENTINEL:
+ if e is not SENTINEL:
+ assert isinstance(e, _CacheEntry)
self._expiry_list.remove(e)
entry = _CacheEntry(expiry_time=expiry, ttl=ttl, key=key, value=value)
self._data[key] = entry
self._expiry_list.add(entry)
- def get(self, key, default=SENTINEL):
+ def get(self, key: KT, default: T = SENTINEL) -> Union[VT, T]:
"""Get a value from the cache
Args:
@@ -72,23 +78,23 @@ class TTLCache:
"""
self.expire()
e = self._data.get(key, SENTINEL)
- if e == SENTINEL:
+ if e is SENTINEL:
self._metrics.inc_misses()
- if default == SENTINEL:
+ if default is SENTINEL:
raise KeyError(key)
return default
+ assert isinstance(e, _CacheEntry)
self._metrics.inc_hits()
return e.value
- def get_with_expiry(self, key):
+ def get_with_expiry(self, key: KT) -> Tuple[VT, float, float]:
"""Get a value, and its expiry time, from the cache
Args:
key: key to look up
Returns:
- Tuple[Any, float, float]: the value from the cache, the expiry time
- and the TTL
+ A tuple of the value from the cache, the expiry time and the TTL
Raises:
KeyError if the entry is not found
@@ -102,7 +108,7 @@ class TTLCache:
self._metrics.inc_hits()
return e.value, e.expiry_time, e.ttl
- def pop(self, key, default=SENTINEL):
+ def pop(self, key: KT, default: T = SENTINEL) -> Union[VT, T]: # type: ignore
"""Remove a value from the cache
If key is in the cache, remove it and return its value, else return default.
@@ -118,29 +124,30 @@ class TTLCache:
"""
self.expire()
e = self._data.pop(key, SENTINEL)
- if e == SENTINEL:
+ if e is SENTINEL:
self._metrics.inc_misses()
- if default == SENTINEL:
+ if default is SENTINEL:
raise KeyError(key)
return default
+ assert isinstance(e, _CacheEntry)
self._expiry_list.remove(e)
self._metrics.inc_hits()
return e.value
- def __getitem__(self, key):
+ def __getitem__(self, key: KT) -> VT:
return self.get(key)
- def __delitem__(self, key):
+ def __delitem__(self, key: KT) -> None:
self.pop(key)
- def __contains__(self, key):
+ def __contains__(self, key: KT) -> bool:
return key in self._data
- def __len__(self):
+ def __len__(self) -> int:
self.expire()
return len(self._data)
- def expire(self):
+ def expire(self) -> None:
"""Run the expiry on the cache. Any entries whose expiry times are due will
be removed
"""
@@ -158,7 +165,7 @@ class _CacheEntry:
"""TTLCache entry"""
# expiry_time is the first attribute, so that entries are sorted by expiry.
- expiry_time = attr.ib()
- ttl = attr.ib()
+ expiry_time = attr.ib(type=float)
+ ttl = attr.ib(type=float)
key = attr.ib()
value = attr.ib()
diff --git a/synapse/util/frozenutils.py b/synapse/util/frozenutils.py
index 5f7a6dd1..5ca2e71e 100644
--- a/synapse/util/frozenutils.py
+++ b/synapse/util/frozenutils.py
@@ -36,7 +36,7 @@ def freeze(o):
def unfreeze(o):
if isinstance(o, (dict, frozendict)):
- return dict({k: unfreeze(v) for k, v in o.items()})
+ return {k: unfreeze(v) for k, v in o.items()}
if isinstance(o, (bytes, str)):
return o
diff --git a/synapse/visibility.py b/synapse/visibility.py
index e39d0260..ff53a49b 100644
--- a/synapse/visibility.py
+++ b/synapse/visibility.py
@@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
-import operator
+from typing import Dict, FrozenSet, List, Optional
from synapse.api.constants import (
AccountDataTypes,
@@ -21,10 +21,11 @@ from synapse.api.constants import (
HistoryVisibility,
Membership,
)
+from synapse.events import EventBase
from synapse.events.utils import prune_event
from synapse.storage import Storage
from synapse.storage.state import StateFilter
-from synapse.types import get_domain_from_id
+from synapse.types import StateMap, get_domain_from_id
logger = logging.getLogger(__name__)
@@ -48,32 +49,32 @@ MEMBERSHIP_PRIORITY = (
async def filter_events_for_client(
storage: Storage,
- user_id,
- events,
- is_peeking=False,
- always_include_ids=frozenset(),
- filter_send_to_client=True,
-):
+ user_id: str,
+ events: List[EventBase],
+ is_peeking: bool = False,
+ always_include_ids: FrozenSet[str] = frozenset(),
+ filter_send_to_client: bool = True,
+) -> List[EventBase]:
"""
Check which events a user is allowed to see. If the user can see the event but its
sender asked for their data to be erased, prune the content of the event.
Args:
storage
- user_id(str): user id to be checked
- events(list[synapse.events.EventBase]): sequence of events to be checked
- is_peeking(bool): should be True if:
+ user_id: user id to be checked
+ events: sequence of events to be checked
+ is_peeking: should be True if:
* the user is not currently a member of the room, and:
* the user has not been a member of the room since the given
events
- always_include_ids (set(event_id)): set of event ids to specifically
+ always_include_ids: set of event ids to specifically
include (unless sender is ignored)
- filter_send_to_client (bool): Whether we're checking an event that's going to be
+ filter_send_to_client: Whether we're checking an event that's going to be
sent to a client. This might not always be the case since this function can
also be called to check whether a user can see the state at a given point.
Returns:
- list[synapse.events.EventBase]
+ The filtered events.
"""
# Filter out events that have been soft failed so that we don't relay them
# to clients.
@@ -90,7 +91,7 @@ async def filter_events_for_client(
AccountDataTypes.IGNORED_USER_LIST, user_id
)
- ignore_list = frozenset()
+ ignore_list = frozenset() # type: FrozenSet[str]
if ignore_dict_content:
ignored_users_dict = ignore_dict_content.get("ignored_users", {})
if isinstance(ignored_users_dict, dict):
@@ -107,19 +108,18 @@ async def filter_events_for_client(
room_id
] = await storage.main.get_retention_policy_for_room(room_id)
- def allowed(event):
+ def allowed(event: EventBase) -> Optional[EventBase]:
"""
Args:
- event (synapse.events.EventBase): event to check
+ event: event to check
Returns:
- None|EventBase:
- None if the user cannot see this event at all
+ None if the user cannot see this event at all
- a redacted copy of the event if they can only see a redacted
- version
+ a redacted copy of the event if they can only see a redacted
+ version
- the original event if they can see it as normal.
+ the original event if they can see it as normal.
"""
# Only run some checks if these events aren't about to be sent to clients. This is
# because, if this is not the case, we're probably only checking if the users can
@@ -252,48 +252,46 @@ async def filter_events_for_client(
return event
- # check each event: gives an iterable[None|EventBase]
+ # Check each event: gives an iterable of None or (a potentially modified)
+ # EventBase.
filtered_events = map(allowed, events)
- # remove the None entries
- filtered_events = filter(operator.truth, filtered_events)
-
- # we turn it into a list before returning it.
- return list(filtered_events)
+ # Turn it into a list and remove None entries before returning.
+ return [ev for ev in filtered_events if ev]
async def filter_events_for_server(
storage: Storage,
- server_name,
- events,
- redact=True,
- check_history_visibility_only=False,
-):
+ server_name: str,
+ events: List[EventBase],
+ redact: bool = True,
+ check_history_visibility_only: bool = False,
+) -> List[EventBase]:
"""Filter a list of events based on whether given server is allowed to
see them.
Args:
storage
- server_name (str)
- events (iterable[FrozenEvent])
- redact (bool): Whether to return a redacted version of the event, or
+ server_name
+ events
+ redact: Whether to return a redacted version of the event, or
to filter them out entirely.
- check_history_visibility_only (bool): Whether to only check the
+ check_history_visibility_only: Whether to only check the
history visibility, rather than things like if the sender has been
erased. This is used e.g. during pagination to decide whether to
backfill or not.
Returns
- list[FrozenEvent]
+ The filtered events.
"""
- def is_sender_erased(event, erased_senders):
+ def is_sender_erased(event: EventBase, erased_senders: Dict[str, bool]) -> bool:
if erased_senders and erased_senders[event.sender]:
logger.info("Sender of %s has been erased, redacting", event.event_id)
return True
return False
- def check_event_is_visible(event, state):
+ def check_event_is_visible(event: EventBase, state: StateMap[EventBase]) -> bool:
history = state.get((EventTypes.RoomHistoryVisibility, ""), None)
if history:
visibility = history.content.get(
diff --git a/test_postgresql.sh b/test_postgresql.sh
index 1ffcaabd..c10828fb 100755
--- a/test_postgresql.sh
+++ b/test_postgresql.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
# This script builds the Docker image to run the PostgreSQL tests, and then runs
# the tests.
diff --git a/tests/federation/test_federation_catch_up.py b/tests/federation/test_federation_catch_up.py
index 6f96cd79..95eac6a5 100644
--- a/tests/federation/test_federation_catch_up.py
+++ b/tests/federation/test_federation_catch_up.py
@@ -2,6 +2,7 @@ from typing import List, Tuple
from mock import Mock
+from synapse.api.constants import EventTypes
from synapse.events import EventBase
from synapse.federation.sender import PerDestinationQueue, TransactionManager
from synapse.federation.units import Edu
@@ -421,3 +422,51 @@ class FederationCatchUpTestCases(FederatingHomeserverTestCase):
self.assertNotIn("zzzerver", woken)
# - all destinations are woken exactly once; they appear once in woken.
self.assertCountEqual(woken, server_names[:-1])
+
+ @override_config({"send_federation": True})
+ def test_not_latest_event(self):
+ """Test that we send the latest event in the room even if its not ours."""
+
+ per_dest_queue, sent_pdus = self.make_fake_destination_queue()
+
+ # Make a room with a local user, and two servers. One will go offline
+ # and one will send some events.
+ self.register_user("u1", "you the one")
+ u1_token = self.login("u1", "you the one")
+ room_1 = self.helper.create_room_as("u1", tok=u1_token)
+
+ self.get_success(
+ event_injection.inject_member_event(self.hs, room_1, "@user:host2", "join")
+ )
+ event_1 = self.get_success(
+ event_injection.inject_member_event(self.hs, room_1, "@user:host3", "join")
+ )
+
+ # First we send something from the local server, so that we notice the
+ # remote is down and go into catchup mode.
+ self.helper.send(room_1, "you hear me!!", tok=u1_token)
+
+ # Now simulate us receiving an event from the still online remote.
+ event_2 = self.get_success(
+ event_injection.inject_event(
+ self.hs,
+ type=EventTypes.Message,
+ sender="@user:host3",
+ room_id=room_1,
+ content={"msgtype": "m.text", "body": "Hello"},
+ )
+ )
+
+ self.get_success(
+ self.hs.get_datastore().set_destination_last_successful_stream_ordering(
+ "host2", event_1.internal_metadata.stream_ordering
+ )
+ )
+
+ self.get_success(per_dest_queue._catch_up_transmission_loop())
+
+ # We expect only the last message from the remote, event_2, to have been
+ # sent, rather than the last *local* event that was sent.
+ self.assertEqual(len(sent_pdus), 1)
+ self.assertEqual(sent_pdus[0].event_id, event_2.event_id)
+ self.assertFalse(per_dest_queue._catching_up)
diff --git a/tests/handlers/test_oidc.py b/tests/handlers/test_oidc.py
index 5e9c9c2e..c7796fb8 100644
--- a/tests/handlers/test_oidc.py
+++ b/tests/handlers/test_oidc.py
@@ -989,6 +989,138 @@ class OidcHandlerTestCase(HomeserverTestCase):
self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
self.assertRenderedError("mapping_error", "localpart is invalid: ")
+ @override_config(
+ {
+ "oidc_config": {
+ **DEFAULT_CONFIG,
+ "attribute_requirements": [{"attribute": "test", "value": "foobar"}],
+ }
+ }
+ )
+ def test_attribute_requirements(self):
+ """The required attributes must be met from the OIDC userinfo response."""
+ auth_handler = self.hs.get_auth_handler()
+ auth_handler.complete_sso_login = simple_async_mock()
+
+ # userinfo lacking "test": "foobar" attribute should fail.
+ userinfo = {
+ "sub": "tester",
+ "username": "tester",
+ }
+ self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
+ auth_handler.complete_sso_login.assert_not_called()
+
+ # userinfo with "test": "foobar" attribute should succeed.
+ userinfo = {
+ "sub": "tester",
+ "username": "tester",
+ "test": "foobar",
+ }
+ self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
+
+ # check that the auth handler got called as expected
+ auth_handler.complete_sso_login.assert_called_once_with(
+ "@tester:test", "oidc", ANY, ANY, None, new_user=True
+ )
+
+ @override_config(
+ {
+ "oidc_config": {
+ **DEFAULT_CONFIG,
+ "attribute_requirements": [{"attribute": "test", "value": "foobar"}],
+ }
+ }
+ )
+ def test_attribute_requirements_contains(self):
+ """Test that auth succeeds if userinfo attribute CONTAINS required value"""
+ auth_handler = self.hs.get_auth_handler()
+ auth_handler.complete_sso_login = simple_async_mock()
+ # userinfo with "test": ["foobar", "foo", "bar"] attribute should succeed.
+ userinfo = {
+ "sub": "tester",
+ "username": "tester",
+ "test": ["foobar", "foo", "bar"],
+ }
+ self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
+
+ # check that the auth handler got called as expected
+ auth_handler.complete_sso_login.assert_called_once_with(
+ "@tester:test", "oidc", ANY, ANY, None, new_user=True
+ )
+
+ @override_config(
+ {
+ "oidc_config": {
+ **DEFAULT_CONFIG,
+ "attribute_requirements": [{"attribute": "test", "value": "foobar"}],
+ }
+ }
+ )
+ def test_attribute_requirements_mismatch(self):
+ """
+ Test that auth fails if attributes exist but don't match,
+ or are non-string values.
+ """
+ auth_handler = self.hs.get_auth_handler()
+ auth_handler.complete_sso_login = simple_async_mock()
+ # userinfo with "test": "not_foobar" attribute should fail
+ userinfo = {
+ "sub": "tester",
+ "username": "tester",
+ "test": "not_foobar",
+ }
+ self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
+ auth_handler.complete_sso_login.assert_not_called()
+
+ # userinfo with "test": ["foo", "bar"] attribute should fail
+ userinfo = {
+ "sub": "tester",
+ "username": "tester",
+ "test": ["foo", "bar"],
+ }
+ self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
+ auth_handler.complete_sso_login.assert_not_called()
+
+ # userinfo with "test": False attribute should fail
+ # this is largely just to ensure we don't crash here
+ userinfo = {
+ "sub": "tester",
+ "username": "tester",
+ "test": False,
+ }
+ self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
+ auth_handler.complete_sso_login.assert_not_called()
+
+ # userinfo with "test": None attribute should fail
+ # a value of None breaks the OIDC spec, but it's important to not crash here
+ userinfo = {
+ "sub": "tester",
+ "username": "tester",
+ "test": None,
+ }
+ self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
+ auth_handler.complete_sso_login.assert_not_called()
+
+ # userinfo with "test": 1 attribute should fail
+ # this is largely just to ensure we don't crash here
+ userinfo = {
+ "sub": "tester",
+ "username": "tester",
+ "test": 1,
+ }
+ self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
+ auth_handler.complete_sso_login.assert_not_called()
+
+ # userinfo with "test": 3.14 attribute should fail
+ # this is largely just to ensure we don't crash here
+ userinfo = {
+ "sub": "tester",
+ "username": "tester",
+ "test": 3.14,
+ }
+ self.get_success(_make_callback_with_userinfo(self.hs, userinfo))
+ auth_handler.complete_sso_login.assert_not_called()
+
def _generate_oidc_session_token(
self,
state: str,
diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py
index 996c6141..77330f59 100644
--- a/tests/handlers/test_presence.py
+++ b/tests/handlers/test_presence.py
@@ -310,6 +310,26 @@ class PresenceTimeoutTestCase(unittest.TestCase):
self.assertIsNotNone(new_state)
self.assertEquals(new_state.state, PresenceState.UNAVAILABLE)
+ def test_busy_no_idle(self):
+ """
+ Tests that a user setting their presence to busy but idling doesn't turn their
+ presence state into unavailable.
+ """
+ user_id = "@foo:bar"
+ now = 5000000
+
+ state = UserPresenceState.default(user_id)
+ state = state.copy_and_replace(
+ state=PresenceState.BUSY,
+ last_active_ts=now - IDLE_TIMER - 1,
+ last_user_sync_ts=now,
+ )
+
+ new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now)
+
+ self.assertIsNotNone(new_state)
+ self.assertEquals(new_state.state, PresenceState.BUSY)
+
def test_sync_timeout(self):
user_id = "@foo:bar"
now = 5000000
diff --git a/tests/http/test_proxyagent.py b/tests/http/test_proxyagent.py
index 505ffcd3..3ea8b5be 100644
--- a/tests/http/test_proxyagent.py
+++ b/tests/http/test_proxyagent.py
@@ -12,8 +12,10 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+import base64
import logging
import os
+from typing import Optional
from unittest.mock import patch
import treq
@@ -242,6 +244,21 @@ class MatrixFederationAgentTests(TestCase):
@patch.dict(os.environ, {"https_proxy": "proxy.com", "no_proxy": "unused.com"})
def test_https_request_via_proxy(self):
+ """Tests that TLS-encrypted requests can be made through a proxy"""
+ self._do_https_request_via_proxy(auth_credentials=None)
+
+ @patch.dict(
+ os.environ,
+ {"https_proxy": "bob:pinkponies@proxy.com", "no_proxy": "unused.com"},
+ )
+ def test_https_request_via_proxy_with_auth(self):
+ """Tests that authenticated, TLS-encrypted requests can be made through a proxy"""
+ self._do_https_request_via_proxy(auth_credentials="bob:pinkponies")
+
+ def _do_https_request_via_proxy(
+ self,
+ auth_credentials: Optional[str] = None,
+ ):
agent = ProxyAgent(
self.reactor,
contextFactory=get_test_https_policy(),
@@ -278,6 +295,22 @@ class MatrixFederationAgentTests(TestCase):
self.assertEqual(request.method, b"CONNECT")
self.assertEqual(request.path, b"test.com:443")
+ # Check whether auth credentials have been supplied to the proxy
+ proxy_auth_header_values = request.requestHeaders.getRawHeaders(
+ b"Proxy-Authorization"
+ )
+
+ if auth_credentials is not None:
+ # Compute the correct header value for Proxy-Authorization
+ encoded_credentials = base64.b64encode(b"bob:pinkponies")
+ expected_header_value = b"Basic " + encoded_credentials
+
+ # Validate the header's value
+ self.assertIn(expected_header_value, proxy_auth_header_values)
+ else:
+ # Check that the Proxy-Authorization header has not been supplied to the proxy
+ self.assertIsNone(proxy_auth_header_values)
+
# tell the proxy server not to close the connection
proxy_server.persistent = True
@@ -312,6 +345,13 @@ class MatrixFederationAgentTests(TestCase):
self.assertEqual(request.method, b"GET")
self.assertEqual(request.path, b"/abc")
self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"])
+
+ # Check that the destination server DID NOT receive proxy credentials
+ proxy_auth_header_values = request.requestHeaders.getRawHeaders(
+ b"Proxy-Authorization"
+ )
+ self.assertIsNone(proxy_auth_header_values)
+
request.write(b"result")
request.finish()
diff --git a/tests/replication/_base.py b/tests/replication/_base.py
index 67b79136..1d4a5928 100644
--- a/tests/replication/_base.py
+++ b/tests/replication/_base.py
@@ -44,7 +44,7 @@ from tests.server import FakeTransport
try:
import hiredis
except ImportError:
- hiredis = None
+ hiredis = None # type: ignore
logger = logging.getLogger(__name__)
diff --git a/tests/replication/tcp/streams/test_typing.py b/tests/replication/tcp/streams/test_typing.py
index 5acfb3e5..ca49d4dd 100644
--- a/tests/replication/tcp/streams/test_typing.py
+++ b/tests/replication/tcp/streams/test_typing.py
@@ -69,6 +69,7 @@ class TypingStreamTestCase(BaseStreamTestCase):
self.assert_request_is_get_repl_stream_updates(request, "typing")
# The from token should be the token from the last RDATA we got.
+ assert request.args is not None
self.assertEqual(int(request.args[b"from_token"][0]), token)
self.test_handler.on_rdata.assert_called_once()
diff --git a/tests/replication/test_multi_media_repo.py b/tests/replication/test_multi_media_repo.py
index 7ff11cde..b0800f98 100644
--- a/tests/replication/test_multi_media_repo.py
+++ b/tests/replication/test_multi_media_repo.py
@@ -15,7 +15,7 @@
import logging
import os
from binascii import unhexlify
-from typing import Tuple
+from typing import Optional, Tuple
from twisted.internet.protocol import Factory
from twisted.protocols.tls import TLSMemoryBIOFactory
@@ -32,7 +32,7 @@ from tests.server import FakeChannel, FakeSite, FakeTransport, make_request
logger = logging.getLogger(__name__)
-test_server_connection_factory = None
+test_server_connection_factory = None # type: Optional[TestServerTLSConnectionFactory]
class MediaRepoShardTestCase(BaseMultiWorkerStreamTestCase):
diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py
index e58d5cf0..cf61f284 100644
--- a/tests/rest/admin/test_user.py
+++ b/tests/rest/admin/test_user.py
@@ -1003,12 +1003,23 @@ class UserRestTestCase(unittest.HomeserverTestCase):
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
+ self.auth_handler = hs.get_auth_handler()
+ # create users and get access tokens
+ # regardless of whether password login or SSO is allowed
self.admin_user = self.register_user("admin", "pass", admin=True)
- self.admin_user_tok = self.login("admin", "pass")
+ self.admin_user_tok = self.get_success(
+ self.auth_handler.get_access_token_for_user_id(
+ self.admin_user, device_id=None, valid_until_ms=None
+ )
+ )
self.other_user = self.register_user("user", "pass", displayname="User")
- self.other_user_token = self.login("user", "pass")
+ self.other_user_token = self.get_success(
+ self.auth_handler.get_access_token_for_user_id(
+ self.other_user, device_id=None, valid_until_ms=None
+ )
+ )
self.url_other_user = "/_synapse/admin/v2/users/%s" % urllib.parse.quote(
self.other_user
)
@@ -1081,7 +1092,7 @@ class UserRestTestCase(unittest.HomeserverTestCase):
self.assertEqual("Bob's name", channel.json_body["displayname"])
self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"])
- self.assertEqual(True, channel.json_body["admin"])
+ self.assertTrue(channel.json_body["admin"])
self.assertEqual("mxc://fibble/wibble", channel.json_body["avatar_url"])
# Get user
@@ -1096,9 +1107,9 @@ class UserRestTestCase(unittest.HomeserverTestCase):
self.assertEqual("Bob's name", channel.json_body["displayname"])
self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"])
- self.assertEqual(True, channel.json_body["admin"])
- self.assertEqual(False, channel.json_body["is_guest"])
- self.assertEqual(False, channel.json_body["deactivated"])
+ self.assertTrue(channel.json_body["admin"])
+ self.assertFalse(channel.json_body["is_guest"])
+ self.assertFalse(channel.json_body["deactivated"])
self.assertEqual("mxc://fibble/wibble", channel.json_body["avatar_url"])
def test_create_user(self):
@@ -1130,7 +1141,7 @@ class UserRestTestCase(unittest.HomeserverTestCase):
self.assertEqual("Bob's name", channel.json_body["displayname"])
self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"])
- self.assertEqual(False, channel.json_body["admin"])
+ self.assertFalse(channel.json_body["admin"])
self.assertEqual("mxc://fibble/wibble", channel.json_body["avatar_url"])
# Get user
@@ -1145,10 +1156,10 @@ class UserRestTestCase(unittest.HomeserverTestCase):
self.assertEqual("Bob's name", channel.json_body["displayname"])
self.assertEqual("email", channel.json_body["threepids"][0]["medium"])
self.assertEqual("bob@bob.bob", channel.json_body["threepids"][0]["address"])
- self.assertEqual(False, channel.json_body["admin"])
- self.assertEqual(False, channel.json_body["is_guest"])
- self.assertEqual(False, channel.json_body["deactivated"])
- self.assertEqual(False, channel.json_body["shadow_banned"])
+ self.assertFalse(channel.json_body["admin"])
+ self.assertFalse(channel.json_body["is_guest"])
+ self.assertFalse(channel.json_body["deactivated"])
+ self.assertFalse(channel.json_body["shadow_banned"])
self.assertEqual("mxc://fibble/wibble", channel.json_body["avatar_url"])
@override_config(
@@ -1197,7 +1208,7 @@ class UserRestTestCase(unittest.HomeserverTestCase):
self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob:test", channel.json_body["name"])
- self.assertEqual(False, channel.json_body["admin"])
+ self.assertFalse(channel.json_body["admin"])
@override_config(
{"limit_usage_by_mau": True, "max_mau_value": 2, "mau_trial_days": 0}
@@ -1237,7 +1248,7 @@ class UserRestTestCase(unittest.HomeserverTestCase):
# Admin user is not blocked by mau anymore
self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@bob:test", channel.json_body["name"])
- self.assertEqual(False, channel.json_body["admin"])
+ self.assertFalse(channel.json_body["admin"])
@override_config(
{
@@ -1429,24 +1440,23 @@ class UserRestTestCase(unittest.HomeserverTestCase):
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@user:test", channel.json_body["name"])
- self.assertEqual(False, channel.json_body["deactivated"])
+ self.assertFalse(channel.json_body["deactivated"])
self.assertEqual("foo@bar.com", channel.json_body["threepids"][0]["address"])
self.assertEqual("mxc://servername/mediaid", channel.json_body["avatar_url"])
self.assertEqual("User", channel.json_body["displayname"])
# Deactivate user
- body = json.dumps({"deactivated": True})
-
channel = self.make_request(
"PUT",
self.url_other_user,
access_token=self.admin_user_tok,
- content=body.encode(encoding="utf_8"),
+ content={"deactivated": True},
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@user:test", channel.json_body["name"])
- self.assertEqual(True, channel.json_body["deactivated"])
+ self.assertTrue(channel.json_body["deactivated"])
+ self.assertIsNone(channel.json_body["password_hash"])
self.assertEqual(0, len(channel.json_body["threepids"]))
self.assertEqual("mxc://servername/mediaid", channel.json_body["avatar_url"])
self.assertEqual("User", channel.json_body["displayname"])
@@ -1461,7 +1471,8 @@ class UserRestTestCase(unittest.HomeserverTestCase):
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@user:test", channel.json_body["name"])
- self.assertEqual(True, channel.json_body["deactivated"])
+ self.assertTrue(channel.json_body["deactivated"])
+ self.assertIsNone(channel.json_body["password_hash"])
self.assertEqual(0, len(channel.json_body["threepids"]))
self.assertEqual("mxc://servername/mediaid", channel.json_body["avatar_url"])
self.assertEqual("User", channel.json_body["displayname"])
@@ -1478,41 +1489,37 @@ class UserRestTestCase(unittest.HomeserverTestCase):
self.assertTrue(profile["display_name"] == "User")
# Deactivate user
- body = json.dumps({"deactivated": True})
-
channel = self.make_request(
"PUT",
self.url_other_user,
access_token=self.admin_user_tok,
- content=body.encode(encoding="utf_8"),
+ content={"deactivated": True},
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@user:test", channel.json_body["name"])
- self.assertEqual(True, channel.json_body["deactivated"])
+ self.assertTrue(channel.json_body["deactivated"])
# is not in user directory
profile = self.get_success(self.store.get_user_in_directory(self.other_user))
- self.assertTrue(profile is None)
+ self.assertIsNone(profile)
# Set new displayname user
- body = json.dumps({"displayname": "Foobar"})
-
channel = self.make_request(
"PUT",
self.url_other_user,
access_token=self.admin_user_tok,
- content=body.encode(encoding="utf_8"),
+ content={"displayname": "Foobar"},
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@user:test", channel.json_body["name"])
- self.assertEqual(True, channel.json_body["deactivated"])
+ self.assertTrue(channel.json_body["deactivated"])
self.assertEqual("Foobar", channel.json_body["displayname"])
# is not in user directory
profile = self.get_success(self.store.get_user_in_directory(self.other_user))
- self.assertTrue(profile is None)
+ self.assertIsNone(profile)
def test_reactivate_user(self):
"""
@@ -1520,48 +1527,92 @@ class UserRestTestCase(unittest.HomeserverTestCase):
"""
# Deactivate the user.
+ self._deactivate_user("@user:test")
+
+ # Attempt to reactivate the user (without a password).
+ channel = self.make_request(
+ "PUT",
+ self.url_other_user,
+ access_token=self.admin_user_tok,
+ content={"deactivated": False},
+ )
+ self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+
+ # Reactivate the user.
channel = self.make_request(
"PUT",
self.url_other_user,
access_token=self.admin_user_tok,
- content=json.dumps({"deactivated": True}).encode(encoding="utf_8"),
+ content={"deactivated": False, "password": "foo"},
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual("@user:test", channel.json_body["name"])
+ self.assertFalse(channel.json_body["deactivated"])
+ self.assertIsNotNone(channel.json_body["password_hash"])
self._is_erased("@user:test", False)
- d = self.store.mark_user_erased("@user:test")
- self.assertIsNone(self.get_success(d))
- self._is_erased("@user:test", True)
- # Attempt to reactivate the user (without a password).
+ @override_config({"password_config": {"localdb_enabled": False}})
+ def test_reactivate_user_localdb_disabled(self):
+ """
+ Test reactivating another user when using SSO.
+ """
+
+ # Deactivate the user.
+ self._deactivate_user("@user:test")
+
+ # Reactivate the user with a password
channel = self.make_request(
"PUT",
self.url_other_user,
access_token=self.admin_user_tok,
- content=json.dumps({"deactivated": False}).encode(encoding="utf_8"),
+ content={"deactivated": False, "password": "foo"},
)
- self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
- # Reactivate the user.
+ # Reactivate the user without a password.
channel = self.make_request(
"PUT",
self.url_other_user,
access_token=self.admin_user_tok,
- content=json.dumps({"deactivated": False, "password": "foo"}).encode(
- encoding="utf_8"
- ),
+ content={"deactivated": False},
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual("@user:test", channel.json_body["name"])
+ self.assertFalse(channel.json_body["deactivated"])
+ self.assertIsNone(channel.json_body["password_hash"])
+ self._is_erased("@user:test", False)
- # Get user
+ @override_config({"password_config": {"enabled": False}})
+ def test_reactivate_user_password_disabled(self):
+ """
+ Test reactivating another user when using SSO.
+ """
+
+ # Deactivate the user.
+ self._deactivate_user("@user:test")
+
+ # Reactivate the user with a password
channel = self.make_request(
- "GET",
+ "PUT",
self.url_other_user,
access_token=self.admin_user_tok,
+ content={"deactivated": False, "password": "foo"},
)
+ self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
+ # Reactivate the user without a password.
+ channel = self.make_request(
+ "PUT",
+ self.url_other_user,
+ access_token=self.admin_user_tok,
+ content={"deactivated": False},
+ )
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@user:test", channel.json_body["name"])
- self.assertEqual(False, channel.json_body["deactivated"])
+ self.assertFalse(channel.json_body["deactivated"])
+ self.assertIsNone(channel.json_body["password_hash"])
self._is_erased("@user:test", False)
def test_set_user_as_admin(self):
@@ -1570,18 +1621,16 @@ class UserRestTestCase(unittest.HomeserverTestCase):
"""
# Set a user as an admin
- body = json.dumps({"admin": True})
-
channel = self.make_request(
"PUT",
self.url_other_user,
access_token=self.admin_user_tok,
- content=body.encode(encoding="utf_8"),
+ content={"admin": True},
)
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@user:test", channel.json_body["name"])
- self.assertEqual(True, channel.json_body["admin"])
+ self.assertTrue(channel.json_body["admin"])
# Get user
channel = self.make_request(
@@ -1592,7 +1641,7 @@ class UserRestTestCase(unittest.HomeserverTestCase):
self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual("@user:test", channel.json_body["name"])
- self.assertEqual(True, channel.json_body["admin"])
+ self.assertTrue(channel.json_body["admin"])
def test_accidental_deactivation_prevention(self):
"""
@@ -1602,13 +1651,11 @@ class UserRestTestCase(unittest.HomeserverTestCase):
url = "/_synapse/admin/v2/users/@bob:test"
# Create user
- body = json.dumps({"password": "abc123"})
-
channel = self.make_request(
"PUT",
url,
access_token=self.admin_user_tok,
- content=body.encode(encoding="utf_8"),
+ content={"password": "abc123"},
)
self.assertEqual(201, int(channel.result["code"]), msg=channel.result["body"])
@@ -1628,13 +1675,11 @@ class UserRestTestCase(unittest.HomeserverTestCase):
self.assertEqual(0, channel.json_body["deactivated"])
# Change password (and use a str for deactivate instead of a bool)
- body = json.dumps({"password": "abc123", "deactivated": "false"}) # oops!
-
channel = self.make_request(
"PUT",
url,
access_token=self.admin_user_tok,
- content=body.encode(encoding="utf_8"),
+ content={"password": "abc123", "deactivated": "false"},
)
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
@@ -1653,7 +1698,7 @@ class UserRestTestCase(unittest.HomeserverTestCase):
# Ensure they're still alive
self.assertEqual(0, channel.json_body["deactivated"])
- def _is_erased(self, user_id, expect):
+ def _is_erased(self, user_id: str, expect: bool) -> None:
"""Assert that the user is erased or not"""
d = self.store.is_user_erased(user_id)
if expect:
@@ -1661,6 +1706,24 @@ class UserRestTestCase(unittest.HomeserverTestCase):
else:
self.assertFalse(self.get_success(d))
+ def _deactivate_user(self, user_id: str) -> None:
+ """Deactivate user and set as erased"""
+
+ # Deactivate the user.
+ channel = self.make_request(
+ "PUT",
+ "/_synapse/admin/v2/users/%s" % urllib.parse.quote(user_id),
+ access_token=self.admin_user_tok,
+ content={"deactivated": True},
+ )
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertTrue(channel.json_body["deactivated"])
+ self.assertIsNone(channel.json_body["password_hash"])
+ self._is_erased(user_id, False)
+ d = self.store.mark_user_erased(user_id)
+ self.assertIsNone(self.get_success(d))
+ self._is_erased(user_id, True)
+
class UserMembershipRestTestCase(unittest.HomeserverTestCase):
diff --git a/tests/rest/client/test_third_party_rules.py b/tests/rest/client/test_third_party_rules.py
index 227fffab..bf390142 100644
--- a/tests/rest/client/test_third_party_rules.py
+++ b/tests/rest/client/test_third_party_rules.py
@@ -161,6 +161,68 @@ class ThirdPartyRulesTestCase(unittest.HomeserverTestCase):
ev = channel.json_body
self.assertEqual(ev["content"]["x"], "y")
+ def test_message_edit(self):
+ """Ensure that the module doesn't cause issues with edited messages."""
+ # first patch the event checker so that it will modify the event
+ async def check(ev: EventBase, state):
+ d = ev.get_dict()
+ d["content"] = {
+ "msgtype": "m.text",
+ "body": d["content"]["body"].upper(),
+ }
+ return d
+
+ current_rules_module().check_event_allowed = check
+
+ # Send an event, then edit it.
+ channel = self.make_request(
+ "PUT",
+ "/_matrix/client/r0/rooms/%s/send/modifyme/1" % self.room_id,
+ {
+ "msgtype": "m.text",
+ "body": "Original body",
+ },
+ access_token=self.tok,
+ )
+ self.assertEqual(channel.result["code"], b"200", channel.result)
+ orig_event_id = channel.json_body["event_id"]
+
+ channel = self.make_request(
+ "PUT",
+ "/_matrix/client/r0/rooms/%s/send/m.room.message/2" % self.room_id,
+ {
+ "m.new_content": {"msgtype": "m.text", "body": "Edited body"},
+ "m.relates_to": {
+ "rel_type": "m.replace",
+ "event_id": orig_event_id,
+ },
+ "msgtype": "m.text",
+ "body": "Edited body",
+ },
+ access_token=self.tok,
+ )
+ self.assertEqual(channel.result["code"], b"200", channel.result)
+ edited_event_id = channel.json_body["event_id"]
+
+ # ... and check that they both got modified
+ channel = self.make_request(
+ "GET",
+ "/_matrix/client/r0/rooms/%s/event/%s" % (self.room_id, orig_event_id),
+ access_token=self.tok,
+ )
+ self.assertEqual(channel.result["code"], b"200", channel.result)
+ ev = channel.json_body
+ self.assertEqual(ev["content"]["body"], "ORIGINAL BODY")
+
+ channel = self.make_request(
+ "GET",
+ "/_matrix/client/r0/rooms/%s/event/%s" % (self.room_id, edited_event_id),
+ access_token=self.tok,
+ )
+ self.assertEqual(channel.result["code"], b"200", channel.result)
+ ev = channel.json_body
+ self.assertEqual(ev["content"]["body"], "EDITED BODY")
+
def test_send_event(self):
"""Tests that the module can send an event into a room via the module api"""
content = {
diff --git a/tests/rest/client/v2_alpha/test_capabilities.py b/tests/rest/client/v2_alpha/test_capabilities.py
index e808339f..287a1a48 100644
--- a/tests/rest/client/v2_alpha/test_capabilities.py
+++ b/tests/rest/client/v2_alpha/test_capabilities.py
@@ -18,6 +18,7 @@ from synapse.rest.client.v1 import login
from synapse.rest.client.v2_alpha import capabilities
from tests import unittest
+from tests.unittest import override_config
class CapabilitiesTestCase(unittest.HomeserverTestCase):
@@ -33,6 +34,7 @@ class CapabilitiesTestCase(unittest.HomeserverTestCase):
hs = self.setup_test_homeserver()
self.store = hs.get_datastore()
self.config = hs.config
+ self.auth_handler = hs.get_auth_handler()
return hs
def test_check_auth_required(self):
@@ -56,7 +58,7 @@ class CapabilitiesTestCase(unittest.HomeserverTestCase):
capabilities["m.room_versions"]["default"],
)
- def test_get_change_password_capabilities(self):
+ def test_get_change_password_capabilities_password_login(self):
localpart = "user"
password = "pass"
user = self.register_user(localpart, password)
@@ -66,10 +68,36 @@ class CapabilitiesTestCase(unittest.HomeserverTestCase):
capabilities = channel.json_body["capabilities"]
self.assertEqual(channel.code, 200)
-
- # Test case where password is handled outside of Synapse
self.assertTrue(capabilities["m.change_password"]["enabled"])
- self.get_success(self.store.user_set_password_hash(user, None))
+
+ @override_config({"password_config": {"localdb_enabled": False}})
+ def test_get_change_password_capabilities_localdb_disabled(self):
+ localpart = "user"
+ password = "pass"
+ user = self.register_user(localpart, password)
+ access_token = self.get_success(
+ self.auth_handler.get_access_token_for_user_id(
+ user, device_id=None, valid_until_ms=None
+ )
+ )
+
+ channel = self.make_request("GET", self.url, access_token=access_token)
+ capabilities = channel.json_body["capabilities"]
+
+ self.assertEqual(channel.code, 200)
+ self.assertFalse(capabilities["m.change_password"]["enabled"])
+
+ @override_config({"password_config": {"enabled": False}})
+ def test_get_change_password_capabilities_password_disabled(self):
+ localpart = "user"
+ password = "pass"
+ user = self.register_user(localpart, password)
+ access_token = self.get_success(
+ self.auth_handler.get_access_token_for_user_id(
+ user, device_id=None, valid_until_ms=None
+ )
+ )
+
channel = self.make_request("GET", self.url, access_token=access_token)
capabilities = channel.json_body["capabilities"]
diff --git a/tests/rest/client/v2_alpha/test_relations.py b/tests/rest/client/v2_alpha/test_relations.py
index 7c457754..e7bb5583 100644
--- a/tests/rest/client/v2_alpha/test_relations.py
+++ b/tests/rest/client/v2_alpha/test_relations.py
@@ -39,6 +39,11 @@ class RelationsTestCase(unittest.HomeserverTestCase):
# We need to enable msc1849 support for aggregations
config = self.default_config()
config["experimental_msc1849_support_enabled"] = True
+
+ # We enable frozen dicts as relations/edits change event contents, so we
+ # want to test that we don't modify the events in the caches.
+ config["use_frozen_dicts"] = True
+
return self.setup_test_homeserver(config=config)
def prepare(self, reactor, clock, hs):
@@ -518,6 +523,63 @@ class RelationsTestCase(unittest.HomeserverTestCase):
{"event_id": edit_event_id, "sender": self.user_id}, m_replace_dict
)
+ def test_edit_reply(self):
+ """Test that editing a reply works."""
+
+ # Create a reply to edit.
+ channel = self._send_relation(
+ RelationTypes.REFERENCE,
+ "m.room.message",
+ content={"msgtype": "m.text", "body": "A reply!"},
+ )
+ self.assertEquals(200, channel.code, channel.json_body)
+ reply = channel.json_body["event_id"]
+
+ new_body = {"msgtype": "m.text", "body": "I've been edited!"}
+ channel = self._send_relation(
+ RelationTypes.REPLACE,
+ "m.room.message",
+ content={"msgtype": "m.text", "body": "foo", "m.new_content": new_body},
+ parent_id=reply,
+ )
+ self.assertEquals(200, channel.code, channel.json_body)
+
+ edit_event_id = channel.json_body["event_id"]
+
+ channel = self.make_request(
+ "GET",
+ "/rooms/%s/event/%s" % (self.room, reply),
+ access_token=self.user_token,
+ )
+ self.assertEquals(200, channel.code, channel.json_body)
+
+ # We expect to see the new body in the dict, as well as the reference
+ # metadata sill intact.
+ self.assertDictContainsSubset(new_body, channel.json_body["content"])
+ self.assertDictContainsSubset(
+ {
+ "m.relates_to": {
+ "event_id": self.parent_id,
+ "key": None,
+ "rel_type": "m.reference",
+ }
+ },
+ channel.json_body["content"],
+ )
+
+ # We expect that the edit relation appears in the unsigned relations
+ # section.
+ relations_dict = channel.json_body["unsigned"].get("m.relations")
+ self.assertIn(RelationTypes.REPLACE, relations_dict)
+
+ m_replace_dict = relations_dict[RelationTypes.REPLACE]
+ for key in ["event_id", "sender", "origin_server_ts"]:
+ self.assertIn(key, m_replace_dict)
+
+ self.assert_dict(
+ {"event_id": edit_event_id, "sender": self.user_id}, m_replace_dict
+ )
+
def test_relations_redaction_redacts_edits(self):
"""Test that edits of an event are redacted when the original event
is redacted.
diff --git a/tests/server.py b/tests/server.py
index 2287d200..b535a5d8 100644
--- a/tests/server.py
+++ b/tests/server.py
@@ -2,7 +2,7 @@ import json
import logging
from collections import deque
from io import SEEK_END, BytesIO
-from typing import Callable, Iterable, MutableMapping, Optional, Tuple, Union
+from typing import Callable, Dict, Iterable, MutableMapping, Optional, Tuple, Union
import attr
from typing_extensions import Deque
@@ -13,8 +13,11 @@ from twisted.internet._resolver import SimpleResolverComplexifier
from twisted.internet.defer import Deferred, fail, succeed
from twisted.internet.error import DNSLookupError
from twisted.internet.interfaces import (
+ IHostnameResolver,
+ IProtocol,
+ IPullProducer,
+ IPushProducer,
IReactorPluggableNameResolver,
- IReactorTCP,
IResolverSimple,
ITransport,
)
@@ -45,11 +48,11 @@ class FakeChannel:
wire).
"""
- site = attr.ib(type=Site)
+ site = attr.ib(type=Union[Site, "FakeSite"])
_reactor = attr.ib()
result = attr.ib(type=dict, default=attr.Factory(dict))
_ip = attr.ib(type=str, default="127.0.0.1")
- _producer = None
+ _producer = None # type: Optional[Union[IPullProducer, IPushProducer]]
@property
def json_body(self):
@@ -159,7 +162,11 @@ class FakeChannel:
Any cookines found are added to the given dict
"""
- for h in self.headers.getRawHeaders("Set-Cookie"):
+ headers = self.headers.getRawHeaders("Set-Cookie")
+ if not headers:
+ return
+
+ for h in headers:
parts = h.split(";")
k, v = parts[0].split("=", maxsplit=1)
cookies[k] = v
@@ -311,8 +318,8 @@ class ThreadedMemoryReactorClock(MemoryReactorClock):
self._tcp_callbacks = {}
self._udp = []
- lookups = self.lookups = {}
- self._thread_callbacks = deque() # type: Deque[Callable[[], None]]()
+ lookups = self.lookups = {} # type: Dict[str, str]
+ self._thread_callbacks = deque() # type: Deque[Callable[[], None]]
@implementer(IResolverSimple)
class FakeResolver:
@@ -324,6 +331,9 @@ class ThreadedMemoryReactorClock(MemoryReactorClock):
self.nameResolver = SimpleResolverComplexifier(FakeResolver())
super().__init__()
+ def installNameResolver(self, resolver: IHostnameResolver) -> IHostnameResolver:
+ raise NotImplementedError()
+
def listenUDP(self, port, protocol, interface="", maxPacketSize=8196):
p = udp.Port(port, protocol, interface, maxPacketSize, self)
p.startListening()
@@ -593,7 +603,7 @@ class FakeTransport:
if self.disconnected:
return
- if getattr(self.other, "transport") is None:
+ if not hasattr(self.other, "transport"):
# the other has no transport yet; reschedule
if self.autoflush:
self._reactor.callLater(0.0, self.flush)
@@ -621,7 +631,9 @@ class FakeTransport:
self.disconnected = True
-def connect_client(reactor: IReactorTCP, client_id: int) -> AccumulatingProtocol:
+def connect_client(
+ reactor: ThreadedMemoryReactorClock, client_id: int
+) -> Tuple[IProtocol, AccumulatingProtocol]:
"""
Connect a client to a fake TCP transport.
diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py
index 8bd12fa8..2471f126 100644
--- a/tests/storage/test_state.py
+++ b/tests/storage/test_state.py
@@ -377,14 +377,11 @@ class StateStoreTestCase(tests.unittest.TestCase):
#######################################################
# deliberately remove e2 (room name) from the _state_group_cache
- (
- is_all,
- known_absent,
- state_dict_ids,
- ) = self.state_datastore._state_group_cache.get(group)
+ cache_entry = self.state_datastore._state_group_cache.get(group)
+ state_dict_ids = cache_entry.value
- self.assertEqual(is_all, True)
- self.assertEqual(known_absent, set())
+ self.assertEqual(cache_entry.full, True)
+ self.assertEqual(cache_entry.known_absent, set())
self.assertDictEqual(
state_dict_ids,
{
@@ -403,14 +400,11 @@ class StateStoreTestCase(tests.unittest.TestCase):
fetched_keys=((e1.type, e1.state_key),),
)
- (
- is_all,
- known_absent,
- state_dict_ids,
- ) = self.state_datastore._state_group_cache.get(group)
+ cache_entry = self.state_datastore._state_group_cache.get(group)
+ state_dict_ids = cache_entry.value
- self.assertEqual(is_all, False)
- self.assertEqual(known_absent, {(e1.type, e1.state_key)})
+ self.assertEqual(cache_entry.full, False)
+ self.assertEqual(cache_entry.known_absent, {(e1.type, e1.state_key)})
self.assertDictEqual(state_dict_ids, {(e1.type, e1.state_key): e1.event_id})
############################################
diff --git a/tests/unittest.py b/tests/unittest.py
index ca7031c7..58a4daa1 100644
--- a/tests/unittest.py
+++ b/tests/unittest.py
@@ -32,6 +32,7 @@ from twisted.python.threadpool import ThreadPool
from twisted.trial import unittest
from twisted.web.resource import Resource
+from synapse import events
from synapse.api.constants import EventTypes, Membership
from synapse.config.homeserver import HomeServerConfig
from synapse.config.ratelimiting import FederationRateLimitConfig
@@ -140,7 +141,7 @@ class TestCase(unittest.TestCase):
try:
self.assertEquals(attrs[key], getattr(obj, key))
except AssertionError as e:
- raise (type(e))(e.message + " for '.%s'" % key)
+ raise (type(e))("Assert error for '.{}':".format(key)) from e
def assert_dict(self, required, actual):
"""Does a partial assert of a dict.
@@ -229,6 +230,11 @@ class HomeserverTestCase(TestCase):
self._hs_args = {"clock": self.clock, "reactor": self.reactor}
self.hs = self.make_homeserver(self.reactor, self.clock)
+ # Honour the `use_frozen_dicts` config option. We have to do this
+ # manually because this is taken care of in the app `start` code, which
+ # we don't run. Plus we want to reset it on tearDown.
+ events.USE_FROZEN_DICTS = self.hs.config.use_frozen_dicts
+
if self.hs is None:
raise Exception("No homeserver returned from make_homeserver.")
@@ -292,6 +298,10 @@ class HomeserverTestCase(TestCase):
if hasattr(self, "prepare"):
self.prepare(self.reactor, self.clock, self.hs)
+ def tearDown(self):
+ # Reset to not use frozen dicts.
+ events.USE_FROZEN_DICTS = False
+
def wait_on_thread(self, deferred, timeout=10):
"""
Wait until a Deferred is done, where it's waiting on a real thread.
diff --git a/tests/util/test_dict_cache.py b/tests/util/test_dict_cache.py
index 34fdc9a4..2f41333f 100644
--- a/tests/util/test_dict_cache.py
+++ b/tests/util/test_dict_cache.py
@@ -27,7 +27,9 @@ class DictCacheTestCase(unittest.TestCase):
key = "test_simple_cache_hit_full"
v = self.cache.get(key)
- self.assertEqual((False, set(), {}), v)
+ self.assertIs(v.full, False)
+ self.assertEqual(v.known_absent, set())
+ self.assertEqual({}, v.value)
seq = self.cache.sequence
test_value = {"test": "test_simple_cache_hit_full"}