summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-x.buildkite/scripts/create_postgres_db.py1
-rwxr-xr-x.buildkite/scripts/test_old_deps.sh8
-rw-r--r--.github/workflows/tests.yml322
-rw-r--r--CHANGES.md223
-rw-r--r--README.rst7
-rw-r--r--UPGRADE.rst66
-rwxr-xr-xcontrib/cmdclient/console.py5
-rw-r--r--contrib/cmdclient/http.py25
-rw-r--r--contrib/experiments/test_messaging.py1
-rwxr-xr-xdebian/build_virtualenv23
-rw-r--r--debian/changelog40
-rwxr-xr-xdemo/start.sh54
-rw-r--r--docker/Dockerfile10
-rw-r--r--docker/Dockerfile-workers23
-rw-r--r--docker/README-testing.md140
-rw-r--r--docker/README.md19
-rw-r--r--docker/conf-workers/nginx.conf.j227
-rw-r--r--docker/conf-workers/shared.yaml.j29
-rw-r--r--docker/conf-workers/supervisord.conf.j241
-rw-r--r--docker/conf-workers/worker.yaml.j226
-rw-r--r--docker/conf/homeserver.yaml12
-rw-r--r--docker/conf/log.config32
-rwxr-xr-xdocker/configure_workers_and_start.py558
-rw-r--r--docs/admin_api/user_admin_api.rst200
-rw-r--r--docs/code_style.md3
-rw-r--r--docs/presence_router_module.md235
-rw-r--r--docs/sample_config.yaml208
-rw-r--r--docs/sso_mapping_providers.md4
-rw-r--r--mypy.ini2
-rw-r--r--pyproject.toml2
-rwxr-xr-xscripts-dev/build_debian_packages26
-rwxr-xr-xscripts-dev/complement.sh49
-rwxr-xr-xscripts-dev/definitions.py2
-rwxr-xr-xscripts-dev/list_url_patterns.py2
-rw-r--r--scripts-dev/mypy_synapse_plugin.py1
-rwxr-xr-xscripts-dev/release.py244
-rwxr-xr-xscripts-dev/sign_json1
-rwxr-xr-xscripts-dev/update_database1
-rwxr-xr-xscripts/export_signing_key1
-rwxr-xr-xscripts/generate_log_config1
-rwxr-xr-xscripts/generate_signing_key.py1
-rwxr-xr-xscripts/move_remote_media_to_new_store.py1
-rwxr-xr-xscripts/register_new_matrix_user1
-rwxr-xr-xscripts/synapse_port_db8
-rw-r--r--setup.cfg6
-rwxr-xr-xsetup.py12
-rw-r--r--stubs/frozendict.pyi1
-rw-r--r--stubs/txredisapi.pyi1
-rw-r--r--synapse/__init__.py7
-rw-r--r--synapse/_scripts/register_new_matrix_user.py1
-rw-r--r--synapse/api/__init__.py1
-rw-r--r--synapse/api/auth.py86
-rw-r--r--synapse/api/auth_blocking.py10
-rw-r--r--synapse/api/constants.py11
-rw-r--r--synapse/api/errors.py1
-rw-r--r--synapse/api/filtering.py1
-rw-r--r--synapse/api/presence.py1
-rw-r--r--synapse/api/ratelimiting.py100
-rw-r--r--synapse/api/room_versions.py25
-rw-r--r--synapse/api/urls.py1
-rw-r--r--synapse/app/__init__.py1
-rw-r--r--synapse/app/_base.py31
-rw-r--r--synapse/app/admin_cmd.py9
-rw-r--r--synapse/app/appservice.py1
-rw-r--r--synapse/app/client_reader.py1
-rw-r--r--synapse/app/event_creator.py1
-rw-r--r--synapse/app/federation_reader.py1
-rw-r--r--synapse/app/federation_sender.py1
-rw-r--r--synapse/app/frontend_proxy.py1
-rw-r--r--synapse/app/generic_worker.py513
-rw-r--r--synapse/app/homeserver.py61
-rw-r--r--synapse/app/media_repository.py1
-rw-r--r--synapse/app/pusher.py1
-rw-r--r--synapse/app/synchrotron.py1
-rw-r--r--synapse/app/user_dir.py1
-rw-r--r--synapse/appservice/__init__.py1
-rw-r--r--synapse/appservice/api.py1
-rw-r--r--synapse/appservice/scheduler.py7
-rw-r--r--synapse/config/__init__.py1
-rw-r--r--synapse/config/__main__.py1
-rw-r--r--synapse/config/_base.py1
-rw-r--r--synapse/config/_base.pyi22
-rw-r--r--synapse/config/_util.py1
-rw-r--r--synapse/config/account_validity.py165
-rw-r--r--synapse/config/api.py139
-rw-r--r--synapse/config/auth.py1
-rw-r--r--synapse/config/cache.py1
-rw-r--r--synapse/config/cas.py1
-rw-r--r--synapse/config/consent.py (renamed from synapse/config/consent_config.py)1
-rw-r--r--synapse/config/database.py1
-rw-r--r--synapse/config/emailconfig.py3
-rw-r--r--synapse/config/experimental.py8
-rw-r--r--synapse/config/federation.py1
-rw-r--r--synapse/config/groups.py1
-rw-r--r--synapse/config/homeserver.py14
-rw-r--r--synapse/config/jwt.py (renamed from synapse/config/jwt_config.py)1
-rw-r--r--synapse/config/key.py1
-rw-r--r--synapse/config/logger.py4
-rw-r--r--synapse/config/metrics.py1
-rw-r--r--synapse/config/oidc.py (renamed from synapse/config/oidc_config.py)12
-rw-r--r--synapse/config/password_auth_providers.py1
-rw-r--r--synapse/config/push.py1
-rw-r--r--synapse/config/ratelimiting.py6
-rw-r--r--synapse/config/redis.py1
-rw-r--r--synapse/config/registration.py134
-rw-r--r--synapse/config/repository.py2
-rw-r--r--synapse/config/room.py1
-rw-r--r--synapse/config/room_directory.py1
-rw-r--r--synapse/config/saml2.py (renamed from synapse/config/saml2_config.py)8
-rw-r--r--synapse/config/server.py48
-rw-r--r--synapse/config/server_notices.py (renamed from synapse/config/server_notices_config.py)1
-rw-r--r--synapse/config/spam_checker.py1
-rw-r--r--synapse/config/sso.py1
-rw-r--r--synapse/config/stats.py1
-rw-r--r--synapse/config/third_party_event_rules.py1
-rw-r--r--synapse/config/tls.py7
-rw-r--r--synapse/config/tracer.py1
-rw-r--r--synapse/config/user_directory.py1
-rw-r--r--synapse/config/workers.py28
-rw-r--r--synapse/crypto/__init__.py1
-rw-r--r--synapse/crypto/event_signing.py1
-rw-r--r--synapse/crypto/keyring.py3
-rw-r--r--synapse/event_auth.py39
-rw-r--r--synapse/events/__init__.py15
-rw-r--r--synapse/events/builder.py1
-rw-r--r--synapse/events/presence_router.py103
-rw-r--r--synapse/events/snapshot.py1
-rw-r--r--synapse/events/spamcheck.py4
-rw-r--r--synapse/events/third_party_rules.py1
-rw-r--r--synapse/events/utils.py1
-rw-r--r--synapse/events/validator.py1
-rw-r--r--synapse/federation/__init__.py1
-rw-r--r--synapse/federation/federation_base.py1
-rw-r--r--synapse/federation/federation_client.py121
-rw-r--r--synapse/federation/federation_server.py34
-rw-r--r--synapse/federation/persistence.py1
-rw-r--r--synapse/federation/send_queue.py75
-rw-r--r--synapse/federation/sender/__init__.py99
-rw-r--r--synapse/federation/sender/per_destination_queue.py9
-rw-r--r--synapse/federation/sender/transaction_manager.py3
-rw-r--r--synapse/federation/transport/__init__.py1
-rw-r--r--synapse/federation/transport/client.py1
-rw-r--r--synapse/federation/transport/server.py15
-rw-r--r--synapse/federation/units.py6
-rw-r--r--synapse/groups/attestations.py1
-rw-r--r--synapse/groups/groups_server.py1
-rw-r--r--synapse/handlers/__init__.py1
-rw-r--r--synapse/handlers/_base.py15
-rw-r--r--synapse/handlers/account_data.py1
-rw-r--r--synapse/handlers/account_validity.py107
-rw-r--r--synapse/handlers/acme.py1
-rw-r--r--synapse/handlers/acme_issuing_service.py1
-rw-r--r--synapse/handlers/admin.py1
-rw-r--r--synapse/handlers/appservice.py9
-rw-r--r--synapse/handlers/auth.py27
-rw-r--r--synapse/handlers/cas.py (renamed from synapse/handlers/cas_handler.py)1
-rw-r--r--synapse/handlers/deactivate_account.py5
-rw-r--r--synapse/handlers/device.py28
-rw-r--r--synapse/handlers/devicemessage.py41
-rw-r--r--synapse/handlers/directory.py1
-rw-r--r--synapse/handlers/e2e_keys.py15
-rw-r--r--synapse/handlers/e2e_room_keys.py1
-rw-r--r--synapse/handlers/event_auth.py86
-rw-r--r--synapse/handlers/events.py1
-rw-r--r--synapse/handlers/federation.py380
-rw-r--r--synapse/handlers/groups_local.py1
-rw-r--r--synapse/handlers/identity.py42
-rw-r--r--synapse/handlers/initial_sync.py1
-rw-r--r--synapse/handlers/message.py14
-rw-r--r--synapse/handlers/oidc.py (renamed from synapse/handlers/oidc_handler.py)33
-rw-r--r--synapse/handlers/pagination.py1
-rw-r--r--synapse/handlers/password_policy.py1
-rw-r--r--synapse/handlers/presence.py855
-rw-r--r--synapse/handlers/profile.py1
-rw-r--r--synapse/handlers/read_marker.py1
-rw-r--r--synapse/handlers/receipts.py1
-rw-r--r--synapse/handlers/register.py11
-rw-r--r--synapse/handlers/room.py1
-rw-r--r--synapse/handlers/room_list.py1
-rw-r--r--synapse/handlers/room_member.py41
-rw-r--r--synapse/handlers/room_member_worker.py1
-rw-r--r--synapse/handlers/saml.py (renamed from synapse/handlers/saml_handler.py)1
-rw-r--r--synapse/handlers/search.py1
-rw-r--r--synapse/handlers/set_password.py1
-rw-r--r--synapse/handlers/space_summary.py1
-rw-r--r--synapse/handlers/sso.py4
-rw-r--r--synapse/handlers/state_deltas.py1
-rw-r--r--synapse/handlers/stats.py1
-rw-r--r--synapse/handlers/sync.py50
-rw-r--r--synapse/handlers/typing.py7
-rw-r--r--synapse/handlers/ui_auth/__init__.py1
-rw-r--r--synapse/handlers/ui_auth/checkers.py1
-rw-r--r--synapse/handlers/user_directory.py16
-rw-r--r--synapse/http/__init__.py1
-rw-r--r--synapse/http/additional_resource.py1
-rw-r--r--synapse/http/client.py22
-rw-r--r--synapse/http/connectproxyclient.py1
-rw-r--r--synapse/http/federation/__init__.py1
-rw-r--r--synapse/http/federation/matrix_federation_agent.py1
-rw-r--r--synapse/http/federation/srv_resolver.py1
-rw-r--r--synapse/http/federation/well_known_resolver.py1
-rw-r--r--synapse/http/matrixfederationclient.py46
-rw-r--r--synapse/http/proxyagent.py7
-rw-r--r--synapse/http/request_metrics.py1
-rw-r--r--synapse/http/server.py1
-rw-r--r--synapse/http/servlet.py1
-rw-r--r--synapse/http/site.py179
-rw-r--r--synapse/logging/__init__.py1
-rw-r--r--synapse/logging/_remote.py5
-rw-r--r--synapse/logging/_structured.py1
-rw-r--r--synapse/logging/_terse_json.py1
-rw-r--r--synapse/logging/context.py85
-rw-r--r--synapse/logging/filter.py1
-rw-r--r--synapse/logging/formatter.py1
-rw-r--r--synapse/logging/opentracing.py12
-rw-r--r--synapse/logging/scopecontextmanager.py1
-rw-r--r--synapse/logging/utils.py1
-rw-r--r--synapse/metrics/__init__.py17
-rw-r--r--synapse/metrics/_exposition.py1
-rw-r--r--synapse/metrics/background_process_metrics.py22
-rw-r--r--synapse/module_api/__init__.py66
-rw-r--r--synapse/module_api/errors.py1
-rw-r--r--synapse/notifier.py76
-rw-r--r--synapse/push/__init__.py1
-rw-r--r--synapse/push/action_generator.py1
-rw-r--r--synapse/push/bulk_push_rule_evaluator.py162
-rw-r--r--synapse/push/clientformat.py1
-rw-r--r--synapse/push/emailpusher.py10
-rw-r--r--synapse/push/httppusher.py1
-rw-r--r--synapse/push/mailer.py1
-rw-r--r--synapse/push/presentable_names.py1
-rw-r--r--synapse/push/push_rule_evaluator.py56
-rw-r--r--synapse/push/push_tools.py1
-rw-r--r--synapse/push/pusher.py1
-rw-r--r--synapse/push/pusherpool.py9
-rw-r--r--synapse/python_dependencies.py12
-rw-r--r--synapse/replication/__init__.py1
-rw-r--r--synapse/replication/http/__init__.py1
-rw-r--r--synapse/replication/http/_base.py6
-rw-r--r--synapse/replication/http/account_data.py1
-rw-r--r--synapse/replication/http/devices.py1
-rw-r--r--synapse/replication/http/federation.py1
-rw-r--r--synapse/replication/http/login.py1
-rw-r--r--synapse/replication/http/membership.py1
-rw-r--r--synapse/replication/http/presence.py1
-rw-r--r--synapse/replication/http/push.py1
-rw-r--r--synapse/replication/http/register.py3
-rw-r--r--synapse/replication/http/send_event.py1
-rw-r--r--synapse/replication/http/streams.py1
-rw-r--r--synapse/replication/slave/__init__.py1
-rw-r--r--synapse/replication/slave/storage/__init__.py1
-rw-r--r--synapse/replication/slave/storage/_base.py1
-rw-r--r--synapse/replication/slave/storage/_slaved_id_tracker.py1
-rw-r--r--synapse/replication/slave/storage/account_data.py1
-rw-r--r--synapse/replication/slave/storage/appservice.py1
-rw-r--r--synapse/replication/slave/storage/client_ips.py1
-rw-r--r--synapse/replication/slave/storage/deviceinbox.py1
-rw-r--r--synapse/replication/slave/storage/devices.py1
-rw-r--r--synapse/replication/slave/storage/directory.py1
-rw-r--r--synapse/replication/slave/storage/events.py1
-rw-r--r--synapse/replication/slave/storage/filtering.py1
-rw-r--r--synapse/replication/slave/storage/groups.py1
-rw-r--r--synapse/replication/slave/storage/keys.py1
-rw-r--r--synapse/replication/slave/storage/presence.py51
-rw-r--r--synapse/replication/slave/storage/profile.py1
-rw-r--r--synapse/replication/slave/storage/push_rule.py1
-rw-r--r--synapse/replication/slave/storage/pushers.py1
-rw-r--r--synapse/replication/slave/storage/receipts.py1
-rw-r--r--synapse/replication/slave/storage/registration.py1
-rw-r--r--synapse/replication/slave/storage/room.py1
-rw-r--r--synapse/replication/slave/storage/transactions.py1
-rw-r--r--synapse/replication/tcp/__init__.py1
-rw-r--r--synapse/replication/tcp/client.py233
-rw-r--r--synapse/replication/tcp/commands.py1
-rw-r--r--synapse/replication/tcp/external_cache.py1
-rw-r--r--synapse/replication/tcp/handler.py19
-rw-r--r--synapse/replication/tcp/protocol.py9
-rw-r--r--synapse/replication/tcp/redis.py3
-rw-r--r--synapse/replication/tcp/resource.py1
-rw-r--r--synapse/replication/tcp/streams/__init__.py4
-rw-r--r--synapse/replication/tcp/streams/_base.py42
-rw-r--r--synapse/replication/tcp/streams/events.py1
-rw-r--r--synapse/replication/tcp/streams/federation.py1
-rw-r--r--synapse/res/templates/account_previously_renewed.html1
-rw-r--r--synapse/res/templates/account_renewed.html2
-rw-r--r--synapse/rest/__init__.py1
-rw-r--r--synapse/rest/admin/__init__.py5
-rw-r--r--synapse/rest/admin/_base.py1
-rw-r--r--synapse/rest/admin/devices.py1
-rw-r--r--synapse/rest/admin/event_reports.py1
-rw-r--r--synapse/rest/admin/groups.py1
-rw-r--r--synapse/rest/admin/media.py1
-rw-r--r--synapse/rest/admin/purge_room_servlet.py1
-rw-r--r--synapse/rest/admin/rooms.py1
-rw-r--r--synapse/rest/admin/server_notice_servlet.py1
-rw-r--r--synapse/rest/admin/statistics.py1
-rw-r--r--synapse/rest/admin/users.py159
-rw-r--r--synapse/rest/client/__init__.py1
-rw-r--r--synapse/rest/client/transactions.py1
-rw-r--r--synapse/rest/client/v1/__init__.py1
-rw-r--r--synapse/rest/client/v1/directory.py1
-rw-r--r--synapse/rest/client/v1/events.py1
-rw-r--r--synapse/rest/client/v1/initial_sync.py1
-rw-r--r--synapse/rest/client/v1/login.py15
-rw-r--r--synapse/rest/client/v1/logout.py1
-rw-r--r--synapse/rest/client/v1/presence.py8
-rw-r--r--synapse/rest/client/v1/profile.py1
-rw-r--r--synapse/rest/client/v1/push_rule.py1
-rw-r--r--synapse/rest/client/v1/pusher.py1
-rw-r--r--synapse/rest/client/v1/room.py1
-rw-r--r--synapse/rest/client/v1/voip.py1
-rw-r--r--synapse/rest/client/v2_alpha/__init__.py1
-rw-r--r--synapse/rest/client/v2_alpha/_base.py1
-rw-r--r--synapse/rest/client/v2_alpha/account.py19
-rw-r--r--synapse/rest/client/v2_alpha/account_data.py1
-rw-r--r--synapse/rest/client/v2_alpha/account_validity.py33
-rw-r--r--synapse/rest/client/v2_alpha/auth.py1
-rw-r--r--synapse/rest/client/v2_alpha/capabilities.py1
-rw-r--r--synapse/rest/client/v2_alpha/devices.py1
-rw-r--r--synapse/rest/client/v2_alpha/filter.py1
-rw-r--r--synapse/rest/client/v2_alpha/groups.py1
-rw-r--r--synapse/rest/client/v2_alpha/keys.py1
-rw-r--r--synapse/rest/client/v2_alpha/notifications.py1
-rw-r--r--synapse/rest/client/v2_alpha/openid.py1
-rw-r--r--synapse/rest/client/v2_alpha/password_policy.py1
-rw-r--r--synapse/rest/client/v2_alpha/read_marker.py1
-rw-r--r--synapse/rest/client/v2_alpha/receipts.py1
-rw-r--r--synapse/rest/client/v2_alpha/register.py42
-rw-r--r--synapse/rest/client/v2_alpha/relations.py1
-rw-r--r--synapse/rest/client/v2_alpha/report_event.py1
-rw-r--r--synapse/rest/client/v2_alpha/room_keys.py1
-rw-r--r--synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py1
-rw-r--r--synapse/rest/client/v2_alpha/sendtodevice.py1
-rw-r--r--synapse/rest/client/v2_alpha/shared_rooms.py1
-rw-r--r--synapse/rest/client/v2_alpha/sync.py1
-rw-r--r--synapse/rest/client/v2_alpha/tags.py1
-rw-r--r--synapse/rest/client/v2_alpha/thirdparty.py1
-rw-r--r--synapse/rest/client/v2_alpha/tokenrefresh.py1
-rw-r--r--synapse/rest/client/v2_alpha/user_directory.py1
-rw-r--r--synapse/rest/client/versions.py1
-rw-r--r--synapse/rest/consent/consent_resource.py11
-rw-r--r--synapse/rest/health.py1
-rw-r--r--synapse/rest/key/__init__.py1
-rw-r--r--synapse/rest/key/v2/__init__.py1
-rw-r--r--synapse/rest/key/v2/local_key_resource.py1
-rw-r--r--synapse/rest/key/v2/remote_key_resource.py4
-rw-r--r--synapse/rest/media/v1/__init__.py1
-rw-r--r--synapse/rest/media/v1/_base.py1
-rw-r--r--synapse/rest/media/v1/config_resource.py3
-rw-r--r--synapse/rest/media/v1/download_resource.py1
-rw-r--r--synapse/rest/media/v1/filepath.py3
-rw-r--r--synapse/rest/media/v1/media_repository.py4
-rw-r--r--synapse/rest/media/v1/media_storage.py1
-rw-r--r--synapse/rest/media/v1/preview_url_resource.py3
-rw-r--r--synapse/rest/media/v1/storage_provider.py1
-rw-r--r--synapse/rest/media/v1/thumbnail_resource.py1
-rw-r--r--synapse/rest/media/v1/thumbnailer.py1
-rw-r--r--synapse/rest/media/v1/upload_resource.py3
-rw-r--r--synapse/rest/synapse/__init__.py1
-rw-r--r--synapse/rest/synapse/client/__init__.py1
-rw-r--r--synapse/rest/synapse/client/new_user_consent.py10
-rw-r--r--synapse/rest/synapse/client/oidc/__init__.py1
-rw-r--r--synapse/rest/synapse/client/oidc/callback_resource.py1
-rw-r--r--synapse/rest/synapse/client/password_reset.py1
-rw-r--r--synapse/rest/synapse/client/pick_idp.py1
-rw-r--r--synapse/rest/synapse/client/pick_username.py1
-rw-r--r--synapse/rest/synapse/client/saml2/__init__.py1
-rw-r--r--synapse/rest/synapse/client/saml2/metadata_resource.py1
-rw-r--r--synapse/rest/synapse/client/saml2/response_resource.py1
-rw-r--r--synapse/rest/synapse/client/sso_register.py1
-rw-r--r--synapse/rest/well_known.py1
-rw-r--r--synapse/secrets.py45
-rw-r--r--synapse/server.py51
-rw-r--r--synapse/server_notices/consent_server_notices.py1
-rw-r--r--synapse/server_notices/resource_limits_server_notices.py1
-rw-r--r--synapse/server_notices/server_notices_manager.py1
-rw-r--r--synapse/server_notices/server_notices_sender.py1
-rw-r--r--synapse/server_notices/worker_server_notices_sender.py1
-rw-r--r--synapse/spam_checker_api/__init__.py1
-rw-r--r--synapse/state/__init__.py9
-rw-r--r--synapse/state/v1.py1
-rw-r--r--synapse/state/v2.py4
-rw-r--r--synapse/storage/__init__.py1
-rw-r--r--synapse/storage/_base.py7
-rw-r--r--synapse/storage/background_updates.py1
-rw-r--r--synapse/storage/database.py109
-rw-r--r--synapse/storage/databases/__init__.py1
-rw-r--r--synapse/storage/databases/main/__init__.py74
-rw-r--r--synapse/storage/databases/main/account_data.py1
-rw-r--r--synapse/storage/databases/main/appservice.py1
-rw-r--r--synapse/storage/databases/main/cache.py1
-rw-r--r--synapse/storage/databases/main/censor_events.py1
-rw-r--r--synapse/storage/databases/main/client_ips.py2
-rw-r--r--synapse/storage/databases/main/deviceinbox.py1
-rw-r--r--synapse/storage/databases/main/devices.py26
-rw-r--r--synapse/storage/databases/main/directory.py1
-rw-r--r--synapse/storage/databases/main/e2e_room_keys.py1
-rw-r--r--synapse/storage/databases/main/end_to_end_keys.py1
-rw-r--r--synapse/storage/databases/main/event_federation.py4
-rw-r--r--synapse/storage/databases/main/event_push_actions.py1
-rw-r--r--synapse/storage/databases/main/events.py72
-rw-r--r--synapse/storage/databases/main/events_bg_updates.py2
-rw-r--r--synapse/storage/databases/main/events_forward_extremities.py1
-rw-r--r--synapse/storage/databases/main/events_worker.py16
-rw-r--r--synapse/storage/databases/main/filtering.py1
-rw-r--r--synapse/storage/databases/main/group_server.py9
-rw-r--r--synapse/storage/databases/main/keys.py1
-rw-r--r--synapse/storage/databases/main/media_repository.py22
-rw-r--r--synapse/storage/databases/main/metrics.py1
-rw-r--r--synapse/storage/databases/main/monthly_active_users.py1
-rw-r--r--synapse/storage/databases/main/presence.py93
-rw-r--r--synapse/storage/databases/main/profile.py1
-rw-r--r--synapse/storage/databases/main/purge_events.py1
-rw-r--r--synapse/storage/databases/main/push_rule.py1
-rw-r--r--synapse/storage/databases/main/pusher.py1
-rw-r--r--synapse/storage/databases/main/receipts.py1
-rw-r--r--synapse/storage/databases/main/registration.py63
-rw-r--r--synapse/storage/databases/main/rejections.py1
-rw-r--r--synapse/storage/databases/main/relations.py1
-rw-r--r--synapse/storage/databases/main/room.py65
-rw-r--r--synapse/storage/databases/main/roommember.py201
-rw-r--r--synapse/storage/databases/main/schema/delta/50/make_event_content_nullable.py1
-rw-r--r--synapse/storage/databases/main/schema/delta/57/local_current_membership.py1
-rw-r--r--synapse/storage/databases/main/schema/delta/59/11drop_thumbnail_constraint.sql.postgres22
-rw-r--r--synapse/storage/databases/main/schema/delta/59/12account_validity_token_used_ts_ms.sql18
-rw-r--r--synapse/storage/databases/main/schema/delta/59/12presence_stream_instance.sql18
-rw-r--r--synapse/storage/databases/main/schema/delta/59/12presence_stream_instance_seq.sql.postgres20
-rw-r--r--synapse/storage/databases/main/search.py4
-rw-r--r--synapse/storage/databases/main/signatures.py1
-rw-r--r--synapse/storage/databases/main/state.py7
-rw-r--r--synapse/storage/databases/main/state_deltas.py1
-rw-r--r--synapse/storage/databases/main/stats.py26
-rw-r--r--synapse/storage/databases/main/stream.py5
-rw-r--r--synapse/storage/databases/main/tags.py1
-rw-r--r--synapse/storage/databases/main/transactions.py1
-rw-r--r--synapse/storage/databases/main/ui_auth.py1
-rw-r--r--synapse/storage/databases/main/user_directory.py1
-rw-r--r--synapse/storage/databases/main/user_erasure_store.py1
-rw-r--r--synapse/storage/databases/state/__init__.py1
-rw-r--r--synapse/storage/databases/state/bg_updates.py6
-rw-r--r--synapse/storage/databases/state/store.py6
-rw-r--r--synapse/storage/engines/__init__.py1
-rw-r--r--synapse/storage/engines/_base.py9
-rw-r--r--synapse/storage/engines/postgres.py12
-rw-r--r--synapse/storage/engines/sqlite.py16
-rw-r--r--synapse/storage/keys.py1
-rw-r--r--synapse/storage/persist_events.py4
-rw-r--r--synapse/storage/prepare_database.py15
-rw-r--r--synapse/storage/purge_events.py1
-rw-r--r--synapse/storage/push_rule.py1
-rw-r--r--synapse/storage/relations.py1
-rw-r--r--synapse/storage/roommember.py1
-rw-r--r--synapse/storage/state.py27
-rw-r--r--synapse/storage/types.py1
-rw-r--r--synapse/storage/util/__init__.py1
-rw-r--r--synapse/storage/util/id_generators.py12
-rw-r--r--synapse/storage/util/sequence.py1
-rw-r--r--synapse/streams/__init__.py1
-rw-r--r--synapse/streams/config.py1
-rw-r--r--synapse/streams/events.py1
-rw-r--r--synapse/types.py32
-rw-r--r--synapse/util/__init__.py62
-rw-r--r--synapse/util/async_helpers.py1
-rw-r--r--synapse/util/caches/__init__.py1
-rw-r--r--synapse/util/caches/cached_call.py1
-rw-r--r--synapse/util/caches/deferred_cache.py5
-rw-r--r--synapse/util/caches/descriptors.py1
-rw-r--r--synapse/util/caches/dictionary_cache.py1
-rw-r--r--synapse/util/caches/expiringcache.py84
-rw-r--r--synapse/util/caches/lrucache.py15
-rw-r--r--synapse/util/caches/response_cache.py3
-rw-r--r--synapse/util/caches/stream_change_cache.py4
-rw-r--r--synapse/util/caches/ttlcache.py1
-rw-r--r--synapse/util/daemonize.py1
-rw-r--r--synapse/util/distributor.py1
-rw-r--r--synapse/util/file_consumer.py1
-rw-r--r--synapse/util/frozenutils.py1
-rw-r--r--synapse/util/hash.py2
-rw-r--r--synapse/util/iterutils.py4
-rw-r--r--synapse/util/jsonobject.py1
-rw-r--r--synapse/util/macaroons.py1
-rw-r--r--synapse/util/metrics.py15
-rw-r--r--synapse/util/module_loader.py1
-rw-r--r--synapse/util/msisdn.py1
-rw-r--r--synapse/util/patch_inline_callbacks.py1
-rw-r--r--synapse/util/ratelimitutils.py1
-rw-r--r--synapse/util/retryutils.py1
-rw-r--r--synapse/util/rlimit.py1
-rw-r--r--synapse/util/stringutils.py33
-rw-r--r--synapse/util/templates.py1
-rw-r--r--synapse/util/threepids.py31
-rw-r--r--synapse/util/versionstring.py1
-rw-r--r--synapse/util/wheel_timer.py1
-rw-r--r--synapse/visibility.py1
-rwxr-xr-xsynctl1
-rw-r--r--synmark/__init__.py1
-rw-r--r--synmark/__main__.py1
-rw-r--r--synmark/suites/logging.py4
-rw-r--r--synmark/suites/lrucache.py1
-rw-r--r--synmark/suites/lrucache_evict.py1
-rw-r--r--tests/__init__.py1
-rw-r--r--tests/api/test_auth.py3
-rw-r--r--tests/api/test_filtering.py1
-rw-r--r--tests/api/test_ratelimiting.py168
-rw-r--r--tests/app/test_frontend_proxy.py84
-rw-r--r--tests/app/test_openid_listener.py5
-rw-r--r--tests/appservice/__init__.py1
-rw-r--r--tests/appservice/test_appservice.py4
-rw-r--r--tests/appservice/test_scheduler.py3
-rw-r--r--tests/config/__init__.py1
-rw-r--r--tests/config/test_base.py1
-rw-r--r--tests/config/test_cache.py1
-rw-r--r--tests/config/test_database.py1
-rw-r--r--tests/config/test_generate.py1
-rw-r--r--tests/config/test_load.py6
-rw-r--r--tests/config/test_ratelimiting.py1
-rw-r--r--tests/config/test_room_directory.py1
-rw-r--r--tests/config/test_server.py1
-rw-r--r--tests/config/test_tls.py1
-rw-r--r--tests/config/test_util.py1
-rw-r--r--tests/crypto/__init__.py1
-rw-r--r--tests/crypto/test_event_signing.py1
-rw-r--r--tests/crypto/test_keyring.py27
-rw-r--r--tests/events/test_presence_router.py385
-rw-r--r--tests/events/test_snapshot.py1
-rw-r--r--tests/events/test_utils.py1
-rw-r--r--tests/federation/test_complexity.py3
-rw-r--r--tests/federation/test_federation_catch_up.py3
-rw-r--r--tests/federation/test_federation_sender.py4
-rw-r--r--tests/federation/test_federation_server.py20
-rw-r--r--tests/federation/transport/test_server.py1
-rw-r--r--tests/handlers/test_admin.py4
-rw-r--r--tests/handlers/test_appservice.py3
-rw-r--r--tests/handlers/test_auth.py3
-rw-r--r--tests/handlers/test_cas.py4
-rw-r--r--tests/handlers/test_device.py1
-rw-r--r--tests/handlers/test_directory.py3
-rw-r--r--tests/handlers/test_e2e_keys.py3
-rw-r--r--tests/handlers/test_e2e_room_keys.py4
-rw-r--r--tests/handlers/test_federation.py3
-rw-r--r--tests/handlers/test_message.py1
-rw-r--r--tests/handlers/test_oidc.py12
-rw-r--r--tests/handlers/test_password_providers.py4
-rw-r--r--tests/handlers/test_presence.py204
-rw-r--r--tests/handlers/test_profile.py3
-rw-r--r--tests/handlers/test_register.py3
-rw-r--r--tests/handlers/test_saml.py3
-rw-r--r--tests/handlers/test_stats.py1
-rw-r--r--tests/handlers/test_sync.py22
-rw-r--r--tests/handlers/test_typing.py4
-rw-r--r--tests/handlers/test_user_directory.py3
-rw-r--r--tests/http/__init__.py1
-rw-r--r--tests/http/federation/__init__.py1
-rw-r--r--tests/http/federation/test_matrix_federation_agent.py19
-rw-r--r--tests/http/federation/test_srv_resolver.py3
-rw-r--r--tests/http/test_additional_resource.py1
-rw-r--r--tests/http/test_client.py3
-rw-r--r--tests/http/test_endpoint.py1
-rw-r--r--tests/http/test_fedclient.py62
-rw-r--r--tests/http/test_proxyagent.py1
-rw-r--r--tests/http/test_servlet.py4
-rw-r--r--tests/http/test_simple_client.py3
-rw-r--r--tests/http/test_site.py83
-rw-r--r--tests/logging/__init__.py1
-rw-r--r--tests/logging/test_remote_handler.py1
-rw-r--r--tests/logging/test_terse_json.py72
-rw-r--r--tests/module_api/test_api.py179
-rw-r--r--tests/push/test_email.py1
-rw-r--r--tests/push/test_http.py3
-rw-r--r--tests/push/test_push_rule_evaluator.py167
-rw-r--r--tests/replication/__init__.py1
-rw-r--r--tests/replication/_base.py149
-rw-r--r--tests/replication/slave/__init__.py1
-rw-r--r--tests/replication/slave/storage/__init__.py1
-rw-r--r--tests/replication/slave/storage/_base.py2
-rw-r--r--tests/replication/slave/storage/test_events.py18
-rw-r--r--tests/replication/tcp/__init__.py1
-rw-r--r--tests/replication/tcp/streams/__init__.py1
-rw-r--r--tests/replication/tcp/streams/test_account_data.py1
-rw-r--r--tests/replication/tcp/streams/test_events.py5
-rw-r--r--tests/replication/tcp/streams/test_federation.py1
-rw-r--r--tests/replication/tcp/streams/test_receipts.py3
-rw-r--r--tests/replication/tcp/streams/test_typing.py3
-rw-r--r--tests/replication/tcp/test_commands.py1
-rw-r--r--tests/replication/tcp/test_remote_server_up.py1
-rw-r--r--tests/replication/test_auth.py1
-rw-r--r--tests/replication/test_client_reader_shard.py1
-rw-r--r--tests/replication/test_federation_ack.py3
-rw-r--r--tests/replication/test_federation_sender_shard.py4
-rw-r--r--tests/replication/test_multi_media_repo.py1
-rw-r--r--tests/replication/test_pusher_shard.py4
-rw-r--r--tests/replication/test_sharded_event_persister.py4
-rw-r--r--tests/rest/__init__.py1
-rw-r--r--tests/rest/admin/__init__.py1
-rw-r--r--tests/rest/admin/test_admin.py4
-rw-r--r--tests/rest/admin/test_device.py5
-rw-r--r--tests/rest/admin/test_event_reports.py9
-rw-r--r--tests/rest/admin/test_media.py1
-rw-r--r--tests/rest/admin/test_room.py12
-rw-r--r--tests/rest/admin/test_statistics.py3
-rw-r--r--tests/rest/admin/test_user.py426
-rw-r--r--tests/rest/client/__init__.py1
-rw-r--r--tests/rest/client/test_consent.py1
-rw-r--r--tests/rest/client/test_ephemeral_message.py1
-rw-r--r--tests/rest/client/test_identity.py1
-rw-r--r--tests/rest/client/test_power_levels.py1
-rw-r--r--tests/rest/client/test_redactions.py1
-rw-r--r--tests/rest/client/test_retention.py3
-rw-r--r--tests/rest/client/test_shadow_banned.py2
-rw-r--r--tests/rest/client/test_third_party_rules.py4
-rw-r--r--tests/rest/client/test_transactions.py2
-rw-r--r--tests/rest/client/v1/__init__.py1
-rw-r--r--tests/rest/client/v1/test_directory.py1
-rw-r--r--tests/rest/client/v1/test_events.py3
-rw-r--r--tests/rest/client/v1/test_login.py4
-rw-r--r--tests/rest/client/v1/test_presence.py8
-rw-r--r--tests/rest/client/v1/test_profile.py1
-rw-r--r--tests/rest/client/v1/test_push_rule_attrs.py1
-rw-r--r--tests/rest/client/v1/test_rooms.py15
-rw-r--r--tests/rest/client/v1/test_typing.py3
-rw-r--r--tests/rest/client/v1/utils.py18
-rw-r--r--tests/rest/client/v2_alpha/test_account.py1
-rw-r--r--tests/rest/client/v2_alpha/test_auth.py8
-rw-r--r--tests/rest/client/v2_alpha/test_capabilities.py1
-rw-r--r--tests/rest/client/v2_alpha/test_filter.py1
-rw-r--r--tests/rest/client/v2_alpha/test_password_policy.py1
-rw-r--r--tests/rest/client/v2_alpha/test_register.py135
-rw-r--r--tests/rest/client/v2_alpha/test_relations.py6
-rw-r--r--tests/rest/client/v2_alpha/test_shared_rooms.py1
-rw-r--r--tests/rest/client/v2_alpha/test_sync.py1
-rw-r--r--tests/rest/client/v2_alpha/test_upgrade_room.py1
-rw-r--r--tests/rest/key/v2/test_remote_key_resource.py4
-rw-r--r--tests/rest/media/__init__.py1
-rw-r--r--tests/rest/media/v1/__init__.py1
-rw-r--r--tests/rest/media/v1/test_base.py1
-rw-r--r--tests/rest/media/v1/test_media_storage.py4
-rw-r--r--tests/rest/media/v1/test_url_preview.py4
-rw-r--r--tests/rest/test_health.py1
-rw-r--r--tests/rest/test_well_known.py1
-rw-r--r--tests/scripts/test_new_matrix_user.py3
-rw-r--r--tests/server.py6
-rw-r--r--tests/server_notices/test_consent.py1
-rw-r--r--tests/server_notices/test_resource_limits_server_notices.py3
-rw-r--r--tests/state/test_v2.py1
-rw-r--r--tests/storage/test__base.py4
-rw-r--r--tests/storage/test_account_data.py1
-rw-r--r--tests/storage/test_appservice.py4
-rw-r--r--tests/storage/test_background_update.py2
-rw-r--r--tests/storage/test_base.py4
-rw-r--r--tests/storage/test_cleanup_extrems.py5
-rw-r--r--tests/storage/test_client_ips.py7
-rw-r--r--tests/storage/test_database.py14
-rw-r--r--tests/storage/test_devices.py81
-rw-r--r--tests/storage/test_directory.py45
-rw-r--r--tests/storage/test_e2e_room_keys.py1
-rw-r--r--tests/storage/test_end_to_end_keys.py60
-rw-r--r--tests/storage/test_event_chain.py1
-rw-r--r--tests/storage/test_event_federation.py1
-rw-r--r--tests/storage/test_event_metrics.py5
-rw-r--r--tests/storage/test_event_push_actions.py136
-rw-r--r--tests/storage/test_events.py1
-rw-r--r--tests/storage/test_id_generators.py15
-rw-r--r--tests/storage/test_keys.py1
-rw-r--r--tests/storage/test_main.py1
-rw-r--r--tests/storage/test_monthly_active_users.py3
-rw-r--r--tests/storage/test_profile.py36
-rw-r--r--tests/storage/test_purge.py1
-rw-r--r--tests/storage/test_redaction.py23
-rw-r--r--tests/storage/test_registration.py109
-rw-r--r--tests/storage/test_room.py62
-rw-r--r--tests/storage/test_roommember.py1
-rw-r--r--tests/storage/test_state.py146
-rw-r--r--tests/storage/test_transactions.py1
-rw-r--r--tests/storage/test_user_directory.py87
-rw-r--r--tests/test_distributor.py3
-rw-r--r--tests/test_event_auth.py247
-rw-r--r--tests/test_federation.py11
-rw-r--r--tests/test_mau.py24
-rw-r--r--tests/test_metrics.py1
-rw-r--r--tests/test_phone_home.py4
-rw-r--r--tests/test_preview.py1
-rw-r--r--tests/test_server.py2
-rw-r--r--tests/test_state.py11
-rw-r--r--tests/test_terms_auth.py3
-rw-r--r--tests/test_test_utils.py1
-rw-r--r--tests/test_types.py1
-rw-r--r--tests/test_utils/__init__.py4
-rw-r--r--tests/test_utils/event_injection.py7
-rw-r--r--tests/test_utils/html_parsers.py1
-rw-r--r--tests/test_utils/logging_setup.py1
-rw-r--r--tests/test_visibility.py11
-rw-r--r--tests/unittest.py12
-rw-r--r--tests/util/__init__.py1
-rw-r--r--tests/util/caches/__init__.py1
-rw-r--r--tests/util/caches/test_cached_call.py1
-rw-r--r--tests/util/caches/test_deferred_cache.py1
-rw-r--r--tests/util/caches/test_descriptors.py17
-rw-r--r--tests/util/caches/test_ttlcache.py3
-rw-r--r--tests/util/test_async_utils.py1
-rw-r--r--tests/util/test_dict_cache.py1
-rw-r--r--tests/util/test_expiring_cache.py1
-rw-r--r--tests/util/test_file_consumer.py4
-rw-r--r--tests/util/test_glob_to_regex.py59
-rw-r--r--tests/util/test_itertools.py1
-rw-r--r--tests/util/test_linearizer.py1
-rw-r--r--tests/util/test_logcontext.py35
-rw-r--r--tests/util/test_logformatter.py1
-rw-r--r--tests/util/test_lrucache.py3
-rw-r--r--tests/util/test_ratelimitutils.py7
-rw-r--r--tests/util/test_retryutils.py1
-rw-r--r--tests/util/test_rwlock.py1
-rw-r--r--tests/util/test_stringutils.py1
-rw-r--r--tests/util/test_threepids.py1
-rw-r--r--tests/util/test_treecache.py1
-rw-r--r--tests/util/test_wheel_timer.py1
-rw-r--r--tests/utils.py13
-rw-r--r--tox.ini43
717 files changed, 9333 insertions, 4260 deletions
diff --git a/.buildkite/scripts/create_postgres_db.py b/.buildkite/scripts/create_postgres_db.py
index 956339de..cc829db2 100755
--- a/.buildkite/scripts/create_postgres_db.py
+++ b/.buildkite/scripts/create_postgres_db.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/.buildkite/scripts/test_old_deps.sh b/.buildkite/scripts/test_old_deps.sh
index 9fe5b696..9270d55f 100755
--- a/.buildkite/scripts/test_old_deps.sh
+++ b/.buildkite/scripts/test_old_deps.sh
@@ -1,16 +1,16 @@
#!/usr/bin/env bash
-# this script is run by buildkite in a plain `xenial` container; it installs the
-# minimal requirements for tox and hands over to the py35-old tox environment.
+# this script is run by buildkite in a plain `bionic` container; it installs the
+# minimal requirements for tox and hands over to the py3-old tox environment.
set -ex
apt-get update
-apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox
+apt-get install -y python3 python3-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox
export LANG="C.UTF-8"
# Prevent virtualenv from auto-updating pip to an incompatible version
export VIRTUALENV_NO_DOWNLOAD=1
-exec tox -e py35-old,combine
+exec tox -e py3-old,combine
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
new file mode 100644
index 00000000..12c82ac6
--- /dev/null
+++ b/.github/workflows/tests.yml
@@ -0,0 +1,322 @@
+name: Tests
+
+on:
+ push:
+ branches: ["develop", "release-*"]
+ pull_request:
+
+jobs:
+ lint:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ toxenv:
+ - "check-sampleconfig"
+ - "check_codestyle"
+ - "check_isort"
+ - "mypy"
+ - "packaging"
+
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions/setup-python@v2
+ - run: pip install tox
+ - run: tox -e ${{ matrix.toxenv }}
+
+ lint-crlf:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - name: Check line endings
+ run: scripts-dev/check_line_terminators.sh
+
+ lint-newsfile:
+ if: ${{ github.base_ref == 'develop' || contains(github.base_ref, 'release-') }}
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions/setup-python@v2
+ - run: pip install tox
+ - name: Patch Buildkite-specific test script
+ run: |
+ sed -i -e 's/\$BUILDKITE_PULL_REQUEST/${{ github.event.number }}/' \
+ scripts-dev/check-newsfragment
+ - run: scripts-dev/check-newsfragment
+
+ lint-sdist:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions/setup-python@v2
+ with:
+ python-version: "3.x"
+ - run: pip install wheel
+ - run: python setup.py sdist bdist_wheel
+ - uses: actions/upload-artifact@v2
+ with:
+ name: Python Distributions
+ path: dist/*
+
+ # Dummy step to gate other tests on without repeating the whole list
+ linting-done:
+ if: ${{ always() }} # Run this even if prior jobs were skipped
+ needs: [lint, lint-crlf, lint-newsfile, lint-sdist]
+ runs-on: ubuntu-latest
+ steps:
+ - run: "true"
+
+ trial:
+ if: ${{ !failure() }} # Allow previous steps to be skipped, but not fail
+ needs: linting-done
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ python-version: ["3.6", "3.7", "3.8", "3.9"]
+ database: ["sqlite"]
+ include:
+ # Newest Python without optional deps
+ - python-version: "3.9"
+ toxenv: "py-noextras,combine"
+
+ # Oldest Python with PostgreSQL
+ - python-version: "3.6"
+ database: "postgres"
+ postgres-version: "9.6"
+
+ # Newest Python with PostgreSQL
+ - python-version: "3.9"
+ database: "postgres"
+ postgres-version: "13"
+
+ steps:
+ - uses: actions/checkout@v2
+ - run: sudo apt-get -qq install xmlsec1
+ - name: Set up PostgreSQL ${{ matrix.postgres-version }}
+ if: ${{ matrix.postgres-version }}
+ run: |
+ docker run -d -p 5432:5432 \
+ -e POSTGRES_PASSWORD=postgres \
+ -e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
+ postgres:${{ matrix.postgres-version }}
+ - uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python-version }}
+ - run: pip install tox
+ - name: Await PostgreSQL
+ if: ${{ matrix.postgres-version }}
+ timeout-minutes: 2
+ run: until pg_isready -h localhost; do sleep 1; done
+ - run: tox -e py,combine
+ env:
+ TRIAL_FLAGS: "--jobs=2"
+ SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
+ SYNAPSE_POSTGRES_HOST: localhost
+ SYNAPSE_POSTGRES_USER: postgres
+ SYNAPSE_POSTGRES_PASSWORD: postgres
+ - name: Dump logs
+ # Note: Dumps to workflow logs instead of using actions/upload-artifact
+ # This keeps logs colocated with failing jobs
+ # It also ignores find's exit code; this is a best effort affair
+ run: >-
+ find _trial_temp -name '*.log'
+ -exec echo "::group::{}" \;
+ -exec cat {} \;
+ -exec echo "::endgroup::" \;
+ || true
+
+ trial-olddeps:
+ if: ${{ !failure() }} # Allow previous steps to be skipped, but not fail
+ needs: linting-done
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - name: Test with old deps
+ uses: docker://ubuntu:bionic # For old python and sqlite
+ with:
+ workdir: /github/workspace
+ entrypoint: .buildkite/scripts/test_old_deps.sh
+ env:
+ TRIAL_FLAGS: "--jobs=2"
+ - name: Dump logs
+ # Note: Dumps to workflow logs instead of using actions/upload-artifact
+ # This keeps logs colocated with failing jobs
+ # It also ignores find's exit code; this is a best effort affair
+ run: >-
+ find _trial_temp -name '*.log'
+ -exec echo "::group::{}" \;
+ -exec cat {} \;
+ -exec echo "::endgroup::" \;
+ || true
+
+ trial-pypy:
+ # Very slow; only run if the branch name includes 'pypy'
+ if: ${{ contains(github.ref, 'pypy') && !failure() }}
+ needs: linting-done
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ python-version: ["pypy-3.6"]
+
+ steps:
+ - uses: actions/checkout@v2
+ - run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
+ - uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python-version }}
+ - run: pip install tox
+ - run: tox -e py,combine
+ env:
+ TRIAL_FLAGS: "--jobs=2"
+ - name: Dump logs
+ # Note: Dumps to workflow logs instead of using actions/upload-artifact
+ # This keeps logs colocated with failing jobs
+ # It also ignores find's exit code; this is a best effort affair
+ run: >-
+ find _trial_temp -name '*.log'
+ -exec echo "::group::{}" \;
+ -exec cat {} \;
+ -exec echo "::endgroup::" \;
+ || true
+
+ sytest:
+ if: ${{ !failure() }}
+ needs: linting-done
+ runs-on: ubuntu-latest
+ container:
+ image: matrixdotorg/sytest-synapse:${{ matrix.sytest-tag }}
+ volumes:
+ - ${{ github.workspace }}:/src
+ env:
+ BUILDKITE_BRANCH: ${{ github.head_ref }}
+ POSTGRES: ${{ matrix.postgres && 1}}
+ MULTI_POSTGRES: ${{ (matrix.postgres == 'multi-postgres') && 1}}
+ WORKERS: ${{ matrix.workers && 1 }}
+ REDIS: ${{ matrix.redis && 1 }}
+ BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
+
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - sytest-tag: bionic
+
+ - sytest-tag: bionic
+ postgres: postgres
+
+ - sytest-tag: testing
+ postgres: postgres
+
+ - sytest-tag: bionic
+ postgres: multi-postgres
+ workers: workers
+
+ - sytest-tag: buster
+ postgres: multi-postgres
+ workers: workers
+
+ - sytest-tag: buster
+ postgres: postgres
+ workers: workers
+ redis: redis
+
+ steps:
+ - uses: actions/checkout@v2
+ - name: Prepare test blacklist
+ run: cat sytest-blacklist .buildkite/worker-blacklist > synapse-blacklist-with-workers
+ - name: Run SyTest
+ run: /bootstrap.sh synapse
+ working-directory: /src
+ - name: Dump results.tap
+ if: ${{ always() }}
+ run: cat /logs/results.tap
+ - name: Upload SyTest logs
+ uses: actions/upload-artifact@v2
+ if: ${{ always() }}
+ with:
+ name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
+ path: |
+ /logs/results.tap
+ /logs/**/*.log*
+
+ portdb:
+ if: ${{ !failure() }} # Allow previous steps to be skipped, but not fail
+ needs: linting-done
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ include:
+ - python-version: "3.6"
+ postgres-version: "9.6"
+
+ - python-version: "3.9"
+ postgres-version: "13"
+
+ services:
+ postgres:
+ image: postgres:${{ matrix.postgres-version }}
+ ports:
+ - 5432:5432
+ env:
+ POSTGRES_PASSWORD: "postgres"
+ POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
+ options: >-
+ --health-cmd pg_isready
+ --health-interval 10s
+ --health-timeout 5s
+ --health-retries 5
+
+ steps:
+ - uses: actions/checkout@v2
+ - run: sudo apt-get -qq install xmlsec1
+ - uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Patch Buildkite-specific test scripts
+ run: |
+ sed -i -e 's/host="postgres"/host="localhost"/' .buildkite/scripts/create_postgres_db.py
+ sed -i -e 's/host: postgres/host: localhost/' .buildkite/postgres-config.yaml
+ sed -i -e 's|/src/||' .buildkite/{sqlite,postgres}-config.yaml
+ sed -i -e 's/\$TOP/\$GITHUB_WORKSPACE/' .coveragerc
+ - run: .buildkite/scripts/test_synapse_port_db.sh
+
+ complement:
+ if: ${{ !failure() }}
+ needs: linting-done
+ runs-on: ubuntu-latest
+ container:
+ # https://github.com/matrix-org/complement/blob/master/dockerfiles/ComplementCIBuildkite.Dockerfile
+ image: matrixdotorg/complement:latest
+ env:
+ CI: true
+ ports:
+ - 8448:8448
+ volumes:
+ - /var/run/docker.sock:/var/run/docker.sock
+
+ steps:
+ - name: Run actions/checkout@v2 for synapse
+ uses: actions/checkout@v2
+ with:
+ path: synapse
+
+ - name: Run actions/checkout@v2 for complement
+ uses: actions/checkout@v2
+ with:
+ repository: "matrix-org/complement"
+ path: complement
+
+ # Build initial Synapse image
+ - run: docker build -t matrixdotorg/synapse:latest -f docker/Dockerfile .
+ working-directory: synapse
+
+ # Build a ready-to-run Synapse image based on the initial image above.
+ # This new image includes a config file, keys for signing and TLS, and
+ # other settings to make it suitable for testing under Complement.
+ - run: docker build -t complement-synapse -f Synapse.Dockerfile .
+ working-directory: complement/dockerfiles
+
+ # Run Complement
+ - run: go test -v -tags synapse_blacklist ./tests
+ env:
+ COMPLEMENT_BASE_IMAGE: complement-synapse:latest
+ working-directory: complement
diff --git a/CHANGES.md b/CHANGES.md
index 27483532..93efa3ce 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,3 +1,226 @@
+Synapse 1.33.2 (2021-05-11)
+===========================
+
+Due to the security issue highlighted below, server administrators are encouraged to update Synapse. We are not aware of these vulnerabilities being exploited in the wild.
+
+Security advisory
+-----------------
+
+This release fixes a denial of service attack ([CVE-2021-29471](https://github.com/matrix-org/synapse/security/advisories/GHSA-x345-32rc-8h85)) against Synapse's push rules implementation. Server admins are encouraged to upgrade.
+
+Internal Changes
+----------------
+
+- Unpin attrs dependency. ([\#9946](https://github.com/matrix-org/synapse/issues/9946))
+
+
+Synapse 1.33.1 (2021-05-06)
+===========================
+
+Bugfixes
+--------
+
+- Fix bug where `/sync` would break if using the latest version of `attrs` dependency, by pinning to a previous version. ([\#9937](https://github.com/matrix-org/synapse/issues/9937))
+
+
+Synapse 1.33.0 (2021-05-05)
+===========================
+
+Features
+--------
+
+- Build Debian packages for Ubuntu 21.04 (Hirsute Hippo). ([\#9909](https://github.com/matrix-org/synapse/issues/9909))
+
+
+Synapse 1.33.0rc2 (2021-04-29)
+==============================
+
+Bugfixes
+--------
+
+- Fix tight loop when handling presence replication when using workers. Introduced in v1.33.0rc1. ([\#9900](https://github.com/matrix-org/synapse/issues/9900))
+
+
+Synapse 1.33.0rc1 (2021-04-28)
+==============================
+
+Features
+--------
+
+- Update experimental support for [MSC3083](https://github.com/matrix-org/matrix-doc/pull/3083): restricting room access via group membership. ([\#9800](https://github.com/matrix-org/synapse/issues/9800), [\#9814](https://github.com/matrix-org/synapse/issues/9814))
+- Add experimental support for handling presence on a worker. ([\#9819](https://github.com/matrix-org/synapse/issues/9819), [\#9820](https://github.com/matrix-org/synapse/issues/9820), [\#9828](https://github.com/matrix-org/synapse/issues/9828), [\#9850](https://github.com/matrix-org/synapse/issues/9850))
+- Return a new template when an user attempts to renew their account multiple times with the same token, stating that their account is set to expire. This replaces the invalid token template that would previously be shown in this case. This change concerns the optional account validity feature. ([\#9832](https://github.com/matrix-org/synapse/issues/9832))
+
+
+Bugfixes
+--------
+
+- Fixes the OIDC SSO flow when using a `public_baseurl` value including a non-root URL path. ([\#9726](https://github.com/matrix-org/synapse/issues/9726))
+- Fix thumbnail generation for some sites with non-standard content types. Contributed by @rkfg. ([\#9788](https://github.com/matrix-org/synapse/issues/9788))
+- Add some sanity checks to identity server passed to 3PID bind/unbind endpoints. ([\#9802](https://github.com/matrix-org/synapse/issues/9802))
+- Limit the size of HTTP responses read over federation. ([\#9833](https://github.com/matrix-org/synapse/issues/9833))
+- Fix a bug which could cause Synapse to get stuck in a loop of resyncing device lists. ([\#9867](https://github.com/matrix-org/synapse/issues/9867))
+- Fix a long-standing bug where errors from federation did not propagate to the client. ([\#9868](https://github.com/matrix-org/synapse/issues/9868))
+
+
+Improved Documentation
+----------------------
+
+- Add a note to the docker docs mentioning that we mirror upstream's supported Docker platforms. ([\#9801](https://github.com/matrix-org/synapse/issues/9801))
+
+
+Internal Changes
+----------------
+
+- Add a dockerfile for running Synapse in worker-mode under Complement. ([\#9162](https://github.com/matrix-org/synapse/issues/9162))
+- Apply `pyupgrade` across the codebase. ([\#9786](https://github.com/matrix-org/synapse/issues/9786))
+- Move some replication processing out of `generic_worker`. ([\#9796](https://github.com/matrix-org/synapse/issues/9796))
+- Replace `HomeServer.get_config()` with inline references. ([\#9815](https://github.com/matrix-org/synapse/issues/9815))
+- Rename some handlers and config modules to not duplicate the top-level module. ([\#9816](https://github.com/matrix-org/synapse/issues/9816))
+- Fix a long-standing bug which caused `max_upload_size` to not be correctly enforced. ([\#9817](https://github.com/matrix-org/synapse/issues/9817))
+- Reduce CPU usage of the user directory by reusing existing calculated room membership. ([\#9821](https://github.com/matrix-org/synapse/issues/9821))
+- Small speed up for joining large remote rooms. ([\#9825](https://github.com/matrix-org/synapse/issues/9825))
+- Introduce flake8-bugbear to the test suite and fix some of its lint violations. ([\#9838](https://github.com/matrix-org/synapse/issues/9838))
+- Only store the raw data in the in-memory caches, rather than objects that include references to e.g. the data stores. ([\#9845](https://github.com/matrix-org/synapse/issues/9845))
+- Limit length of accepted email addresses. ([\#9855](https://github.com/matrix-org/synapse/issues/9855))
+- Remove redundant `synapse.types.Collection` type definition. ([\#9856](https://github.com/matrix-org/synapse/issues/9856))
+- Handle recently added rate limits correctly when using `--no-rate-limit` with the demo scripts. ([\#9858](https://github.com/matrix-org/synapse/issues/9858))
+- Disable invite rate-limiting by default when running the unit tests. ([\#9871](https://github.com/matrix-org/synapse/issues/9871))
+- Pass a reactor into `SynapseSite` to make testing easier. ([\#9874](https://github.com/matrix-org/synapse/issues/9874))
+- Make `DomainSpecificString` an `attrs` class. ([\#9875](https://github.com/matrix-org/synapse/issues/9875))
+- Add type hints to `synapse.api.auth` and `synapse.api.auth_blocking` modules. ([\#9876](https://github.com/matrix-org/synapse/issues/9876))
+- Remove redundant `_PushHTTPChannel` test class. ([\#9878](https://github.com/matrix-org/synapse/issues/9878))
+- Remove backwards-compatibility code for Python versions < 3.6. ([\#9879](https://github.com/matrix-org/synapse/issues/9879))
+- Small performance improvement around handling new local presence updates. ([\#9887](https://github.com/matrix-org/synapse/issues/9887))
+
+
+Synapse 1.32.2 (2021-04-22)
+===========================
+
+This release includes a fix for a regression introduced in 1.32.0.
+
+Bugfixes
+--------
+
+- Fix a regression in Synapse 1.32.0 and 1.32.1 which caused `LoggingContext` errors in plugins. ([\#9857](https://github.com/matrix-org/synapse/issues/9857))
+
+
+Synapse 1.32.1 (2021-04-21)
+===========================
+
+This release fixes [a regression](https://github.com/matrix-org/synapse/issues/9853)
+in Synapse 1.32.0 that caused connected Prometheus instances to become unstable.
+
+However, as this release is still subject to the `LoggingContext` change in 1.32.0,
+it is recommended to remain on or downgrade to 1.31.0.
+
+Bugfixes
+--------
+
+- Fix a regression in Synapse 1.32.0 which caused Synapse to report large numbers of Prometheus time series, potentially overwhelming Prometheus instances. ([\#9854](https://github.com/matrix-org/synapse/issues/9854))
+
+
+Synapse 1.32.0 (2021-04-20)
+===========================
+
+**Note:** This release introduces [a regression](https://github.com/matrix-org/synapse/issues/9853)
+that can overwhelm connected Prometheus instances. This issue was not present in
+1.32.0rc1. If affected, it is recommended to downgrade to 1.31.0 in the meantime, and
+follow [these instructions](https://github.com/matrix-org/synapse/pull/9854#issuecomment-823472183)
+to clean up any excess writeahead logs.
+
+**Note:** This release also mistakenly included a change that may affected Synapse
+modules that import `synapse.logging.context.LoggingContext`, such as
+[synapse-s3-storage-provider](https://github.com/matrix-org/synapse-s3-storage-provider).
+This will be fixed in a later Synapse version.
+
+**Note:** This release requires Python 3.6+ and Postgres 9.6+ or SQLite 3.22+.
+
+This release removes the deprecated `GET /_synapse/admin/v1/users/<user_id>` admin API. Please use the [v2 API](https://github.com/matrix-org/synapse/blob/develop/docs/admin_api/user_admin_api.rst#query-user-account) instead, which has improved capabilities.
+
+This release requires Application Services to use type `m.login.application_service` when registering users via the `/_matrix/client/r0/register` endpoint to comply with the spec. Please ensure your Application Services are up to date.
+
+If you are using the `packages.matrix.org` Debian repository for Synapse packages,
+note that we have recently updated the expiry date on the gpg signing key. If you see an
+error similar to `The following signatures were invalid: EXPKEYSIG F473DD4473365DE1`, you
+will need to get a fresh copy of the keys. You can do so with:
+
+```sh
+sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
+```
+
+Bugfixes
+--------
+
+- Fix the log lines of nested logging contexts. Broke in 1.32.0rc1. ([\#9829](https://github.com/matrix-org/synapse/issues/9829))
+
+
+Synapse 1.32.0rc1 (2021-04-13)
+==============================
+
+Features
+--------
+
+- Add a Synapse module for routing presence updates between users. ([\#9491](https://github.com/matrix-org/synapse/issues/9491))
+- Add an admin API to manage ratelimit for a specific user. ([\#9648](https://github.com/matrix-org/synapse/issues/9648))
+- Include request information in structured logging output. ([\#9654](https://github.com/matrix-org/synapse/issues/9654))
+- Add `order_by` to the admin API `GET /_synapse/admin/v2/users`. Contributed by @dklimpel. ([\#9691](https://github.com/matrix-org/synapse/issues/9691))
+- Replace the `room_invite_state_types` configuration setting with `room_prejoin_state`. ([\#9700](https://github.com/matrix-org/synapse/issues/9700))
+- Add experimental support for [MSC3083](https://github.com/matrix-org/matrix-doc/pull/3083): restricting room access via group membership. ([\#9717](https://github.com/matrix-org/synapse/issues/9717), [\#9735](https://github.com/matrix-org/synapse/issues/9735))
+- Update experimental support for Spaces: include `m.room.create` in the room state sent with room-invites. ([\#9710](https://github.com/matrix-org/synapse/issues/9710))
+- Synapse now requires Python 3.6 or later. It also requires Postgres 9.6 or later or SQLite 3.22 or later. ([\#9766](https://github.com/matrix-org/synapse/issues/9766))
+
+
+Bugfixes
+--------
+
+- Prevent `synapse_forward_extremities` and `synapse_excess_extremity_events` Prometheus metrics from initially reporting zero-values after startup. ([\#8926](https://github.com/matrix-org/synapse/issues/8926))
+- Fix recently added ratelimits to correctly honour the application service `rate_limited` flag. ([\#9711](https://github.com/matrix-org/synapse/issues/9711))
+- Fix longstanding bug which caused `duplicate key value violates unique constraint "remote_media_cache_thumbnails_media_origin_media_id_thumbna_key"` errors. ([\#9725](https://github.com/matrix-org/synapse/issues/9725))
+- Fix bug where sharded federation senders could get stuck repeatedly querying the DB in a loop, using lots of CPU. ([\#9770](https://github.com/matrix-org/synapse/issues/9770))
+- Fix duplicate logging of exceptions thrown during federation transaction processing. ([\#9780](https://github.com/matrix-org/synapse/issues/9780))
+
+
+Updates to the Docker image
+---------------------------
+
+- Move opencontainers labels to the final Docker image such that users can inspect them. ([\#9765](https://github.com/matrix-org/synapse/issues/9765))
+
+
+Improved Documentation
+----------------------
+
+- Make the `allowed_local_3pids` regex example in the sample config stricter. ([\#9719](https://github.com/matrix-org/synapse/issues/9719))
+
+
+Deprecations and Removals
+-------------------------
+
+- Remove old admin API `GET /_synapse/admin/v1/users/<user_id>`. ([\#9401](https://github.com/matrix-org/synapse/issues/9401))
+- Make `/_matrix/client/r0/register` expect a type of `m.login.application_service` when an Application Service registers a user, to align with [the relevant spec](https://spec.matrix.org/unstable/application-service-api/#server-admin-style-permissions). ([\#9548](https://github.com/matrix-org/synapse/issues/9548))
+
+
+Internal Changes
+----------------
+
+- Replace deprecated `imp` module with successor `importlib`. Contributed by Cristina Muñoz. ([\#9718](https://github.com/matrix-org/synapse/issues/9718))
+- Experiment with GitHub Actions for CI. ([\#9661](https://github.com/matrix-org/synapse/issues/9661))
+- Introduce flake8-bugbear to the test suite and fix some of its lint violations. ([\#9682](https://github.com/matrix-org/synapse/issues/9682))
+- Update `scripts-dev/complement.sh` to use a local checkout of Complement, allow running a subset of tests and have it use Synapse's Complement test blacklist. ([\#9685](https://github.com/matrix-org/synapse/issues/9685))
+- Improve Jaeger tracing for `to_device` messages. ([\#9686](https://github.com/matrix-org/synapse/issues/9686))
+- Add release helper script for automating part of the Synapse release process. ([\#9713](https://github.com/matrix-org/synapse/issues/9713))
+- Add type hints to expiring cache. ([\#9730](https://github.com/matrix-org/synapse/issues/9730))
+- Convert various testcases to `HomeserverTestCase`. ([\#9736](https://github.com/matrix-org/synapse/issues/9736))
+- Start linting mypy with `no_implicit_optional`. ([\#9742](https://github.com/matrix-org/synapse/issues/9742))
+- Add missing type hints to federation handler and server. ([\#9743](https://github.com/matrix-org/synapse/issues/9743))
+- Check that a `ConfigError` is raised, rather than simply `Exception`, when appropriate in homeserver config file generation tests. ([\#9753](https://github.com/matrix-org/synapse/issues/9753))
+- Fix incompatibility with `tox` 2.5. ([\#9769](https://github.com/matrix-org/synapse/issues/9769))
+- Enable Complement tests for [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946): Spaces Summary API. ([\#9771](https://github.com/matrix-org/synapse/issues/9771))
+- Use mock from the standard library instead of a separate package. ([\#9772](https://github.com/matrix-org/synapse/issues/9772))
+- Update Black configuration to target Python 3.6. ([\#9781](https://github.com/matrix-org/synapse/issues/9781))
+- Add option to skip unit tests when building Debian packages. ([\#9793](https://github.com/matrix-org/synapse/issues/9793))
+
+
Synapse 1.31.0 (2021-04-06)
===========================
diff --git a/README.rst b/README.rst
index 655a2bf3..1a550357 100644
--- a/README.rst
+++ b/README.rst
@@ -393,7 +393,12 @@ massive excess of outgoing federation requests (see `discussion
indicate that your server is also issuing far more outgoing federation
requests than can be accounted for by your users' activity, this is a
likely cause. The misbehavior can be worked around by setting
-``use_presence: false`` in the Synapse config file.
+the following in the Synapse config file:
+
+.. code-block:: yaml
+
+ presence:
+ enabled: false
People can't accept room invitations from me
--------------------------------------------
diff --git a/UPGRADE.rst b/UPGRADE.rst
index ba488e10..e921e0c0 100644
--- a/UPGRADE.rst
+++ b/UPGRADE.rst
@@ -85,6 +85,72 @@ for example:
wget https://packages.matrix.org/debian/pool/main/m/matrix-synapse-py3/matrix-synapse-py3_1.3.0+stretch1_amd64.deb
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
+Upgrading to v1.33.0
+====================
+
+Account Validity HTML templates can now display a user's expiration date
+------------------------------------------------------------------------
+
+This may affect you if you have enabled the account validity feature, and have made use of a
+custom HTML template specified by the ``account_validity.template_dir`` or ``account_validity.account_renewed_html_path``
+Synapse config options.
+
+The template can now accept an ``expiration_ts`` variable, which represents the unix timestamp in milliseconds for the
+future date of which their account has been renewed until. See the
+`default template <https://github.com/matrix-org/synapse/blob/release-v1.33.0/synapse/res/templates/account_renewed.html>`_
+for an example of usage.
+
+ALso note that a new HTML template, ``account_previously_renewed.html``, has been added. This is is shown to users
+when they attempt to renew their account with a valid renewal token that has already been used before. The default
+template contents can been found
+`here <https://github.com/matrix-org/synapse/blob/release-v1.33.0/synapse/res/templates/account_previously_renewed.html>`_,
+and can also accept an ``expiration_ts`` variable. This template replaces the error message users would previously see
+upon attempting to use a valid renewal token more than once.
+
+
+Upgrading to v1.32.0
+====================
+
+Regression causing connected Prometheus instances to become overwhelmed
+-----------------------------------------------------------------------
+
+This release introduces `a regression <https://github.com/matrix-org/synapse/issues/9853>`_
+that can overwhelm connected Prometheus instances. This issue is not present in
+Synapse v1.32.0rc1.
+
+If you have been affected, please downgrade to 1.31.0. You then may need to
+remove excess writeahead logs in order for Prometheus to recover. Instructions
+for doing so are provided
+`here <https://github.com/matrix-org/synapse/pull/9854#issuecomment-823472183>`_.
+
+Dropping support for old Python, Postgres and SQLite versions
+-------------------------------------------------------------
+
+In line with our `deprecation policy <https://github.com/matrix-org/synapse/blob/release-v1.32.0/docs/deprecation_policy.md>`_,
+we've dropped support for Python 3.5 and PostgreSQL 9.5, as they are no longer supported upstream.
+
+This release of Synapse requires Python 3.6+ and PostgresSQL 9.6+ or SQLite 3.22+.
+
+Removal of old List Accounts Admin API
+--------------------------------------
+
+The deprecated v1 "list accounts" admin API (``GET /_synapse/admin/v1/users/<user_id>``) has been removed in this version.
+
+The `v2 list accounts API <https://github.com/matrix-org/synapse/blob/master/docs/admin_api/user_admin_api.rst#list-accounts>`_
+has been available since Synapse 1.7.0 (2019-12-13), and is accessible under ``GET /_synapse/admin/v2/users``.
+
+The deprecation of the old endpoint was announced with Synapse 1.28.0 (released on 2021-02-25).
+
+Application Services must use type ``m.login.application_service`` when registering users
+-----------------------------------------------------------------------------------------
+
+In compliance with the
+`Application Service spec <https://matrix.org/docs/spec/application_service/r0.1.2#server-admin-style-permissions>`_,
+Application Services are now required to use the ``m.login.application_service`` type when registering users via the
+``/_matrix/client/r0/register`` endpoint. This behaviour was deprecated in Synapse v1.30.0.
+
+Please ensure your Application Services are up to date.
+
Upgrading to v1.29.0
====================
diff --git a/contrib/cmdclient/console.py b/contrib/cmdclient/console.py
index 67e03224..856dd437 100755
--- a/contrib/cmdclient/console.py
+++ b/contrib/cmdclient/console.py
@@ -24,6 +24,7 @@ import sys
import time
import urllib
from http import TwistedHttpClient
+from typing import Optional
import nacl.encoding
import nacl.signing
@@ -718,7 +719,7 @@ class SynapseCmd(cmd.Cmd):
method,
path,
data=None,
- query_params={"access_token": None},
+ query_params: Optional[dict] = None,
alt_text=None,
):
"""Runs an HTTP request and pretty prints the output.
@@ -729,6 +730,8 @@ class SynapseCmd(cmd.Cmd):
data: Raw JSON data if any
query_params: dict of query parameters to add to the url
"""
+ query_params = query_params or {"access_token": None}
+
url = self._url() + path
if "access_token" in query_params:
query_params["access_token"] = self._tok()
diff --git a/contrib/cmdclient/http.py b/contrib/cmdclient/http.py
index 851e80c2..1310f078 100644
--- a/contrib/cmdclient/http.py
+++ b/contrib/cmdclient/http.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
import json
import urllib
from pprint import pformat
+from typing import Optional
from twisted.internet import defer, reactor
from twisted.web.client import Agent, readBody
@@ -85,8 +85,9 @@ class TwistedHttpClient(HttpClient):
body = yield readBody(response)
defer.returnValue(json.loads(body))
- def _create_put_request(self, url, json_data, headers_dict={}):
+ def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
"""Wrapper of _create_request to issue a PUT request"""
+ headers_dict = headers_dict or {}
if "Content-Type" not in headers_dict:
raise defer.error(RuntimeError("Must include Content-Type header for PUTs"))
@@ -95,14 +96,22 @@ class TwistedHttpClient(HttpClient):
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
)
- def _create_get_request(self, url, headers_dict={}):
+ def _create_get_request(self, url, headers_dict: Optional[dict] = None):
"""Wrapper of _create_request to issue a GET request"""
- return self._create_request("GET", url, headers_dict=headers_dict)
+ return self._create_request("GET", url, headers_dict=headers_dict or {})
@defer.inlineCallbacks
def do_request(
- self, method, url, data=None, qparams=None, jsonreq=True, headers={}
+ self,
+ method,
+ url,
+ data=None,
+ qparams=None,
+ jsonreq=True,
+ headers: Optional[dict] = None,
):
+ headers = headers or {}
+
if qparams:
url = "%s?%s" % (url, urllib.urlencode(qparams, True))
@@ -123,8 +132,12 @@ class TwistedHttpClient(HttpClient):
defer.returnValue(json.loads(body))
@defer.inlineCallbacks
- def _create_request(self, method, url, producer=None, headers_dict={}):
+ def _create_request(
+ self, method, url, producer=None, headers_dict: Optional[dict] = None
+ ):
"""Creates and sends a request to the given url"""
+ headers_dict = headers_dict or {}
+
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
retries_left = 5
diff --git a/contrib/experiments/test_messaging.py b/contrib/experiments/test_messaging.py
index 7fbc7d8f..31b8a682 100644
--- a/contrib/experiments/test_messaging.py
+++ b/contrib/experiments/test_messaging.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/debian/build_virtualenv b/debian/build_virtualenv
index cad7d168..21caad90 100755
--- a/debian/build_virtualenv
+++ b/debian/build_virtualenv
@@ -50,15 +50,24 @@ PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
TARGET_PYTHON="${VIRTUALENV_DIR}/bin/python"
-# we copy the tests to a temporary directory so that we can put them on the
-# PYTHONPATH without putting the uninstalled synapse on the pythonpath.
-tmpdir=`mktemp -d`
-trap "rm -r $tmpdir" EXIT
+case "$DEB_BUILD_OPTIONS" in
+ *nocheck*)
+ # Skip running tests if "nocheck" present in $DEB_BUILD_OPTIONS
+ ;;
+
+ *)
+ # Copy tests to a temporary directory so that we can put them on the
+ # PYTHONPATH without putting the uninstalled synapse on the pythonpath.
+ tmpdir=`mktemp -d`
+ trap "rm -r $tmpdir" EXIT
+
+ cp -r tests "$tmpdir"
-cp -r tests "$tmpdir"
+ PYTHONPATH="$tmpdir" \
+ "${TARGET_PYTHON}" -m twisted.trial --reporter=text -j2 tests
-PYTHONPATH="$tmpdir" \
- "${TARGET_PYTHON}" -m twisted.trial --reporter=text -j2 tests
+ ;;
+esac
# build the config file
"${TARGET_PYTHON}" "${VIRTUALENV_DIR}/bin/generate_config" \
diff --git a/debian/changelog b/debian/changelog
index 09602ff5..76b82c17 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,43 @@
+matrix-synapse-py3 (1.33.2) stable; urgency=medium
+
+ * New synapse release 1.33.2.
+
+ -- Synapse Packaging team <packages@matrix.org> Tue, 11 May 2021 11:17:59 +0100
+
+matrix-synapse-py3 (1.33.1) stable; urgency=medium
+
+ * New synapse release 1.33.1.
+
+ -- Synapse Packaging team <packages@matrix.org> Thu, 06 May 2021 14:06:33 +0100
+
+matrix-synapse-py3 (1.33.0) stable; urgency=medium
+
+ * New synapse release 1.33.0.
+
+ -- Synapse Packaging team <packages@matrix.org> Wed, 05 May 2021 14:15:27 +0100
+
+matrix-synapse-py3 (1.32.2) stable; urgency=medium
+
+ * New synapse release 1.32.2.
+
+ -- Synapse Packaging team <packages@matrix.org> Wed, 22 Apr 2021 12:43:52 +0100
+
+matrix-synapse-py3 (1.32.1) stable; urgency=medium
+
+ * New synapse release 1.32.1.
+
+ -- Synapse Packaging team <packages@matrix.org> Wed, 21 Apr 2021 14:00:55 +0100
+
+matrix-synapse-py3 (1.32.0) stable; urgency=medium
+
+ [ Dan Callahan ]
+ * Skip tests when DEB_BUILD_OPTIONS contains "nocheck".
+
+ [ Synapse Packaging team ]
+ * New synapse release 1.32.0.
+
+ -- Synapse Packaging team <packages@matrix.org> Tue, 20 Apr 2021 14:28:39 +0100
+
matrix-synapse-py3 (1.31.0) stable; urgency=medium
* New synapse release 1.31.0.
diff --git a/demo/start.sh b/demo/start.sh
index 621a5698..bc485409 100755
--- a/demo/start.sh
+++ b/demo/start.sh
@@ -96,18 +96,48 @@ for port in 8080 8081 8082; do
# Check script parameters
if [ $# -eq 1 ]; then
if [ $1 = "--no-rate-limit" ]; then
- # messages rate limit
- echo 'rc_messages_per_second: 1000' >> $DIR/etc/$port.config
- echo 'rc_message_burst_count: 1000' >> $DIR/etc/$port.config
-
- # registration rate limit
- printf 'rc_registration:\n per_second: 1000\n burst_count: 1000\n' >> $DIR/etc/$port.config
-
- # login rate limit
- echo 'rc_login:' >> $DIR/etc/$port.config
- printf ' address:\n per_second: 1000\n burst_count: 1000\n' >> $DIR/etc/$port.config
- printf ' account:\n per_second: 1000\n burst_count: 1000\n' >> $DIR/etc/$port.config
- printf ' failed_attempts:\n per_second: 1000\n burst_count: 1000\n' >> $DIR/etc/$port.config
+
+ # Disable any rate limiting
+ ratelimiting=$(cat <<-RC
+ rc_message:
+ per_second: 1000
+ burst_count: 1000
+ rc_registration:
+ per_second: 1000
+ burst_count: 1000
+ rc_login:
+ address:
+ per_second: 1000
+ burst_count: 1000
+ account:
+ per_second: 1000
+ burst_count: 1000
+ failed_attempts:
+ per_second: 1000
+ burst_count: 1000
+ rc_admin_redaction:
+ per_second: 1000
+ burst_count: 1000
+ rc_joins:
+ local:
+ per_second: 1000
+ burst_count: 1000
+ remote:
+ per_second: 1000
+ burst_count: 1000
+ rc_3pid_validation:
+ per_second: 1000
+ burst_count: 1000
+ rc_invites:
+ per_room:
+ per_second: 1000
+ burst_count: 1000
+ per_user:
+ per_second: 1000
+ burst_count: 1000
+ RC
+ )
+ echo "${ratelimiting}" >> $DIR/etc/$port.config
fi
fi
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 5b7bf027..4f5cd06d 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -18,11 +18,6 @@ ARG PYTHON_VERSION=3.8
###
FROM docker.io/python:${PYTHON_VERSION}-slim as builder
-LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
-LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md'
-LABEL org.opencontainers.image.source='https://github.com/matrix-org/synapse.git'
-LABEL org.opencontainers.image.licenses='Apache-2.0'
-
# install the OS build deps
RUN apt-get update && apt-get install -y \
build-essential \
@@ -66,6 +61,11 @@ RUN pip install --prefix="/install" --no-deps --no-warn-script-location /synapse
FROM docker.io/python:${PYTHON_VERSION}-slim
+LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
+LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md'
+LABEL org.opencontainers.image.source='https://github.com/matrix-org/synapse.git'
+LABEL org.opencontainers.image.licenses='Apache-2.0'
+
RUN apt-get update && apt-get install -y \
curl \
gosu \
diff --git a/docker/Dockerfile-workers b/docker/Dockerfile-workers
new file mode 100644
index 00000000..969cf972
--- /dev/null
+++ b/docker/Dockerfile-workers
@@ -0,0 +1,23 @@
+# Inherit from the official Synapse docker image
+FROM matrixdotorg/synapse
+
+# Install deps
+RUN apt-get update
+RUN apt-get install -y supervisor redis nginx
+
+# Remove the default nginx sites
+RUN rm /etc/nginx/sites-enabled/default
+
+# Copy Synapse worker, nginx and supervisord configuration template files
+COPY ./docker/conf-workers/* /conf/
+
+# Expose nginx listener port
+EXPOSE 8080/tcp
+
+# Volume for user-editable config files, logs etc.
+VOLUME ["/data"]
+
+# A script to read environment variables and create the necessary
+# files to run the desired worker configuration. Will start supervisord.
+COPY ./docker/configure_workers_and_start.py /configure_workers_and_start.py
+ENTRYPOINT ["/configure_workers_and_start.py"]
diff --git a/docker/README-testing.md b/docker/README-testing.md
new file mode 100644
index 00000000..6a5baf9e
--- /dev/null
+++ b/docker/README-testing.md
@@ -0,0 +1,140 @@
+# Running tests against a dockerised Synapse
+
+It's possible to run integration tests against Synapse
+using [Complement](https://github.com/matrix-org/complement). Complement is a Matrix Spec
+compliance test suite for homeservers, and supports any homeserver docker image configured
+to listen on ports 8008/8448. This document contains instructions for building Synapse
+docker images that can be run inside Complement for testing purposes.
+
+Note that running Synapse's unit tests from within the docker image is not supported.
+
+## Testing with SQLite and single-process Synapse
+
+> Note that `scripts-dev/complement.sh` is a script that will automatically build
+> and run an SQLite-based, single-process of Synapse against Complement.
+
+The instructions below will set up Complement testing for a single-process,
+SQLite-based Synapse deployment.
+
+Start by building the base Synapse docker image. If you wish to run tests with the latest
+release of Synapse, instead of your current checkout, you can skip this step. From the
+root of the repository:
+
+```sh
+docker build -t matrixdotorg/synapse -f docker/Dockerfile .
+```
+
+This will build an image with the tag `matrixdotorg/synapse`.
+
+Next, build the Synapse image for Complement. You will need a local checkout
+of Complement. Change to the root of your Complement checkout and run:
+
+```sh
+docker build -t complement-synapse -f "dockerfiles/Synapse.Dockerfile" dockerfiles
+```
+
+This will build an image with the tag `complement-synapse`, which can be handed to
+Complement for testing via the `COMPLEMENT_BASE_IMAGE` environment variable. Refer to
+[Complement's documentation](https://github.com/matrix-org/complement/#running) for
+how to run the tests, as well as the various available command line flags.
+
+## Testing with PostgreSQL and single or multi-process Synapse
+
+The above docker image only supports running Synapse with SQLite and in a
+single-process topology. The following instructions are used to build a Synapse image for
+Complement that supports either single or multi-process topology with a PostgreSQL
+database backend.
+
+As with the single-process image, build the base Synapse docker image. If you wish to run
+tests with the latest release of Synapse, instead of your current checkout, you can skip
+this step. From the root of the repository:
+
+```sh
+docker build -t matrixdotorg/synapse -f docker/Dockerfile .
+```
+
+This will build an image with the tag `matrixdotorg/synapse`.
+
+Next, we build a new image with worker support based on `matrixdotorg/synapse:latest`.
+Again, from the root of the repository:
+
+```sh
+docker build -t matrixdotorg/synapse-workers -f docker/Dockerfile-workers .
+```
+
+This will build an image with the tag` matrixdotorg/synapse-workers`.
+
+It's worth noting at this point that this image is fully functional, and
+can be used for testing against locally. See instructions for using the container
+under
+[Running the Dockerfile-worker image standalone](#running-the-dockerfile-worker-image-standalone)
+below.
+
+Finally, build the Synapse image for Complement, which is based on
+`matrixdotorg/synapse-workers`. You will need a local checkout of Complement. Change to
+the root of your Complement checkout and run:
+
+```sh
+docker build -t matrixdotorg/complement-synapse-workers -f dockerfiles/SynapseWorkers.Dockerfile dockerfiles
+```
+
+This will build an image with the tag `complement-synapse`, which can be handed to
+Complement for testing via the `COMPLEMENT_BASE_IMAGE` environment variable. Refer to
+[Complement's documentation](https://github.com/matrix-org/complement/#running) for
+how to run the tests, as well as the various available command line flags.
+
+## Running the Dockerfile-worker image standalone
+
+For manual testing of a multi-process Synapse instance in Docker,
+[Dockerfile-workers](Dockerfile-workers) is a Dockerfile that will produce an image
+bundling all necessary components together for a workerised homeserver instance.
+
+This includes any desired Synapse worker processes, a nginx to route traffic accordingly,
+a redis for worker communication and a supervisord instance to start up and monitor all
+processes. You will need to provide your own postgres container to connect to, and TLS
+is not handled by the container.
+
+Once you've built the image using the above instructions, you can run it. Be sure
+you've set up a volume according to the [usual Synapse docker instructions](README.md).
+Then run something along the lines of:
+
+```
+docker run -d --name synapse \
+ --mount type=volume,src=synapse-data,dst=/data \
+ -p 8008:8008 \
+ -e SYNAPSE_SERVER_NAME=my.matrix.host \
+ -e SYNAPSE_REPORT_STATS=no \
+ -e POSTGRES_HOST=postgres \
+ -e POSTGRES_USER=postgres \
+ -e POSTGRES_PASSWORD=somesecret \
+ -e SYNAPSE_WORKER_TYPES=synchrotron,media_repository,user_dir \
+ -e SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK=1 \
+ matrixdotorg/synapse-workers
+```
+
+...substituting `POSTGRES*` variables for those that match a postgres host you have
+available (usually a running postgres docker container).
+
+The `SYNAPSE_WORKER_TYPES` environment variable is a comma-separated list of workers to
+use when running the container. All possible worker names are defined by the keys of the
+`WORKERS_CONFIG` variable in [this script](configure_workers_and_start.py), which the
+Dockerfile makes use of to generate appropriate worker, nginx and supervisord config
+files.
+
+Sharding is supported for a subset of workers, in line with the
+[worker documentation](../docs/workers.md). To run multiple instances of a given worker
+type, simply specify the type multiple times in `SYNAPSE_WORKER_TYPES`
+(e.g `SYNAPSE_WORKER_TYPES=event_creator,event_creator...`).
+
+Otherwise, `SYNAPSE_WORKER_TYPES` can either be left empty or unset to spawn no workers
+(leaving only the main process). The container is configured to use redis-based worker
+mode.
+
+Logs for workers and the main process are logged to stdout and can be viewed with
+standard `docker logs` tooling. Worker logs contain their worker name
+after the timestamp.
+
+Setting `SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK=1` will cause worker logs to be written to
+`<data_dir>/logs/<worker_name>.log`. Logs are kept for 1 week and rotate every day at 00:
+00, according to the container's clock. Logging for the main process must still be
+configured by modifying the homeserver's log config in your Synapse data volume.
diff --git a/docker/README.md b/docker/README.md
index 3a7dc585..a7d1e670 100644
--- a/docker/README.md
+++ b/docker/README.md
@@ -2,13 +2,16 @@
This Docker image will run Synapse as a single process. By default it uses a
sqlite database; for production use you should connect it to a separate
-postgres database.
+postgres database. The image also does *not* provide a TURN server.
-The image also does *not* provide a TURN server.
+This image should work on all platforms that are supported by Docker upstream.
+Note that Docker's WS1-backend Linux Containers on Windows
+platform is [experimental](https://github.com/docker/for-win/issues/6470) and
+is not supported by this image.
## Volumes
-By default, the image expects a single volume, located at ``/data``, that will hold:
+By default, the image expects a single volume, located at `/data`, that will hold:
* configuration files;
* uploaded media and thumbnails;
@@ -16,11 +19,11 @@ By default, the image expects a single volume, located at ``/data``, that will h
* the appservices configuration.
You are free to use separate volumes depending on storage endpoints at your
-disposal. For instance, ``/data/media`` could be stored on a large but low
+disposal. For instance, `/data/media` could be stored on a large but low
performance hdd storage while other files could be stored on high performance
endpoints.
-In order to setup an application service, simply create an ``appservices``
+In order to setup an application service, simply create an `appservices`
directory in the data volume and write the application service Yaml
configuration file there. Multiple application services are supported.
@@ -53,6 +56,8 @@ The following environment variables are supported in `generate` mode:
* `SYNAPSE_SERVER_NAME` (mandatory): the server public hostname.
* `SYNAPSE_REPORT_STATS` (mandatory, `yes` or `no`): whether to enable
anonymous statistics reporting.
+* `SYNAPSE_HTTP_PORT`: the port Synapse should listen on for http traffic.
+ Defaults to `8008`.
* `SYNAPSE_CONFIG_DIR`: where additional config files (such as the log config
and event signing key) will be stored. Defaults to `/data`.
* `SYNAPSE_CONFIG_PATH`: path to the file to be generated. Defaults to
@@ -73,6 +78,8 @@ docker run -d --name synapse \
matrixdotorg/synapse:latest
```
+(assuming 8008 is the port Synapse is configured to listen on for http traffic.)
+
You can then check that it has started correctly with:
```
@@ -208,4 +215,4 @@ healthcheck:
## Using jemalloc
Jemalloc is embedded in the image and will be used instead of the default allocator.
-You can read about jemalloc by reading the Synapse [README](../README.md) \ No newline at end of file
+You can read about jemalloc by reading the Synapse [README](../README.md).
diff --git a/docker/conf-workers/nginx.conf.j2 b/docker/conf-workers/nginx.conf.j2
new file mode 100644
index 00000000..1081979e
--- /dev/null
+++ b/docker/conf-workers/nginx.conf.j2
@@ -0,0 +1,27 @@
+# This file contains the base config for the reverse proxy, as part of ../Dockerfile-workers.
+# configure_workers_and_start.py uses and amends to this file depending on the workers
+# that have been selected.
+
+{{ upstream_directives }}
+
+server {
+ # Listen on an unoccupied port number
+ listen 8008;
+ listen [::]:8008;
+
+ server_name localhost;
+
+ # Nginx by default only allows file uploads up to 1M in size
+ # Increase client_max_body_size to match max_upload_size defined in homeserver.yaml
+ client_max_body_size 100M;
+
+{{ worker_locations }}
+
+ # Send all other traffic to the main process
+ location ~* ^(\\/_matrix|\\/_synapse) {
+ proxy_pass http://localhost:8080;
+ proxy_set_header X-Forwarded-For $remote_addr;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ proxy_set_header Host $host;
+ }
+}
diff --git a/docker/conf-workers/shared.yaml.j2 b/docker/conf-workers/shared.yaml.j2
new file mode 100644
index 00000000..f94b8c6a
--- /dev/null
+++ b/docker/conf-workers/shared.yaml.j2
@@ -0,0 +1,9 @@
+# This file contains the base for the shared homeserver config file between Synapse workers,
+# as part of ./Dockerfile-workers.
+# configure_workers_and_start.py uses and amends to this file depending on the workers
+# that have been selected.
+
+redis:
+ enabled: true
+
+{{ shared_worker_config }} \ No newline at end of file
diff --git a/docker/conf-workers/supervisord.conf.j2 b/docker/conf-workers/supervisord.conf.j2
new file mode 100644
index 00000000..0de2c614
--- /dev/null
+++ b/docker/conf-workers/supervisord.conf.j2
@@ -0,0 +1,41 @@
+# This file contains the base config for supervisord, as part of ../Dockerfile-workers.
+# configure_workers_and_start.py uses and amends to this file depending on the workers
+# that have been selected.
+[supervisord]
+nodaemon=true
+user=root
+
+[program:nginx]
+command=/usr/sbin/nginx -g "daemon off;"
+priority=500
+stdout_logfile=/dev/stdout
+stdout_logfile_maxbytes=0
+stderr_logfile=/dev/stderr
+stderr_logfile_maxbytes=0
+username=www-data
+autorestart=true
+
+[program:redis]
+command=/usr/bin/redis-server /etc/redis/redis.conf --daemonize no
+priority=1
+stdout_logfile=/dev/stdout
+stdout_logfile_maxbytes=0
+stderr_logfile=/dev/stderr
+stderr_logfile_maxbytes=0
+username=redis
+autorestart=true
+
+[program:synapse_main]
+command=/usr/local/bin/python -m synapse.app.homeserver --config-path="{{ main_config_path }}" --config-path=/conf/workers/shared.yaml
+priority=10
+# Log startup failures to supervisord's stdout/err
+# Regular synapse logs will still go in the configured data directory
+stdout_logfile=/dev/stdout
+stdout_logfile_maxbytes=0
+stderr_logfile=/dev/stderr
+stderr_logfile_maxbytes=0
+autorestart=unexpected
+exitcodes=0
+
+# Additional process blocks
+{{ worker_config }} \ No newline at end of file
diff --git a/docker/conf-workers/worker.yaml.j2 b/docker/conf-workers/worker.yaml.j2
new file mode 100644
index 00000000..42131afc
--- /dev/null
+++ b/docker/conf-workers/worker.yaml.j2
@@ -0,0 +1,26 @@
+# This is a configuration template for a single worker instance, and is
+# used by Dockerfile-workers.
+# Values will be change depending on whichever workers are selected when
+# running that image.
+
+worker_app: "{{ app }}"
+worker_name: "{{ name }}"
+
+# The replication listener on the main synapse process.
+worker_replication_host: 127.0.0.1
+worker_replication_http_port: 9093
+
+worker_listeners:
+ - type: http
+ port: {{ port }}
+{% if listener_resources %}
+ resources:
+ - names:
+{%- for resource in listener_resources %}
+ - {{ resource }}
+{%- endfor %}
+{% endif %}
+
+worker_log_config: {{ worker_log_config_filepath }}
+
+{{ worker_extra_conf }}
diff --git a/docker/conf/homeserver.yaml b/docker/conf/homeserver.yaml
index 0dea62a8..2b23d7f4 100644
--- a/docker/conf/homeserver.yaml
+++ b/docker/conf/homeserver.yaml
@@ -40,7 +40,9 @@ listeners:
compress: false
{% endif %}
- - port: 8008
+ # Allow configuring in case we want to reverse proxy 8008
+ # using another process in the same container
+ - port: {{ SYNAPSE_HTTP_PORT or 8008 }}
tls: false
bind_addresses: ['::']
type: http
@@ -173,18 +175,10 @@ report_stats: False
## API Configuration ##
-room_invite_state_types:
- - "m.room.join_rules"
- - "m.room.canonical_alias"
- - "m.room.avatar"
- - "m.room.name"
-
{% if SYNAPSE_APPSERVICES %}
app_service_config_files:
{% for appservice in SYNAPSE_APPSERVICES %} - "{{ appservice }}"
{% endfor %}
-{% else %}
-app_service_config_files: []
{% endif %}
macaroon_secret_key: "{{ SYNAPSE_MACAROON_SECRET_KEY }}"
diff --git a/docker/conf/log.config b/docker/conf/log.config
index 491bbcc8..34572bc0 100644
--- a/docker/conf/log.config
+++ b/docker/conf/log.config
@@ -2,9 +2,34 @@ version: 1
formatters:
precise:
- format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
+{% if worker_name %}
+ format: '%(asctime)s - worker:{{ worker_name }} - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
+{% else %}
+ format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
+{% endif %}
handlers:
+ file:
+ class: logging.handlers.TimedRotatingFileHandler
+ formatter: precise
+ filename: {{ LOG_FILE_PATH or "homeserver.log" }}
+ when: "midnight"
+ backupCount: 6 # Does not include the current log file.
+ encoding: utf8
+
+ # Default to buffering writes to log file for efficiency. This means that
+ # there will be a delay for INFO/DEBUG logs to get written, but WARNING/ERROR
+ # logs will still be flushed immediately.
+ buffer:
+ class: logging.handlers.MemoryHandler
+ target: file
+ # The capacity is the number of log lines that are buffered before
+ # being written to disk. Increasing this will lead to better
+ # performance, at the expensive of it taking longer for log lines to
+ # be written to disk.
+ capacity: 10
+ flushLevel: 30 # Flush for WARNING logs as well
+
console:
class: logging.StreamHandler
formatter: precise
@@ -17,6 +42,11 @@ loggers:
root:
level: {{ SYNAPSE_LOG_LEVEL or "INFO" }}
+
+{% if LOG_FILE_PATH %}
+ handlers: [console, buffer]
+{% else %}
handlers: [console]
+{% endif %}
disable_existing_loggers: false
diff --git a/docker/configure_workers_and_start.py b/docker/configure_workers_and_start.py
new file mode 100755
index 00000000..4be6afc6
--- /dev/null
+++ b/docker/configure_workers_and_start.py
@@ -0,0 +1,558 @@
+#!/usr/bin/env python
+# Copyright 2021 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script reads environment variables and generates a shared Synapse worker,
+# nginx and supervisord configs depending on the workers requested.
+#
+# The environment variables it reads are:
+# * SYNAPSE_SERVER_NAME: The desired server_name of the homeserver.
+# * SYNAPSE_REPORT_STATS: Whether to report stats.
+# * SYNAPSE_WORKER_TYPES: A comma separated list of worker names as specified in WORKER_CONFIG
+# below. Leave empty for no workers, or set to '*' for all possible workers.
+#
+# NOTE: According to Complement's ENTRYPOINT expectations for a homeserver image (as defined
+# in the project's README), this script may be run multiple times, and functionality should
+# continue to work if so.
+
+import os
+import subprocess
+import sys
+
+import jinja2
+import yaml
+
+MAIN_PROCESS_HTTP_LISTENER_PORT = 8080
+
+
+WORKERS_CONFIG = {
+ "pusher": {
+ "app": "synapse.app.pusher",
+ "listener_resources": [],
+ "endpoint_patterns": [],
+ "shared_extra_conf": {"start_pushers": False},
+ "worker_extra_conf": "",
+ },
+ "user_dir": {
+ "app": "synapse.app.user_dir",
+ "listener_resources": ["client"],
+ "endpoint_patterns": [
+ "^/_matrix/client/(api/v1|r0|unstable)/user_directory/search$"
+ ],
+ "shared_extra_conf": {"update_user_directory": False},
+ "worker_extra_conf": "",
+ },
+ "media_repository": {
+ "app": "synapse.app.media_repository",
+ "listener_resources": ["media"],
+ "endpoint_patterns": [
+ "^/_matrix/media/",
+ "^/_synapse/admin/v1/purge_media_cache$",
+ "^/_synapse/admin/v1/room/.*/media.*$",
+ "^/_synapse/admin/v1/user/.*/media.*$",
+ "^/_synapse/admin/v1/media/.*$",
+ "^/_synapse/admin/v1/quarantine_media/.*$",
+ ],
+ "shared_extra_conf": {"enable_media_repo": False},
+ "worker_extra_conf": "enable_media_repo: true",
+ },
+ "appservice": {
+ "app": "synapse.app.appservice",
+ "listener_resources": [],
+ "endpoint_patterns": [],
+ "shared_extra_conf": {"notify_appservices": False},
+ "worker_extra_conf": "",
+ },
+ "federation_sender": {
+ "app": "synapse.app.federation_sender",
+ "listener_resources": [],
+ "endpoint_patterns": [],
+ "shared_extra_conf": {"send_federation": False},
+ "worker_extra_conf": "",
+ },
+ "synchrotron": {
+ "app": "synapse.app.generic_worker",
+ "listener_resources": ["client"],
+ "endpoint_patterns": [
+ "^/_matrix/client/(v2_alpha|r0)/sync$",
+ "^/_matrix/client/(api/v1|v2_alpha|r0)/events$",
+ "^/_matrix/client/(api/v1|r0)/initialSync$",
+ "^/_matrix/client/(api/v1|r0)/rooms/[^/]+/initialSync$",
+ ],
+ "shared_extra_conf": {},
+ "worker_extra_conf": "",
+ },
+ "federation_reader": {
+ "app": "synapse.app.generic_worker",
+ "listener_resources": ["federation"],
+ "endpoint_patterns": [
+ "^/_matrix/federation/(v1|v2)/event/",
+ "^/_matrix/federation/(v1|v2)/state/",
+ "^/_matrix/federation/(v1|v2)/state_ids/",
+ "^/_matrix/federation/(v1|v2)/backfill/",
+ "^/_matrix/federation/(v1|v2)/get_missing_events/",
+ "^/_matrix/federation/(v1|v2)/publicRooms",
+ "^/_matrix/federation/(v1|v2)/query/",
+ "^/_matrix/federation/(v1|v2)/make_join/",
+ "^/_matrix/federation/(v1|v2)/make_leave/",
+ "^/_matrix/federation/(v1|v2)/send_join/",
+ "^/_matrix/federation/(v1|v2)/send_leave/",
+ "^/_matrix/federation/(v1|v2)/invite/",
+ "^/_matrix/federation/(v1|v2)/query_auth/",
+ "^/_matrix/federation/(v1|v2)/event_auth/",
+ "^/_matrix/federation/(v1|v2)/exchange_third_party_invite/",
+ "^/_matrix/federation/(v1|v2)/user/devices/",
+ "^/_matrix/federation/(v1|v2)/get_groups_publicised$",
+ "^/_matrix/key/v2/query",
+ ],
+ "shared_extra_conf": {},
+ "worker_extra_conf": "",
+ },
+ "federation_inbound": {
+ "app": "synapse.app.generic_worker",
+ "listener_resources": ["federation"],
+ "endpoint_patterns": ["/_matrix/federation/(v1|v2)/send/"],
+ "shared_extra_conf": {},
+ "worker_extra_conf": "",
+ },
+ "event_persister": {
+ "app": "synapse.app.generic_worker",
+ "listener_resources": ["replication"],
+ "endpoint_patterns": [],
+ "shared_extra_conf": {},
+ "worker_extra_conf": "",
+ },
+ "background_worker": {
+ "app": "synapse.app.generic_worker",
+ "listener_resources": [],
+ "endpoint_patterns": [],
+ # This worker cannot be sharded. Therefore there should only ever be one background
+ # worker, and it should be named background_worker1
+ "shared_extra_conf": {"run_background_tasks_on": "background_worker1"},
+ "worker_extra_conf": "",
+ },
+ "event_creator": {
+ "app": "synapse.app.generic_worker",
+ "listener_resources": ["client"],
+ "endpoint_patterns": [
+ "^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/redact",
+ "^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/send",
+ "^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/(join|invite|leave|ban|unban|kick)$",
+ "^/_matrix/client/(api/v1|r0|unstable)/join/",
+ "^/_matrix/client/(api/v1|r0|unstable)/profile/",
+ ],
+ "shared_extra_conf": {},
+ "worker_extra_conf": "",
+ },
+ "frontend_proxy": {
+ "app": "synapse.app.frontend_proxy",
+ "listener_resources": ["client", "replication"],
+ "endpoint_patterns": ["^/_matrix/client/(api/v1|r0|unstable)/keys/upload"],
+ "shared_extra_conf": {},
+ "worker_extra_conf": (
+ "worker_main_http_uri: http://127.0.0.1:%d"
+ % (MAIN_PROCESS_HTTP_LISTENER_PORT,),
+ ),
+ },
+}
+
+# Templates for sections that may be inserted multiple times in config files
+SUPERVISORD_PROCESS_CONFIG_BLOCK = """
+[program:synapse_{name}]
+command=/usr/local/bin/python -m {app} \
+ --config-path="{config_path}" \
+ --config-path=/conf/workers/shared.yaml \
+ --config-path=/conf/workers/{name}.yaml
+autorestart=unexpected
+priority=500
+exitcodes=0
+stdout_logfile=/dev/stdout
+stdout_logfile_maxbytes=0
+stderr_logfile=/dev/stderr
+stderr_logfile_maxbytes=0
+"""
+
+NGINX_LOCATION_CONFIG_BLOCK = """
+ location ~* {endpoint} {
+ proxy_pass {upstream};
+ proxy_set_header X-Forwarded-For $remote_addr;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ proxy_set_header Host $host;
+ }
+"""
+
+NGINX_UPSTREAM_CONFIG_BLOCK = """
+upstream {upstream_worker_type} {
+{body}
+}
+"""
+
+
+# Utility functions
+def log(txt: str):
+ """Log something to the stdout.
+
+ Args:
+ txt: The text to log.
+ """
+ print(txt)
+
+
+def error(txt: str):
+ """Log something and exit with an error code.
+
+ Args:
+ txt: The text to log in error.
+ """
+ log(txt)
+ sys.exit(2)
+
+
+def convert(src: str, dst: str, **template_vars):
+ """Generate a file from a template
+
+ Args:
+ src: Path to the input file.
+ dst: Path to write to.
+ template_vars: The arguments to replace placeholder variables in the template with.
+ """
+ # Read the template file
+ with open(src) as infile:
+ template = infile.read()
+
+ # Generate a string from the template. We disable autoescape to prevent template
+ # variables from being escaped.
+ rendered = jinja2.Template(template, autoescape=False).render(**template_vars)
+
+ # Write the generated contents to a file
+ #
+ # We use append mode in case the files have already been written to by something else
+ # (for instance, as part of the instructions in a dockerfile).
+ with open(dst, "a") as outfile:
+ # In case the existing file doesn't end with a newline
+ outfile.write("\n")
+
+ outfile.write(rendered)
+
+
+def add_sharding_to_shared_config(
+ shared_config: dict,
+ worker_type: str,
+ worker_name: str,
+ worker_port: int,
+) -> None:
+ """Given a dictionary representing a config file shared across all workers,
+ append sharded worker information to it for the current worker_type instance.
+
+ Args:
+ shared_config: The config dict that all worker instances share (after being converted to YAML)
+ worker_type: The type of worker (one of those defined in WORKERS_CONFIG).
+ worker_name: The name of the worker instance.
+ worker_port: The HTTP replication port that the worker instance is listening on.
+ """
+ # The instance_map config field marks the workers that write to various replication streams
+ instance_map = shared_config.setdefault("instance_map", {})
+
+ # Worker-type specific sharding config
+ if worker_type == "pusher":
+ shared_config.setdefault("pusher_instances", []).append(worker_name)
+
+ elif worker_type == "federation_sender":
+ shared_config.setdefault("federation_sender_instances", []).append(worker_name)
+
+ elif worker_type == "event_persister":
+ # Event persisters write to the events stream, so we need to update
+ # the list of event stream writers
+ shared_config.setdefault("stream_writers", {}).setdefault("events", []).append(
+ worker_name
+ )
+
+ # Map of stream writer instance names to host/ports combos
+ instance_map[worker_name] = {
+ "host": "localhost",
+ "port": worker_port,
+ }
+
+ elif worker_type == "media_repository":
+ # The first configured media worker will run the media background jobs
+ shared_config.setdefault("media_instance_running_background_jobs", worker_name)
+
+
+def generate_base_homeserver_config():
+ """Starts Synapse and generates a basic homeserver config, which will later be
+ modified for worker support.
+
+ Raises: CalledProcessError if calling start.py returned a non-zero exit code.
+ """
+ # start.py already does this for us, so just call that.
+ # note that this script is copied in in the official, monolith dockerfile
+ os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT)
+ subprocess.check_output(["/usr/local/bin/python", "/start.py", "migrate_config"])
+
+
+def generate_worker_files(environ, config_path: str, data_dir: str):
+ """Read the desired list of workers from environment variables and generate
+ shared homeserver, nginx and supervisord configs.
+
+ Args:
+ environ: _Environ[str]
+ config_path: Where to output the generated Synapse main worker config file.
+ data_dir: The location of the synapse data directory. Where log and
+ user-facing config files live.
+ """
+ # Note that yaml cares about indentation, so care should be taken to insert lines
+ # into files at the correct indentation below.
+
+ # shared_config is the contents of a Synapse config file that will be shared amongst
+ # the main Synapse process as well as all workers.
+ # It is intended mainly for disabling functionality when certain workers are spun up,
+ # and adding a replication listener.
+
+ # First read the original config file and extract the listeners block. Then we'll add
+ # another listener for replication. Later we'll write out the result.
+ listeners = [
+ {
+ "port": 9093,
+ "bind_address": "127.0.0.1",
+ "type": "http",
+ "resources": [{"names": ["replication"]}],
+ }
+ ]
+ with open(config_path) as file_stream:
+ original_config = yaml.safe_load(file_stream)
+ original_listeners = original_config.get("listeners")
+ if original_listeners:
+ listeners += original_listeners
+
+ # The shared homeserver config. The contents of which will be inserted into the
+ # base shared worker jinja2 template.
+ #
+ # This config file will be passed to all workers, included Synapse's main process.
+ shared_config = {"listeners": listeners}
+
+ # The supervisord config. The contents of which will be inserted into the
+ # base supervisord jinja2 template.
+ #
+ # Supervisord will be in charge of running everything, from redis to nginx to Synapse
+ # and all of its worker processes. Load the config template, which defines a few
+ # services that are necessary to run.
+ supervisord_config = ""
+
+ # Upstreams for load-balancing purposes. This dict takes the form of a worker type to the
+ # ports of each worker. For example:
+ # {
+ # worker_type: {1234, 1235, ...}}
+ # }
+ # and will be used to construct 'upstream' nginx directives.
+ nginx_upstreams = {}
+
+ # A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what will be
+ # placed after the proxy_pass directive. The main benefit to representing this data as a
+ # dict over a str is that we can easily deduplicate endpoints across multiple instances
+ # of the same worker.
+ #
+ # An nginx site config that will be amended to depending on the workers that are
+ # spun up. To be placed in /etc/nginx/conf.d.
+ nginx_locations = {}
+
+ # Read the desired worker configuration from the environment
+ worker_types = environ.get("SYNAPSE_WORKER_TYPES")
+ if worker_types is None:
+ # No workers, just the main process
+ worker_types = []
+ else:
+ # Split type names by comma
+ worker_types = worker_types.split(",")
+
+ # Create the worker configuration directory if it doesn't already exist
+ os.makedirs("/conf/workers", exist_ok=True)
+
+ # Start worker ports from this arbitrary port
+ worker_port = 18009
+
+ # A counter of worker_type -> int. Used for determining the name for a given
+ # worker type when generating its config file, as each worker's name is just
+ # worker_type + instance #
+ worker_type_counter = {}
+
+ # For each worker type specified by the user, create config values
+ for worker_type in worker_types:
+ worker_type = worker_type.strip()
+
+ worker_config = WORKERS_CONFIG.get(worker_type)
+ if worker_config:
+ worker_config = worker_config.copy()
+ else:
+ log(worker_type + " is an unknown worker type! It will be ignored")
+ continue
+
+ new_worker_count = worker_type_counter.setdefault(worker_type, 0) + 1
+ worker_type_counter[worker_type] = new_worker_count
+
+ # Name workers by their type concatenated with an incrementing number
+ # e.g. federation_reader1
+ worker_name = worker_type + str(new_worker_count)
+ worker_config.update(
+ {"name": worker_name, "port": worker_port, "config_path": config_path}
+ )
+
+ # Update the shared config with any worker-type specific options
+ shared_config.update(worker_config["shared_extra_conf"])
+
+ # Check if more than one instance of this worker type has been specified
+ worker_type_total_count = worker_types.count(worker_type)
+ if worker_type_total_count > 1:
+ # Update the shared config with sharding-related options if necessary
+ add_sharding_to_shared_config(
+ shared_config, worker_type, worker_name, worker_port
+ )
+
+ # Enable the worker in supervisord
+ supervisord_config += SUPERVISORD_PROCESS_CONFIG_BLOCK.format_map(worker_config)
+
+ # Add nginx location blocks for this worker's endpoints (if any are defined)
+ for pattern in worker_config["endpoint_patterns"]:
+ # Determine whether we need to load-balance this worker
+ if worker_type_total_count > 1:
+ # Create or add to a load-balanced upstream for this worker
+ nginx_upstreams.setdefault(worker_type, set()).add(worker_port)
+
+ # Upstreams are named after the worker_type
+ upstream = "http://" + worker_type
+ else:
+ upstream = "http://localhost:%d" % (worker_port,)
+
+ # Note that this endpoint should proxy to this upstream
+ nginx_locations[pattern] = upstream
+
+ # Write out the worker's logging config file
+
+ # Check whether we should write worker logs to disk, in addition to the console
+ extra_log_template_args = {}
+ if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
+ extra_log_template_args["LOG_FILE_PATH"] = "{dir}/logs/{name}.log".format(
+ dir=data_dir, name=worker_name
+ )
+
+ # Render and write the file
+ log_config_filepath = "/conf/workers/{name}.log.config".format(name=worker_name)
+ convert(
+ "/conf/log.config",
+ log_config_filepath,
+ worker_name=worker_name,
+ **extra_log_template_args,
+ )
+
+ # Then a worker config file
+ convert(
+ "/conf/worker.yaml.j2",
+ "/conf/workers/{name}.yaml".format(name=worker_name),
+ **worker_config,
+ worker_log_config_filepath=log_config_filepath,
+ )
+
+ worker_port += 1
+
+ # Build the nginx location config blocks
+ nginx_location_config = ""
+ for endpoint, upstream in nginx_locations.items():
+ nginx_location_config += NGINX_LOCATION_CONFIG_BLOCK.format(
+ endpoint=endpoint,
+ upstream=upstream,
+ )
+
+ # Determine the load-balancing upstreams to configure
+ nginx_upstream_config = ""
+ for upstream_worker_type, upstream_worker_ports in nginx_upstreams.items():
+ body = ""
+ for port in upstream_worker_ports:
+ body += " server localhost:%d;\n" % (port,)
+
+ # Add to the list of configured upstreams
+ nginx_upstream_config += NGINX_UPSTREAM_CONFIG_BLOCK.format(
+ upstream_worker_type=upstream_worker_type,
+ body=body,
+ )
+
+ # Finally, we'll write out the config files.
+
+ # Shared homeserver config
+ convert(
+ "/conf/shared.yaml.j2",
+ "/conf/workers/shared.yaml",
+ shared_worker_config=yaml.dump(shared_config),
+ )
+
+ # Nginx config
+ convert(
+ "/conf/nginx.conf.j2",
+ "/etc/nginx/conf.d/matrix-synapse.conf",
+ worker_locations=nginx_location_config,
+ upstream_directives=nginx_upstream_config,
+ )
+
+ # Supervisord config
+ convert(
+ "/conf/supervisord.conf.j2",
+ "/etc/supervisor/conf.d/supervisord.conf",
+ main_config_path=config_path,
+ worker_config=supervisord_config,
+ )
+
+ # Ensure the logging directory exists
+ log_dir = data_dir + "/logs"
+ if not os.path.exists(log_dir):
+ os.mkdir(log_dir)
+
+
+def start_supervisord():
+ """Starts up supervisord which then starts and monitors all other necessary processes
+
+ Raises: CalledProcessError if calling start.py return a non-zero exit code.
+ """
+ subprocess.run(["/usr/bin/supervisord"], stdin=subprocess.PIPE)
+
+
+def main(args, environ):
+ config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
+ config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
+ data_dir = environ.get("SYNAPSE_DATA_DIR", "/data")
+
+ # override SYNAPSE_NO_TLS, we don't support TLS in worker mode,
+ # this needs to be handled by a frontend proxy
+ environ["SYNAPSE_NO_TLS"] = "yes"
+
+ # Generate the base homeserver config if one does not yet exist
+ if not os.path.exists(config_path):
+ log("Generating base homeserver config")
+ generate_base_homeserver_config()
+
+ # This script may be run multiple times (mostly by Complement, see note at top of file).
+ # Don't re-configure workers in this instance.
+ mark_filepath = "/conf/workers_have_been_configured"
+ if not os.path.exists(mark_filepath):
+ # Always regenerate all other config files
+ generate_worker_files(environ, config_path, data_dir)
+
+ # Mark workers as being configured
+ with open(mark_filepath, "w") as f:
+ f.write("")
+
+ # Start supervisord, which will start Synapse, all of the configured worker
+ # processes, redis, nginx etc. according to the config we created above.
+ start_supervisord()
+
+
+if __name__ == "__main__":
+ main(sys.argv, os.environ)
diff --git a/docs/admin_api/user_admin_api.rst b/docs/admin_api/user_admin_api.rst
index 8d4ec5a6..dbce9c90 100644
--- a/docs/admin_api/user_admin_api.rst
+++ b/docs/admin_api/user_admin_api.rst
@@ -111,35 +111,16 @@ List Accounts
=============
This API returns all local user accounts.
+By default, the response is ordered by ascending user ID.
-The api is::
+The API is::
GET /_synapse/admin/v2/users?from=0&limit=10&guests=false
To use it, you will need to authenticate by providing an ``access_token`` for a
server admin: see `README.rst <README.rst>`_.
-The parameter ``from`` is optional but used for pagination, denoting the
-offset in the returned results. This should be treated as an opaque value and
-not explicitly set to anything other than the return value of ``next_token``
-from a previous call.
-
-The parameter ``limit`` is optional but is used for pagination, denoting the
-maximum number of items to return in this call. Defaults to ``100``.
-
-The parameter ``user_id`` is optional and filters to only return users with user IDs
-that contain this value. This parameter is ignored when using the ``name`` parameter.
-
-The parameter ``name`` is optional and filters to only return users with user ID localparts
-**or** displaynames that contain this value.
-
-The parameter ``guests`` is optional and if ``false`` will **exclude** guest users.
-Defaults to ``true`` to include guest users.
-
-The parameter ``deactivated`` is optional and if ``true`` will **include** deactivated users.
-Defaults to ``false`` to exclude deactivated users.
-
-A JSON body is returned with the following shape:
+A response body like the following is returned:
.. code:: json
@@ -175,6 +156,66 @@ with ``from`` set to the value of ``next_token``. This will return a new page.
If the endpoint does not return a ``next_token`` then there are no more users
to paginate through.
+**Parameters**
+
+The following parameters should be set in the URL:
+
+- ``user_id`` - Is optional and filters to only return users with user IDs
+ that contain this value. This parameter is ignored when using the ``name`` parameter.
+- ``name`` - Is optional and filters to only return users with user ID localparts
+ **or** displaynames that contain this value.
+- ``guests`` - string representing a bool - Is optional and if ``false`` will **exclude** guest users.
+ Defaults to ``true`` to include guest users.
+- ``deactivated`` - string representing a bool - Is optional and if ``true`` will **include** deactivated users.
+ Defaults to ``false`` to exclude deactivated users.
+- ``limit`` - string representing a positive integer - Is optional but is used for pagination,
+ denoting the maximum number of items to return in this call. Defaults to ``100``.
+- ``from`` - string representing a positive integer - Is optional but used for pagination,
+ denoting the offset in the returned results. This should be treated as an opaque value and
+ not explicitly set to anything other than the return value of ``next_token`` from a previous call.
+ Defaults to ``0``.
+- ``order_by`` - The method by which to sort the returned list of users.
+ If the ordered field has duplicates, the second order is always by ascending ``name``,
+ which guarantees a stable ordering. Valid values are:
+
+ - ``name`` - Users are ordered alphabetically by ``name``. This is the default.
+ - ``is_guest`` - Users are ordered by ``is_guest`` status.
+ - ``admin`` - Users are ordered by ``admin`` status.
+ - ``user_type`` - Users are ordered alphabetically by ``user_type``.
+ - ``deactivated`` - Users are ordered by ``deactivated`` status.
+ - ``shadow_banned`` - Users are ordered by ``shadow_banned`` status.
+ - ``displayname`` - Users are ordered alphabetically by ``displayname``.
+ - ``avatar_url`` - Users are ordered alphabetically by avatar URL.
+
+- ``dir`` - Direction of media order. Either ``f`` for forwards or ``b`` for backwards.
+ Setting this value to ``b`` will reverse the above sort order. Defaults to ``f``.
+
+Caution. The database only has indexes on the columns ``name`` and ``created_ts``.
+This means that if a different sort order is used (``is_guest``, ``admin``,
+``user_type``, ``deactivated``, ``shadow_banned``, ``avatar_url`` or ``displayname``),
+this can cause a large load on the database, especially for large environments.
+
+**Response**
+
+The following fields are returned in the JSON response body:
+
+- ``users`` - An array of objects, each containing information about an user.
+ User objects contain the following fields:
+
+ - ``name`` - string - Fully-qualified user ID (ex. ``@user:server.com``).
+ - ``is_guest`` - bool - Status if that user is a guest account.
+ - ``admin`` - bool - Status if that user is a server administrator.
+ - ``user_type`` - string - Type of the user. Normal users are type ``None``.
+ This allows user type specific behaviour. There are also types ``support`` and ``bot``.
+ - ``deactivated`` - bool - Status if that user has been marked as deactivated.
+ - ``shadow_banned`` - bool - Status if that user has been marked as shadow banned.
+ - ``displayname`` - string - The user's display name if they have set one.
+ - ``avatar_url`` - string - The user's avatar URL if they have set one.
+
+- ``next_token``: string representing a positive integer - Indication for pagination. See above.
+- ``total`` - integer - Total number of media.
+
+
Query current sessions for a user
=================================
@@ -823,3 +864,118 @@ The following parameters should be set in the URL:
- ``user_id`` - The fully qualified MXID: for example, ``@user:server.com``. The user must
be local.
+
+Override ratelimiting for users
+===============================
+
+This API allows to override or disable ratelimiting for a specific user.
+There are specific APIs to set, get and delete a ratelimit.
+
+Get status of ratelimit
+-----------------------
+
+The API is::
+
+ GET /_synapse/admin/v1/users/<user_id>/override_ratelimit
+
+To use it, you will need to authenticate by providing an ``access_token`` for a
+server admin: see `README.rst <README.rst>`_.
+
+A response body like the following is returned:
+
+.. code:: json
+
+ {
+ "messages_per_second": 0,
+ "burst_count": 0
+ }
+
+**Parameters**
+
+The following parameters should be set in the URL:
+
+- ``user_id`` - The fully qualified MXID: for example, ``@user:server.com``. The user must
+ be local.
+
+**Response**
+
+The following fields are returned in the JSON response body:
+
+- ``messages_per_second`` - integer - The number of actions that can
+ be performed in a second. `0` mean that ratelimiting is disabled for this user.
+- ``burst_count`` - integer - How many actions that can be performed before
+ being limited.
+
+If **no** custom ratelimit is set, an empty JSON dict is returned.
+
+.. code:: json
+
+ {}
+
+Set ratelimit
+-------------
+
+The API is::
+
+ POST /_synapse/admin/v1/users/<user_id>/override_ratelimit
+
+To use it, you will need to authenticate by providing an ``access_token`` for a
+server admin: see `README.rst <README.rst>`_.
+
+A response body like the following is returned:
+
+.. code:: json
+
+ {
+ "messages_per_second": 0,
+ "burst_count": 0
+ }
+
+**Parameters**
+
+The following parameters should be set in the URL:
+
+- ``user_id`` - The fully qualified MXID: for example, ``@user:server.com``. The user must
+ be local.
+
+Body parameters:
+
+- ``messages_per_second`` - positive integer, optional. The number of actions that can
+ be performed in a second. Defaults to ``0``.
+- ``burst_count`` - positive integer, optional. How many actions that can be performed
+ before being limited. Defaults to ``0``.
+
+To disable users' ratelimit set both values to ``0``.
+
+**Response**
+
+The following fields are returned in the JSON response body:
+
+- ``messages_per_second`` - integer - The number of actions that can
+ be performed in a second.
+- ``burst_count`` - integer - How many actions that can be performed before
+ being limited.
+
+Delete ratelimit
+----------------
+
+The API is::
+
+ DELETE /_synapse/admin/v1/users/<user_id>/override_ratelimit
+
+To use it, you will need to authenticate by providing an ``access_token`` for a
+server admin: see `README.rst <README.rst>`_.
+
+An empty JSON dict is returned.
+
+.. code:: json
+
+ {}
+
+**Parameters**
+
+The following parameters should be set in the URL:
+
+- ``user_id`` - The fully qualified MXID: for example, ``@user:server.com``. The user must
+ be local.
+
diff --git a/docs/code_style.md b/docs/code_style.md
index 190f8ab2..28fb7277 100644
--- a/docs/code_style.md
+++ b/docs/code_style.md
@@ -128,6 +128,9 @@ Some guidelines follow:
will be if no sub-options are enabled).
- Lines should be wrapped at 80 characters.
- Use two-space indents.
+- `true` and `false` are spelt thus (as opposed to `True`, etc.)
+- Use single quotes (`'`) rather than double-quotes (`"`) or backticks
+ (`` ` ``) to refer to configuration options.
Example:
diff --git a/docs/presence_router_module.md b/docs/presence_router_module.md
new file mode 100644
index 00000000..d6566d97
--- /dev/null
+++ b/docs/presence_router_module.md
@@ -0,0 +1,235 @@
+# Presence Router Module
+
+Synapse supports configuring a module that can specify additional users
+(local or remote) to should receive certain presence updates from local
+users.
+
+Note that routing presence via Application Service transactions is not
+currently supported.
+
+The presence routing module is implemented as a Python class, which will
+be imported by the running Synapse.
+
+## Python Presence Router Class
+
+The Python class is instantiated with two objects:
+
+* A configuration object of some type (see below).
+* An instance of `synapse.module_api.ModuleApi`.
+
+It then implements methods related to presence routing.
+
+Note that one method of `ModuleApi` that may be useful is:
+
+```python
+async def ModuleApi.send_local_online_presence_to(users: Iterable[str]) -> None
+```
+
+which can be given a list of local or remote MXIDs to broadcast known, online user
+presence to (for those users that the receiving user is considered interested in).
+It does not include state for users who are currently offline, and it can only be
+called on workers that support sending federation.
+
+### Module structure
+
+Below is a list of possible methods that can be implemented, and whether they are
+required.
+
+#### `parse_config`
+
+```python
+def parse_config(config_dict: dict) -> Any
+```
+
+**Required.** A static method that is passed a dictionary of config options, and
+ should return a validated config object. This method is described further in
+ [Configuration](#configuration).
+
+#### `get_users_for_states`
+
+```python
+async def get_users_for_states(
+ self,
+ state_updates: Iterable[UserPresenceState],
+) -> Dict[str, Set[UserPresenceState]]:
+```
+
+**Required.** An asynchronous method that is passed an iterable of user presence
+state. This method can determine whether a given presence update should be sent to certain
+users. It does this by returning a dictionary with keys representing local or remote
+Matrix User IDs, and values being a python set
+of `synapse.handlers.presence.UserPresenceState` instances.
+
+Synapse will then attempt to send the specified presence updates to each user when
+possible.
+
+#### `get_interested_users`
+
+```python
+async def get_interested_users(self, user_id: str) -> Union[Set[str], str]
+```
+
+**Required.** An asynchronous method that is passed a single Matrix User ID. This
+method is expected to return the users that the passed in user may be interested in the
+presence of. Returned users may be local or remote. The presence routed as a result of
+what this method returns is sent in addition to the updates already sent between users
+that share a room together. Presence updates are deduplicated.
+
+This method should return a python set of Matrix User IDs, or the object
+`synapse.events.presence_router.PresenceRouter.ALL_USERS` to indicate that the passed
+user should receive presence information for *all* known users.
+
+For clarity, if the user `@alice:example.org` is passed to this method, and the Set
+`{"@bob:example.com", "@charlie:somewhere.org"}` is returned, this signifies that Alice
+should receive presence updates sent by Bob and Charlie, regardless of whether these
+users share a room.
+
+### Example
+
+Below is an example implementation of a presence router class.
+
+```python
+from typing import Dict, Iterable, Set, Union
+from synapse.events.presence_router import PresenceRouter
+from synapse.handlers.presence import UserPresenceState
+from synapse.module_api import ModuleApi
+
+class PresenceRouterConfig:
+ def __init__(self):
+ # Config options with their defaults
+ # A list of users to always send all user presence updates to
+ self.always_send_to_users = [] # type: List[str]
+
+ # A list of users to ignore presence updates for. Does not affect
+ # shared-room presence relationships
+ self.blacklisted_users = [] # type: List[str]
+
+class ExamplePresenceRouter:
+ """An example implementation of synapse.presence_router.PresenceRouter.
+ Supports routing all presence to a configured set of users, or a subset
+ of presence from certain users to members of certain rooms.
+
+ Args:
+ config: A configuration object.
+ module_api: An instance of Synapse's ModuleApi.
+ """
+ def __init__(self, config: PresenceRouterConfig, module_api: ModuleApi):
+ self._config = config
+ self._module_api = module_api
+
+ @staticmethod
+ def parse_config(config_dict: dict) -> PresenceRouterConfig:
+ """Parse a configuration dictionary from the homeserver config, do
+ some validation and return a typed PresenceRouterConfig.
+
+ Args:
+ config_dict: The configuration dictionary.
+
+ Returns:
+ A validated config object.
+ """
+ # Initialise a typed config object
+ config = PresenceRouterConfig()
+ always_send_to_users = config_dict.get("always_send_to_users")
+ blacklisted_users = config_dict.get("blacklisted_users")
+
+ # Do some validation of config options... otherwise raise a
+ # synapse.config.ConfigError.
+ config.always_send_to_users = always_send_to_users
+ config.blacklisted_users = blacklisted_users
+
+ return config
+
+ async def get_users_for_states(
+ self,
+ state_updates: Iterable[UserPresenceState],
+ ) -> Dict[str, Set[UserPresenceState]]:
+ """Given an iterable of user presence updates, determine where each one
+ needs to go. Returned results will not affect presence updates that are
+ sent between users who share a room.
+
+ Args:
+ state_updates: An iterable of user presence state updates.
+
+ Returns:
+ A dictionary of user_id -> set of UserPresenceState that the user should
+ receive.
+ """
+ destination_users = {} # type: Dict[str, Set[UserPresenceState]
+
+ # Ignore any updates for blacklisted users
+ desired_updates = set()
+ for update in state_updates:
+ if update.state_key not in self._config.blacklisted_users:
+ desired_updates.add(update)
+
+ # Send all presence updates to specific users
+ for user_id in self._config.always_send_to_users:
+ destination_users[user_id] = desired_updates
+
+ return destination_users
+
+ async def get_interested_users(
+ self,
+ user_id: str,
+ ) -> Union[Set[str], PresenceRouter.ALL_USERS]:
+ """
+ Retrieve a list of users that `user_id` is interested in receiving the
+ presence of. This will be in addition to those they share a room with.
+ Optionally, the object PresenceRouter.ALL_USERS can be returned to indicate
+ that this user should receive all incoming local and remote presence updates.
+
+ Note that this method will only be called for local users.
+
+ Args:
+ user_id: A user requesting presence updates.
+
+ Returns:
+ A set of user IDs to return additional presence updates for, or
+ PresenceRouter.ALL_USERS to return presence updates for all other users.
+ """
+ if user_id in self._config.always_send_to_users:
+ return PresenceRouter.ALL_USERS
+
+ return set()
+```
+
+#### A note on `get_users_for_states` and `get_interested_users`
+
+Both of these methods are effectively two different sides of the same coin. The logic
+regarding which users should receive updates for other users should be the same
+between them.
+
+`get_users_for_states` is called when presence updates come in from either federation
+or local users, and is used to either direct local presence to remote users, or to
+wake up the sync streams of local users to collect remote presence.
+
+In contrast, `get_interested_users` is used to determine the users that presence should
+be fetched for when a local user is syncing. This presence is then retrieved, before
+being fed through `get_users_for_states` once again, with only the syncing user's
+routing information pulled from the resulting dictionary.
+
+Their routing logic should thus line up, else you may run into unintended behaviour.
+
+## Configuration
+
+Once you've crafted your module and installed it into the same Python environment as
+Synapse, amend your homeserver config file with the following.
+
+```yaml
+presence:
+ routing_module:
+ module: my_module.ExamplePresenceRouter
+ config:
+ # Any configuration options for your module. The below is an example.
+ # of setting options for ExamplePresenceRouter.
+ always_send_to_users: ["@presence_gobbler:example.org"]
+ blacklisted_users:
+ - "@alice:example.com"
+ - "@bob:example.com"
+ ...
+```
+
+The contents of `config` will be passed as a Python dictionary to the static
+`parse_config` method of your class. The object returned by this method will
+then be passed to the `__init__` method of your module as `config`.
diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml
index 17cda71a..e0350279 100644
--- a/docs/sample_config.yaml
+++ b/docs/sample_config.yaml
@@ -82,9 +82,28 @@ pid_file: DATADIR/homeserver.pid
#
#soft_file_limit: 0
-# Set to false to disable presence tracking on this homeserver.
+# Presence tracking allows users to see the state (e.g online/offline)
+# of other local and remote users.
#
-#use_presence: false
+presence:
+ # Uncomment to disable presence tracking on this homeserver. This option
+ # replaces the previous top-level 'use_presence' option.
+ #
+ #enabled: false
+
+ # Presence routers are third-party modules that can specify additional logic
+ # to where presence updates from users are routed.
+ #
+ presence_router:
+ # The custom module's class. Uncomment to use a custom presence router module.
+ #
+ #module: "my_custom_router.PresenceRouter"
+
+ # Configuration options of the custom module. Refer to your module's
+ # documentation for available options.
+ #
+ #config:
+ # example_option: 'something'
# Whether to require authentication to retrieve profile data (avatars,
# display names) of other users through the client API. Defaults to
@@ -1156,69 +1175,6 @@ url_preview_accept_language:
#
#enable_registration: false
-# Optional account validity configuration. This allows for accounts to be denied
-# any request after a given period.
-#
-# Once this feature is enabled, Synapse will look for registered users without an
-# expiration date at startup and will add one to every account it found using the
-# current settings at that time.
-# This means that, if a validity period is set, and Synapse is restarted (it will
-# then derive an expiration date from the current validity period), and some time
-# after that the validity period changes and Synapse is restarted, the users'
-# expiration dates won't be updated unless their account is manually renewed. This
-# date will be randomly selected within a range [now + period - d ; now + period],
-# where d is equal to 10% of the validity period.
-#
-account_validity:
- # The account validity feature is disabled by default. Uncomment the
- # following line to enable it.
- #
- #enabled: true
-
- # The period after which an account is valid after its registration. When
- # renewing the account, its validity period will be extended by this amount
- # of time. This parameter is required when using the account validity
- # feature.
- #
- #period: 6w
-
- # The amount of time before an account's expiry date at which Synapse will
- # send an email to the account's email address with a renewal link. By
- # default, no such emails are sent.
- #
- # If you enable this setting, you will also need to fill out the 'email' and
- # 'public_baseurl' configuration sections.
- #
- #renew_at: 1w
-
- # The subject of the email sent out with the renewal link. '%(app)s' can be
- # used as a placeholder for the 'app_name' parameter from the 'email'
- # section.
- #
- # Note that the placeholder must be written '%(app)s', including the
- # trailing 's'.
- #
- # If this is not set, a default value is used.
- #
- #renew_email_subject: "Renew your %(app)s account"
-
- # Directory in which Synapse will try to find templates for the HTML files to
- # serve to the user when trying to renew an account. If not set, default
- # templates from within the Synapse package will be used.
- #
- #template_dir: "res/templates"
-
- # File within 'template_dir' giving the HTML to be displayed to the user after
- # they successfully renewed their account. If not set, default text is used.
- #
- #account_renewed_html_path: "account_renewed.html"
-
- # File within 'template_dir' giving the HTML to be displayed when the user
- # tries to renew an account with an invalid renewal token. If not set,
- # default text is used.
- #
- #invalid_token_html_path: "invalid_token.html"
-
# Time that a user's session remains valid for, after they log in.
#
# Note that this is not currently compatible with guest logins.
@@ -1246,9 +1202,9 @@ account_validity:
#
#allowed_local_3pids:
# - medium: email
-# pattern: '.*@matrix\.org'
+# pattern: '^[^@]+@matrix\.org$'
# - medium: email
-# pattern: '.*@vector\.im'
+# pattern: '^[^@]+@vector\.im$'
# - medium: msisdn
# pattern: '\+44'
@@ -1413,6 +1369,91 @@ account_threepid_delegates:
#auto_join_rooms_for_guests: false
+## Account Validity ##
+
+# Optional account validity configuration. This allows for accounts to be denied
+# any request after a given period.
+#
+# Once this feature is enabled, Synapse will look for registered users without an
+# expiration date at startup and will add one to every account it found using the
+# current settings at that time.
+# This means that, if a validity period is set, and Synapse is restarted (it will
+# then derive an expiration date from the current validity period), and some time
+# after that the validity period changes and Synapse is restarted, the users'
+# expiration dates won't be updated unless their account is manually renewed. This
+# date will be randomly selected within a range [now + period - d ; now + period],
+# where d is equal to 10% of the validity period.
+#
+account_validity:
+ # The account validity feature is disabled by default. Uncomment the
+ # following line to enable it.
+ #
+ #enabled: true
+
+ # The period after which an account is valid after its registration. When
+ # renewing the account, its validity period will be extended by this amount
+ # of time. This parameter is required when using the account validity
+ # feature.
+ #
+ #period: 6w
+
+ # The amount of time before an account's expiry date at which Synapse will
+ # send an email to the account's email address with a renewal link. By
+ # default, no such emails are sent.
+ #
+ # If you enable this setting, you will also need to fill out the 'email' and
+ # 'public_baseurl' configuration sections.
+ #
+ #renew_at: 1w
+
+ # The subject of the email sent out with the renewal link. '%(app)s' can be
+ # used as a placeholder for the 'app_name' parameter from the 'email'
+ # section.
+ #
+ # Note that the placeholder must be written '%(app)s', including the
+ # trailing 's'.
+ #
+ # If this is not set, a default value is used.
+ #
+ #renew_email_subject: "Renew your %(app)s account"
+
+ # Directory in which Synapse will try to find templates for the HTML files to
+ # serve to the user when trying to renew an account. If not set, default
+ # templates from within the Synapse package will be used.
+ #
+ # The currently available templates are:
+ #
+ # * account_renewed.html: Displayed to the user after they have successfully
+ # renewed their account.
+ #
+ # * account_previously_renewed.html: Displayed to the user if they attempt to
+ # renew their account with a token that is valid, but that has already
+ # been used. In this case the account is not renewed again.
+ #
+ # * invalid_token.html: Displayed to the user when they try to renew an account
+ # with an unknown or invalid renewal token.
+ #
+ # See https://github.com/matrix-org/synapse/tree/master/synapse/res/templates for
+ # default template contents.
+ #
+ # The file name of some of these templates can be configured below for legacy
+ # reasons.
+ #
+ #template_dir: "res/templates"
+
+ # A custom file name for the 'account_renewed.html' template.
+ #
+ # If not set, the file is assumed to be named "account_renewed.html".
+ #
+ #account_renewed_html_path: "account_renewed.html"
+
+ # A custom file name for the 'invalid_token.html' template.
+ #
+ # If not set, the file is assumed to be named "invalid_token.html".
+ #
+ #invalid_token_html_path: "invalid_token.html"
+
+
## Metrics ###
# Enable collection and rendering of performance metrics
@@ -1451,14 +1492,31 @@ metrics_flags:
## API Configuration ##
-# A list of event types that will be included in the room_invite_state
+# Controls for the state that is shared with users who receive an invite
+# to a room
#
-#room_invite_state_types:
-# - "m.room.join_rules"
-# - "m.room.canonical_alias"
-# - "m.room.avatar"
-# - "m.room.encryption"
-# - "m.room.name"
+room_prejoin_state:
+ # By default, the following state event types are shared with users who
+ # receive invites to the room:
+ #
+ # - m.room.join_rules
+ # - m.room.canonical_alias
+ # - m.room.avatar
+ # - m.room.encryption
+ # - m.room.name
+ #
+ # Uncomment the following to disable these defaults (so that only the event
+ # types listed in 'additional_event_types' are shared). Defaults to 'false'.
+ #
+ #disable_default_event_types: true
+
+ # Additional state event types to share with users when they are invited
+ # to a room.
+ #
+ # By default, this list is empty (so only the default event types are shared).
+ #
+ #additional_event_types:
+ # - org.example.custom.event.type
# A list of application service config files to use
@@ -1842,7 +1900,7 @@ saml2_config:
# sub-properties:
#
# module: The class name of a custom mapping module. Default is
-# 'synapse.handlers.oidc_handler.JinjaOidcMappingProvider'.
+# 'synapse.handlers.oidc.JinjaOidcMappingProvider'.
# See https://github.com/matrix-org/synapse/blob/master/docs/sso_mapping_providers.md#openid-mapping-providers
# for information on implementing a custom mapping provider.
#
diff --git a/docs/sso_mapping_providers.md b/docs/sso_mapping_providers.md
index e1d6ede7..50020d1a 100644
--- a/docs/sso_mapping_providers.md
+++ b/docs/sso_mapping_providers.md
@@ -106,7 +106,7 @@ A custom mapping provider must specify the following methods:
Synapse has a built-in OpenID mapping provider if a custom provider isn't
specified in the config. It is located at
-[`synapse.handlers.oidc_handler.JinjaOidcMappingProvider`](../synapse/handlers/oidc_handler.py).
+[`synapse.handlers.oidc.JinjaOidcMappingProvider`](../synapse/handlers/oidc.py).
## SAML Mapping Providers
@@ -190,4 +190,4 @@ A custom mapping provider must specify the following methods:
Synapse has a built-in SAML mapping provider if a custom provider isn't
specified in the config. It is located at
-[`synapse.handlers.saml_handler.DefaultSamlMappingProvider`](../synapse/handlers/saml_handler.py).
+[`synapse.handlers.saml.DefaultSamlMappingProvider`](../synapse/handlers/saml.py).
diff --git a/mypy.ini b/mypy.ini
index 3ae5d457..a40f705b 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -8,6 +8,7 @@ show_traceback = True
mypy_path = stubs
warn_unreachable = True
local_partial_types = True
+no_implicit_optional = True
# To find all folders that pass mypy you run:
#
@@ -40,7 +41,6 @@ files =
synapse/push,
synapse/replication,
synapse/rest,
- synapse/secrets.py,
synapse/server.py,
synapse/server_notices,
synapse/spam_checker_api,
diff --git a/pyproject.toml b/pyproject.toml
index cd880d4e..8bca1fa4 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -35,7 +35,7 @@
showcontent = true
[tool.black]
-target-version = ['py35']
+target-version = ['py36']
exclude = '''
(
diff --git a/scripts-dev/build_debian_packages b/scripts-dev/build_debian_packages
index d0685c8b..07d018db 100755
--- a/scripts-dev/build_debian_packages
+++ b/scripts-dev/build_debian_packages
@@ -18,14 +18,13 @@ import threading
from concurrent.futures import ThreadPoolExecutor
DISTS = (
- "debian:stretch",
"debian:buster",
"debian:bullseye",
"debian:sid",
- "ubuntu:xenial",
- "ubuntu:bionic",
- "ubuntu:focal",
- "ubuntu:groovy",
+ "ubuntu:bionic", # 18.04 LTS (our EOL forced by Py36 on 2021-12-23)
+ "ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14)
+ "ubuntu:groovy", # 20.10 (EOL 2021-07-07)
+ "ubuntu:hirsute", # 21.04 (EOL 2022-01-05)
)
DESC = '''\
@@ -43,7 +42,7 @@ class Builder(object):
self._lock = threading.Lock()
self._failed = False
- def run_build(self, dist):
+ def run_build(self, dist, skip_tests=False):
"""Build deb for a single distribution"""
if self._failed:
@@ -51,13 +50,13 @@ class Builder(object):
raise Exception("failed")
try:
- self._inner_build(dist)
+ self._inner_build(dist, skip_tests)
except Exception as e:
print("build of %s failed: %s" % (dist, e), file=sys.stderr)
self._failed = True
raise
- def _inner_build(self, dist):
+ def _inner_build(self, dist, skip_tests=False):
projdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
os.chdir(projdir)
@@ -101,6 +100,7 @@ class Builder(object):
"--volume=" + debsdir + ":/debs",
"-e", "TARGET_USERID=%i" % (os.getuid(), ),
"-e", "TARGET_GROUPID=%i" % (os.getgid(), ),
+ "-e", "DEB_BUILD_OPTIONS=%s" % ("nocheck" if skip_tests else ""),
"dh-venv-builder:" + tag,
], stdout=stdout, stderr=subprocess.STDOUT)
@@ -124,7 +124,7 @@ class Builder(object):
self.active_containers.remove(c)
-def run_builds(dists, jobs=1):
+def run_builds(dists, jobs=1, skip_tests=False):
builder = Builder(redirect_stdout=(jobs > 1))
def sig(signum, _frame):
@@ -133,7 +133,7 @@ def run_builds(dists, jobs=1):
signal.signal(signal.SIGINT, sig)
with ThreadPoolExecutor(max_workers=jobs) as e:
- res = e.map(builder.run_build, dists)
+ res = e.map(lambda dist: builder.run_build(dist, skip_tests), dists)
# make sure we consume the iterable so that exceptions are raised.
for r in res:
@@ -149,8 +149,12 @@ if __name__ == '__main__':
help='specify the number of builds to run in parallel',
)
parser.add_argument(
+ '--no-check', action='store_true',
+ help='skip running tests after building',
+ )
+ parser.add_argument(
'dist', nargs='*', default=DISTS,
help='a list of distributions to build for. Default: %(default)s',
)
args = parser.parse_args()
- run_builds(dists=args.dist, jobs=args.jobs)
+ run_builds(dists=args.dist, jobs=args.jobs, skip_tests=args.no_check)
diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh
index 3cde53f5..1612ab52 100755
--- a/scripts-dev/complement.sh
+++ b/scripts-dev/complement.sh
@@ -1,22 +1,49 @@
-#! /bin/bash -eu
+#!/usr/bin/env bash
# This script is designed for developers who want to test their code
# against Complement.
#
# It makes a Synapse image which represents the current checkout,
-# then downloads Complement and runs it with that image.
+# builds a synapse-complement image on top, then runs tests with it.
+#
+# By default the script will fetch the latest Complement master branch and
+# run tests with that. This can be overridden to use a custom Complement
+# checkout by setting the COMPLEMENT_DIR environment variable to the
+# filepath of a local Complement checkout.
+#
+# A regular expression of test method names can be supplied as the first
+# argument to the script. Complement will then only run those tests. If
+# no regex is supplied, all tests are run. For example;
+#
+# ./complement.sh "TestOutboundFederation(Profile|Send)"
+#
+
+# Exit if a line returns a non-zero exit code
+set -e
+# Change to the repository root
cd "$(dirname $0)/.."
+# Check for a user-specified Complement checkout
+if [[ -z "$COMPLEMENT_DIR" ]]; then
+ echo "COMPLEMENT_DIR not set. Fetching the latest Complement checkout..."
+ wget -Nq https://github.com/matrix-org/complement/archive/master.tar.gz
+ tar -xzf master.tar.gz
+ COMPLEMENT_DIR=complement-master
+ echo "Checkout available at 'complement-master'"
+fi
+
# Build the base Synapse image from the local checkout
-docker build -t matrixdotorg/synapse:latest -f docker/Dockerfile .
+docker build -t matrixdotorg/synapse -f docker/Dockerfile .
+# Build the Synapse monolith image from Complement, based on the above image we just built
+docker build -t complement-synapse -f "$COMPLEMENT_DIR/dockerfiles/Synapse.Dockerfile" "$COMPLEMENT_DIR/dockerfiles"
-# Download Complement
-wget -N https://github.com/matrix-org/complement/archive/master.tar.gz
-tar -xzf master.tar.gz
-cd complement-master
+cd "$COMPLEMENT_DIR"
-# Build the Synapse image from Complement, based on the above image we just built
-docker build -t complement-synapse -f dockerfiles/Synapse.Dockerfile ./dockerfiles
+EXTRA_COMPLEMENT_ARGS=""
+if [[ -n "$1" ]]; then
+ # A test name regex has been set, supply it to Complement
+ EXTRA_COMPLEMENT_ARGS+="-run $1 "
+fi
-# Run the tests on the resulting image!
-COMPLEMENT_BASE_IMAGE=complement-synapse go test -v -count=1 ./tests
+# Run the tests!
+COMPLEMENT_BASE_IMAGE=complement-synapse go test -v -tags synapse_blacklist,msc2946,msc3083 -count=1 $EXTRA_COMPLEMENT_ARGS ./tests
diff --git a/scripts-dev/definitions.py b/scripts-dev/definitions.py
index 313860df..c82ddd96 100755
--- a/scripts-dev/definitions.py
+++ b/scripts-dev/definitions.py
@@ -140,7 +140,7 @@ if __name__ == "__main__":
definitions = {}
for directory in args.directories:
- for root, dirs, files in os.walk(directory):
+ for root, _, files in os.walk(directory):
for filename in files:
if filename.endswith(".py"):
filepath = os.path.join(root, filename)
diff --git a/scripts-dev/list_url_patterns.py b/scripts-dev/list_url_patterns.py
index 26ad7c67..e85420de 100755
--- a/scripts-dev/list_url_patterns.py
+++ b/scripts-dev/list_url_patterns.py
@@ -48,7 +48,7 @@ args = parser.parse_args()
for directory in args.directories:
- for root, dirs, files in os.walk(directory):
+ for root, _, files in os.walk(directory):
for filename in files:
if filename.endswith(".py"):
filepath = os.path.join(root, filename)
diff --git a/scripts-dev/mypy_synapse_plugin.py b/scripts-dev/mypy_synapse_plugin.py
index 18df6830..1217e148 100644
--- a/scripts-dev/mypy_synapse_plugin.py
+++ b/scripts-dev/mypy_synapse_plugin.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/scripts-dev/release.py b/scripts-dev/release.py
new file mode 100755
index 00000000..1042fa48
--- /dev/null
+++ b/scripts-dev/release.py
@@ -0,0 +1,244 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2020 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""An interactive script for doing a release. See `run()` below.
+"""
+
+import subprocess
+import sys
+from typing import Optional
+
+import click
+import git
+from packaging import version
+from redbaron import RedBaron
+
+
+@click.command()
+def run():
+ """An interactive script to walk through the initial stages of creating a
+ release, including creating release branch, updating changelog and pushing to
+ GitHub.
+
+ Requires the dev dependencies be installed, which can be done via:
+
+ pip install -e .[dev]
+
+ """
+
+ # Make sure we're in a git repo.
+ try:
+ repo = git.Repo()
+ except git.InvalidGitRepositoryError:
+ raise click.ClickException("Not in Synapse repo.")
+
+ if repo.is_dirty():
+ raise click.ClickException("Uncommitted changes exist.")
+
+ click.secho("Updating git repo...")
+ repo.remote().fetch()
+
+ # Parse the AST and load the `__version__` node so that we can edit it
+ # later.
+ with open("synapse/__init__.py") as f:
+ red = RedBaron(f.read())
+
+ version_node = None
+ for node in red:
+ if node.type != "assignment":
+ continue
+
+ if node.target.type != "name":
+ continue
+
+ if node.target.value != "__version__":
+ continue
+
+ version_node = node
+ break
+
+ if not version_node:
+ print("Failed to find '__version__' definition in synapse/__init__.py")
+ sys.exit(1)
+
+ # Parse the current version.
+ current_version = version.parse(version_node.value.value.strip('"'))
+ assert isinstance(current_version, version.Version)
+
+ # Figure out what sort of release we're doing and calcuate the new version.
+ rc = click.confirm("RC", default=True)
+ if current_version.pre:
+ # If the current version is an RC we don't need to bump any of the
+ # version numbers (other than the RC number).
+ base_version = "{}.{}.{}".format(
+ current_version.major,
+ current_version.minor,
+ current_version.micro,
+ )
+
+ if rc:
+ new_version = "{}.{}.{}rc{}".format(
+ current_version.major,
+ current_version.minor,
+ current_version.micro,
+ current_version.pre[1] + 1,
+ )
+ else:
+ new_version = base_version
+ else:
+ # If this is a new release cycle then we need to know if its a major
+ # version bump or a hotfix.
+ release_type = click.prompt(
+ "Release type",
+ type=click.Choice(("major", "hotfix")),
+ show_choices=True,
+ default="major",
+ )
+
+ if release_type == "major":
+ base_version = new_version = "{}.{}.{}".format(
+ current_version.major,
+ current_version.minor + 1,
+ 0,
+ )
+ if rc:
+ new_version = "{}.{}.{}rc1".format(
+ current_version.major,
+ current_version.minor + 1,
+ 0,
+ )
+
+ else:
+ base_version = new_version = "{}.{}.{}".format(
+ current_version.major,
+ current_version.minor,
+ current_version.micro + 1,
+ )
+ if rc:
+ new_version = "{}.{}.{}rc1".format(
+ current_version.major,
+ current_version.minor,
+ current_version.micro + 1,
+ )
+
+ # Confirm the calculated version is OK.
+ if not click.confirm(f"Create new version: {new_version}?", default=True):
+ click.get_current_context().abort()
+
+ # Switch to the release branch.
+ release_branch_name = f"release-v{base_version}"
+ release_branch = find_ref(repo, release_branch_name)
+ if release_branch:
+ if release_branch.is_remote():
+ # If the release branch only exists on the remote we check it out
+ # locally.
+ repo.git.checkout(release_branch_name)
+ release_branch = repo.active_branch
+ else:
+ # If a branch doesn't exist we create one. We ask which one branch it
+ # should be based off, defaulting to sensible values depending on the
+ # release type.
+ if current_version.is_prerelease:
+ default = release_branch_name
+ elif release_type == "major":
+ default = "develop"
+ else:
+ default = "master"
+
+ branch_name = click.prompt(
+ "Which branch should the release be based on?", default=default
+ )
+
+ base_branch = find_ref(repo, branch_name)
+ if not base_branch:
+ print(f"Could not find base branch {branch_name}!")
+ click.get_current_context().abort()
+
+ # Check out the base branch and ensure it's up to date
+ repo.head.reference = base_branch
+ repo.head.reset(index=True, working_tree=True)
+ if not base_branch.is_remote():
+ update_branch(repo)
+
+ # Create the new release branch
+ release_branch = repo.create_head(release_branch_name, commit=base_branch)
+
+ # Switch to the release branch and ensure its up to date.
+ repo.git.checkout(release_branch_name)
+ update_branch(repo)
+
+ # Update the `__version__` variable and write it back to the file.
+ version_node.value = '"' + new_version + '"'
+ with open("synapse/__init__.py", "w") as f:
+ f.write(red.dumps())
+
+ # Generate changelogs
+ subprocess.run("python3 -m towncrier", shell=True)
+
+ # Generate debian changelogs if its not an RC.
+ if not rc:
+ subprocess.run(
+ f'dch -M -v {new_version} "New synapse release {new_version}."', shell=True
+ )
+ subprocess.run('dch -M -r -D stable ""', shell=True)
+
+ # Show the user the changes and ask if they want to edit the change log.
+ repo.git.add("-u")
+ subprocess.run("git diff --cached", shell=True)
+
+ if click.confirm("Edit changelog?", default=False):
+ click.edit(filename="CHANGES.md")
+
+ # Commit the changes.
+ repo.git.add("-u")
+ repo.git.commit(f"-m {new_version}")
+
+ # We give the option to bail here in case the user wants to make sure things
+ # are OK before pushing.
+ if not click.confirm("Push branch to github?", default=True):
+ print("")
+ print("Run when ready to push:")
+ print("")
+ print(f"\tgit push -u {repo.remote().name} {repo.active_branch.name}")
+ print("")
+ sys.exit(0)
+
+ # Otherwise, push and open the changelog in the browser.
+ repo.git.push("-u", repo.remote().name, repo.active_branch.name)
+
+ click.launch(
+ f"https://github.com/matrix-org/synapse/blob/{repo.active_branch.name}/CHANGES.md"
+ )
+
+
+def find_ref(repo: git.Repo, ref_name: str) -> Optional[git.HEAD]:
+ """Find the branch/ref, looking first locally then in the remote."""
+ if ref_name in repo.refs:
+ return repo.refs[ref_name]
+ elif ref_name in repo.remote().refs:
+ return repo.remote().refs[ref_name]
+ else:
+ return None
+
+
+def update_branch(repo: git.Repo):
+ """Ensure branch is up to date if it has a remote"""
+ if repo.active_branch.tracking_branch():
+ repo.git.merge(repo.active_branch.tracking_branch().name)
+
+
+if __name__ == "__main__":
+ run()
diff --git a/scripts-dev/sign_json b/scripts-dev/sign_json
index 44553fb7..4a43d3f2 100755
--- a/scripts-dev/sign_json
+++ b/scripts-dev/sign_json
@@ -1,6 +1,5 @@
#!/usr/bin/env python
#
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/scripts-dev/update_database b/scripts-dev/update_database
index 56365e2b..87f709b6 100755
--- a/scripts-dev/update_database
+++ b/scripts-dev/update_database
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/scripts/export_signing_key b/scripts/export_signing_key
index 8aec9d80..0ed167ea 100755
--- a/scripts/export_signing_key
+++ b/scripts/export_signing_key
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/scripts/generate_log_config b/scripts/generate_log_config
index a13a5634..e72a0daf 100755
--- a/scripts/generate_log_config
+++ b/scripts/generate_log_config
@@ -1,6 +1,5 @@
#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/scripts/generate_signing_key.py b/scripts/generate_signing_key.py
index 16d7c4f3..07df25a8 100755
--- a/scripts/generate_signing_key.py
+++ b/scripts/generate_signing_key.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/scripts/move_remote_media_to_new_store.py b/scripts/move_remote_media_to_new_store.py
index 8477955a..875aa478 100755
--- a/scripts/move_remote_media_to_new_store.py
+++ b/scripts/move_remote_media_to_new_store.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2017 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/scripts/register_new_matrix_user b/scripts/register_new_matrix_user
index 8b9d3087..00104b9d 100755
--- a/scripts/register_new_matrix_user
+++ b/scripts/register_new_matrix_user
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/scripts/synapse_port_db b/scripts/synapse_port_db
index 58edf6af..f0c93d52 100755
--- a/scripts/synapse_port_db
+++ b/scripts/synapse_port_db
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
@@ -635,8 +634,11 @@ class Porter(object):
"device_inbox_sequence", ("device_inbox", "device_federation_outbox")
)
await self._setup_sequence(
- "account_data_sequence", ("room_account_data", "room_tags_revisions", "account_data"))
- await self._setup_sequence("receipts_sequence", ("receipts_linearized", ))
+ "account_data_sequence",
+ ("room_account_data", "room_tags_revisions", "account_data"),
+ )
+ await self._setup_sequence("receipts_sequence", ("receipts_linearized",))
+ await self._setup_sequence("presence_stream_sequence", ("presence_stream",))
await self._setup_auth_chain_sequence()
# Step 3. Get tables.
diff --git a/setup.cfg b/setup.cfg
index 7329eed2..e5ceb7ed 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -18,16 +18,14 @@ ignore =
# E203: whitespace before ':' (which is contrary to pep8?)
# E731: do not assign a lambda expression, use a def
# E501: Line too long (black enforces this for us)
-# B00*: Subsection of the bugbear suite (TODO: add in remaining fixes)
-ignore=W503,W504,E203,E731,E501,B006,B007,B008
+ignore=W503,W504,E203,E731,E501
[isort]
line_length = 88
-sections=FUTURE,STDLIB,COMPAT,THIRDPARTY,TWISTED,FIRSTPARTY,TESTS,LOCALFOLDER
+sections=FUTURE,STDLIB,THIRDPARTY,TWISTED,FIRSTPARTY,TESTS,LOCALFOLDER
default_section=THIRDPARTY
known_first_party = synapse
known_tests=tests
-known_compat = mock
known_twisted=twisted,OpenSSL
multi_line_output=3
include_trailing_comma=true
diff --git a/setup.py b/setup.py
index 29e9971d..e2e48876 100755
--- a/setup.py
+++ b/setup.py
@@ -103,6 +103,13 @@ CONDITIONAL_REQUIREMENTS["lint"] = [
"flake8",
]
+CONDITIONAL_REQUIREMENTS["dev"] = CONDITIONAL_REQUIREMENTS["lint"] + [
+ # The following are used by the release script
+ "click==7.1.2",
+ "redbaron==0.9.2",
+ "GitPython==3.1.14",
+]
+
CONDITIONAL_REQUIREMENTS["mypy"] = ["mypy==0.812", "mypy-zope==0.2.13"]
# Dependencies which are exclusively required by unit test code. This is
@@ -110,7 +117,7 @@ CONDITIONAL_REQUIREMENTS["mypy"] = ["mypy==0.812", "mypy-zope==0.2.13"]
# Tests assume that all optional dependencies are installed.
#
# parameterized_class decorator was introduced in parameterized 0.7.0
-CONDITIONAL_REQUIREMENTS["test"] = ["mock>=2.0", "parameterized>=0.7.0"]
+CONDITIONAL_REQUIREMENTS["test"] = ["parameterized>=0.7.0"]
setup(
name="matrix-synapse",
@@ -123,13 +130,12 @@ setup(
zip_safe=False,
long_description=long_description,
long_description_content_type="text/x-rst",
- python_requires="~=3.5",
+ python_requires="~=3.6",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Topic :: Communications :: Chat",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3 :: Only",
- "Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
diff --git a/stubs/frozendict.pyi b/stubs/frozendict.pyi
index 0368ba47..24c6f3af 100644
--- a/stubs/frozendict.pyi
+++ b/stubs/frozendict.pyi
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/stubs/txredisapi.pyi b/stubs/txredisapi.pyi
index 080ca402..c1a06ae0 100644
--- a/stubs/txredisapi.pyi
+++ b/stubs/txredisapi.pyi
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/__init__.py b/synapse/__init__.py
index 1d2883ac..ce822ccb 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018-9 New Vector Ltd
#
@@ -22,8 +21,8 @@ import os
import sys
# Check that we're not running on an unsupported Python version.
-if sys.version_info < (3, 5):
- print("Synapse requires Python 3.5 or above.")
+if sys.version_info < (3, 6):
+ print("Synapse requires Python 3.6 or above.")
sys.exit(1)
# Twisted and canonicaljson will fail to import when this file is executed to
@@ -48,7 +47,7 @@ try:
except ImportError:
pass
-__version__ = "1.31.0"
+__version__ = "1.33.2"
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
# We import here so that we don't have to install a bunch of deps when
diff --git a/synapse/_scripts/register_new_matrix_user.py b/synapse/_scripts/register_new_matrix_user.py
index dfe26dea..dae986c7 100644
--- a/synapse/_scripts/register_new_matrix_user.py
+++ b/synapse/_scripts/register_new_matrix_user.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2018 New Vector
#
diff --git a/synapse/api/__init__.py b/synapse/api/__init__.py
index bfebb0f6..5e83dba2 100644
--- a/synapse/api/__init__.py
+++ b/synapse/api/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/api/auth.py b/synapse/api/auth.py
index 7d9930ae..efc926d0 100644
--- a/synapse/api/auth.py
+++ b/synapse/api/auth.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014 - 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,14 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
-from typing import List, Optional, Tuple
+from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple
import pymacaroons
from netaddr import IPAddress
from twisted.web.server import Request
-import synapse.types
from synapse import event_auth
from synapse.api.auth_blocking import AuthBlocking
from synapse.api.constants import EventTypes, HistoryVisibility, Membership
@@ -37,11 +35,14 @@ from synapse.http import get_request_user_agent
from synapse.http.site import SynapseRequest
from synapse.logging import opentracing as opentracing
from synapse.storage.databases.main.registration import TokenLookupResult
-from synapse.types import StateMap, UserID
+from synapse.types import Requester, StateMap, UserID, create_requester
from synapse.util.caches.lrucache import LruCache
from synapse.util.macaroons import get_value_from_macaroon, satisfy_expiry
from synapse.util.metrics import Measure
+if TYPE_CHECKING:
+ from synapse.server import HomeServer
+
logger = logging.getLogger(__name__)
@@ -66,9 +67,10 @@ class Auth:
"""
FIXME: This class contains a mix of functions for authenticating users
of our client-server API and authenticating events added to room graphs.
+ The latter should be moved to synapse.handlers.event_auth.EventAuthHandler.
"""
- def __init__(self, hs):
+ def __init__(self, hs: "HomeServer"):
self.hs = hs
self.clock = hs.get_clock()
self.store = hs.get_datastore()
@@ -80,19 +82,21 @@ class Auth:
self._auth_blocking = AuthBlocking(self.hs)
- self._account_validity = hs.config.account_validity
+ self._account_validity_enabled = (
+ hs.config.account_validity.account_validity_enabled
+ )
self._track_appservice_user_ips = hs.config.track_appservice_user_ips
self._macaroon_secret_key = hs.config.macaroon_secret_key
async def check_from_context(
self, room_version: str, event, context, do_sig_check=True
- ):
+ ) -> None:
prev_state_ids = await context.get_prev_state_ids()
auth_events_ids = self.compute_auth_events(
event, prev_state_ids, for_verification=True
)
- auth_events = await self.store.get_events(auth_events_ids)
- auth_events = {(e.type, e.state_key): e for e in auth_events.values()}
+ auth_events_by_id = await self.store.get_events(auth_events_ids)
+ auth_events = {(e.type, e.state_key): e for e in auth_events_by_id.values()}
room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
event_auth.check(
@@ -149,17 +153,11 @@ class Auth:
raise AuthError(403, "User %s not in room %s" % (user_id, room_id))
- async def check_host_in_room(self, room_id, host):
+ async def check_host_in_room(self, room_id: str, host: str) -> bool:
with Measure(self.clock, "check_host_in_room"):
- latest_event_ids = await self.store.is_host_joined(room_id, host)
- return latest_event_ids
-
- def can_federate(self, event, auth_events):
- creation_event = auth_events.get((EventTypes.Create, ""))
+ return await self.store.is_host_joined(room_id, host)
- return creation_event.content.get("m.federate", True) is True
-
- def get_public_keys(self, invite_event):
+ def get_public_keys(self, invite_event: EventBase) -> List[Dict[str, Any]]:
return event_auth.get_public_keys(invite_event)
async def get_user_by_req(
@@ -168,7 +166,7 @@ class Auth:
allow_guest: bool = False,
rights: str = "access",
allow_expired: bool = False,
- ) -> synapse.types.Requester:
+ ) -> Requester:
"""Get a registered user's ID.
Args:
@@ -194,7 +192,7 @@ class Auth:
access_token = self.get_access_token_from_request(request)
user_id, app_service = await self._get_appservice_user_id(request)
- if user_id:
+ if user_id and app_service:
if ip_addr and self._track_appservice_user_ips:
await self.store.insert_client_ip(
user_id=user_id,
@@ -204,9 +202,7 @@ class Auth:
device_id="dummy-device", # stubbed
)
- requester = synapse.types.create_requester(
- user_id, app_service=app_service
- )
+ requester = create_requester(user_id, app_service=app_service)
request.requester = user_id
opentracing.set_tag("authenticated_entity", user_id)
@@ -223,7 +219,7 @@ class Auth:
shadow_banned = user_info.shadow_banned
# Deny the request if the user account has expired.
- if self._account_validity.enabled and not allow_expired:
+ if self._account_validity_enabled and not allow_expired:
if await self.store.is_account_expired(
user_info.user_id, self.clock.time_msec()
):
@@ -249,7 +245,7 @@ class Auth:
errcode=Codes.GUEST_ACCESS_FORBIDDEN,
)
- requester = synapse.types.create_requester(
+ requester = create_requester(
user_info.user_id,
token_id,
is_guest,
@@ -269,7 +265,9 @@ class Auth:
except KeyError:
raise MissingClientTokenError()
- async def _get_appservice_user_id(self, request):
+ async def _get_appservice_user_id(
+ self, request: Request
+ ) -> Tuple[Optional[str], Optional[ApplicationService]]:
app_service = self.store.get_app_service_by_token(
self.get_access_token_from_request(request)
)
@@ -281,6 +279,9 @@ class Auth:
if ip_address not in app_service.ip_range_whitelist:
return None, None
+ # This will always be set by the time Twisted calls us.
+ assert request.args is not None
+
if b"user_id" not in request.args:
return app_service.sender, app_service
@@ -385,7 +386,9 @@ class Auth:
logger.warning("Invalid macaroon in auth: %s %s", type(e), e)
raise InvalidClientTokenError("Invalid macaroon passed.")
- def _parse_and_validate_macaroon(self, token, rights="access"):
+ def _parse_and_validate_macaroon(
+ self, token: str, rights: str = "access"
+ ) -> Tuple[str, bool]:
"""Takes a macaroon and tries to parse and validate it. This is cached
if and only if rights == access and there isn't an expiry.
@@ -430,15 +433,16 @@ class Auth:
return user_id, guest
- def validate_macaroon(self, macaroon, type_string, user_id):
+ def validate_macaroon(
+ self, macaroon: pymacaroons.Macaroon, type_string: str, user_id: str
+ ) -> None:
"""
validate that a Macaroon is understood by and was signed by this server.
Args:
- macaroon(pymacaroons.Macaroon): The macaroon to validate
- type_string(str): The kind of token required (e.g. "access",
- "delete_pusher")
- user_id (str): The user_id required
+ macaroon: The macaroon to validate
+ type_string: The kind of token required (e.g. "access", "delete_pusher")
+ user_id: The user_id required
"""
v = pymacaroons.Verifier()
@@ -463,9 +467,7 @@ class Auth:
if not service:
logger.warning("Unrecognised appservice access token.")
raise InvalidClientTokenError()
- request.requester = synapse.types.create_requester(
- service.sender, app_service=service
- )
+ request.requester = create_requester(service.sender, app_service=service)
return service
async def is_server_admin(self, user: UserID) -> bool:
@@ -517,7 +519,7 @@ class Auth:
return auth_ids
- async def check_can_change_room_list(self, room_id: str, user: UserID):
+ async def check_can_change_room_list(self, room_id: str, user: UserID) -> bool:
"""Determine whether the user is allowed to edit the room's entry in the
published room list.
@@ -552,11 +554,11 @@ class Auth:
return user_level >= send_level
@staticmethod
- def has_access_token(request: Request):
+ def has_access_token(request: Request) -> bool:
"""Checks if the request has an access_token.
Returns:
- bool: False if no access_token was given, True otherwise.
+ False if no access_token was given, True otherwise.
"""
# This will always be set by the time Twisted calls us.
assert request.args is not None
@@ -566,13 +568,13 @@ class Auth:
return bool(query_params) or bool(auth_headers)
@staticmethod
- def get_access_token_from_request(request: Request):
+ def get_access_token_from_request(request: Request) -> str:
"""Extracts the access_token from the request.
Args:
request: The http request.
Returns:
- unicode: The access_token
+ The access_token
Raises:
MissingClientTokenError: If there isn't a single access_token in the
request
@@ -647,5 +649,5 @@ class Auth:
% (user_id, room_id),
)
- def check_auth_blocking(self, *args, **kwargs):
- return self._auth_blocking.check_auth_blocking(*args, **kwargs)
+ async def check_auth_blocking(self, *args, **kwargs) -> None:
+ await self._auth_blocking.check_auth_blocking(*args, **kwargs)
diff --git a/synapse/api/auth_blocking.py b/synapse/api/auth_blocking.py
index d8088f52..e6bced93 100644
--- a/synapse/api/auth_blocking.py
+++ b/synapse/api/auth_blocking.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,18 +13,21 @@
# limitations under the License.
import logging
-from typing import Optional
+from typing import TYPE_CHECKING, Optional
from synapse.api.constants import LimitBlockingTypes, UserTypes
from synapse.api.errors import Codes, ResourceLimitError
from synapse.config.server import is_threepid_reserved
from synapse.types import Requester
+if TYPE_CHECKING:
+ from synapse.server import HomeServer
+
logger = logging.getLogger(__name__)
class AuthBlocking:
- def __init__(self, hs):
+ def __init__(self, hs: "HomeServer"):
self.store = hs.get_datastore()
self._server_notices_mxid = hs.config.server_notices_mxid
@@ -44,7 +46,7 @@ class AuthBlocking:
threepid: Optional[dict] = None,
user_type: Optional[str] = None,
requester: Optional[Requester] = None,
- ):
+ ) -> None:
"""Checks if the user should be rejected for some external reason,
such as monthly active user limiting or global disable flag
diff --git a/synapse/api/constants.py b/synapse/api/constants.py
index 8f37d2cf..936b6534 100644
--- a/synapse/api/constants.py
+++ b/synapse/api/constants.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017 Vector Creations Ltd
# Copyright 2018-2019 New Vector Ltd
@@ -18,6 +17,9 @@
"""Contains constants from the specification."""
+# the max size of a (canonical-json-encoded) event
+MAX_PDU_SIZE = 65536
+
# the "depth" field on events is limited to 2**63 - 1
MAX_DEPTH = 2 ** 63 - 1
@@ -59,6 +61,8 @@ class JoinRules:
KNOCK = "knock"
INVITE = "invite"
PRIVATE = "private"
+ # As defined for MSC3083.
+ MSC3083_RESTRICTED = "restricted"
class LoginType:
@@ -71,6 +75,11 @@ class LoginType:
DUMMY = "m.login.dummy"
+# This is used in the `type` parameter for /register when called by
+# an appservice to register a new user.
+APP_SERVICE_REGISTRATION_TYPE = "m.login.application_service"
+
+
class EventTypes:
Member = "m.room.member"
Create = "m.room.create"
diff --git a/synapse/api/errors.py b/synapse/api/errors.py
index 2a789ea3..0231c790 100644
--- a/synapse/api/errors.py
+++ b/synapse/api/errors.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py
index 5caf336f..ce49a0ad 100644
--- a/synapse/api/filtering.py
+++ b/synapse/api/filtering.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2017 Vector Creations Ltd
# Copyright 2018-2019 New Vector Ltd
diff --git a/synapse/api/presence.py b/synapse/api/presence.py
index b9a8e294..a3bf0348 100644
--- a/synapse/api/presence.py
+++ b/synapse/api/presence.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/api/ratelimiting.py b/synapse/api/ratelimiting.py
index c3f07bc1..2244b8a3 100644
--- a/synapse/api/ratelimiting.py
+++ b/synapse/api/ratelimiting.py
@@ -17,6 +17,7 @@ from collections import OrderedDict
from typing import Hashable, Optional, Tuple
from synapse.api.errors import LimitExceededError
+from synapse.storage.databases.main import DataStore
from synapse.types import Requester
from synapse.util import Clock
@@ -31,10 +32,13 @@ class Ratelimiter:
burst_count: How many actions that can be performed before being limited.
"""
- def __init__(self, clock: Clock, rate_hz: float, burst_count: int):
+ def __init__(
+ self, store: DataStore, clock: Clock, rate_hz: float, burst_count: int
+ ):
self.clock = clock
self.rate_hz = rate_hz
self.burst_count = burst_count
+ self.store = store
# A ordered dictionary keeping track of actions, when they were last
# performed and how often. Each entry is a mapping from a key of arbitrary type
@@ -46,45 +50,10 @@ class Ratelimiter:
OrderedDict()
) # type: OrderedDict[Hashable, Tuple[float, int, float]]
- def can_requester_do_action(
- self,
- requester: Requester,
- rate_hz: Optional[float] = None,
- burst_count: Optional[int] = None,
- update: bool = True,
- _time_now_s: Optional[int] = None,
- ) -> Tuple[bool, float]:
- """Can the requester perform the action?
-
- Args:
- requester: The requester to key off when rate limiting. The user property
- will be used.
- rate_hz: The long term number of actions that can be performed in a second.
- Overrides the value set during instantiation if set.
- burst_count: How many actions that can be performed before being limited.
- Overrides the value set during instantiation if set.
- update: Whether to count this check as performing the action
- _time_now_s: The current time. Optional, defaults to the current time according
- to self.clock. Only used by tests.
-
- Returns:
- A tuple containing:
- * A bool indicating if they can perform the action now
- * The reactor timestamp for when the action can be performed next.
- -1 if rate_hz is less than or equal to zero
- """
- # Disable rate limiting of users belonging to any AS that is configured
- # not to be rate limited in its registration file (rate_limited: true|false).
- if requester.app_service and not requester.app_service.is_rate_limited():
- return True, -1.0
-
- return self.can_do_action(
- requester.user.to_string(), rate_hz, burst_count, update, _time_now_s
- )
-
- def can_do_action(
+ async def can_do_action(
self,
- key: Hashable,
+ requester: Optional[Requester],
+ key: Optional[Hashable] = None,
rate_hz: Optional[float] = None,
burst_count: Optional[int] = None,
update: bool = True,
@@ -92,9 +61,16 @@ class Ratelimiter:
) -> Tuple[bool, float]:
"""Can the entity (e.g. user or IP address) perform the action?
+ Checks if the user has ratelimiting disabled in the database by looking
+ for null/zero values in the `ratelimit_override` table. (Non-zero
+ values aren't honoured, as they're specific to the event sending
+ ratelimiter, rather than all ratelimiters)
+
Args:
- key: The key we should use when rate limiting. Can be a user ID
- (when sending events), an IP address, etc.
+ requester: The requester that is doing the action, if any. Used to check
+ if the user has ratelimits disabled in the database.
+ key: An arbitrary key used to classify an action. Defaults to the
+ requester's user ID.
rate_hz: The long term number of actions that can be performed in a second.
Overrides the value set during instantiation if set.
burst_count: How many actions that can be performed before being limited.
@@ -109,6 +85,30 @@ class Ratelimiter:
* The reactor timestamp for when the action can be performed next.
-1 if rate_hz is less than or equal to zero
"""
+ if key is None:
+ if not requester:
+ raise ValueError("Must supply at least one of `requester` or `key`")
+
+ key = requester.user.to_string()
+
+ if requester:
+ # Disable rate limiting of users belonging to any AS that is configured
+ # not to be rate limited in its registration file (rate_limited: true|false).
+ if requester.app_service and not requester.app_service.is_rate_limited():
+ return True, -1.0
+
+ # Check if ratelimiting has been disabled for the user.
+ #
+ # Note that we don't use the returned rate/burst count, as the table
+ # is specifically for the event sending ratelimiter. Instead, we
+ # only use it to (somewhat cheekily) infer whether the user should
+ # be subject to any rate limiting or not.
+ override = await self.store.get_ratelimit_for_user(
+ requester.authenticated_entity
+ )
+ if override and not override.messages_per_second:
+ return True, -1.0
+
# Override default values if set
time_now_s = _time_now_s if _time_now_s is not None else self.clock.time()
rate_hz = rate_hz if rate_hz is not None else self.rate_hz
@@ -175,9 +175,10 @@ class Ratelimiter:
else:
del self.actions[key]
- def ratelimit(
+ async def ratelimit(
self,
- key: Hashable,
+ requester: Optional[Requester],
+ key: Optional[Hashable] = None,
rate_hz: Optional[float] = None,
burst_count: Optional[int] = None,
update: bool = True,
@@ -185,8 +186,16 @@ class Ratelimiter:
):
"""Checks if an action can be performed. If not, raises a LimitExceededError
+ Checks if the user has ratelimiting disabled in the database by looking
+ for null/zero values in the `ratelimit_override` table. (Non-zero
+ values aren't honoured, as they're specific to the event sending
+ ratelimiter, rather than all ratelimiters)
+
Args:
- key: An arbitrary key used to classify an action
+ requester: The requester that is doing the action, if any. Used to check for
+ if the user has ratelimits disabled.
+ key: An arbitrary key used to classify an action. Defaults to the
+ requester's user ID.
rate_hz: The long term number of actions that can be performed in a second.
Overrides the value set during instantiation if set.
burst_count: How many actions that can be performed before being limited.
@@ -201,7 +210,8 @@ class Ratelimiter:
"""
time_now_s = _time_now_s if _time_now_s is not None else self.clock.time()
- allowed, time_allowed = self.can_do_action(
+ allowed, time_allowed = await self.can_do_action(
+ requester,
key,
rate_hz=rate_hz,
burst_count=burst_count,
diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py
index de2cc15d..c9f9596a 100644
--- a/synapse/api/room_versions.py
+++ b/synapse/api/room_versions.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -57,7 +56,7 @@ class RoomVersion:
state_res = attr.ib(type=int) # one of the StateResolutionVersions
enforce_key_validity = attr.ib(type=bool)
- # bool: before MSC2261/MSC2432, m.room.aliases had special auth rules and redaction rules
+ # Before MSC2261/MSC2432, m.room.aliases had special auth rules and redaction rules
special_case_aliases_auth = attr.ib(type=bool)
# Strictly enforce canonicaljson, do not allow:
# * Integers outside the range of [-2 ^ 53 + 1, 2 ^ 53 - 1]
@@ -69,6 +68,8 @@ class RoomVersion:
limit_notifications_power_levels = attr.ib(type=bool)
# MSC2174/MSC2176: Apply updated redaction rules algorithm.
msc2176_redaction_rules = attr.ib(type=bool)
+ # MSC3083: Support the 'restricted' join_rule.
+ msc3083_join_rules = attr.ib(type=bool)
class RoomVersions:
@@ -82,6 +83,7 @@ class RoomVersions:
strict_canonicaljson=False,
limit_notifications_power_levels=False,
msc2176_redaction_rules=False,
+ msc3083_join_rules=False,
)
V2 = RoomVersion(
"2",
@@ -93,6 +95,7 @@ class RoomVersions:
strict_canonicaljson=False,
limit_notifications_power_levels=False,
msc2176_redaction_rules=False,
+ msc3083_join_rules=False,
)
V3 = RoomVersion(
"3",
@@ -104,6 +107,7 @@ class RoomVersions:
strict_canonicaljson=False,
limit_notifications_power_levels=False,
msc2176_redaction_rules=False,
+ msc3083_join_rules=False,
)
V4 = RoomVersion(
"4",
@@ -115,6 +119,7 @@ class RoomVersions:
strict_canonicaljson=False,
limit_notifications_power_levels=False,
msc2176_redaction_rules=False,
+ msc3083_join_rules=False,
)
V5 = RoomVersion(
"5",
@@ -126,6 +131,7 @@ class RoomVersions:
strict_canonicaljson=False,
limit_notifications_power_levels=False,
msc2176_redaction_rules=False,
+ msc3083_join_rules=False,
)
V6 = RoomVersion(
"6",
@@ -137,6 +143,7 @@ class RoomVersions:
strict_canonicaljson=True,
limit_notifications_power_levels=True,
msc2176_redaction_rules=False,
+ msc3083_join_rules=False,
)
MSC2176 = RoomVersion(
"org.matrix.msc2176",
@@ -148,6 +155,19 @@ class RoomVersions:
strict_canonicaljson=True,
limit_notifications_power_levels=True,
msc2176_redaction_rules=True,
+ msc3083_join_rules=False,
+ )
+ MSC3083 = RoomVersion(
+ "org.matrix.msc3083",
+ RoomDisposition.UNSTABLE,
+ EventFormatVersions.V3,
+ StateResolutionVersions.V2,
+ enforce_key_validity=True,
+ special_case_aliases_auth=False,
+ strict_canonicaljson=True,
+ limit_notifications_power_levels=True,
+ msc2176_redaction_rules=False,
+ msc3083_join_rules=True,
)
@@ -162,4 +182,5 @@ KNOWN_ROOM_VERSIONS = {
RoomVersions.V6,
RoomVersions.MSC2176,
)
+ # Note that we do not include MSC3083 here unless it is enabled in the config.
} # type: Dict[str, RoomVersion]
diff --git a/synapse/api/urls.py b/synapse/api/urls.py
index 6379c86d..4b1f213c 100644
--- a/synapse/api/urls.py
+++ b/synapse/api/urls.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/app/__init__.py b/synapse/app/__init__.py
index d1a2cd5e..f9940491 100644
--- a/synapse/app/__init__.py
+++ b/synapse/app/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/app/_base.py b/synapse/app/_base.py
index 3912c899..638e01c1 100644
--- a/synapse/app/_base.py
+++ b/synapse/app/_base.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 New Vector Ltd
# Copyright 2019-2021 The Matrix.org Foundation C.I.C
#
@@ -31,9 +30,10 @@ from twisted.internet import defer, error, reactor
from twisted.protocols.tls import TLSMemoryBIOFactory
import synapse
+from synapse.api.constants import MAX_PDU_SIZE
from synapse.app import check_bind_error
from synapse.app.phone_stats_home import start_phone_stats_home
-from synapse.config.server import ListenerConfig
+from synapse.config.homeserver import HomeServerConfig
from synapse.crypto import context_factory
from synapse.logging.context import PreserveLoggingContext
from synapse.metrics.background_process_metrics import wrap_as_background_process
@@ -289,7 +289,7 @@ def refresh_certificate(hs):
logger.info("Context factories updated.")
-async def start(hs: "synapse.server.HomeServer", listeners: Iterable[ListenerConfig]):
+async def start(hs: "synapse.server.HomeServer"):
"""
Start a Synapse server or worker.
@@ -301,7 +301,6 @@ async def start(hs: "synapse.server.HomeServer", listeners: Iterable[ListenerCon
Args:
hs: homeserver instance
- listeners: Listener configuration ('listeners' in homeserver.yaml)
"""
# Set up the SIGHUP machinery.
if hasattr(signal, "SIGHUP"):
@@ -337,7 +336,7 @@ async def start(hs: "synapse.server.HomeServer", listeners: Iterable[ListenerCon
synapse.logging.opentracing.init_tracer(hs) # type: ignore[attr-defined] # noqa
# It is now safe to start your Synapse.
- hs.start_listening(listeners)
+ hs.start_listening()
hs.get_datastore().db_pool.start_profiling()
hs.get_pusherpool().start()
@@ -531,3 +530,25 @@ def sdnotify(state):
# this is a bit surprising, since we don't expect to have a NOTIFY_SOCKET
# unless systemd is expecting us to notify it.
logger.warning("Unable to send notification to systemd: %s", e)
+
+
+def max_request_body_size(config: HomeServerConfig) -> int:
+ """Get a suitable maximum size for incoming HTTP requests"""
+
+ # Other than media uploads, the biggest request we expect to see is a fully-loaded
+ # /federation/v1/send request.
+ #
+ # The main thing in such a request is up to 50 PDUs, and up to 100 EDUs. PDUs are
+ # limited to 65536 bytes (possibly slightly more if the sender didn't use canonical
+ # json encoding); there is no specced limit to EDUs (see
+ # https://github.com/matrix-org/matrix-doc/issues/3121).
+ #
+ # in short, we somewhat arbitrarily limit requests to 200 * 64K (about 12.5M)
+ #
+ max_request_size = 200 * MAX_PDU_SIZE
+
+ # if we have a media repo enabled, we may need to allow larger uploads than that
+ if config.media.can_load_media_repo:
+ max_request_size = max(max_request_size, config.media.max_upload_size)
+
+ return max_request_size
diff --git a/synapse/app/admin_cmd.py b/synapse/app/admin_cmd.py
index 9f99651a..68ae19c9 100644
--- a/synapse/app/admin_cmd.py
+++ b/synapse/app/admin_cmd.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2019 Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -71,12 +70,6 @@ class AdminCmdSlavedStore(
class AdminCmdServer(HomeServer):
DATASTORE_CLASS = AdminCmdSlavedStore
- def _listen_http(self, listener_config):
- pass
-
- def start_listening(self, listeners):
- pass
-
async def export_data_command(hs, args):
"""Export data for a user.
@@ -233,7 +226,7 @@ def start(config_options):
async def run():
with LoggingContext("command"):
- _base.start(ss, [])
+ _base.start(ss)
await args.func(ss, args)
_base.start_worker_reactor(
diff --git a/synapse/app/appservice.py b/synapse/app/appservice.py
index add43147..2d50060f 100644
--- a/synapse/app/appservice.py
+++ b/synapse/app/appservice.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py
index add43147..2d50060f 100644
--- a/synapse/app/client_reader.py
+++ b/synapse/app/client_reader.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py
index e9c098c4..57af28f1 100644
--- a/synapse/app/event_creator.py
+++ b/synapse/app/event_creator.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py
index add43147..2d50060f 100644
--- a/synapse/app/federation_reader.py
+++ b/synapse/app/federation_reader.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py
index add43147..2d50060f 100644
--- a/synapse/app/federation_sender.py
+++ b/synapse/app/federation_sender.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py
index add43147..2d50060f 100644
--- a/synapse/app/frontend_proxy.py
+++ b/synapse/app/frontend_proxy.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py
index 3df2aa5c..1a15ceee 100644
--- a/synapse/app/generic_worker.py
+++ b/synapse/app/generic_worker.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
@@ -14,12 +13,9 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-import contextlib
import logging
import sys
-from typing import Dict, Iterable, Optional, Set
-
-from typing_extensions import ContextManager
+from typing import Dict, Optional
from twisted.internet import address
from twisted.web.resource import IResource
@@ -36,29 +32,18 @@ from synapse.api.urls import (
SERVER_KEY_V2_PREFIX,
)
from synapse.app import _base
-from synapse.app._base import register_start
+from synapse.app._base import max_request_body_size, register_start
from synapse.config._base import ConfigError
from synapse.config.homeserver import HomeServerConfig
from synapse.config.logger import setup_logging
from synapse.config.server import ListenerConfig
-from synapse.federation import send_queue
from synapse.federation.transport.server import TransportLayerServer
-from synapse.handlers.presence import (
- BasePresenceHandler,
- PresenceState,
- get_interested_parties,
-)
from synapse.http.server import JsonResource, OptionsResource
from synapse.http.servlet import RestServlet, parse_json_object_from_request
from synapse.http.site import SynapseSite
from synapse.logging.context import LoggingContext
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
-from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
-from synapse.replication.http.presence import (
- ReplicationBumpPresenceActiveTime,
- ReplicationPresenceSetState,
-)
from synapse.replication.slave.storage._base import BaseSlavedStore
from synapse.replication.slave.storage.account_data import SlavedAccountDataStore
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
@@ -70,7 +55,6 @@ from synapse.replication.slave.storage.events import SlavedEventStore
from synapse.replication.slave.storage.filtering import SlavedFilteringStore
from synapse.replication.slave.storage.groups import SlavedGroupServerStore
from synapse.replication.slave.storage.keys import SlavedKeyStore
-from synapse.replication.slave.storage.presence import SlavedPresenceStore
from synapse.replication.slave.storage.profile import SlavedProfileStore
from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore
from synapse.replication.slave.storage.pushers import SlavedPusherStore
@@ -78,21 +62,8 @@ from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
from synapse.replication.slave.storage.registration import SlavedRegistrationStore
from synapse.replication.slave.storage.room import RoomStore
from synapse.replication.slave.storage.transactions import SlavedTransactionStore
-from synapse.replication.tcp.client import ReplicationDataHandler
-from synapse.replication.tcp.commands import ClearUserSyncsCommand
-from synapse.replication.tcp.streams import (
- AccountDataStream,
- DeviceListsStream,
- GroupServerStream,
- PresenceStream,
- PushersStream,
- PushRulesStream,
- ReceiptsStream,
- TagAccountDataStream,
- ToDeviceStream,
-)
from synapse.rest.admin import register_servlets_for_media_repo
-from synapse.rest.client.v1 import events, login, room
+from synapse.rest.client.v1 import events, login, presence, room
from synapse.rest.client.v1.initial_sync import InitialSyncRestServlet
from synapse.rest.client.v1.profile import (
ProfileAvatarURLRestServlet,
@@ -129,7 +100,7 @@ from synapse.rest.client.versions import VersionsRestServlet
from synapse.rest.health import HealthResource
from synapse.rest.key.v2 import KeyApiV2Resource
from synapse.rest.synapse.client import build_synapse_client_resource_tree
-from synapse.server import HomeServer, cache_in_self
+from synapse.server import HomeServer
from synapse.storage.databases.main.censor_events import CensorEventsStore
from synapse.storage.databases.main.client_ips import ClientIpWorkerStore
from synapse.storage.databases.main.e2e_room_keys import EndToEndRoomKeyStore
@@ -138,40 +109,18 @@ from synapse.storage.databases.main.metrics import ServerMetricsStore
from synapse.storage.databases.main.monthly_active_users import (
MonthlyActiveUsersWorkerStore,
)
-from synapse.storage.databases.main.presence import UserPresenceState
+from synapse.storage.databases.main.presence import PresenceStore
from synapse.storage.databases.main.search import SearchWorkerStore
from synapse.storage.databases.main.stats import StatsStore
from synapse.storage.databases.main.transactions import TransactionWorkerStore
from synapse.storage.databases.main.ui_auth import UIAuthWorkerStore
from synapse.storage.databases.main.user_directory import UserDirectoryStore
-from synapse.types import ReadReceipt
-from synapse.util.async_helpers import Linearizer
from synapse.util.httpresourcetree import create_resource_tree
from synapse.util.versionstring import get_version_string
logger = logging.getLogger("synapse.app.generic_worker")
-class PresenceStatusStubServlet(RestServlet):
- """If presence is disabled this servlet can be used to stub out setting
- presence status.
- """
-
- PATTERNS = client_patterns("/presence/(?P<user_id>[^/]*)/status")
-
- def __init__(self, hs):
- super().__init__()
- self.auth = hs.get_auth()
-
- async def on_GET(self, request, user_id):
- await self.auth.get_user_by_req(request)
- return 200, {"presence": "offline"}
-
- async def on_PUT(self, request, user_id):
- await self.auth.get_user_by_req(request)
- return 200, {}
-
-
class KeyUploadServlet(RestServlet):
"""An implementation of the `KeyUploadServlet` that responds to read only
requests, but otherwise proxies through to the master instance.
@@ -265,213 +214,6 @@ class KeyUploadServlet(RestServlet):
return 200, {"one_time_key_counts": result}
-class _NullContextManager(ContextManager[None]):
- """A context manager which does nothing."""
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- pass
-
-
-UPDATE_SYNCING_USERS_MS = 10 * 1000
-
-
-class GenericWorkerPresence(BasePresenceHandler):
- def __init__(self, hs):
- super().__init__(hs)
- self.hs = hs
- self.is_mine_id = hs.is_mine_id
-
- self._presence_enabled = hs.config.use_presence
-
- # The number of ongoing syncs on this process, by user id.
- # Empty if _presence_enabled is false.
- self._user_to_num_current_syncs = {} # type: Dict[str, int]
-
- self.notifier = hs.get_notifier()
- self.instance_id = hs.get_instance_id()
-
- # user_id -> last_sync_ms. Lists the users that have stopped syncing
- # but we haven't notified the master of that yet
- self.users_going_offline = {}
-
- self._bump_active_client = ReplicationBumpPresenceActiveTime.make_client(hs)
- self._set_state_client = ReplicationPresenceSetState.make_client(hs)
-
- self._send_stop_syncing_loop = self.clock.looping_call(
- self.send_stop_syncing, UPDATE_SYNCING_USERS_MS
- )
-
- self._busy_presence_enabled = hs.config.experimental.msc3026_enabled
-
- hs.get_reactor().addSystemEventTrigger(
- "before",
- "shutdown",
- run_as_background_process,
- "generic_presence.on_shutdown",
- self._on_shutdown,
- )
-
- def _on_shutdown(self):
- if self._presence_enabled:
- self.hs.get_tcp_replication().send_command(
- ClearUserSyncsCommand(self.instance_id)
- )
-
- def send_user_sync(self, user_id, is_syncing, last_sync_ms):
- if self._presence_enabled:
- self.hs.get_tcp_replication().send_user_sync(
- self.instance_id, user_id, is_syncing, last_sync_ms
- )
-
- def mark_as_coming_online(self, user_id):
- """A user has started syncing. Send a UserSync to the master, unless they
- had recently stopped syncing.
-
- Args:
- user_id (str)
- """
- going_offline = self.users_going_offline.pop(user_id, None)
- if not going_offline:
- # Safe to skip because we haven't yet told the master they were offline
- self.send_user_sync(user_id, True, self.clock.time_msec())
-
- def mark_as_going_offline(self, user_id):
- """A user has stopped syncing. We wait before notifying the master as
- its likely they'll come back soon. This allows us to avoid sending
- a stopped syncing immediately followed by a started syncing notification
- to the master
-
- Args:
- user_id (str)
- """
- self.users_going_offline[user_id] = self.clock.time_msec()
-
- def send_stop_syncing(self):
- """Check if there are any users who have stopped syncing a while ago
- and haven't come back yet. If there are poke the master about them.
- """
- now = self.clock.time_msec()
- for user_id, last_sync_ms in list(self.users_going_offline.items()):
- if now - last_sync_ms > UPDATE_SYNCING_USERS_MS:
- self.users_going_offline.pop(user_id, None)
- self.send_user_sync(user_id, False, last_sync_ms)
-
- async def user_syncing(
- self, user_id: str, affect_presence: bool
- ) -> ContextManager[None]:
- """Record that a user is syncing.
-
- Called by the sync and events servlets to record that a user has connected to
- this worker and is waiting for some events.
- """
- if not affect_presence or not self._presence_enabled:
- return _NullContextManager()
-
- curr_sync = self._user_to_num_current_syncs.get(user_id, 0)
- self._user_to_num_current_syncs[user_id] = curr_sync + 1
-
- # If we went from no in flight sync to some, notify replication
- if self._user_to_num_current_syncs[user_id] == 1:
- self.mark_as_coming_online(user_id)
-
- def _end():
- # We check that the user_id is in user_to_num_current_syncs because
- # user_to_num_current_syncs may have been cleared if we are
- # shutting down.
- if user_id in self._user_to_num_current_syncs:
- self._user_to_num_current_syncs[user_id] -= 1
-
- # If we went from one in flight sync to non, notify replication
- if self._user_to_num_current_syncs[user_id] == 0:
- self.mark_as_going_offline(user_id)
-
- @contextlib.contextmanager
- def _user_syncing():
- try:
- yield
- finally:
- _end()
-
- return _user_syncing()
-
- async def notify_from_replication(self, states, stream_id):
- parties = await get_interested_parties(self.store, states)
- room_ids_to_states, users_to_states = parties
-
- self.notifier.on_new_event(
- "presence_key",
- stream_id,
- rooms=room_ids_to_states.keys(),
- users=users_to_states.keys(),
- )
-
- async def process_replication_rows(self, token, rows):
- states = [
- UserPresenceState(
- row.user_id,
- row.state,
- row.last_active_ts,
- row.last_federation_update_ts,
- row.last_user_sync_ts,
- row.status_msg,
- row.currently_active,
- )
- for row in rows
- ]
-
- for state in states:
- self.user_to_current_state[state.user_id] = state
-
- stream_id = token
- await self.notify_from_replication(states, stream_id)
-
- def get_currently_syncing_users_for_replication(self) -> Iterable[str]:
- return [
- user_id
- for user_id, count in self._user_to_num_current_syncs.items()
- if count > 0
- ]
-
- async def set_state(self, target_user, state, ignore_status_msg=False):
- """Set the presence state of the user."""
- presence = state["presence"]
-
- valid_presence = (
- PresenceState.ONLINE,
- PresenceState.UNAVAILABLE,
- PresenceState.OFFLINE,
- PresenceState.BUSY,
- )
-
- if presence not in valid_presence or (
- presence == PresenceState.BUSY and not self._busy_presence_enabled
- ):
- raise SynapseError(400, "Invalid presence state")
-
- user_id = target_user.to_string()
-
- # If presence is disabled, no-op
- if not self.hs.config.use_presence:
- return
-
- # Proxy request to master
- await self._set_state_client(
- user_id=user_id, state=state, ignore_status_msg=ignore_status_msg
- )
-
- async def bump_presence_active_time(self, user):
- """We've seen the user do something that indicates they're interacting
- with the app.
- """
- # If presence is disabled, no-op
- if not self.hs.config.use_presence:
- return
-
- # Proxy request to master
- user_id = user.to_string()
- await self._bump_active_client(user_id=user_id)
-
-
class GenericWorkerSlavedStore(
# FIXME(#3714): We need to add UserDirectoryStore as we write directly
# rather than going via the correct worker.
@@ -479,6 +221,7 @@ class GenericWorkerSlavedStore(
StatsStore,
UIAuthWorkerStore,
EndToEndRoomKeyStore,
+ PresenceStore,
SlavedDeviceInboxStore,
SlavedDeviceStore,
SlavedReceiptsStore,
@@ -497,7 +240,6 @@ class GenericWorkerSlavedStore(
SlavedTransactionStore,
SlavedProfileStore,
SlavedClientIpStore,
- SlavedPresenceStore,
SlavedFilteringStore,
MonthlyActiveUsersWorkerStore,
MediaRepositoryStore,
@@ -565,10 +307,7 @@ class GenericWorkerServer(HomeServer):
user_directory.register_servlets(self, resource)
- # If presence is disabled, use the stub servlet that does
- # not allow sending presence
- if not self.config.use_presence:
- PresenceStatusStubServlet(self).register(resource)
+ presence.register_servlets(self, resource)
groups.register_servlets(self, resource)
@@ -628,14 +367,16 @@ class GenericWorkerServer(HomeServer):
listener_config,
root_resource,
self.version_string,
+ max_request_body_size=max_request_body_size(self.config),
+ reactor=self.get_reactor(),
),
reactor=self.get_reactor(),
)
logger.info("Synapse worker now listening on port %d", port)
- def start_listening(self, listeners: Iterable[ListenerConfig]):
- for listener in listeners:
+ def start_listening(self):
+ for listener in self.config.worker_listeners:
if listener.type == "http":
self._listen_http(listener)
elif listener.type == "manhole":
@@ -643,7 +384,7 @@ class GenericWorkerServer(HomeServer):
listener.bind_addresses, listener.port, manhole_globals={"hs": self}
)
elif listener.type == "metrics":
- if not self.get_config().enable_metrics:
+ if not self.config.enable_metrics:
logger.warning(
(
"Metrics listener configured, but "
@@ -657,234 +398,6 @@ class GenericWorkerServer(HomeServer):
self.get_tcp_replication().start_replication(self)
- @cache_in_self
- def get_replication_data_handler(self):
- return GenericWorkerReplicationHandler(self)
-
- @cache_in_self
- def get_presence_handler(self):
- return GenericWorkerPresence(self)
-
-
-class GenericWorkerReplicationHandler(ReplicationDataHandler):
- def __init__(self, hs):
- super().__init__(hs)
-
- self.store = hs.get_datastore()
- self.presence_handler = hs.get_presence_handler() # type: GenericWorkerPresence
- self.notifier = hs.get_notifier()
-
- self.notify_pushers = hs.config.start_pushers
- self.pusher_pool = hs.get_pusherpool()
-
- self.send_handler = None # type: Optional[FederationSenderHandler]
- if hs.config.send_federation:
- self.send_handler = FederationSenderHandler(hs)
-
- async def on_rdata(self, stream_name, instance_name, token, rows):
- await super().on_rdata(stream_name, instance_name, token, rows)
- await self._process_and_notify(stream_name, instance_name, token, rows)
-
- async def _process_and_notify(self, stream_name, instance_name, token, rows):
- try:
- if self.send_handler:
- await self.send_handler.process_replication_rows(
- stream_name, token, rows
- )
-
- if stream_name == PushRulesStream.NAME:
- self.notifier.on_new_event(
- "push_rules_key", token, users=[row.user_id for row in rows]
- )
- elif stream_name in (AccountDataStream.NAME, TagAccountDataStream.NAME):
- self.notifier.on_new_event(
- "account_data_key", token, users=[row.user_id for row in rows]
- )
- elif stream_name == ReceiptsStream.NAME:
- self.notifier.on_new_event(
- "receipt_key", token, rooms=[row.room_id for row in rows]
- )
- await self.pusher_pool.on_new_receipts(
- token, token, {row.room_id for row in rows}
- )
- elif stream_name == ToDeviceStream.NAME:
- entities = [row.entity for row in rows if row.entity.startswith("@")]
- if entities:
- self.notifier.on_new_event("to_device_key", token, users=entities)
- elif stream_name == DeviceListsStream.NAME:
- all_room_ids = set() # type: Set[str]
- for row in rows:
- if row.entity.startswith("@"):
- room_ids = await self.store.get_rooms_for_user(row.entity)
- all_room_ids.update(room_ids)
- self.notifier.on_new_event("device_list_key", token, rooms=all_room_ids)
- elif stream_name == PresenceStream.NAME:
- await self.presence_handler.process_replication_rows(token, rows)
- elif stream_name == GroupServerStream.NAME:
- self.notifier.on_new_event(
- "groups_key", token, users=[row.user_id for row in rows]
- )
- elif stream_name == PushersStream.NAME:
- for row in rows:
- if row.deleted:
- self.stop_pusher(row.user_id, row.app_id, row.pushkey)
- else:
- await self.start_pusher(row.user_id, row.app_id, row.pushkey)
- except Exception:
- logger.exception("Error processing replication")
-
- async def on_position(self, stream_name: str, instance_name: str, token: int):
- await super().on_position(stream_name, instance_name, token)
- # Also call on_rdata to ensure that stream positions are properly reset.
- await self.on_rdata(stream_name, instance_name, token, [])
-
- def stop_pusher(self, user_id, app_id, pushkey):
- if not self.notify_pushers:
- return
-
- key = "%s:%s" % (app_id, pushkey)
- pushers_for_user = self.pusher_pool.pushers.get(user_id, {})
- pusher = pushers_for_user.pop(key, None)
- if pusher is None:
- return
- logger.info("Stopping pusher %r / %r", user_id, key)
- pusher.on_stop()
-
- async def start_pusher(self, user_id, app_id, pushkey):
- if not self.notify_pushers:
- return
-
- key = "%s:%s" % (app_id, pushkey)
- logger.info("Starting pusher %r / %r", user_id, key)
- return await self.pusher_pool.start_pusher_by_id(app_id, pushkey, user_id)
-
- def on_remote_server_up(self, server: str):
- """Called when get a new REMOTE_SERVER_UP command."""
-
- # Let's wake up the transaction queue for the server in case we have
- # pending stuff to send to it.
- if self.send_handler:
- self.send_handler.wake_destination(server)
-
-
-class FederationSenderHandler:
- """Processes the fedration replication stream
-
- This class is only instantiate on the worker responsible for sending outbound
- federation transactions. It receives rows from the replication stream and forwards
- the appropriate entries to the FederationSender class.
- """
-
- def __init__(self, hs: GenericWorkerServer):
- self.store = hs.get_datastore()
- self._is_mine_id = hs.is_mine_id
- self.federation_sender = hs.get_federation_sender()
- self._hs = hs
-
- # Stores the latest position in the federation stream we've gotten up
- # to. This is always set before we use it.
- self.federation_position = None
-
- self._fed_position_linearizer = Linearizer(name="_fed_position_linearizer")
-
- def wake_destination(self, server: str):
- self.federation_sender.wake_destination(server)
-
- async def process_replication_rows(self, stream_name, token, rows):
- # The federation stream contains things that we want to send out, e.g.
- # presence, typing, etc.
- if stream_name == "federation":
- send_queue.process_rows_for_federation(self.federation_sender, rows)
- await self.update_token(token)
-
- # ... and when new receipts happen
- elif stream_name == ReceiptsStream.NAME:
- await self._on_new_receipts(rows)
-
- # ... as well as device updates and messages
- elif stream_name == DeviceListsStream.NAME:
- # The entities are either user IDs (starting with '@') whose devices
- # have changed, or remote servers that we need to tell about
- # changes.
- hosts = {row.entity for row in rows if not row.entity.startswith("@")}
- for host in hosts:
- self.federation_sender.send_device_messages(host)
-
- elif stream_name == ToDeviceStream.NAME:
- # The to_device stream includes stuff to be pushed to both local
- # clients and remote servers, so we ignore entities that start with
- # '@' (since they'll be local users rather than destinations).
- hosts = {row.entity for row in rows if not row.entity.startswith("@")}
- for host in hosts:
- self.federation_sender.send_device_messages(host)
-
- async def _on_new_receipts(self, rows):
- """
- Args:
- rows (Iterable[synapse.replication.tcp.streams.ReceiptsStream.ReceiptsStreamRow]):
- new receipts to be processed
- """
- for receipt in rows:
- # we only want to send on receipts for our own users
- if not self._is_mine_id(receipt.user_id):
- continue
- receipt_info = ReadReceipt(
- receipt.room_id,
- receipt.receipt_type,
- receipt.user_id,
- [receipt.event_id],
- receipt.data,
- )
- await self.federation_sender.send_read_receipt(receipt_info)
-
- async def update_token(self, token):
- """Update the record of where we have processed to in the federation stream.
-
- Called after we have processed a an update received over replication. Sends
- a FEDERATION_ACK back to the master, and stores the token that we have processed
- in `federation_stream_position` so that we can restart where we left off.
- """
- self.federation_position = token
-
- # We save and send the ACK to master asynchronously, so we don't block
- # processing on persistence. We don't need to do this operation for
- # every single RDATA we receive, we just need to do it periodically.
-
- if self._fed_position_linearizer.is_queued(None):
- # There is already a task queued up to save and send the token, so
- # no need to queue up another task.
- return
-
- run_as_background_process("_save_and_send_ack", self._save_and_send_ack)
-
- async def _save_and_send_ack(self):
- """Save the current federation position in the database and send an ACK
- to master with where we're up to.
- """
- try:
- # We linearize here to ensure we don't have races updating the token
- #
- # XXX this appears to be redundant, since the ReplicationCommandHandler
- # has a linearizer which ensures that we only process one line of
- # replication data at a time. Should we remove it, or is it doing useful
- # service for robustness? Or could we replace it with an assertion that
- # we're not being re-entered?
-
- with (await self._fed_position_linearizer.queue(None)):
- # We persist and ack the same position, so we take a copy of it
- # here as otherwise it can get modified from underneath us.
- current_position = self.federation_position
-
- await self.store.update_federation_out_pos(
- "federation", current_position
- )
-
- # We ACK this token over replication so that the master can drop
- # its in memory queues
- self._hs.get_tcp_replication().send_federation_ack(current_position)
- except Exception:
- logger.exception("Error updating federation stream position")
-
def start(config_options):
try:
@@ -956,7 +469,7 @@ def start(config_options):
# streams. Will no-op if no streams can be written to by this worker.
hs.get_replication_streamer()
- register_start(_base.start, hs, config.worker_listeners)
+ register_start(_base.start, hs)
_base.start_worker_reactor("synapse-generic-worker", config)
diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py
index 3bfe9d50..8e78134b 100644
--- a/synapse/app/homeserver.py
+++ b/synapse/app/homeserver.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019 New Vector Ltd
#
@@ -18,7 +17,7 @@
import logging
import os
import sys
-from typing import Iterable, Iterator
+from typing import Iterator
from twisted.internet import reactor
from twisted.web.resource import EncodingResourceWrapper, IResource
@@ -37,7 +36,13 @@ from synapse.api.urls import (
WEB_CLIENT_PREFIX,
)
from synapse.app import _base
-from synapse.app._base import listen_ssl, listen_tcp, quit_with_error, register_start
+from synapse.app._base import (
+ listen_ssl,
+ listen_tcp,
+ max_request_body_size,
+ quit_with_error,
+ register_start,
+)
from synapse.config._base import ConfigError
from synapse.config.emailconfig import ThreepidBehaviour
from synapse.config.homeserver import HomeServerConfig
@@ -127,19 +132,21 @@ class SynapseHomeServer(HomeServer):
else:
root_resource = OptionsResource()
- root_resource = create_resource_tree(resources, root_resource)
+ site = SynapseSite(
+ "synapse.access.%s.%s" % ("https" if tls else "http", site_tag),
+ site_tag,
+ listener_config,
+ create_resource_tree(resources, root_resource),
+ self.version_string,
+ max_request_body_size=max_request_body_size(self.config),
+ reactor=self.get_reactor(),
+ )
if tls:
ports = listen_ssl(
bind_addresses,
port,
- SynapseSite(
- "synapse.access.https.%s" % (site_tag,),
- site_tag,
- listener_config,
- root_resource,
- self.version_string,
- ),
+ site,
self.tls_server_context_factory,
reactor=self.get_reactor(),
)
@@ -149,13 +156,7 @@ class SynapseHomeServer(HomeServer):
ports = listen_tcp(
bind_addresses,
port,
- SynapseSite(
- "synapse.access.http.%s" % (site_tag,),
- site_tag,
- listener_config,
- root_resource,
- self.version_string,
- ),
+ site,
reactor=self.get_reactor(),
)
logger.info("Synapse now listening on TCP port %d", port)
@@ -192,7 +193,7 @@ class SynapseHomeServer(HomeServer):
}
)
- if self.get_config().threepid_behaviour_email == ThreepidBehaviour.LOCAL:
+ if self.config.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
from synapse.rest.synapse.client.password_reset import (
PasswordResetSubmitTokenResource,
)
@@ -231,7 +232,7 @@ class SynapseHomeServer(HomeServer):
)
if name in ["media", "federation", "client"]:
- if self.get_config().enable_media_repo:
+ if self.config.enable_media_repo:
media_repo = self.get_media_repository_resource()
resources.update(
{MEDIA_PREFIX: media_repo, LEGACY_MEDIA_PREFIX: media_repo}
@@ -245,7 +246,7 @@ class SynapseHomeServer(HomeServer):
resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self)
if name == "webclient":
- webclient_loc = self.get_config().web_client_location
+ webclient_loc = self.config.web_client_location
if webclient_loc is None:
logger.warning(
@@ -266,7 +267,7 @@ class SynapseHomeServer(HomeServer):
# https://twistedmatrix.com/trac/ticket/7678
resources[WEB_CLIENT_PREFIX] = File(webclient_loc)
- if name == "metrics" and self.get_config().enable_metrics:
+ if name == "metrics" and self.config.enable_metrics:
resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
if name == "replication":
@@ -274,18 +275,18 @@ class SynapseHomeServer(HomeServer):
return resources
- def start_listening(self, listeners: Iterable[ListenerConfig]):
- config = self.get_config()
-
- if config.redis_enabled:
+ def start_listening(self):
+ if self.config.redis_enabled:
# If redis is enabled we connect via the replication command handler
# in the same way as the workers (since we're effectively a client
# rather than a server).
self.get_tcp_replication().start_replication(self)
- for listener in listeners:
+ for listener in self.config.server.listeners:
if listener.type == "http":
- self._listening_services.extend(self._listener_http(config, listener))
+ self._listening_services.extend(
+ self._listener_http(self.config, listener)
+ )
elif listener.type == "manhole":
_base.listen_manhole(
listener.bind_addresses, listener.port, manhole_globals={"hs": self}
@@ -299,7 +300,7 @@ class SynapseHomeServer(HomeServer):
for s in services:
reactor.addSystemEventTrigger("before", "shutdown", s.stopListening)
elif listener.type == "metrics":
- if not self.get_config().enable_metrics:
+ if not self.config.enable_metrics:
logger.warning(
(
"Metrics listener configured, but "
@@ -413,7 +414,7 @@ def setup(config_options):
# Loading the provider metadata also ensures the provider config is valid.
await oidc.load_metadata()
- await _base.start(hs, config.listeners)
+ await _base.start(hs)
hs.get_datastore().db_pool.updates.start_doing_background_updates()
diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py
index add43147..2d50060f 100644
--- a/synapse/app/media_repository.py
+++ b/synapse/app/media_repository.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/app/pusher.py b/synapse/app/pusher.py
index add43147..2d50060f 100644
--- a/synapse/app/pusher.py
+++ b/synapse/app/pusher.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py
index add43147..2d50060f 100644
--- a/synapse/app/synchrotron.py
+++ b/synapse/app/synchrotron.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py
index 503d44f6..a368efb3 100644
--- a/synapse/app/user_dir.py
+++ b/synapse/app/user_dir.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py
index 0bfc5e44..6504c6bd 100644
--- a/synapse/appservice/__init__.py
+++ b/synapse/appservice/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py
index 9d3bbe3b..fe04d7a6 100644
--- a/synapse/appservice/api.py
+++ b/synapse/appservice/api.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/appservice/scheduler.py b/synapse/appservice/scheduler.py
index 366c476f..6a2ce99b 100644
--- a/synapse/appservice/scheduler.py
+++ b/synapse/appservice/scheduler.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -49,7 +48,7 @@ This is all tied together by the AppServiceScheduler which DIs the required
components.
"""
import logging
-from typing import List
+from typing import List, Optional
from synapse.appservice import ApplicationService, ApplicationServiceState
from synapse.events import EventBase
@@ -191,11 +190,11 @@ class _TransactionController:
self,
service: ApplicationService,
events: List[EventBase],
- ephemeral: List[JsonDict] = [],
+ ephemeral: Optional[List[JsonDict]] = None,
):
try:
txn = await self.store.create_appservice_txn(
- service=service, events=events, ephemeral=ephemeral
+ service=service, events=events, ephemeral=ephemeral or []
)
service_is_up = await self._is_service_up(service)
if service_is_up:
diff --git a/synapse/config/__init__.py b/synapse/config/__init__.py
index 1e76e955..d2f88915 100644
--- a/synapse/config/__init__.py
+++ b/synapse/config/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/__main__.py b/synapse/config/__main__.py
index 65043d5b..b5b6735a 100644
--- a/synapse/config/__main__.py
+++ b/synapse/config/__main__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/_base.py b/synapse/config/_base.py
index ba9cd63c..08e2c2c5 100644
--- a/synapse/config/_base.py
+++ b/synapse/config/_base.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
diff --git a/synapse/config/_base.pyi b/synapse/config/_base.pyi
index e896fd34..ff9abbc2 100644
--- a/synapse/config/_base.pyi
+++ b/synapse/config/_base.pyi
@@ -1,21 +1,22 @@
from typing import Any, Iterable, List, Optional
from synapse.config import (
+ account_validity,
api,
appservice,
auth,
captcha,
cas,
- consent_config,
+ consent,
database,
emailconfig,
experimental,
groups,
- jwt_config,
+ jwt,
key,
logger,
metrics,
- oidc_config,
+ oidc,
password_auth_providers,
push,
ratelimiting,
@@ -23,9 +24,9 @@ from synapse.config import (
registration,
repository,
room_directory,
- saml2_config,
+ saml2,
server,
- server_notices_config,
+ server_notices,
spam_checker,
sso,
stats,
@@ -59,15 +60,16 @@ class RootConfig:
captcha: captcha.CaptchaConfig
voip: voip.VoipConfig
registration: registration.RegistrationConfig
+ account_validity: account_validity.AccountValidityConfig
metrics: metrics.MetricsConfig
api: api.ApiConfig
appservice: appservice.AppServiceConfig
key: key.KeyConfig
- saml2: saml2_config.SAML2Config
+ saml2: saml2.SAML2Config
cas: cas.CasConfig
sso: sso.SSOConfig
- oidc: oidc_config.OIDCConfig
- jwt: jwt_config.JWTConfig
+ oidc: oidc.OIDCConfig
+ jwt: jwt.JWTConfig
auth: auth.AuthConfig
email: emailconfig.EmailConfig
worker: workers.WorkerConfig
@@ -76,9 +78,9 @@ class RootConfig:
spamchecker: spam_checker.SpamCheckerConfig
groups: groups.GroupsConfig
userdirectory: user_directory.UserDirectoryConfig
- consent: consent_config.ConsentConfig
+ consent: consent.ConsentConfig
stats: stats.StatsConfig
- servernotices: server_notices_config.ServerNoticesConfig
+ servernotices: server_notices.ServerNoticesConfig
roomdirectory: room_directory.RoomDirectoryConfig
thirdpartyrules: third_party_event_rules.ThirdPartyRulesConfig
tracer: tracer.TracerConfig
diff --git a/synapse/config/_util.py b/synapse/config/_util.py
index 8fce7f6b..3edb4b71 100644
--- a/synapse/config/_util.py
+++ b/synapse/config/_util.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/account_validity.py b/synapse/config/account_validity.py
new file mode 100644
index 00000000..c58a7d95
--- /dev/null
+++ b/synapse/config/account_validity.py
@@ -0,0 +1,165 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from synapse.config._base import Config, ConfigError
+
+
+class AccountValidityConfig(Config):
+ section = "account_validity"
+
+ def read_config(self, config, **kwargs):
+ account_validity_config = config.get("account_validity") or {}
+ self.account_validity_enabled = account_validity_config.get("enabled", False)
+ self.account_validity_renew_by_email_enabled = (
+ "renew_at" in account_validity_config
+ )
+
+ if self.account_validity_enabled:
+ if "period" in account_validity_config:
+ self.account_validity_period = self.parse_duration(
+ account_validity_config["period"]
+ )
+ else:
+ raise ConfigError("'period' is required when using account validity")
+
+ if "renew_at" in account_validity_config:
+ self.account_validity_renew_at = self.parse_duration(
+ account_validity_config["renew_at"]
+ )
+
+ if "renew_email_subject" in account_validity_config:
+ self.account_validity_renew_email_subject = account_validity_config[
+ "renew_email_subject"
+ ]
+ else:
+ self.account_validity_renew_email_subject = "Renew your %(app)s account"
+
+ self.account_validity_startup_job_max_delta = (
+ self.account_validity_period * 10.0 / 100.0
+ )
+
+ if self.account_validity_renew_by_email_enabled:
+ if not self.public_baseurl:
+ raise ConfigError("Can't send renewal emails without 'public_baseurl'")
+
+ # Load account validity templates.
+ account_validity_template_dir = account_validity_config.get("template_dir")
+
+ account_renewed_template_filename = account_validity_config.get(
+ "account_renewed_html_path", "account_renewed.html"
+ )
+ invalid_token_template_filename = account_validity_config.get(
+ "invalid_token_html_path", "invalid_token.html"
+ )
+
+ # Read and store template content
+ (
+ self.account_validity_account_renewed_template,
+ self.account_validity_account_previously_renewed_template,
+ self.account_validity_invalid_token_template,
+ ) = self.read_templates(
+ [
+ account_renewed_template_filename,
+ "account_previously_renewed.html",
+ invalid_token_template_filename,
+ ],
+ account_validity_template_dir,
+ )
+
+ def generate_config_section(self, **kwargs):
+ return """\
+ ## Account Validity ##
+
+ # Optional account validity configuration. This allows for accounts to be denied
+ # any request after a given period.
+ #
+ # Once this feature is enabled, Synapse will look for registered users without an
+ # expiration date at startup and will add one to every account it found using the
+ # current settings at that time.
+ # This means that, if a validity period is set, and Synapse is restarted (it will
+ # then derive an expiration date from the current validity period), and some time
+ # after that the validity period changes and Synapse is restarted, the users'
+ # expiration dates won't be updated unless their account is manually renewed. This
+ # date will be randomly selected within a range [now + period - d ; now + period],
+ # where d is equal to 10% of the validity period.
+ #
+ account_validity:
+ # The account validity feature is disabled by default. Uncomment the
+ # following line to enable it.
+ #
+ #enabled: true
+
+ # The period after which an account is valid after its registration. When
+ # renewing the account, its validity period will be extended by this amount
+ # of time. This parameter is required when using the account validity
+ # feature.
+ #
+ #period: 6w
+
+ # The amount of time before an account's expiry date at which Synapse will
+ # send an email to the account's email address with a renewal link. By
+ # default, no such emails are sent.
+ #
+ # If you enable this setting, you will also need to fill out the 'email' and
+ # 'public_baseurl' configuration sections.
+ #
+ #renew_at: 1w
+
+ # The subject of the email sent out with the renewal link. '%(app)s' can be
+ # used as a placeholder for the 'app_name' parameter from the 'email'
+ # section.
+ #
+ # Note that the placeholder must be written '%(app)s', including the
+ # trailing 's'.
+ #
+ # If this is not set, a default value is used.
+ #
+ #renew_email_subject: "Renew your %(app)s account"
+
+ # Directory in which Synapse will try to find templates for the HTML files to
+ # serve to the user when trying to renew an account. If not set, default
+ # templates from within the Synapse package will be used.
+ #
+ # The currently available templates are:
+ #
+ # * account_renewed.html: Displayed to the user after they have successfully
+ # renewed their account.
+ #
+ # * account_previously_renewed.html: Displayed to the user if they attempt to
+ # renew their account with a token that is valid, but that has already
+ # been used. In this case the account is not renewed again.
+ #
+ # * invalid_token.html: Displayed to the user when they try to renew an account
+ # with an unknown or invalid renewal token.
+ #
+ # See https://github.com/matrix-org/synapse/tree/master/synapse/res/templates for
+ # default template contents.
+ #
+ # The file name of some of these templates can be configured below for legacy
+ # reasons.
+ #
+ #template_dir: "res/templates"
+
+ # A custom file name for the 'account_renewed.html' template.
+ #
+ # If not set, the file is assumed to be named "account_renewed.html".
+ #
+ #account_renewed_html_path: "account_renewed.html"
+
+ # A custom file name for the 'invalid_token.html' template.
+ #
+ # If not set, the file is assumed to be named "invalid_token.html".
+ #
+ #invalid_token_html_path: "invalid_token.html"
+ """
diff --git a/synapse/config/api.py b/synapse/config/api.py
index 74cd53a8..55c038c0 100644
--- a/synapse/config/api.py
+++ b/synapse/config/api.py
@@ -1,4 +1,4 @@
-# Copyright 2015, 2016 OpenMarket Ltd
+# Copyright 2015-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,38 +12,131 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import logging
+from typing import Iterable
+
from synapse.api.constants import EventTypes
+from synapse.config._base import Config, ConfigError
+from synapse.config._util import validate_config
+from synapse.types import JsonDict
-from ._base import Config
+logger = logging.getLogger(__name__)
class ApiConfig(Config):
section = "api"
- def read_config(self, config, **kwargs):
- self.room_invite_state_types = config.get(
- "room_invite_state_types",
- [
- EventTypes.JoinRules,
- EventTypes.CanonicalAlias,
- EventTypes.RoomAvatar,
- EventTypes.RoomEncryption,
- EventTypes.Name,
- ],
+ def read_config(self, config: JsonDict, **kwargs):
+ validate_config(_MAIN_SCHEMA, config, ())
+ self.room_prejoin_state = list(self._get_prejoin_state_types(config))
+
+ def generate_config_section(cls, **kwargs) -> str:
+ formatted_default_state_types = "\n".join(
+ " # - %s" % (t,) for t in _DEFAULT_PREJOIN_STATE_TYPES
)
- def generate_config_section(cls, **kwargs):
return """\
## API Configuration ##
- # A list of event types that will be included in the room_invite_state
+ # Controls for the state that is shared with users who receive an invite
+ # to a room
#
- #room_invite_state_types:
- # - "{JoinRules}"
- # - "{CanonicalAlias}"
- # - "{RoomAvatar}"
- # - "{RoomEncryption}"
- # - "{Name}"
- """.format(
- **vars(EventTypes)
- )
+ room_prejoin_state:
+ # By default, the following state event types are shared with users who
+ # receive invites to the room:
+ #
+%(formatted_default_state_types)s
+ #
+ # Uncomment the following to disable these defaults (so that only the event
+ # types listed in 'additional_event_types' are shared). Defaults to 'false'.
+ #
+ #disable_default_event_types: true
+
+ # Additional state event types to share with users when they are invited
+ # to a room.
+ #
+ # By default, this list is empty (so only the default event types are shared).
+ #
+ #additional_event_types:
+ # - org.example.custom.event.type
+ """ % {
+ "formatted_default_state_types": formatted_default_state_types
+ }
+
+ def _get_prejoin_state_types(self, config: JsonDict) -> Iterable[str]:
+ """Get the event types to include in the prejoin state
+
+ Parses the config and returns an iterable of the event types to be included.
+ """
+ room_prejoin_state_config = config.get("room_prejoin_state") or {}
+
+ # backwards-compatibility support for room_invite_state_types
+ if "room_invite_state_types" in config:
+ # if both "room_invite_state_types" and "room_prejoin_state" are set, then
+ # we don't really know what to do.
+ if room_prejoin_state_config:
+ raise ConfigError(
+ "Can't specify both 'room_invite_state_types' and 'room_prejoin_state' "
+ "in config"
+ )
+
+ logger.warning(_ROOM_INVITE_STATE_TYPES_WARNING)
+
+ yield from config["room_invite_state_types"]
+ return
+
+ if not room_prejoin_state_config.get("disable_default_event_types"):
+ yield from _DEFAULT_PREJOIN_STATE_TYPES
+
+ if self.spaces_enabled:
+ # MSC1772 suggests adding m.room.create to the prejoin state
+ yield EventTypes.Create
+
+ yield from room_prejoin_state_config.get("additional_event_types", [])
+
+
+_ROOM_INVITE_STATE_TYPES_WARNING = """\
+WARNING: The 'room_invite_state_types' configuration setting is now deprecated,
+and replaced with 'room_prejoin_state'. New features may not work correctly
+unless 'room_invite_state_types' is removed. See the sample configuration file for
+details of 'room_prejoin_state'.
+--------------------------------------------------------------------------------
+"""
+
+_DEFAULT_PREJOIN_STATE_TYPES = [
+ EventTypes.JoinRules,
+ EventTypes.CanonicalAlias,
+ EventTypes.RoomAvatar,
+ EventTypes.RoomEncryption,
+ EventTypes.Name,
+]
+
+
+# room_prejoin_state can either be None (as it is in the default config), or
+# an object containing other config settings
+_ROOM_PREJOIN_STATE_CONFIG_SCHEMA = {
+ "oneOf": [
+ {
+ "type": "object",
+ "properties": {
+ "disable_default_event_types": {"type": "boolean"},
+ "additional_event_types": {
+ "type": "array",
+ "items": {"type": "string"},
+ },
+ },
+ },
+ {"type": "null"},
+ ]
+}
+
+# the legacy room_invite_state_types setting
+_ROOM_INVITE_STATE_TYPES_SCHEMA = {"type": "array", "items": {"type": "string"}}
+
+_MAIN_SCHEMA = {
+ "type": "object",
+ "properties": {
+ "room_prejoin_state": _ROOM_PREJOIN_STATE_CONFIG_SCHEMA,
+ "room_invite_state_types": _ROOM_INVITE_STATE_TYPES_SCHEMA,
+ },
+}
diff --git a/synapse/config/auth.py b/synapse/config/auth.py
index 9aabaadf..e10d641a 100644
--- a/synapse/config/auth.py
+++ b/synapse/config/auth.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
diff --git a/synapse/config/cache.py b/synapse/config/cache.py
index 4e8abbf8..41b9b3f5 100644
--- a/synapse/config/cache.py
+++ b/synapse/config/cache.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/cas.py b/synapse/config/cas.py
index dbf50859..901f4123 100644
--- a/synapse/config/cas.py
+++ b/synapse/config/cas.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/consent_config.py b/synapse/config/consent.py
index c47f364b..30d07cc2 100644
--- a/synapse/config/consent_config.py
+++ b/synapse/config/consent.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/database.py b/synapse/config/database.py
index e7889b9c..79a02706 100644
--- a/synapse/config/database.py
+++ b/synapse/config/database.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py
index 52505ac5..5564d7d0 100644
--- a/synapse/config/emailconfig.py
+++ b/synapse/config/emailconfig.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
@@ -300,7 +299,7 @@ class EmailConfig(Config):
"client_base_url", email_config.get("riot_base_url", None)
)
- if self.account_validity.renew_by_email_enabled:
+ if self.account_validity_renew_by_email_enabled:
expiry_template_html = email_config.get(
"expiry_template_html", "notice_expiry.html"
)
diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py
index 86f4d9af..a693fba8 100644
--- a/synapse/config/experimental.py
+++ b/synapse/config/experimental.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersions
from synapse.config._base import Config
from synapse.types import JsonDict
@@ -27,7 +27,11 @@ class ExperimentalConfig(Config):
# MSC2858 (multiple SSO identity providers)
self.msc2858_enabled = experimental.get("msc2858_enabled", False) # type: bool
- # Spaces (MSC1772, MSC2946, etc)
+
+ # Spaces (MSC1772, MSC2946, MSC3083, etc)
self.spaces_enabled = experimental.get("spaces_enabled", False) # type: bool
+ if self.spaces_enabled:
+ KNOWN_ROOM_VERSIONS[RoomVersions.MSC3083.identifier] = RoomVersions.MSC3083
+
# MSC3026 (busy presence state)
self.msc3026_enabled = experimental.get("msc3026_enabled", False) # type: bool
diff --git a/synapse/config/federation.py b/synapse/config/federation.py
index 55e4db54..090ba047 100644
--- a/synapse/config/federation.py
+++ b/synapse/config/federation.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/groups.py b/synapse/config/groups.py
index 7b7860ea..15c2e64b 100644
--- a/synapse/config/groups.py
+++ b/synapse/config/groups.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/homeserver.py b/synapse/config/homeserver.py
index 64a2429f..c23b66c8 100644
--- a/synapse/config/homeserver.py
+++ b/synapse/config/homeserver.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
@@ -13,25 +12,25 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
from ._base import RootConfig
+from .account_validity import AccountValidityConfig
from .api import ApiConfig
from .appservice import AppServiceConfig
from .auth import AuthConfig
from .cache import CacheConfig
from .captcha import CaptchaConfig
from .cas import CasConfig
-from .consent_config import ConsentConfig
+from .consent import ConsentConfig
from .database import DatabaseConfig
from .emailconfig import EmailConfig
from .experimental import ExperimentalConfig
from .federation import FederationConfig
from .groups import GroupsConfig
-from .jwt_config import JWTConfig
+from .jwt import JWTConfig
from .key import KeyConfig
from .logger import LoggingConfig
from .metrics import MetricsConfig
-from .oidc_config import OIDCConfig
+from .oidc import OIDCConfig
from .password_auth_providers import PasswordAuthProviderConfig
from .push import PushConfig
from .ratelimiting import RatelimitConfig
@@ -40,9 +39,9 @@ from .registration import RegistrationConfig
from .repository import ContentRepositoryConfig
from .room import RoomConfig
from .room_directory import RoomDirectoryConfig
-from .saml2_config import SAML2Config
+from .saml2 import SAML2Config
from .server import ServerConfig
-from .server_notices_config import ServerNoticesConfig
+from .server_notices import ServerNoticesConfig
from .spam_checker import SpamCheckerConfig
from .sso import SSOConfig
from .stats import StatsConfig
@@ -69,6 +68,7 @@ class HomeServerConfig(RootConfig):
CaptchaConfig,
VoipConfig,
RegistrationConfig,
+ AccountValidityConfig,
MetricsConfig,
ApiConfig,
AppServiceConfig,
diff --git a/synapse/config/jwt_config.py b/synapse/config/jwt.py
index f30330ab..9e07e730 100644
--- a/synapse/config/jwt_config.py
+++ b/synapse/config/jwt.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015 Niklas Riekenbrauck
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/key.py b/synapse/config/key.py
index 350ff1d6..94a90630 100644
--- a/synapse/config/key.py
+++ b/synapse/config/key.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
diff --git a/synapse/config/logger.py b/synapse/config/logger.py
index 999aecce..813076df 100644
--- a/synapse/config/logger.py
+++ b/synapse/config/logger.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -32,7 +31,6 @@ from twisted.logger import (
)
import synapse
-from synapse.app import _base as appbase
from synapse.logging._structured import setup_structured_logging
from synapse.logging.context import LoggingContextFilter
from synapse.logging.filter import MetadataFilter
@@ -319,6 +317,8 @@ def setup_logging(
# Perform one-time logging configuration.
_setup_stdlib_logging(config, log_config_path, logBeginner=logBeginner)
# Add a SIGHUP handler to reload the logging configuration, if one is available.
+ from synapse.app import _base as appbase
+
appbase.register_sighup(_reload_logging_config, log_config_path)
# Log immediately so we can grep backwards.
diff --git a/synapse/config/metrics.py b/synapse/config/metrics.py
index 2b289f42..7ac82edb 100644
--- a/synapse/config/metrics.py
+++ b/synapse/config/metrics.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
diff --git a/synapse/config/oidc_config.py b/synapse/config/oidc.py
index 05733ec4..ea0abf5a 100644
--- a/synapse/config/oidc_config.py
+++ b/synapse/config/oidc.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Quentin Gliech
# Copyright 2020-2021 The Matrix.org Foundation C.I.C.
#
@@ -15,20 +14,23 @@
# limitations under the License.
from collections import Counter
-from typing import Iterable, List, Mapping, Optional, Tuple, Type
+from typing import Collection, Iterable, List, Mapping, Optional, Tuple, Type
import attr
from synapse.config._util import validate_config
from synapse.config.sso import SsoAttributeRequirement
from synapse.python_dependencies import DependencyException, check_requirements
-from synapse.types import Collection, JsonDict
+from synapse.types import JsonDict
from synapse.util.module_loader import load_module
from synapse.util.stringutils import parse_and_validate_mxc_uri
from ._base import Config, ConfigError, read_file
-DEFAULT_USER_MAPPING_PROVIDER = "synapse.handlers.oidc_handler.JinjaOidcMappingProvider"
+DEFAULT_USER_MAPPING_PROVIDER = "synapse.handlers.oidc.JinjaOidcMappingProvider"
+# The module that JinjaOidcMappingProvider is in was renamed, we want to
+# transparently handle both the same.
+LEGACY_USER_MAPPING_PROVIDER = "synapse.handlers.oidc_handler.JinjaOidcMappingProvider"
class OIDCConfig(Config):
@@ -404,6 +406,8 @@ def _parse_oidc_config_dict(
"""
ump_config = oidc_config.get("user_mapping_provider", {})
ump_config.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER)
+ if ump_config.get("module") == LEGACY_USER_MAPPING_PROVIDER:
+ ump_config["module"] = DEFAULT_USER_MAPPING_PROVIDER
ump_config.setdefault("config", {})
(
diff --git a/synapse/config/password_auth_providers.py b/synapse/config/password_auth_providers.py
index 85d07c4f..1cf69734 100644
--- a/synapse/config/password_auth_providers.py
+++ b/synapse/config/password_auth_providers.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 Openmarket
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/push.py b/synapse/config/push.py
index 7831a2ef..6ef8491c 100644
--- a/synapse/config/push.py
+++ b/synapse/config/push.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2017 New Vector Ltd
#
diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py
index 3f3997f4..7a8d5851 100644
--- a/synapse/config/ratelimiting.py
+++ b/synapse/config/ratelimiting.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import Dict
+from typing import Dict, Optional
from ._base import Config
@@ -21,8 +21,10 @@ class RateLimitConfig:
def __init__(
self,
config: Dict[str, float],
- defaults={"per_second": 0.17, "burst_count": 3.0},
+ defaults: Optional[Dict[str, float]] = None,
):
+ defaults = defaults or {"per_second": 0.17, "burst_count": 3.0}
+
self.per_second = config.get("per_second", defaults["per_second"])
self.burst_count = int(config.get("burst_count", defaults["burst_count"]))
diff --git a/synapse/config/redis.py b/synapse/config/redis.py
index 13733023..33104af7 100644
--- a/synapse/config/redis.py
+++ b/synapse/config/redis.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/registration.py b/synapse/config/registration.py
index ead007ba..e6f52b4f 100644
--- a/synapse/config/registration.py
+++ b/synapse/config/registration.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,74 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import os
-
-import pkg_resources
-
from synapse.api.constants import RoomCreationPreset
from synapse.config._base import Config, ConfigError
from synapse.types import RoomAlias, UserID
from synapse.util.stringutils import random_string_with_symbols, strtobool
-class AccountValidityConfig(Config):
- section = "accountvalidity"
-
- def __init__(self, config, synapse_config):
- if config is None:
- return
- super().__init__()
- self.enabled = config.get("enabled", False)
- self.renew_by_email_enabled = "renew_at" in config
-
- if self.enabled:
- if "period" in config:
- self.period = self.parse_duration(config["period"])
- else:
- raise ConfigError("'period' is required when using account validity")
-
- if "renew_at" in config:
- self.renew_at = self.parse_duration(config["renew_at"])
-
- if "renew_email_subject" in config:
- self.renew_email_subject = config["renew_email_subject"]
- else:
- self.renew_email_subject = "Renew your %(app)s account"
-
- self.startup_job_max_delta = self.period * 10.0 / 100.0
-
- if self.renew_by_email_enabled:
- if "public_baseurl" not in synapse_config:
- raise ConfigError("Can't send renewal emails without 'public_baseurl'")
-
- template_dir = config.get("template_dir")
-
- if not template_dir:
- template_dir = pkg_resources.resource_filename("synapse", "res/templates")
-
- if "account_renewed_html_path" in config:
- file_path = os.path.join(template_dir, config["account_renewed_html_path"])
-
- self.account_renewed_html_content = self.read_file(
- file_path, "account_validity.account_renewed_html_path"
- )
- else:
- self.account_renewed_html_content = (
- "<html><body>Your account has been successfully renewed.</body><html>"
- )
-
- if "invalid_token_html_path" in config:
- file_path = os.path.join(template_dir, config["invalid_token_html_path"])
-
- self.invalid_token_html_content = self.read_file(
- file_path, "account_validity.invalid_token_html_path"
- )
- else:
- self.invalid_token_html_content = (
- "<html><body>Invalid renewal token.</body><html>"
- )
-
-
class RegistrationConfig(Config):
section = "registration"
@@ -93,10 +30,6 @@ class RegistrationConfig(Config):
str(config["disable_registration"])
)
- self.account_validity = AccountValidityConfig(
- config.get("account_validity") or {}, config
- )
-
self.registrations_require_3pid = config.get("registrations_require_3pid", [])
self.allowed_local_3pids = config.get("allowed_local_3pids", [])
self.enable_3pid_lookup = config.get("enable_3pid_lookup", True)
@@ -208,69 +141,6 @@ class RegistrationConfig(Config):
#
#enable_registration: false
- # Optional account validity configuration. This allows for accounts to be denied
- # any request after a given period.
- #
- # Once this feature is enabled, Synapse will look for registered users without an
- # expiration date at startup and will add one to every account it found using the
- # current settings at that time.
- # This means that, if a validity period is set, and Synapse is restarted (it will
- # then derive an expiration date from the current validity period), and some time
- # after that the validity period changes and Synapse is restarted, the users'
- # expiration dates won't be updated unless their account is manually renewed. This
- # date will be randomly selected within a range [now + period - d ; now + period],
- # where d is equal to 10%% of the validity period.
- #
- account_validity:
- # The account validity feature is disabled by default. Uncomment the
- # following line to enable it.
- #
- #enabled: true
-
- # The period after which an account is valid after its registration. When
- # renewing the account, its validity period will be extended by this amount
- # of time. This parameter is required when using the account validity
- # feature.
- #
- #period: 6w
-
- # The amount of time before an account's expiry date at which Synapse will
- # send an email to the account's email address with a renewal link. By
- # default, no such emails are sent.
- #
- # If you enable this setting, you will also need to fill out the 'email' and
- # 'public_baseurl' configuration sections.
- #
- #renew_at: 1w
-
- # The subject of the email sent out with the renewal link. '%%(app)s' can be
- # used as a placeholder for the 'app_name' parameter from the 'email'
- # section.
- #
- # Note that the placeholder must be written '%%(app)s', including the
- # trailing 's'.
- #
- # If this is not set, a default value is used.
- #
- #renew_email_subject: "Renew your %%(app)s account"
-
- # Directory in which Synapse will try to find templates for the HTML files to
- # serve to the user when trying to renew an account. If not set, default
- # templates from within the Synapse package will be used.
- #
- #template_dir: "res/templates"
-
- # File within 'template_dir' giving the HTML to be displayed to the user after
- # they successfully renewed their account. If not set, default text is used.
- #
- #account_renewed_html_path: "account_renewed.html"
-
- # File within 'template_dir' giving the HTML to be displayed when the user
- # tries to renew an account with an invalid renewal token. If not set,
- # default text is used.
- #
- #invalid_token_html_path: "invalid_token.html"
-
# Time that a user's session remains valid for, after they log in.
#
# Note that this is not currently compatible with guest logins.
@@ -298,9 +168,9 @@ class RegistrationConfig(Config):
#
#allowed_local_3pids:
# - medium: email
- # pattern: '.*@matrix\\.org'
+ # pattern: '^[^@]+@matrix\\.org$'
# - medium: email
- # pattern: '.*@vector\\.im'
+ # pattern: '^[^@]+@vector\\.im$'
# - medium: msisdn
# pattern: '\\+44'
diff --git a/synapse/config/repository.py b/synapse/config/repository.py
index 061c4ec8..c78a83ab 100644
--- a/synapse/config/repository.py
+++ b/synapse/config/repository.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -71,6 +70,7 @@ def parse_thumbnail_requirements(thumbnail_sizes):
jpeg_thumbnail = ThumbnailRequirement(width, height, method, "image/jpeg")
png_thumbnail = ThumbnailRequirement(width, height, method, "image/png")
requirements.setdefault("image/jpeg", []).append(jpeg_thumbnail)
+ requirements.setdefault("image/jpg", []).append(jpeg_thumbnail)
requirements.setdefault("image/webp", []).append(jpeg_thumbnail)
requirements.setdefault("image/gif", []).append(png_thumbnail)
requirements.setdefault("image/png", []).append(png_thumbnail)
diff --git a/synapse/config/room.py b/synapse/config/room.py
index 692d7a19..d889d90d 100644
--- a/synapse/config/room.py
+++ b/synapse/config/room.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py
index 2dd719c3..56981cac 100644
--- a/synapse/config/room_directory.py
+++ b/synapse/config/room_directory.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2.py
index 6db9cb5c..3d1218c8 100644
--- a/synapse/config/saml2_config.py
+++ b/synapse/config/saml2.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
@@ -26,7 +25,10 @@ from ._util import validate_config
logger = logging.getLogger(__name__)
-DEFAULT_USER_MAPPING_PROVIDER = (
+DEFAULT_USER_MAPPING_PROVIDER = "synapse.handlers.saml.DefaultSamlMappingProvider"
+# The module that DefaultSamlMappingProvider is in was renamed, we want to
+# transparently handle both the same.
+LEGACY_USER_MAPPING_PROVIDER = (
"synapse.handlers.saml_handler.DefaultSamlMappingProvider"
)
@@ -98,6 +100,8 @@ class SAML2Config(Config):
# Use the default user mapping provider if not set
ump_dict.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER)
+ if ump_dict.get("module") == LEGACY_USER_MAPPING_PROVIDER:
+ ump_dict["module"] = DEFAULT_USER_MAPPING_PROVIDER
# Ensure a config is present
ump_dict["config"] = ump_dict.get("config") or {}
diff --git a/synapse/config/server.py b/synapse/config/server.py
index 5f8910b6..21ca7b33 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
@@ -27,6 +26,7 @@ import yaml
from netaddr import AddrFormatError, IPNetwork, IPSet
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
+from synapse.util.module_loader import load_module
from synapse.util.stringutils import parse_and_validate_server_name
from ._base import Config, ConfigError
@@ -235,10 +235,27 @@ class ServerConfig(Config):
self.print_pidfile = config.get("print_pidfile")
self.user_agent_suffix = config.get("user_agent_suffix")
self.use_frozen_dicts = config.get("use_frozen_dicts", False)
+
self.public_baseurl = config.get("public_baseurl")
+ if self.public_baseurl is not None:
+ if self.public_baseurl[-1] != "/":
+ self.public_baseurl += "/"
# Whether to enable user presence.
- self.use_presence = config.get("use_presence", True)
+ presence_config = config.get("presence") or {}
+ self.use_presence = presence_config.get("enabled")
+ if self.use_presence is None:
+ self.use_presence = config.get("use_presence", True)
+
+ # Custom presence router module
+ self.presence_router_module_class = None
+ self.presence_router_config = None
+ presence_router_config = presence_config.get("presence_router")
+ if presence_router_config:
+ (
+ self.presence_router_module_class,
+ self.presence_router_config,
+ ) = load_module(presence_router_config, ("presence", "presence_router"))
# Whether to update the user directory or not. This should be set to
# false only if we are updating the user directory in a worker
@@ -394,10 +411,6 @@ class ServerConfig(Config):
config_path=("federation_ip_range_blacklist",),
)
- if self.public_baseurl is not None:
- if self.public_baseurl[-1] != "/":
- self.public_baseurl += "/"
-
# (undocumented) option for torturing the worker-mode replication a bit,
# for testing. The value defines the number of milliseconds to pause before
# sending out any replication updates.
@@ -834,9 +847,28 @@ class ServerConfig(Config):
#
#soft_file_limit: 0
- # Set to false to disable presence tracking on this homeserver.
+ # Presence tracking allows users to see the state (e.g online/offline)
+ # of other local and remote users.
#
- #use_presence: false
+ presence:
+ # Uncomment to disable presence tracking on this homeserver. This option
+ # replaces the previous top-level 'use_presence' option.
+ #
+ #enabled: false
+
+ # Presence routers are third-party modules that can specify additional logic
+ # to where presence updates from users are routed.
+ #
+ presence_router:
+ # The custom module's class. Uncomment to use a custom presence router module.
+ #
+ #module: "my_custom_router.PresenceRouter"
+
+ # Configuration options of the custom module. Refer to your module's
+ # documentation for available options.
+ #
+ #config:
+ # example_option: 'something'
# Whether to require authentication to retrieve profile data (avatars,
# display names) of other users through the client API. Defaults to
diff --git a/synapse/config/server_notices_config.py b/synapse/config/server_notices.py
index 57f69dc8..48bf3241 100644
--- a/synapse/config/server_notices_config.py
+++ b/synapse/config/server_notices.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/spam_checker.py b/synapse/config/spam_checker.py
index 3d05abc1..447ba330 100644
--- a/synapse/config/spam_checker.py
+++ b/synapse/config/spam_checker.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/sso.py b/synapse/config/sso.py
index 243cc681..af645c93 100644
--- a/synapse/config/sso.py
+++ b/synapse/config/sso.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/stats.py b/synapse/config/stats.py
index 2258329a..3d44b512 100644
--- a/synapse/config/stats.py
+++ b/synapse/config/stats.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/third_party_event_rules.py b/synapse/config/third_party_event_rules.py
index c04e1c4e..f502ff53 100644
--- a/synapse/config/third_party_event_rules.py
+++ b/synapse/config/third_party_event_rules.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/tls.py b/synapse/config/tls.py
index ad37b93c..7df4e4c3 100644
--- a/synapse/config/tls.py
+++ b/synapse/config/tls.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,7 +17,7 @@ import os
import warnings
from datetime import datetime
from hashlib import sha256
-from typing import List, Optional
+from typing import List, Optional, Pattern
from unpaddedbase64 import encode_base64
@@ -125,7 +124,7 @@ class TlsConfig(Config):
fed_whitelist_entries = []
# Support globs (*) in whitelist values
- self.federation_certificate_verification_whitelist = [] # type: List[str]
+ self.federation_certificate_verification_whitelist = [] # type: List[Pattern]
for entry in fed_whitelist_entries:
try:
entry_regex = glob_to_regex(entry.encode("ascii").decode("ascii"))
@@ -270,7 +269,7 @@ class TlsConfig(Config):
tls_certificate_path,
tls_private_key_path,
acme_domain,
- **kwargs
+ **kwargs,
):
"""If the acme_domain is specified acme will be enabled.
If the TLS paths are not specified the default will be certs in the
diff --git a/synapse/config/tracer.py b/synapse/config/tracer.py
index 727a1e70..db22b5b1 100644
--- a/synapse/config/tracer.py
+++ b/synapse/config/tracer.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.d
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/user_directory.py b/synapse/config/user_directory.py
index 8d05ef17..4cbf79ee 100644
--- a/synapse/config/user_directory.py
+++ b/synapse/config/user_directory.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/config/workers.py b/synapse/config/workers.py
index ac92375a..46263020 100644
--- a/synapse/config/workers.py
+++ b/synapse/config/workers.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -65,6 +64,14 @@ class WriterLocations:
Attributes:
events: The instances that write to the event and backfill streams.
typing: The instance that writes to the typing stream.
+ to_device: The instances that write to the to_device stream. Currently
+ can only be a single instance.
+ account_data: The instances that write to the account data streams. Currently
+ can only be a single instance.
+ receipts: The instances that write to the receipts stream. Currently
+ can only be a single instance.
+ presence: The instances that write to the presence stream. Currently
+ can only be a single instance.
"""
events = attr.ib(
@@ -86,6 +93,11 @@ class WriterLocations:
type=List[str],
converter=_instance_to_list_converter,
)
+ presence = attr.ib(
+ default=["master"],
+ type=List[str],
+ converter=_instance_to_list_converter,
+ )
class WorkerConfig(Config):
@@ -189,7 +201,14 @@ class WorkerConfig(Config):
# Check that the configured writers for events and typing also appears in
# `instance_map`.
- for stream in ("events", "typing", "to_device", "account_data", "receipts"):
+ for stream in (
+ "events",
+ "typing",
+ "to_device",
+ "account_data",
+ "receipts",
+ "presence",
+ ):
instances = _instance_to_list_converter(getattr(self.writers, stream))
for instance in instances:
if instance != "master" and instance not in self.instance_map:
@@ -216,6 +235,11 @@ class WorkerConfig(Config):
if len(self.writers.events) == 0:
raise ConfigError("Must specify at least one instance to handle `events`.")
+ if len(self.writers.presence) != 1:
+ raise ConfigError(
+ "Must only specify one instance to handle `presence` messages."
+ )
+
self.events_shard_config = RoutableShardedWorkerHandlingConfig(
self.writers.events
)
diff --git a/synapse/crypto/__init__.py b/synapse/crypto/__init__.py
index bfebb0f6..5e83dba2 100644
--- a/synapse/crypto/__init__.py
+++ b/synapse/crypto/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py
index 8fb116ae..0f2b632e 100644
--- a/synapse/crypto/event_signing.py
+++ b/synapse/crypto/event_signing.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
#
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py
index d5fb5151..5f18ef77 100644
--- a/synapse/crypto/keyring.py
+++ b/synapse/crypto/keyring.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017, 2018 New Vector Ltd
#
@@ -502,7 +501,7 @@ class StoreKeyFetcher(KeyFetcher):
class BaseV2KeyFetcher(KeyFetcher):
def __init__(self, hs: "HomeServer"):
self.store = hs.get_datastore()
- self.config = hs.get_config()
+ self.config = hs.config
async def process_v2_response(
self, from_server: str, response_json: JsonDict, time_added_ms: int
diff --git a/synapse/event_auth.py b/synapse/event_auth.py
index 91ad5b3d..70c55656 100644
--- a/synapse/event_auth.py
+++ b/synapse/event_auth.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014 - 2016 OpenMarket Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
@@ -15,14 +14,14 @@
# limitations under the License.
import logging
-from typing import List, Optional, Set, Tuple
+from typing import Any, Dict, List, Optional, Set, Tuple
from canonicaljson import encode_canonical_json
from signedjson.key import decode_verify_key_bytes
from signedjson.sign import SignatureVerifyException, verify_signed_json
from unpaddedbase64 import decode_base64
-from synapse.api.constants import EventTypes, JoinRules, Membership
+from synapse.api.constants import MAX_PDU_SIZE, EventTypes, JoinRules, Membership
from synapse.api.errors import AuthError, EventSizeError, SynapseError
from synapse.api.room_versions import (
KNOWN_ROOM_VERSIONS,
@@ -162,7 +161,7 @@ def check(
logger.debug("Auth events: %s", [a.event_id for a in auth_events.values()])
if event.type == EventTypes.Member:
- _is_membership_change_allowed(event, auth_events)
+ _is_membership_change_allowed(room_version_obj, event, auth_events)
logger.debug("Allowing! %s", event)
return
@@ -206,7 +205,7 @@ def _check_size_limits(event: EventBase) -> None:
too_big("type")
if len(event.event_id) > 255:
too_big("event_id")
- if len(encode_canonical_json(event.get_pdu_json())) > 65536:
+ if len(encode_canonical_json(event.get_pdu_json())) > MAX_PDU_SIZE:
too_big("event")
@@ -220,8 +219,19 @@ def _can_federate(event: EventBase, auth_events: StateMap[EventBase]) -> bool:
def _is_membership_change_allowed(
- event: EventBase, auth_events: StateMap[EventBase]
+ room_version: RoomVersion, event: EventBase, auth_events: StateMap[EventBase]
) -> None:
+ """
+ Confirms that the event which changes membership is an allowed change.
+
+ Args:
+ room_version: The version of the room.
+ event: The event to check.
+ auth_events: The current auth events of the room.
+
+ Raises:
+ AuthError if the event is not allowed.
+ """
membership = event.content["membership"]
# Check if this is the room creator joining:
@@ -315,14 +325,19 @@ def _is_membership_change_allowed(
if user_level < invite_level:
raise AuthError(403, "You don't have permission to invite users")
elif Membership.JOIN == membership:
- # Joins are valid iff caller == target and they were:
- # invited: They are accepting the invitation
- # joined: It's a NOOP
+ # Joins are valid iff caller == target and:
+ # * They are not banned.
+ # * They are accepting a previously sent invitation.
+ # * They are already joined (it's a NOOP).
+ # * The room is public or restricted.
if event.user_id != target_user_id:
raise AuthError(403, "Cannot force another user to join.")
elif target_banned:
raise AuthError(403, "You are banned from this room")
- elif join_rule == JoinRules.PUBLIC:
+ elif join_rule == JoinRules.PUBLIC or (
+ room_version.msc3083_join_rules
+ and join_rule == JoinRules.MSC3083_RESTRICTED
+ ):
pass
elif join_rule == JoinRules.INVITE:
if not caller_in_room and not caller_invited:
@@ -655,7 +670,7 @@ def _verify_third_party_invite(event: EventBase, auth_events: StateMap[EventBase
public_key = public_key_object["public_key"]
try:
for server, signature_block in signed["signatures"].items():
- for key_name, encoded_signature in signature_block.items():
+ for key_name in signature_block.keys():
if not key_name.startswith("ed25519:"):
continue
verify_key = decode_verify_key_bytes(
@@ -673,7 +688,7 @@ def _verify_third_party_invite(event: EventBase, auth_events: StateMap[EventBase
return False
-def get_public_keys(invite_event):
+def get_public_keys(invite_event: EventBase) -> List[Dict[str, Any]]:
public_keys = []
if "public_key" in invite_event.content:
o = {"public_key": invite_event.content["public_key"]}
diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py
index 8f6b955d..c8b52cbc 100644
--- a/synapse/events/__init__.py
+++ b/synapse/events/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019 New Vector Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
@@ -330,9 +329,11 @@ class FrozenEvent(EventBase):
self,
event_dict: JsonDict,
room_version: RoomVersion,
- internal_metadata_dict: JsonDict = {},
+ internal_metadata_dict: Optional[JsonDict] = None,
rejected_reason: Optional[str] = None,
):
+ internal_metadata_dict = internal_metadata_dict or {}
+
event_dict = dict(event_dict)
# Signatures is a dict of dicts, and this is faster than doing a
@@ -386,9 +387,11 @@ class FrozenEventV2(EventBase):
self,
event_dict: JsonDict,
room_version: RoomVersion,
- internal_metadata_dict: JsonDict = {},
+ internal_metadata_dict: Optional[JsonDict] = None,
rejected_reason: Optional[str] = None,
):
+ internal_metadata_dict = internal_metadata_dict or {}
+
event_dict = dict(event_dict)
# Signatures is a dict of dicts, and this is faster than doing a
@@ -507,9 +510,11 @@ def _event_type_from_format_version(format_version: int) -> Type[EventBase]:
def make_event_from_dict(
event_dict: JsonDict,
room_version: RoomVersion = RoomVersions.V1,
- internal_metadata_dict: JsonDict = {},
+ internal_metadata_dict: Optional[JsonDict] = None,
rejected_reason: Optional[str] = None,
) -> EventBase:
"""Construct an EventBase from the given event dict"""
event_type = _event_type_from_format_version(room_version.event_format)
- return event_type(event_dict, room_version, internal_metadata_dict, rejected_reason)
+ return event_type(
+ event_dict, room_version, internal_metadata_dict or {}, rejected_reason
+ )
diff --git a/synapse/events/builder.py b/synapse/events/builder.py
index c1c0426f..5793553a 100644
--- a/synapse/events/builder.py
+++ b/synapse/events/builder.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/events/presence_router.py b/synapse/events/presence_router.py
new file mode 100644
index 00000000..6c37c8a7
--- /dev/null
+++ b/synapse/events/presence_router.py
@@ -0,0 +1,103 @@
+# Copyright 2021 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import TYPE_CHECKING, Dict, Iterable, Set, Union
+
+from synapse.api.presence import UserPresenceState
+
+if TYPE_CHECKING:
+ from synapse.server import HomeServer
+
+
+class PresenceRouter:
+ """
+ A module that the homeserver will call upon to help route user presence updates to
+ additional destinations. If a custom presence router is configured, calls will be
+ passed to that instead.
+ """
+
+ ALL_USERS = "ALL"
+
+ def __init__(self, hs: "HomeServer"):
+ self.custom_presence_router = None
+
+ # Check whether a custom presence router module has been configured
+ if hs.config.presence_router_module_class:
+ # Initialise the module
+ self.custom_presence_router = hs.config.presence_router_module_class(
+ config=hs.config.presence_router_config, module_api=hs.get_module_api()
+ )
+
+ # Ensure the module has implemented the required methods
+ required_methods = ["get_users_for_states", "get_interested_users"]
+ for method_name in required_methods:
+ if not hasattr(self.custom_presence_router, method_name):
+ raise Exception(
+ "PresenceRouter module '%s' must implement all required methods: %s"
+ % (
+ hs.config.presence_router_module_class.__name__,
+ ", ".join(required_methods),
+ )
+ )
+
+ async def get_users_for_states(
+ self,
+ state_updates: Iterable[UserPresenceState],
+ ) -> Dict[str, Set[UserPresenceState]]:
+ """
+ Given an iterable of user presence updates, determine where each one
+ needs to go.
+
+ Args:
+ state_updates: An iterable of user presence state updates.
+
+ Returns:
+ A dictionary of user_id -> set of UserPresenceState, indicating which
+ presence updates each user should receive.
+ """
+ if self.custom_presence_router is not None:
+ # Ask the custom module
+ return await self.custom_presence_router.get_users_for_states(
+ state_updates=state_updates
+ )
+
+ # Don't include any extra destinations for presence updates
+ return {}
+
+ async def get_interested_users(self, user_id: str) -> Union[Set[str], ALL_USERS]:
+ """
+ Retrieve a list of users that `user_id` is interested in receiving the
+ presence of. This will be in addition to those they share a room with.
+ Optionally, the object PresenceRouter.ALL_USERS can be returned to indicate
+ that this user should receive all incoming local and remote presence updates.
+
+ Note that this method will only be called for local users, but can return users
+ that are local or remote.
+
+ Args:
+ user_id: A user requesting presence updates.
+
+ Returns:
+ A set of user IDs to return presence updates for, or ALL_USERS to return all
+ known updates.
+ """
+ if self.custom_presence_router is not None:
+ # Ask the custom module for interested users
+ return await self.custom_presence_router.get_interested_users(
+ user_id=user_id
+ )
+
+ # A custom presence router is not defined.
+ # Don't report any additional interested users
+ return set()
diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py
index 7295df74..f8d898c3 100644
--- a/synapse/events/snapshot.py
+++ b/synapse/events/snapshot.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/events/spamcheck.py b/synapse/events/spamcheck.py
index a9185987..7118d5f5 100644
--- a/synapse/events/spamcheck.py
+++ b/synapse/events/spamcheck.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
@@ -16,12 +15,11 @@
import inspect
import logging
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
+from typing import TYPE_CHECKING, Any, Collection, Dict, List, Optional, Tuple, Union
from synapse.rest.media.v1._base import FileInfo
from synapse.rest.media.v1.media_storage import ReadableFileWrapper
from synapse.spam_checker_api import RegistrationBehaviour
-from synapse.types import Collection
from synapse.util.async_helpers import maybe_awaitable
if TYPE_CHECKING:
diff --git a/synapse/events/third_party_rules.py b/synapse/events/third_party_rules.py
index 9767d239..f7944fd8 100644
--- a/synapse/events/third_party_rules.py
+++ b/synapse/events/third_party_rules.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/events/utils.py b/synapse/events/utils.py
index 0f8a3b5a..7d7cd9aa 100644
--- a/synapse/events/utils.py
+++ b/synapse/events/utils.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/events/validator.py b/synapse/events/validator.py
index f8f3b1a3..fa6987d7 100644
--- a/synapse/events/validator.py
+++ b/synapse/events/validator.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/federation/__init__.py b/synapse/federation/__init__.py
index f5f0bdfc..46300cba 100644
--- a/synapse/federation/__init__.py
+++ b/synapse/federation/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py
index 38373752..949dcd46 100644
--- a/synapse/federation/federation_base.py
+++ b/synapse/federation/federation_base.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py
index afdb5bf2..a5b6a611 100644
--- a/synapse/federation/federation_client.py
+++ b/synapse/federation/federation_client.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -102,7 +101,7 @@ class FederationClient(FederationBase):
max_len=1000,
expiry_ms=120 * 1000,
reset_expiry_on_get=False,
- )
+ ) # type: ExpiringCache[str, EventBase]
def _clear_tried_cache(self):
"""Clear pdu_destination_tried cache"""
@@ -452,6 +451,28 @@ class FederationClient(FederationBase):
return signed_auth
+ def _is_unknown_endpoint(
+ self, e: HttpResponseException, synapse_error: Optional[SynapseError] = None
+ ) -> bool:
+ """
+ Returns true if the response was due to an endpoint being unimplemented.
+
+ Args:
+ e: The error response received from the remote server.
+ synapse_error: The above error converted to a SynapseError. This is
+ automatically generated if not provided.
+
+ """
+ if synapse_error is None:
+ synapse_error = e.to_synapse_error()
+ # There is no good way to detect an "unknown" endpoint.
+ #
+ # Dendrite returns a 404 (with no body); synapse returns a 400
+ # with M_UNRECOGNISED.
+ return e.code == 404 or (
+ e.code == 400 and synapse_error.errcode == Codes.UNRECOGNIZED
+ )
+
async def _try_destination_list(
self,
description: str,
@@ -469,9 +490,9 @@ class FederationClient(FederationBase):
callback: Function to run for each server. Passed a single
argument: the server_name to try.
- If the callback raises a CodeMessageException with a 300/400 code,
- attempts to perform the operation stop immediately and the exception is
- reraised.
+ If the callback raises a CodeMessageException with a 300/400 code or
+ an UnsupportedRoomVersionError, attempts to perform the operation
+ stop immediately and the exception is reraised.
Otherwise, if the callback raises an Exception the error is logged and the
next server tried. Normally the stacktrace is logged but this is
@@ -493,8 +514,7 @@ class FederationClient(FederationBase):
continue
try:
- res = await callback(destination)
- return res
+ return await callback(destination)
except InvalidResponseError as e:
logger.warning("Failed to %s via %s: %s", description, destination, e)
except UnsupportedRoomVersionError:
@@ -503,17 +523,15 @@ class FederationClient(FederationBase):
synapse_error = e.to_synapse_error()
failover = False
+ # Failover on an internal server error, or if the destination
+ # doesn't implemented the endpoint for some reason.
if 500 <= e.code < 600:
failover = True
- elif failover_on_unknown_endpoint:
- # there is no good way to detect an "unknown" endpoint. Dendrite
- # returns a 404 (with no body); synapse returns a 400
- # with M_UNRECOGNISED.
- if e.code == 404 or (
- e.code == 400 and synapse_error.errcode == Codes.UNRECOGNIZED
- ):
- failover = True
+ elif failover_on_unknown_endpoint and self._is_unknown_endpoint(
+ e, synapse_error
+ ):
+ failover = True
if not failover:
raise synapse_error from e
@@ -571,9 +589,8 @@ class FederationClient(FederationBase):
UnsupportedRoomVersionError: if remote responds with
a room version we don't understand.
- SynapseError: if the chosen remote server returns a 300/400 code.
-
- RuntimeError: if no servers were reachable.
+ SynapseError: if the chosen remote server returns a 300/400 code, or
+ no servers successfully handle the request.
"""
valid_memberships = {Membership.JOIN, Membership.LEAVE}
if membership not in valid_memberships:
@@ -643,9 +660,8 @@ class FederationClient(FederationBase):
``auth_chain``.
Raises:
- SynapseError: if the chosen remote server returns a 300/400 code.
-
- RuntimeError: if no servers were reachable.
+ SynapseError: if the chosen remote server returns a 300/400 code, or
+ no servers successfully handle the request.
"""
async def send_request(destination) -> Dict[str, Any]:
@@ -674,7 +690,7 @@ class FederationClient(FederationBase):
if create_event is None:
# If the state doesn't have a create event then the room is
# invalid, and it would fail auth checks anyway.
- raise SynapseError(400, "No create event in state")
+ raise InvalidResponseError("No create event in state")
# the room version should be sane.
create_room_version = create_event.content.get(
@@ -747,16 +763,11 @@ class FederationClient(FederationBase):
content=pdu.get_pdu_json(time_now),
)
except HttpResponseException as e:
- if e.code in [400, 404]:
- err = e.to_synapse_error()
-
- # If we receive an error response that isn't a generic error, or an
- # unrecognised endpoint error, we assume that the remote understands
- # the v2 invite API and this is a legitimate error.
- if err.errcode not in [Codes.UNKNOWN, Codes.UNRECOGNIZED]:
- raise err
- else:
- raise e.to_synapse_error()
+ # If an error is received that is due to an unrecognised endpoint,
+ # fallback to the v1 endpoint. Otherwise consider it a legitmate error
+ # and raise.
+ if not self._is_unknown_endpoint(e):
+ raise
logger.debug("Couldn't send_join with the v2 API, falling back to the v1 API")
@@ -803,6 +814,11 @@ class FederationClient(FederationBase):
Returns:
The event as a dict as returned by the remote server
+
+ Raises:
+ SynapseError: if the remote server returns an error or if the server
+ only supports the v1 endpoint and a room version other than "1"
+ or "2" is requested.
"""
time_now = self._clock.time_msec()
@@ -818,28 +834,19 @@ class FederationClient(FederationBase):
},
)
except HttpResponseException as e:
- if e.code in [400, 404]:
- err = e.to_synapse_error()
-
- # If we receive an error response that isn't a generic error, we
- # assume that the remote understands the v2 invite API and this
- # is a legitimate error.
- if err.errcode != Codes.UNKNOWN:
- raise err
-
- # Otherwise, we assume that the remote server doesn't understand
- # the v2 invite API. That's ok provided the room uses old-style event
- # IDs.
+ # If an error is received that is due to an unrecognised endpoint,
+ # fallback to the v1 endpoint if the room uses old-style event IDs.
+ # Otherwise consider it a legitmate error and raise.
+ err = e.to_synapse_error()
+ if self._is_unknown_endpoint(e, err):
if room_version.event_format != EventFormatVersions.V1:
raise SynapseError(
400,
"User's homeserver does not support this room version",
Codes.UNSUPPORTED_ROOM_VERSION,
)
- elif e.code in (403, 429):
- raise e.to_synapse_error()
else:
- raise
+ raise err
# Didn't work, try v1 API.
# Note the v1 API returns a tuple of `(200, content)`
@@ -866,9 +873,8 @@ class FederationClient(FederationBase):
pdu: event to be sent
Raises:
- SynapseError if the chosen remote server returns a 300/400 code.
-
- RuntimeError if no servers were reachable.
+ SynapseError: if the chosen remote server returns a 300/400 code, or
+ no servers successfully handle the request.
"""
async def send_request(destination: str) -> None:
@@ -890,16 +896,11 @@ class FederationClient(FederationBase):
content=pdu.get_pdu_json(time_now),
)
except HttpResponseException as e:
- if e.code in [400, 404]:
- err = e.to_synapse_error()
-
- # If we receive an error response that isn't a generic error, or an
- # unrecognised endpoint error, we assume that the remote understands
- # the v2 invite API and this is a legitimate error.
- if err.errcode not in [Codes.UNKNOWN, Codes.UNRECOGNIZED]:
- raise err
- else:
- raise e.to_synapse_error()
+ # If an error is received that is due to an unrecognised endpoint,
+ # fallback to the v1 endpoint. Otherwise consider it a legitmate error
+ # and raise.
+ if not self._is_unknown_endpoint(e):
+ raise
logger.debug("Couldn't send_leave with the v2 API, falling back to the v1 API")
diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py
index d84e3620..b729a692 100644
--- a/synapse/federation/federation_server.py
+++ b/synapse/federation/federation_server.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
# Copyright 2019 Matrix.org Federation C.I.C
@@ -137,7 +136,7 @@ class FederationServer(FederationBase):
) # type: ResponseCache[Tuple[str, str]]
self._federation_metrics_domains = (
- hs.get_config().federation.federation_metrics_domains
+ hs.config.federation.federation_metrics_domains
)
async def on_backfill_request(
@@ -739,22 +738,20 @@ class FederationServer(FederationBase):
await self.handler.on_receive_pdu(origin, pdu, sent_to_us_directly=True)
- def __str__(self):
+ def __str__(self) -> str:
return "<ReplicationLayer(%s)>" % self.server_name
async def exchange_third_party_invite(
self, sender_user_id: str, target_user_id: str, room_id: str, signed: Dict
- ):
- ret = await self.handler.exchange_third_party_invite(
+ ) -> None:
+ await self.handler.exchange_third_party_invite(
sender_user_id, target_user_id, room_id, signed
)
- return ret
- async def on_exchange_third_party_invite_request(self, event_dict: Dict):
- ret = await self.handler.on_exchange_third_party_invite_request(event_dict)
- return ret
+ async def on_exchange_third_party_invite_request(self, event_dict: Dict) -> None:
+ await self.handler.on_exchange_third_party_invite_request(event_dict)
- async def check_server_matches_acl(self, server_name: str, room_id: str):
+ async def check_server_matches_acl(self, server_name: str, room_id: str) -> None:
"""Check if the given server is allowed by the server ACLs in the room
Args:
@@ -870,6 +867,7 @@ class FederationHandlerRegistry:
# A rate limiter for incoming room key requests per origin.
self._room_key_request_rate_limiter = Ratelimiter(
+ store=hs.get_datastore(),
clock=self.clock,
rate_hz=self.config.rc_key_requests.per_second,
burst_count=self.config.rc_key_requests.burst_count,
@@ -877,7 +875,7 @@ class FederationHandlerRegistry:
def register_edu_handler(
self, edu_type: str, handler: Callable[[str, JsonDict], Awaitable[None]]
- ):
+ ) -> None:
"""Sets the handler callable that will be used to handle an incoming
federation EDU of the given type.
@@ -896,7 +894,7 @@ class FederationHandlerRegistry:
def register_query_handler(
self, query_type: str, handler: Callable[[dict], Awaitable[JsonDict]]
- ):
+ ) -> None:
"""Sets the handler callable that will be used to handle an incoming
federation query of the given type.
@@ -914,15 +912,17 @@ class FederationHandlerRegistry:
self.query_handlers[query_type] = handler
- def register_instance_for_edu(self, edu_type: str, instance_name: str):
+ def register_instance_for_edu(self, edu_type: str, instance_name: str) -> None:
"""Register that the EDU handler is on a different instance than master."""
self._edu_type_to_instance[edu_type] = [instance_name]
- def register_instances_for_edu(self, edu_type: str, instance_names: List[str]):
+ def register_instances_for_edu(
+ self, edu_type: str, instance_names: List[str]
+ ) -> None:
"""Register that the EDU handler is on multiple instances."""
self._edu_type_to_instance[edu_type] = instance_names
- async def on_edu(self, edu_type: str, origin: str, content: dict):
+ async def on_edu(self, edu_type: str, origin: str, content: dict) -> None:
if not self.config.use_presence and edu_type == EduTypes.Presence:
return
@@ -930,7 +930,9 @@ class FederationHandlerRegistry:
# the limit, drop them.
if (
edu_type == EduTypes.RoomKeyRequest
- and not self._room_key_request_rate_limiter.can_do_action(origin)
+ and not await self._room_key_request_rate_limiter.can_do_action(
+ None, origin
+ )
):
return
diff --git a/synapse/federation/persistence.py b/synapse/federation/persistence.py
index ce5fc758..2f9c9bc2 100644
--- a/synapse/federation/persistence.py
+++ b/synapse/federation/persistence.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py
index 0c18c49a..65d76ea9 100644
--- a/synapse/federation/send_queue.py
+++ b/synapse/federation/send_queue.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -77,9 +76,6 @@ class FederationRemoteSendQueue(AbstractFederationSender):
# Pending presence map user_id -> UserPresenceState
self.presence_map = {} # type: Dict[str, UserPresenceState]
- # Stream position -> list[user_id]
- self.presence_changed = SortedDict() # type: SortedDict[int, List[str]]
-
# Stores the destinations we need to explicitly send presence to about a
# given user.
# Stream position -> (user_id, destinations)
@@ -97,7 +93,7 @@ class FederationRemoteSendQueue(AbstractFederationSender):
self.edus = SortedDict() # type: SortedDict[int, Edu]
- # stream ID for the next entry into presence_changed/keyed_edu_changed/edus.
+ # stream ID for the next entry into keyed_edu_changed/edus.
self.pos = 1
# map from stream ID to the time that stream entry was generated, so that we
@@ -118,7 +114,6 @@ class FederationRemoteSendQueue(AbstractFederationSender):
for queue_name in [
"presence_map",
- "presence_changed",
"keyed_edu",
"keyed_edu_changed",
"edus",
@@ -156,23 +151,12 @@ class FederationRemoteSendQueue(AbstractFederationSender):
"""Clear all the queues from before a given position"""
with Measure(self.clock, "send_queue._clear"):
# Delete things out of presence maps
- keys = self.presence_changed.keys()
- i = self.presence_changed.bisect_left(position_to_delete)
- for key in keys[:i]:
- del self.presence_changed[key]
-
- user_ids = {
- user_id for uids in self.presence_changed.values() for user_id in uids
- }
-
keys = self.presence_destinations.keys()
i = self.presence_destinations.bisect_left(position_to_delete)
for key in keys[:i]:
del self.presence_destinations[key]
- user_ids.update(
- user_id for user_id, _ in self.presence_destinations.values()
- )
+ user_ids = {user_id for user_id, _ in self.presence_destinations.values()}
to_del = [
user_id for user_id in self.presence_map if user_id not in user_ids
@@ -245,23 +229,6 @@ class FederationRemoteSendQueue(AbstractFederationSender):
"""
# nothing to do here: the replication listener will handle it.
- def send_presence(self, states: List[UserPresenceState]) -> None:
- """As per FederationSender
-
- Args:
- states
- """
- pos = self._next_pos()
-
- # We only want to send presence for our own users, so lets always just
- # filter here just in case.
- local_states = [s for s in states if self.is_mine_id(s.user_id)]
-
- self.presence_map.update({state.user_id: state for state in local_states})
- self.presence_changed[pos] = [state.user_id for state in local_states]
-
- self.notifier.on_new_replication_data()
-
def send_presence_to_destinations(
self, states: Iterable[UserPresenceState], destinations: Iterable[str]
) -> None:
@@ -326,18 +293,6 @@ class FederationRemoteSendQueue(AbstractFederationSender):
# of the federation stream.
rows = [] # type: List[Tuple[int, BaseFederationRow]]
- # Fetch changed presence
- i = self.presence_changed.bisect_right(from_token)
- j = self.presence_changed.bisect_right(to_token) + 1
- dest_user_ids = [
- (pos, user_id)
- for pos, user_id_list in self.presence_changed.items()[i:j]
- for user_id in user_id_list
- ]
-
- for (key, user_id) in dest_user_ids:
- rows.append((key, PresenceRow(state=self.presence_map[user_id])))
-
# Fetch presence to send to destinations
i = self.presence_destinations.bisect_right(from_token)
j = self.presence_destinations.bisect_right(to_token) + 1
@@ -428,22 +383,6 @@ class BaseFederationRow:
raise NotImplementedError()
-class PresenceRow(
- BaseFederationRow, namedtuple("PresenceRow", ("state",)) # UserPresenceState
-):
- TypeId = "p"
-
- @staticmethod
- def from_data(data):
- return PresenceRow(state=UserPresenceState.from_dict(data))
-
- def to_data(self):
- return self.state.as_dict()
-
- def add_to_buffer(self, buff):
- buff.presence.append(self.state)
-
-
class PresenceDestinationsRow(
BaseFederationRow,
namedtuple(
@@ -507,7 +446,6 @@ class EduRow(BaseFederationRow, namedtuple("EduRow", ("edu",))): # Edu
_rowtypes = (
- PresenceRow,
PresenceDestinationsRow,
KeyedEduRow,
EduRow,
@@ -519,7 +457,6 @@ TypeToRow = {Row.TypeId: Row for Row in _rowtypes}
ParsedFederationStreamData = namedtuple(
"ParsedFederationStreamData",
(
- "presence", # list(UserPresenceState)
"presence_destinations", # list of tuples of UserPresenceState and destinations
"keyed_edus", # dict of destination -> { key -> Edu }
"edus", # dict of destination -> [Edu]
@@ -544,7 +481,6 @@ def process_rows_for_federation(
# them into the appropriate collection and then send them off.
buff = ParsedFederationStreamData(
- presence=[],
presence_destinations=[],
keyed_edus={},
edus={},
@@ -560,18 +496,15 @@ def process_rows_for_federation(
parsed_row = RowType.from_data(row.data)
parsed_row.add_to_buffer(buff)
- if buff.presence:
- transaction_queue.send_presence(buff.presence)
-
for state, destinations in buff.presence_destinations:
transaction_queue.send_presence_to_destinations(
states=[state], destinations=destinations
)
- for destination, edu_map in buff.keyed_edus.items():
+ for edu_map in buff.keyed_edus.values():
for key, edu in edu_map.items():
transaction_queue.send_edu(edu, key)
- for destination, edu_list in buff.edus.items():
+ for edu_list in buff.edus.values():
for edu in edu_list:
transaction_queue.send_edu(edu, None)
diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py
index 8babb1eb..deb40f46 100644
--- a/synapse/federation/sender/__init__.py
+++ b/synapse/federation/sender/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -27,12 +26,7 @@ from synapse.events import EventBase
from synapse.federation.sender.per_destination_queue import PerDestinationQueue
from synapse.federation.sender.transaction_manager import TransactionManager
from synapse.federation.units import Edu
-from synapse.handlers.presence import get_interested_remotes
-from synapse.logging.context import (
- make_deferred_yieldable,
- preserve_fn,
- run_in_background,
-)
+from synapse.logging.context import make_deferred_yieldable, run_in_background
from synapse.metrics import (
LaterGauge,
event_processing_loop_counter,
@@ -41,9 +35,10 @@ from synapse.metrics import (
)
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.types import JsonDict, ReadReceipt, RoomStreamToken
-from synapse.util.metrics import Measure, measure_func
+from synapse.util.metrics import Measure
if TYPE_CHECKING:
+ from synapse.events.presence_router import PresenceRouter
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
@@ -86,15 +81,6 @@ class AbstractFederationSender(metaclass=abc.ABCMeta):
raise NotImplementedError()
@abc.abstractmethod
- def send_presence(self, states: List[UserPresenceState]) -> None:
- """Send the new presence states to the appropriate destinations.
-
- This actually queues up the presence states ready for sending and
- triggers a background task to process them and send out the transactions.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
def send_presence_to_destinations(
self, states: Iterable[UserPresenceState], destinations: Iterable[str]
) -> None:
@@ -162,6 +148,7 @@ class FederationSender(AbstractFederationSender):
self.clock = hs.get_clock()
self.is_mine_id = hs.is_mine_id
+ self._presence_router = None # type: Optional[PresenceRouter]
self._transaction_manager = TransactionManager(hs)
self._instance_name = hs.get_instance_name()
@@ -181,11 +168,6 @@ class FederationSender(AbstractFederationSender):
),
)
- # Map of user_id -> UserPresenceState for all the pending presence
- # to be sent out by user_id. Entries here get processed and put in
- # pending_presence_by_dest
- self.pending_presence = {} # type: Dict[str, UserPresenceState]
-
LaterGauge(
"synapse_federation_transaction_queue_pending_pdus",
"",
@@ -206,8 +188,6 @@ class FederationSender(AbstractFederationSender):
self._is_processing = False
self._last_poked_id = -1
- self._processing_pending_presence = False
-
# map from room_id to a set of PerDestinationQueues which we believe are
# awaiting a call to flush_read_receipts_for_room. The presence of an entry
# here for a given room means that we are rate-limiting RR flushes to that room,
@@ -517,48 +497,6 @@ class FederationSender(AbstractFederationSender):
for queue in queues:
queue.flush_read_receipts_for_room(room_id)
- @preserve_fn # the caller should not yield on this
- async def send_presence(self, states: List[UserPresenceState]) -> None:
- """Send the new presence states to the appropriate destinations.
-
- This actually queues up the presence states ready for sending and
- triggers a background task to process them and send out the transactions.
- """
- if not self.hs.config.use_presence:
- # No-op if presence is disabled.
- return
-
- # First we queue up the new presence by user ID, so multiple presence
- # updates in quick succession are correctly handled.
- # We only want to send presence for our own users, so lets always just
- # filter here just in case.
- self.pending_presence.update(
- {state.user_id: state for state in states if self.is_mine_id(state.user_id)}
- )
-
- # We then handle the new pending presence in batches, first figuring
- # out the destinations we need to send each state to and then poking it
- # to attempt a new transaction. We linearize this so that we don't
- # accidentally mess up the ordering and send multiple presence updates
- # in the wrong order
- if self._processing_pending_presence:
- return
-
- self._processing_pending_presence = True
- try:
- while True:
- states_map = self.pending_presence
- self.pending_presence = {}
-
- if not states_map:
- break
-
- await self._process_presence_inner(list(states_map.values()))
- except Exception:
- logger.exception("Error sending presence states to servers")
- finally:
- self._processing_pending_presence = False
-
def send_presence_to_destinations(
self, states: Iterable[UserPresenceState], destinations: Iterable[str]
) -> None:
@@ -570,6 +508,10 @@ class FederationSender(AbstractFederationSender):
# No-op if presence is disabled.
return
+ # Ensure we only send out presence states for local users.
+ for state in states:
+ assert self.is_mine_id(state.user_id)
+
for destination in destinations:
if destination == self.server_name:
continue
@@ -579,25 +521,6 @@ class FederationSender(AbstractFederationSender):
continue
self._get_per_destination_queue(destination).send_presence(states)
- @measure_func("txnqueue._process_presence")
- async def _process_presence_inner(self, states: List[UserPresenceState]) -> None:
- """Given a list of states populate self.pending_presence_by_dest and
- poke to send a new transaction to each destination
- """
- hosts_and_states = await get_interested_remotes(self.store, states, self.state)
-
- for destinations, states in hosts_and_states:
- for destination in destinations:
- if destination == self.server_name:
- continue
-
- if not self._federation_shard_config.should_handle(
- self._instance_name, destination
- ):
- continue
-
- self._get_per_destination_queue(destination).send_presence(states)
-
def build_and_send_edu(
self,
destination: str,
@@ -717,16 +640,18 @@ class FederationSender(AbstractFederationSender):
self._catchup_after_startup_timer = None
break
+ last_processed = destinations_to_wake[-1]
+
destinations_to_wake = [
d
for d in destinations_to_wake
if self._federation_shard_config.should_handle(self._instance_name, d)
]
- for last_processed in destinations_to_wake:
+ for destination in destinations_to_wake:
logger.info(
"Destination %s has outstanding catch-up, waking up.",
last_processed,
)
- self.wake_destination(last_processed)
+ self.wake_destination(destination)
await self.clock.sleep(CATCH_UP_STARTUP_INTERVAL_SEC)
diff --git a/synapse/federation/sender/per_destination_queue.py b/synapse/federation/sender/per_destination_queue.py
index 89df9a61..3b053ebc 100644
--- a/synapse/federation/sender/per_destination_queue.py
+++ b/synapse/federation/sender/per_destination_queue.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019 New Vector Ltd
#
@@ -29,6 +28,7 @@ from synapse.api.presence import UserPresenceState
from synapse.events import EventBase
from synapse.federation.units import Edu
from synapse.handlers.presence import format_user_presence_state
+from synapse.logging.opentracing import SynapseTags, set_tag
from synapse.metrics import sent_transactions_counter
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.types import ReadReceipt
@@ -557,6 +557,13 @@ class PerDestinationQueue:
contents, stream_id = await self._store.get_new_device_msgs_for_remote(
self._destination, last_device_stream_id, to_device_stream_id, limit
)
+ for content in contents:
+ message_id = content.get("message_id")
+ if not message_id:
+ continue
+
+ set_tag(SynapseTags.TO_DEVICE_MESSAGE_ID, message_id)
+
edus = [
Edu(
origin=self._server_name,
diff --git a/synapse/federation/sender/transaction_manager.py b/synapse/federation/sender/transaction_manager.py
index 07b740c2..72a63583 100644
--- a/synapse/federation/sender/transaction_manager.py
+++ b/synapse/federation/sender/transaction_manager.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -57,7 +56,7 @@ class TransactionManager:
self._transport_layer = hs.get_federation_transport_client()
self._federation_metrics_domains = (
- hs.get_config().federation.federation_metrics_domains
+ hs.config.federation.federation_metrics_domains
)
# HACK to get unique tx id
diff --git a/synapse/federation/transport/__init__.py b/synapse/federation/transport/__init__.py
index 5db733af..3c9a0f69 100644
--- a/synapse/federation/transport/__init__.py
+++ b/synapse/federation/transport/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py
index 6aee47c4..ada322a8 100644
--- a/synapse/federation/transport/client.py
+++ b/synapse/federation/transport/client.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py
index 84e39c5a..a3759bdd 100644
--- a/synapse/federation/transport/server.py
+++ b/synapse/federation/transport/server.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
@@ -425,13 +424,9 @@ class FederationSendServlet(BaseFederationServlet):
logger.exception(e)
return 400, {"error": "Invalid transaction"}
- try:
- code, response = await self.handler.on_incoming_transaction(
- origin, transaction_data
- )
- except Exception:
- logger.exception("on_incoming_transaction failed")
- raise
+ code, response = await self.handler.on_incoming_transaction(
+ origin, transaction_data
+ )
return code, response
@@ -620,8 +615,8 @@ class FederationThirdPartyInviteExchangeServlet(BaseFederationServlet):
PATH = "/exchange_third_party_invite/(?P<room_id>[^/]*)"
async def on_PUT(self, origin, content, query, room_id):
- content = await self.handler.on_exchange_third_party_invite_request(content)
- return 200, content
+ await self.handler.on_exchange_third_party_invite_request(content)
+ return 200, {}
class FederationClientKeysQueryServlet(BaseFederationServlet):
diff --git a/synapse/federation/units.py b/synapse/federation/units.py
index b662c426..c83a2619 100644
--- a/synapse/federation/units.py
+++ b/synapse/federation/units.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,6 +17,7 @@ server protocol.
"""
import logging
+from typing import Optional
import attr
@@ -98,7 +98,7 @@ class Transaction(JsonEncodedObject):
"pdus",
]
- def __init__(self, transaction_id=None, pdus=[], **kwargs):
+ def __init__(self, transaction_id=None, pdus: Optional[list] = None, **kwargs):
"""If we include a list of pdus then we decode then as PDU's
automatically.
"""
@@ -107,7 +107,7 @@ class Transaction(JsonEncodedObject):
if "edus" in kwargs and not kwargs["edus"]:
del kwargs["edus"]
- super().__init__(transaction_id=transaction_id, pdus=pdus, **kwargs)
+ super().__init__(transaction_id=transaction_id, pdus=pdus or [], **kwargs)
@staticmethod
def create_new(pdus, **kwargs):
diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py
index 368c4470..d2fc8be5 100644
--- a/synapse/groups/attestations.py
+++ b/synapse/groups/attestations.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/groups/groups_server.py b/synapse/groups/groups_server.py
index 4b16a4ac..a06d060e 100644
--- a/synapse/groups/groups_server.py
+++ b/synapse/groups/groups_server.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
# Copyright 2018 New Vector Ltd
# Copyright 2019 Michael Telatynski <7t3chguy@gmail.com>
diff --git a/synapse/handlers/__init__.py b/synapse/handlers/__init__.py
index bfebb0f6..5e83dba2 100644
--- a/synapse/handlers/__init__.py
+++ b/synapse/handlers/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py
index aade2c4a..d800e169 100644
--- a/synapse/handlers/_base.py
+++ b/synapse/handlers/_base.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014 - 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -49,7 +48,7 @@ class BaseHandler:
# The rate_hz and burst_count are overridden on a per-user basis
self.request_ratelimiter = Ratelimiter(
- clock=self.clock, rate_hz=0, burst_count=0
+ store=self.store, clock=self.clock, rate_hz=0, burst_count=0
)
self._rc_message = self.hs.config.rc_message
@@ -57,6 +56,7 @@ class BaseHandler:
# by the presence of rate limits in the config
if self.hs.config.rc_admin_redaction:
self.admin_redaction_ratelimiter = Ratelimiter(
+ store=self.store,
clock=self.clock,
rate_hz=self.hs.config.rc_admin_redaction.per_second,
burst_count=self.hs.config.rc_admin_redaction.burst_count,
@@ -91,11 +91,6 @@ class BaseHandler:
if app_service is not None:
return # do not ratelimit app service senders
- # Disable rate limiting of users belonging to any AS that is configured
- # not to be rate limited in its registration file (rate_limited: true|false).
- if requester.app_service and not requester.app_service.is_rate_limited():
- return
-
messages_per_second = self._rc_message.per_second
burst_count = self._rc_message.burst_count
@@ -113,11 +108,11 @@ class BaseHandler:
if is_admin_redaction and self.admin_redaction_ratelimiter:
# If we have separate config for admin redactions, use a separate
# ratelimiter as to not have user_ids clash
- self.admin_redaction_ratelimiter.ratelimit(user_id, update=update)
+ await self.admin_redaction_ratelimiter.ratelimit(requester, update=update)
else:
# Override rate and burst count per-user
- self.request_ratelimiter.ratelimit(
- user_id,
+ await self.request_ratelimiter.ratelimit(
+ requester,
rate_hz=messages_per_second,
burst_count=burst_count,
update=update,
diff --git a/synapse/handlers/account_data.py b/synapse/handlers/account_data.py
index 1ce6d697..affb54e0 100644
--- a/synapse/handlers/account_data.py
+++ b/synapse/handlers/account_data.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py
index d781bb25..5b927f10 100644
--- a/synapse/handlers/account_validity.py
+++ b/synapse/handlers/account_validity.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,7 +17,7 @@ import email.utils
import logging
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
-from typing import TYPE_CHECKING, List
+from typing import TYPE_CHECKING, List, Optional, Tuple
from synapse.api.errors import StoreError, SynapseError
from synapse.logging.context import make_deferred_yieldable
@@ -40,28 +39,44 @@ class AccountValidityHandler:
self.sendmail = self.hs.get_sendmail()
self.clock = self.hs.get_clock()
- self._account_validity = self.hs.config.account_validity
+ self._account_validity_enabled = (
+ hs.config.account_validity.account_validity_enabled
+ )
+ self._account_validity_renew_by_email_enabled = (
+ hs.config.account_validity.account_validity_renew_by_email_enabled
+ )
+
+ self._account_validity_period = None
+ if self._account_validity_enabled:
+ self._account_validity_period = (
+ hs.config.account_validity.account_validity_period
+ )
if (
- self._account_validity.enabled
- and self._account_validity.renew_by_email_enabled
+ self._account_validity_enabled
+ and self._account_validity_renew_by_email_enabled
):
# Don't do email-specific configuration if renewal by email is disabled.
- self._template_html = self.config.account_validity_template_html
- self._template_text = self.config.account_validity_template_text
+ self._template_html = (
+ hs.config.account_validity.account_validity_template_html
+ )
+ self._template_text = (
+ hs.config.account_validity.account_validity_template_text
+ )
+ account_validity_renew_email_subject = (
+ hs.config.account_validity.account_validity_renew_email_subject
+ )
try:
- app_name = self.hs.config.email_app_name
+ app_name = hs.config.email_app_name
- self._subject = self._account_validity.renew_email_subject % {
- "app": app_name
- }
+ self._subject = account_validity_renew_email_subject % {"app": app_name}
- self._from_string = self.hs.config.email_notif_from % {"app": app_name}
+ self._from_string = hs.config.email_notif_from % {"app": app_name}
except Exception:
# If substitution failed, fall back to the bare strings.
- self._subject = self._account_validity.renew_email_subject
- self._from_string = self.hs.config.email_notif_from
+ self._subject = account_validity_renew_email_subject
+ self._from_string = hs.config.email_notif_from
self._raw_from = email.utils.parseaddr(self._from_string)[1]
@@ -221,47 +236,87 @@ class AccountValidityHandler:
attempts += 1
raise StoreError(500, "Couldn't generate a unique string as refresh string.")
- async def renew_account(self, renewal_token: str) -> bool:
+ async def renew_account(self, renewal_token: str) -> Tuple[bool, bool, int]:
"""Renews the account attached to a given renewal token by pushing back the
expiration date by the current validity period in the server's configuration.
+ If it turns out that the token is valid but has already been used, then the
+ token is considered stale. A token is stale if the 'token_used_ts_ms' db column
+ is non-null.
+
Args:
renewal_token: Token sent with the renewal request.
Returns:
- Whether the provided token is valid.
+ A tuple containing:
+ * A bool representing whether the token is valid and unused.
+ * A bool which is `True` if the token is valid, but stale.
+ * An int representing the user's expiry timestamp as milliseconds since the
+ epoch, or 0 if the token was invalid.
"""
try:
- user_id = await self.store.get_user_from_renewal_token(renewal_token)
+ (
+ user_id,
+ current_expiration_ts,
+ token_used_ts,
+ ) = await self.store.get_user_from_renewal_token(renewal_token)
except StoreError:
- return False
+ return False, False, 0
+
+ # Check whether this token has already been used.
+ if token_used_ts:
+ logger.info(
+ "User '%s' attempted to use previously used token '%s' to renew account",
+ user_id,
+ renewal_token,
+ )
+ return False, True, current_expiration_ts
logger.debug("Renewing an account for user %s", user_id)
- await self.renew_account_for_user(user_id)
- return True
+ # Renew the account. Pass the renewal_token here so that it is not cleared.
+ # We want to keep the token around in case the user attempts to renew their
+ # account with the same token twice (clicking the email link twice).
+ #
+ # In that case, the token will be accepted, but the account's expiration ts
+ # will remain unchanged.
+ new_expiration_ts = await self.renew_account_for_user(
+ user_id, renewal_token=renewal_token
+ )
+
+ return True, False, new_expiration_ts
async def renew_account_for_user(
- self, user_id: str, expiration_ts: int = None, email_sent: bool = False
+ self,
+ user_id: str,
+ expiration_ts: Optional[int] = None,
+ email_sent: bool = False,
+ renewal_token: Optional[str] = None,
) -> int:
"""Renews the account attached to a given user by pushing back the
expiration date by the current validity period in the server's
configuration.
Args:
- renewal_token: Token sent with the renewal request.
+ user_id: The ID of the user to renew.
expiration_ts: New expiration date. Defaults to now + validity period.
- email_sen: Whether an email has been sent for this validity period.
- Defaults to False.
+ email_sent: Whether an email has been sent for this validity period.
+ renewal_token: Token sent with the renewal request. The user's token
+ will be cleared if this is None.
Returns:
New expiration date for this account, as a timestamp in
milliseconds since epoch.
"""
+ now = self.clock.time_msec()
if expiration_ts is None:
- expiration_ts = self.clock.time_msec() + self._account_validity.period
+ expiration_ts = now + self._account_validity_period
await self.store.set_account_validity_for_user(
- user_id=user_id, expiration_ts=expiration_ts, email_sent=email_sent
+ user_id=user_id,
+ expiration_ts=expiration_ts,
+ email_sent=email_sent,
+ renewal_token=renewal_token,
+ token_used_ts=now,
)
return expiration_ts
diff --git a/synapse/handlers/acme.py b/synapse/handlers/acme.py
index 2a25af62..16ab93f5 100644
--- a/synapse/handlers/acme.py
+++ b/synapse/handlers/acme.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/acme_issuing_service.py b/synapse/handlers/acme_issuing_service.py
index ae2a9dd9..a972d3fa 100644
--- a/synapse/handlers/acme_issuing_service.py
+++ b/synapse/handlers/acme_issuing_service.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
diff --git a/synapse/handlers/admin.py b/synapse/handlers/admin.py
index c494de49..f72ded03 100644
--- a/synapse/handlers/admin.py
+++ b/synapse/handlers/admin.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py
index 996f9e5d..177310f0 100644
--- a/synapse/handlers/appservice.py
+++ b/synapse/handlers/appservice.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
-from typing import TYPE_CHECKING, Dict, List, Optional, Union
+from typing import TYPE_CHECKING, Collection, Dict, List, Optional, Union
from prometheus_client import Counter
@@ -34,7 +33,7 @@ from synapse.metrics.background_process_metrics import (
wrap_as_background_process,
)
from synapse.storage.databases.main.directory import RoomAliasMapping
-from synapse.types import Collection, JsonDict, RoomAlias, RoomStreamToken, UserID
+from synapse.types import JsonDict, RoomAlias, RoomStreamToken, UserID
from synapse.util.metrics import Measure
if TYPE_CHECKING:
@@ -182,7 +181,7 @@ class ApplicationServicesHandler:
self,
stream_key: str,
new_token: Optional[int],
- users: Collection[Union[str, UserID]] = [],
+ users: Optional[Collection[Union[str, UserID]]] = None,
):
"""This is called by the notifier in the background
when a ephemeral event handled by the homeserver.
@@ -215,7 +214,7 @@ class ApplicationServicesHandler:
# We only start a new background process if necessary rather than
# optimistically (to cut down on overhead).
self._notify_interested_services_ephemeral(
- services, stream_key, new_token, users
+ services, stream_key, new_token, users or []
)
@wrap_as_background_process("notify_interested_services_ephemeral")
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index d537ea81..36f2450e 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014 - 2016 OpenMarket Ltd
# Copyright 2017 Vector Creations Ltd
# Copyright 2019 - 2020 The Matrix.org Foundation C.I.C.
@@ -238,6 +237,7 @@ class AuthHandler(BaseHandler):
# Ratelimiter for failed auth during UIA. Uses same ratelimit config
# as per `rc_login.failed_attempts`.
self._failed_uia_attempts_ratelimiter = Ratelimiter(
+ store=self.store,
clock=self.clock,
rate_hz=self.hs.config.rc_login_failed_attempts.per_second,
burst_count=self.hs.config.rc_login_failed_attempts.burst_count,
@@ -248,6 +248,7 @@ class AuthHandler(BaseHandler):
# Ratelimitier for failed /login attempts
self._failed_login_attempts_ratelimiter = Ratelimiter(
+ store=self.store,
clock=hs.get_clock(),
rate_hz=self.hs.config.rc_login_failed_attempts.per_second,
burst_count=self.hs.config.rc_login_failed_attempts.burst_count,
@@ -352,7 +353,7 @@ class AuthHandler(BaseHandler):
requester_user_id = requester.user.to_string()
# Check if we should be ratelimited due to too many previous failed attempts
- self._failed_uia_attempts_ratelimiter.ratelimit(requester_user_id, update=False)
+ await self._failed_uia_attempts_ratelimiter.ratelimit(requester, update=False)
# build a list of supported flows
supported_ui_auth_types = await self._get_available_ui_auth_types(
@@ -373,7 +374,9 @@ class AuthHandler(BaseHandler):
)
except LoginError:
# Update the ratelimiter to say we failed (`can_do_action` doesn't raise).
- self._failed_uia_attempts_ratelimiter.can_do_action(requester_user_id)
+ await self._failed_uia_attempts_ratelimiter.can_do_action(
+ requester,
+ )
raise
# find the completed login type
@@ -982,8 +985,8 @@ class AuthHandler(BaseHandler):
# We also apply account rate limiting using the 3PID as a key, as
# otherwise using 3PID bypasses the ratelimiting based on user ID.
if ratelimit:
- self._failed_login_attempts_ratelimiter.ratelimit(
- (medium, address), update=False
+ await self._failed_login_attempts_ratelimiter.ratelimit(
+ None, (medium, address), update=False
)
# Check for login providers that support 3pid login types
@@ -1016,8 +1019,8 @@ class AuthHandler(BaseHandler):
# this code path, which is fine as then the per-user ratelimit
# will kick in below.
if ratelimit:
- self._failed_login_attempts_ratelimiter.can_do_action(
- (medium, address)
+ await self._failed_login_attempts_ratelimiter.can_do_action(
+ None, (medium, address)
)
raise LoginError(403, "", errcode=Codes.FORBIDDEN)
@@ -1039,8 +1042,8 @@ class AuthHandler(BaseHandler):
# Check if we've hit the failed ratelimit (but don't update it)
if ratelimit:
- self._failed_login_attempts_ratelimiter.ratelimit(
- qualified_user_id.lower(), update=False
+ await self._failed_login_attempts_ratelimiter.ratelimit(
+ None, qualified_user_id.lower(), update=False
)
try:
@@ -1051,8 +1054,8 @@ class AuthHandler(BaseHandler):
# exception and masking the LoginError. The actual ratelimiting
# should have happened above.
if ratelimit:
- self._failed_login_attempts_ratelimiter.can_do_action(
- qualified_user_id.lower()
+ await self._failed_login_attempts_ratelimiter.can_do_action(
+ None, qualified_user_id.lower()
)
raise
@@ -1245,7 +1248,7 @@ class AuthHandler(BaseHandler):
# see if any of our auth providers want to know about this
for provider in self.password_providers:
- for token, token_id, device_id in tokens_and_devices:
+ for token, _, device_id in tokens_and_devices:
await provider.on_logged_out(
user_id=user_id, device_id=device_id, access_token=token
)
diff --git a/synapse/handlers/cas_handler.py b/synapse/handlers/cas.py
index 5060936f..7346ccfe 100644
--- a/synapse/handlers/cas_handler.py
+++ b/synapse/handlers/cas.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py
index 2bcd8f54..45d2404d 100644
--- a/synapse/handlers/deactivate_account.py
+++ b/synapse/handlers/deactivate_account.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017, 2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
@@ -50,7 +49,9 @@ class DeactivateAccountHandler(BaseHandler):
if hs.config.run_background_tasks:
hs.get_reactor().callWhenRunning(self._start_user_parting)
- self._account_validity_enabled = hs.config.account_validity.enabled
+ self._account_validity_enabled = (
+ hs.config.account_validity.account_validity_enabled
+ )
async def deactivate_account(
self,
diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py
index 54293d0b..95bdc590 100644
--- a/synapse/handlers/device.py
+++ b/synapse/handlers/device.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
# Copyright 2019 New Vector Ltd
# Copyright 2019,2020 The Matrix.org Foundation C.I.C.
@@ -15,7 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
-from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Set, Tuple
+from typing import TYPE_CHECKING, Collection, Dict, Iterable, List, Optional, Set, Tuple
from synapse.api import errors
from synapse.api.constants import EventTypes
@@ -29,7 +28,6 @@ from synapse.api.errors import (
from synapse.logging.opentracing import log_kv, set_tag, trace
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.types import (
- Collection,
JsonDict,
StreamToken,
UserID,
@@ -157,8 +155,7 @@ class DeviceWorkerHandler(BaseHandler):
# The user may have left the room
# TODO: Check if they actually did or if we were just invited.
if room_id not in room_ids:
- for key, event_id in current_state_ids.items():
- etype, state_key = key
+ for etype, state_key in current_state_ids.keys():
if etype != EventTypes.Member:
continue
possibly_left.add(state_key)
@@ -180,8 +177,7 @@ class DeviceWorkerHandler(BaseHandler):
log_kv(
{"event": "encountered empty previous state", "room_id": room_id}
)
- for key, event_id in current_state_ids.items():
- etype, state_key = key
+ for etype, state_key in current_state_ids.keys():
if etype != EventTypes.Member:
continue
possibly_changed.add(state_key)
@@ -199,8 +195,7 @@ class DeviceWorkerHandler(BaseHandler):
for state_dict in prev_state_ids.values():
member_event = state_dict.get((EventTypes.Member, user_id), None)
if not member_event or member_event != current_member_id:
- for key, event_id in current_state_ids.items():
- etype, state_key = key
+ for etype, state_key in current_state_ids.keys():
if etype != EventTypes.Member:
continue
possibly_changed.add(state_key)
@@ -631,7 +626,7 @@ class DeviceListUpdater:
max_len=10000,
expiry_ms=30 * 60 * 1000,
iterable=True,
- )
+ ) # type: ExpiringCache[str, Set[str]]
# Attempt to resync out of sync device lists every 30s.
self._resync_retry_in_progress = False
@@ -715,7 +710,7 @@ class DeviceListUpdater:
# This can happen since we batch updates
return
- for device_id, stream_id, prev_ids, content in pending_updates:
+ for device_id, stream_id, prev_ids, _ in pending_updates:
logger.debug(
"Handling update %r/%r, ID: %r, prev: %r ",
user_id,
@@ -741,7 +736,7 @@ class DeviceListUpdater:
else:
# Simply update the single device, since we know that is the only
# change (because of the single prev_id matching the current cache)
- for device_id, stream_id, prev_ids, content in pending_updates:
+ for device_id, stream_id, _, content in pending_updates:
await self.store.update_remote_device_list_cache_entry(
user_id, device_id, content, stream_id
)
@@ -760,7 +755,7 @@ class DeviceListUpdater:
"""Given a list of updates for a user figure out if we need to do a full
resync, or whether we have enough data that we can just apply the delta.
"""
- seen_updates = self._seen_updates.get(user_id, set())
+ seen_updates = self._seen_updates.get(user_id, set()) # type: Set[str]
extremity = await self.store.get_device_list_last_stream_id_for_remote(user_id)
@@ -930,6 +925,10 @@ class DeviceListUpdater:
else:
cached_devices = await self.store.get_cached_devices_for_user(user_id)
if cached_devices == {d["device_id"]: d for d in devices}:
+ logging.info(
+ "Skipping device list resync for %s, as our cache matches already",
+ user_id,
+ )
devices = []
ignore_devices = True
@@ -945,6 +944,9 @@ class DeviceListUpdater:
await self.store.update_remote_device_list_cache(
user_id, devices, stream_id
)
+ # mark the cache as valid, whether or not we actually processed any device
+ # list updates.
+ await self.store.mark_remote_user_device_cache_as_valid(user_id)
device_ids = [device["device_id"] for device in devices]
# Handle cross-signing keys.
diff --git a/synapse/handlers/devicemessage.py b/synapse/handlers/devicemessage.py
index eb547743..c5d631de 100644
--- a/synapse/handlers/devicemessage.py
+++ b/synapse/handlers/devicemessage.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -21,10 +20,10 @@ from synapse.api.errors import SynapseError
from synapse.api.ratelimiting import Ratelimiter
from synapse.logging.context import run_in_background
from synapse.logging.opentracing import (
+ SynapseTags,
get_active_span_text_map,
log_kv,
set_tag,
- start_active_span,
)
from synapse.replication.http.devices import ReplicationUserDevicesResyncRestServlet
from synapse.types import JsonDict, Requester, UserID, get_domain_from_id
@@ -81,6 +80,7 @@ class DeviceMessageHandler:
)
self._ratelimiter = Ratelimiter(
+ store=self.store,
clock=hs.get_clock(),
rate_hz=hs.config.rc_key_requests.per_second,
burst_count=hs.config.rc_key_requests.burst_count,
@@ -182,7 +182,10 @@ class DeviceMessageHandler:
) -> None:
sender_user_id = requester.user.to_string()
- set_tag("number_of_messages", len(messages))
+ message_id = random_string(16)
+ set_tag(SynapseTags.TO_DEVICE_MESSAGE_ID, message_id)
+
+ log_kv({"number_of_to_device_messages": len(messages)})
set_tag("sender", sender_user_id)
local_messages = {}
remote_messages = {} # type: Dict[str, Dict[str, Dict[str, JsonDict]]]
@@ -191,8 +194,8 @@ class DeviceMessageHandler:
if (
message_type == EduTypes.RoomKeyRequest
and user_id != sender_user_id
- and self._ratelimiter.can_do_action(
- (sender_user_id, requester.device_id)
+ and await self._ratelimiter.can_do_action(
+ requester, (sender_user_id, requester.device_id)
)
):
continue
@@ -204,32 +207,35 @@ class DeviceMessageHandler:
"content": message_content,
"type": message_type,
"sender": sender_user_id,
+ "message_id": message_id,
}
for device_id, message_content in by_device.items()
}
if messages_by_device:
local_messages[user_id] = messages_by_device
+ log_kv(
+ {
+ "user_id": user_id,
+ "device_id": list(messages_by_device),
+ }
+ )
else:
destination = get_domain_from_id(user_id)
remote_messages.setdefault(destination, {})[user_id] = by_device
- message_id = random_string(16)
-
context = get_active_span_text_map()
remote_edu_contents = {}
for destination, messages in remote_messages.items():
- with start_active_span("to_device_for_user"):
- set_tag("destination", destination)
- remote_edu_contents[destination] = {
- "messages": messages,
- "sender": sender_user_id,
- "type": message_type,
- "message_id": message_id,
- "org.matrix.opentracing_context": json_encoder.encode(context),
- }
+ log_kv({"destination": destination})
+ remote_edu_contents[destination] = {
+ "messages": messages,
+ "sender": sender_user_id,
+ "type": message_type,
+ "message_id": message_id,
+ "org.matrix.opentracing_context": json_encoder.encode(context),
+ }
- log_kv({"local_messages": local_messages})
stream_id = await self.store.add_messages_to_device_inbox(
local_messages, remote_edu_contents
)
@@ -238,7 +244,6 @@ class DeviceMessageHandler:
"to_device_key", stream_id, users=local_messages.keys()
)
- log_kv({"remote_messages": remote_messages})
if self.federation_sender:
for destination in remote_messages.keys():
# Enqueue a new federation transaction to send the new
diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py
index abcf8635..90932316 100644
--- a/synapse/handlers/directory.py
+++ b/synapse/handlers/directory.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py
index 2ad9b6d9..97448780 100644
--- a/synapse/handlers/e2e_keys.py
+++ b/synapse/handlers/e2e_keys.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
# Copyright 2018-2019 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
@@ -38,7 +37,6 @@ from synapse.types import (
)
from synapse.util import json_decoder, unwrapFirstError
from synapse.util.async_helpers import Linearizer
-from synapse.util.caches.expiringcache import ExpiringCache
from synapse.util.retryutils import NotRetryingDestination
if TYPE_CHECKING:
@@ -1008,7 +1006,7 @@ class E2eKeysHandler:
return signature_list, failures
async def _get_e2e_cross_signing_verify_key(
- self, user_id: str, key_type: str, from_user_id: str = None
+ self, user_id: str, key_type: str, from_user_id: Optional[str] = None
) -> Tuple[JsonDict, str, VerifyKey]:
"""Fetch locally or remotely query for a cross-signing public key.
@@ -1292,17 +1290,6 @@ class SigningKeyEduUpdater:
# user_id -> list of updates waiting to be handled.
self._pending_updates = {} # type: Dict[str, List[Tuple[JsonDict, JsonDict]]]
- # Recently seen stream ids. We don't bother keeping these in the DB,
- # but they're useful to have them about to reduce the number of spurious
- # resyncs.
- self._seen_updates = ExpiringCache(
- cache_name="signing_key_update_edu",
- clock=self.clock,
- max_len=10000,
- expiry_ms=30 * 60 * 1000,
- iterable=True,
- )
-
async def incoming_signing_key_update(
self, origin: str, edu_content: JsonDict
) -> None:
diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py
index a910d246..31742236 100644
--- a/synapse/handlers/e2e_room_keys.py
+++ b/synapse/handlers/e2e_room_keys.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017, 2018 New Vector Ltd
# Copyright 2019 Matrix.org Foundation C.I.C.
#
diff --git a/synapse/handlers/event_auth.py b/synapse/handlers/event_auth.py
new file mode 100644
index 00000000..eff639f4
--- /dev/null
+++ b/synapse/handlers/event_auth.py
@@ -0,0 +1,86 @@
+# Copyright 2021 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from typing import TYPE_CHECKING
+
+from synapse.api.constants import EventTypes, JoinRules
+from synapse.api.room_versions import RoomVersion
+from synapse.types import StateMap
+
+if TYPE_CHECKING:
+ from synapse.server import HomeServer
+
+
+class EventAuthHandler:
+ """
+ This class contains methods for authenticating events added to room graphs.
+ """
+
+ def __init__(self, hs: "HomeServer"):
+ self._store = hs.get_datastore()
+
+ async def can_join_without_invite(
+ self, state_ids: StateMap[str], room_version: RoomVersion, user_id: str
+ ) -> bool:
+ """
+ Check whether a user can join a room without an invite.
+
+ When joining a room with restricted joined rules (as defined in MSC3083),
+ the membership of spaces must be checked during join.
+
+ Args:
+ state_ids: The state of the room as it currently is.
+ room_version: The room version of the room being joined.
+ user_id: The user joining the room.
+
+ Returns:
+ True if the user can join the room, false otherwise.
+ """
+ # This only applies to room versions which support the new join rule.
+ if not room_version.msc3083_join_rules:
+ return True
+
+ # If there's no join rule, then it defaults to invite (so this doesn't apply).
+ join_rules_event_id = state_ids.get((EventTypes.JoinRules, ""), None)
+ if not join_rules_event_id:
+ return True
+
+ # If the join rule is not restricted, this doesn't apply.
+ join_rules_event = await self._store.get_event(join_rules_event_id)
+ if join_rules_event.content.get("join_rule") != JoinRules.MSC3083_RESTRICTED:
+ return True
+
+ # If allowed is of the wrong form, then only allow invited users.
+ allowed_spaces = join_rules_event.content.get("allow", [])
+ if not isinstance(allowed_spaces, list):
+ return False
+
+ # Get the list of joined rooms and see if there's an overlap.
+ joined_rooms = await self._store.get_rooms_for_user(user_id)
+
+ # Pull out the other room IDs, invalid data gets filtered.
+ for space in allowed_spaces:
+ if not isinstance(space, dict):
+ continue
+
+ space_id = space.get("space")
+ if not isinstance(space_id, str):
+ continue
+
+ # The user was joined to one of the spaces specified, they can join
+ # this room!
+ if space_id in joined_rooms:
+ return True
+
+ # The user was not in any of the required spaces.
+ return False
diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py
index f46cab73..d82144d7 100644
--- a/synapse/handlers/events.py
+++ b/synapse/handlers/events.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index 598a66f7..9d867aaf 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
@@ -21,7 +20,17 @@ import itertools
import logging
from collections.abc import Container
from http import HTTPStatus
-from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Sequence, Tuple, Union
+from typing import (
+ TYPE_CHECKING,
+ Dict,
+ Iterable,
+ List,
+ Optional,
+ Sequence,
+ Set,
+ Tuple,
+ Union,
+)
import attr
from signedjson.key import decode_verify_key_bytes
@@ -94,7 +103,7 @@ logger = logging.getLogger(__name__)
@attr.s(slots=True)
class _NewEventInfo:
- """Holds information about a received event, ready for passing to _handle_new_events
+ """Holds information about a received event, ready for passing to _auth_and_persist_events
Attributes:
event: the received event
@@ -137,6 +146,7 @@ class FederationHandler(BaseHandler):
self.is_mine_id = hs.is_mine_id
self.spam_checker = hs.get_spam_checker()
self.event_creation_handler = hs.get_event_creation_handler()
+ self._event_auth_handler = hs.get_event_auth_handler()
self._message_handler = hs.get_message_handler()
self._server_notices_mxid = hs.config.server_notices_mxid
self.config = hs.config
@@ -171,15 +181,17 @@ class FederationHandler(BaseHandler):
self._ephemeral_messages_enabled = hs.config.enable_ephemeral_messages
- async def on_receive_pdu(self, origin, pdu, sent_to_us_directly=False) -> None:
+ async def on_receive_pdu(
+ self, origin: str, pdu: EventBase, sent_to_us_directly: bool = False
+ ) -> None:
"""Process a PDU received via a federation /send/ transaction, or
via backfill of missing prev_events
Args:
- origin (str): server which initiated the /send/ transaction. Will
+ origin: server which initiated the /send/ transaction. Will
be used to fetch missing events or state.
- pdu (FrozenEvent): received PDU
- sent_to_us_directly (bool): True if this event was pushed to us; False if
+ pdu: received PDU
+ sent_to_us_directly: True if this event was pushed to us; False if
we pulled it as the result of a missing prev_event.
"""
@@ -411,13 +423,15 @@ class FederationHandler(BaseHandler):
await self._process_received_pdu(origin, pdu, state=state)
- async def _get_missing_events_for_pdu(self, origin, pdu, prevs, min_depth):
+ async def _get_missing_events_for_pdu(
+ self, origin: str, pdu: EventBase, prevs: Set[str], min_depth: int
+ ) -> None:
"""
Args:
- origin (str): Origin of the pdu. Will be called to get the missing events
+ origin: Origin of the pdu. Will be called to get the missing events
pdu: received pdu
- prevs (set(str)): List of event ids which we are missing
- min_depth (int): Minimum depth of events to return.
+ prevs: List of event ids which we are missing
+ min_depth: Minimum depth of events to return.
"""
room_id = pdu.room_id
@@ -778,7 +792,7 @@ class FederationHandler(BaseHandler):
origin: str,
event: EventBase,
state: Optional[Iterable[EventBase]],
- ):
+ ) -> None:
"""Called when we have a new pdu. We need to do auth checks and put it
through the StateHandler.
@@ -794,7 +808,10 @@ class FederationHandler(BaseHandler):
logger.debug("Processing event: %s", event)
try:
- await self._handle_new_event(origin, event, state=state)
+ context = await self.state_handler.compute_event_context(
+ event, old_state=state
+ )
+ await self._auth_and_persist_event(origin, event, context, state=state)
except AuthError as e:
raise FederationError("ERROR", e.code, e.msg, affected=event.event_id)
@@ -887,7 +904,9 @@ class FederationHandler(BaseHandler):
logger.exception("Failed to resync device for %s", sender)
@log_function
- async def backfill(self, dest, room_id, limit, extremities):
+ async def backfill(
+ self, dest: str, room_id: str, limit: int, extremities: List[str]
+ ) -> List[EventBase]:
"""Trigger a backfill request to `dest` for the given `room_id`
This will attempt to get more events from the remote. If the other side
@@ -995,7 +1014,9 @@ class FederationHandler(BaseHandler):
)
if ev_infos:
- await self._handle_new_events(dest, room_id, ev_infos, backfilled=True)
+ await self._auth_and_persist_events(
+ dest, room_id, ev_infos, backfilled=True
+ )
# Step 2: Persist the rest of the events in the chunk one by one
events.sort(key=lambda e: e.depth)
@@ -1008,10 +1029,12 @@ class FederationHandler(BaseHandler):
# non-outliers
assert not event.internal_metadata.is_outlier()
+ context = await self.state_handler.compute_event_context(event)
+
# We store these one at a time since each event depends on the
# previous to work out the state.
# TODO: We can probably do something more clever here.
- await self._handle_new_event(dest, event, backfilled=True)
+ await self._auth_and_persist_event(dest, event, context, backfilled=True)
return events
@@ -1142,16 +1165,15 @@ class FederationHandler(BaseHandler):
curr_state = await self.state_handler.get_current_state(room_id)
- def get_domains_from_state(state):
+ def get_domains_from_state(state: StateMap[EventBase]) -> List[Tuple[str, int]]:
"""Get joined domains from state
Args:
- state (dict[tuple, FrozenEvent]): State map from type/state
- key to event.
+ state: State map from type/state key to event.
Returns:
- list[tuple[str, int]]: Returns a list of servers with the
- lowest depth of their joins. Sorted by lowest depth first.
+ Returns a list of servers with the lowest depth of their joins.
+ Sorted by lowest depth first.
"""
joined_users = [
(state_key, int(event.depth))
@@ -1179,7 +1201,7 @@ class FederationHandler(BaseHandler):
domain for domain, depth in curr_domains if domain != self.server_name
]
- async def try_backfill(domains):
+ async def try_backfill(domains: List[str]) -> bool:
# TODO: Should we try multiple of these at a time?
for dom in domains:
try:
@@ -1258,21 +1280,25 @@ class FederationHandler(BaseHandler):
}
for e_id, _ in sorted_extremeties_tuple:
- likely_domains = get_domains_from_state(states[e_id])
+ likely_extremeties_domains = get_domains_from_state(states[e_id])
success = await try_backfill(
- [dom for dom, _ in likely_domains if dom not in tried_domains]
+ [
+ dom
+ for dom, _ in likely_extremeties_domains
+ if dom not in tried_domains
+ ]
)
if success:
return True
- tried_domains.update(dom for dom, _ in likely_domains)
+ tried_domains.update(dom for dom, _ in likely_extremeties_domains)
return False
async def _get_events_and_persist(
self, destination: str, room_id: str, events: Iterable[str]
- ):
+ ) -> None:
"""Fetch the given events from a server, and persist them as outliers.
This function *does not* recursively get missing auth events of the
@@ -1342,13 +1368,13 @@ class FederationHandler(BaseHandler):
event_infos.append(_NewEventInfo(event, None, auth))
- await self._handle_new_events(
+ await self._auth_and_persist_events(
destination,
room_id,
event_infos,
)
- def _sanity_check_event(self, ev):
+ def _sanity_check_event(self, ev: EventBase) -> None:
"""
Do some early sanity checks of a received event
@@ -1357,9 +1383,7 @@ class FederationHandler(BaseHandler):
or cascade of event fetches.
Args:
- ev (synapse.events.EventBase): event to be checked
-
- Returns: None
+ ev: event to be checked
Raises:
SynapseError if the event does not pass muster
@@ -1380,7 +1404,7 @@ class FederationHandler(BaseHandler):
)
raise SynapseError(HTTPStatus.BAD_REQUEST, "Too many auth_events")
- async def send_invite(self, target_host, event):
+ async def send_invite(self, target_host: str, event: EventBase) -> EventBase:
"""Sends the invite to the remote server for signing.
Invites must be signed by the invitee's server before distribution.
@@ -1528,12 +1552,13 @@ class FederationHandler(BaseHandler):
run_in_background(self._handle_queued_pdus, room_queue)
- async def _handle_queued_pdus(self, room_queue):
+ async def _handle_queued_pdus(
+ self, room_queue: List[Tuple[EventBase, str]]
+ ) -> None:
"""Process PDUs which got queued up while we were busy send_joining.
Args:
- room_queue (list[FrozenEvent, str]): list of PDUs to be processed
- and the servers that sent them
+ room_queue: list of PDUs to be processed and the servers that sent them
"""
for p, origin in room_queue:
try:
@@ -1612,7 +1637,7 @@ class FederationHandler(BaseHandler):
return event
- async def on_send_join_request(self, origin, pdu):
+ async def on_send_join_request(self, origin: str, pdu: EventBase) -> JsonDict:
"""We have received a join event for a room. Fully process it and
respond with the current state and auth chains.
"""
@@ -1649,16 +1674,47 @@ class FederationHandler(BaseHandler):
# would introduce the danger of backwards-compatibility problems.
event.internal_metadata.send_on_behalf_of = origin
- context = await self._handle_new_event(origin, event)
+ # Calculate the event context.
+ context = await self.state_handler.compute_event_context(event)
+
+ # Get the state before the new event.
+ prev_state_ids = await context.get_prev_state_ids()
+
+ # Check if the user is already in the room or invited to the room.
+ user_id = event.state_key
+ prev_member_event_id = prev_state_ids.get((EventTypes.Member, user_id), None)
+ newly_joined = True
+ user_is_invited = False
+ if prev_member_event_id:
+ prev_member_event = await self.store.get_event(prev_member_event_id)
+ newly_joined = prev_member_event.membership != Membership.JOIN
+ user_is_invited = prev_member_event.membership == Membership.INVITE
+
+ # If the member is not already in the room, and not invited, check if
+ # they should be allowed access via membership in a space.
+ if (
+ newly_joined
+ and not user_is_invited
+ and not await self._event_auth_handler.can_join_without_invite(
+ prev_state_ids,
+ event.room_version,
+ user_id,
+ )
+ ):
+ raise AuthError(
+ 403,
+ "You do not belong to any of the required spaces to join this room.",
+ )
+
+ # Persist the event.
+ await self._auth_and_persist_event(origin, event, context)
logger.debug(
- "on_send_join_request: After _handle_new_event: %s, sigs: %s",
+ "on_send_join_request: After _auth_and_persist_event: %s, sigs: %s",
event.event_id,
event.signatures,
)
- prev_state_ids = await context.get_prev_state_ids()
-
state_ids = list(prev_state_ids.values())
auth_chain = await self.store.get_auth_chain(event.room_id, state_ids)
@@ -1668,7 +1724,7 @@ class FederationHandler(BaseHandler):
async def on_invite_request(
self, origin: str, event: EventBase, room_version: RoomVersion
- ):
+ ) -> EventBase:
"""We've got an invite event. Process and persist it. Sign it.
Respond with the now signed event.
@@ -1711,7 +1767,7 @@ class FederationHandler(BaseHandler):
member_handler = self.hs.get_room_member_handler()
# We don't rate limit based on room ID, as that should be done by
# sending server.
- member_handler.ratelimit_invite(None, event.state_key)
+ await member_handler.ratelimit_invite(None, None, event.state_key)
# keep a record of the room version, if we don't yet know it.
# (this may get overwritten if we later get a different room version in a
@@ -1772,7 +1828,7 @@ class FederationHandler(BaseHandler):
room_id: str,
user_id: str,
membership: str,
- content: JsonDict = {},
+ content: JsonDict,
params: Optional[Dict[str, Union[str, Iterable[str]]]] = None,
) -> Tuple[str, EventBase, RoomVersion]:
(
@@ -1841,7 +1897,7 @@ class FederationHandler(BaseHandler):
return event
- async def on_send_leave_request(self, origin, pdu):
+ async def on_send_leave_request(self, origin: str, pdu: EventBase) -> None:
""" We have received a leave event for a room. Fully process it."""
event = pdu
@@ -1861,10 +1917,11 @@ class FederationHandler(BaseHandler):
event.internal_metadata.outlier = False
- await self._handle_new_event(origin, event)
+ context = await self.state_handler.compute_event_context(event)
+ await self._auth_and_persist_event(origin, event, context)
logger.debug(
- "on_send_leave_request: After _handle_new_event: %s, sigs: %s",
+ "on_send_leave_request: After _auth_and_persist_event: %s, sigs: %s",
event.event_id,
event.signatures,
)
@@ -1969,14 +2026,47 @@ class FederationHandler(BaseHandler):
else:
return None
- async def get_min_depth_for_context(self, context):
+ async def get_min_depth_for_context(self, context: str) -> int:
return await self.store.get_min_depth(context)
- async def _handle_new_event(
- self, origin, event, state=None, auth_events=None, backfilled=False
- ):
- context = await self._prep_event(
- origin, event, state=state, auth_events=auth_events, backfilled=backfilled
+ async def _auth_and_persist_event(
+ self,
+ origin: str,
+ event: EventBase,
+ context: EventContext,
+ state: Optional[Iterable[EventBase]] = None,
+ auth_events: Optional[MutableStateMap[EventBase]] = None,
+ backfilled: bool = False,
+ ) -> None:
+ """
+ Process an event by performing auth checks and then persisting to the database.
+
+ Args:
+ origin: The host the event originates from.
+ event: The event itself.
+ context:
+ The event context.
+
+ NB that this function potentially modifies it.
+ state:
+ The state events used to check the event for soft-fail. If this is
+ not provided the current state events will be used.
+ auth_events:
+ Map from (event_type, state_key) to event
+
+ Normally, our calculated auth_events based on the state of the room
+ at the event's position in the DAG, though occasionally (eg if the
+ event is an outlier), may be the auth events claimed by the remote
+ server.
+ backfilled: True if the event was backfilled.
+ """
+ context = await self._check_event_auth(
+ origin,
+ event,
+ context,
+ state=state,
+ auth_events=auth_events,
+ backfilled=backfilled,
)
try:
@@ -1998,9 +2088,7 @@ class FederationHandler(BaseHandler):
)
raise
- return context
-
- async def _handle_new_events(
+ async def _auth_and_persist_events(
self,
origin: str,
room_id: str,
@@ -2018,9 +2106,13 @@ class FederationHandler(BaseHandler):
async def prep(ev_info: _NewEventInfo):
event = ev_info.event
with nested_logging_context(suffix=event.event_id):
- res = await self._prep_event(
+ res = await self.state_handler.compute_event_context(
+ event, old_state=ev_info.state
+ )
+ res = await self._check_event_auth(
origin,
event,
+ res,
state=ev_info.state,
auth_events=ev_info.auth_events,
backfilled=backfilled,
@@ -2155,49 +2247,6 @@ class FederationHandler(BaseHandler):
room_id, [(event, new_event_context)]
)
- async def _prep_event(
- self,
- origin: str,
- event: EventBase,
- state: Optional[Iterable[EventBase]],
- auth_events: Optional[MutableStateMap[EventBase]],
- backfilled: bool,
- ) -> EventContext:
- context = await self.state_handler.compute_event_context(event, old_state=state)
-
- if not auth_events:
- prev_state_ids = await context.get_prev_state_ids()
- auth_events_ids = self.auth.compute_auth_events(
- event, prev_state_ids, for_verification=True
- )
- auth_events_x = await self.store.get_events(auth_events_ids)
- auth_events = {(e.type, e.state_key): e for e in auth_events_x.values()}
-
- # This is a hack to fix some old rooms where the initial join event
- # didn't reference the create event in its auth events.
- if event.type == EventTypes.Member and not event.auth_event_ids():
- if len(event.prev_event_ids()) == 1 and event.depth < 5:
- c = await self.store.get_event(
- event.prev_event_ids()[0], allow_none=True
- )
- if c and c.type == EventTypes.Create:
- auth_events[(c.type, c.state_key)] = c
-
- context = await self.do_auth(origin, event, context, auth_events=auth_events)
-
- if not context.rejected:
- await self._check_for_soft_fail(event, state, backfilled)
-
- if event.type == EventTypes.GuestAccess and not context.rejected:
- await self.maybe_kick_guest_users(event)
-
- # If we are going to send this event over federation we precaclculate
- # the joined hosts.
- if event.internal_metadata.get_send_on_behalf_of():
- await self.event_creation_handler.cache_joined_hosts_for_event(event)
-
- return context
-
async def _check_for_soft_fail(
self, event: EventBase, state: Optional[Iterable[EventBase]], backfilled: bool
) -> None:
@@ -2280,40 +2329,14 @@ class FederationHandler(BaseHandler):
logger.warning("Soft-failing %r because %s", event, e)
event.internal_metadata.soft_failed = True
- async def on_query_auth(
- self, origin, event_id, room_id, remote_auth_chain, rejects, missing
- ):
- in_room = await self.auth.check_host_in_room(room_id, origin)
- if not in_room:
- raise AuthError(403, "Host not in room.")
-
- event = await self.store.get_event(event_id, check_room_id=room_id)
-
- # Just go through and process each event in `remote_auth_chain`. We
- # don't want to fall into the trap of `missing` being wrong.
- for e in remote_auth_chain:
- try:
- await self._handle_new_event(origin, e)
- except AuthError:
- pass
-
- # Now get the current auth_chain for the event.
- local_auth_chain = await self.store.get_auth_chain(
- room_id, list(event.auth_event_ids()), include_given=True
- )
-
- # TODO: Check if we would now reject event_id. If so we need to tell
- # everyone.
-
- ret = await self.construct_auth_difference(local_auth_chain, remote_auth_chain)
-
- logger.debug("on_query_auth returning: %s", ret)
-
- return ret
-
async def on_get_missing_events(
- self, origin, room_id, earliest_events, latest_events, limit
- ):
+ self,
+ origin: str,
+ room_id: str,
+ earliest_events: List[str],
+ latest_events: List[str],
+ limit: int,
+ ) -> List[EventBase]:
in_room = await self.auth.check_host_in_room(room_id, origin)
if not in_room:
raise AuthError(403, "Host not in room.")
@@ -2334,19 +2357,28 @@ class FederationHandler(BaseHandler):
return missing_events
- async def do_auth(
+ async def _check_event_auth(
self,
origin: str,
event: EventBase,
context: EventContext,
- auth_events: MutableStateMap[EventBase],
+ state: Optional[Iterable[EventBase]],
+ auth_events: Optional[MutableStateMap[EventBase]],
+ backfilled: bool,
) -> EventContext:
"""
+ Checks whether an event should be rejected (for failing auth checks).
Args:
- origin:
- event:
+ origin: The host the event originates from.
+ event: The event itself.
context:
+ The event context.
+
+ NB that this function potentially modifies it.
+ state:
+ The state events used to check the event for soft-fail. If this is
+ not provided the current state events will be used.
auth_events:
Map from (event_type, state_key) to event
@@ -2356,12 +2388,34 @@ class FederationHandler(BaseHandler):
server.
Also NB that this function adds entries to it.
+
+ If this is not provided, it is calculated from the previous state IDs.
+ backfilled: True if the event was backfilled.
+
Returns:
- updated context object
+ The updated context object.
"""
room_version = await self.store.get_room_version_id(event.room_id)
room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
+ if not auth_events:
+ prev_state_ids = await context.get_prev_state_ids()
+ auth_events_ids = self.auth.compute_auth_events(
+ event, prev_state_ids, for_verification=True
+ )
+ auth_events_x = await self.store.get_events(auth_events_ids)
+ auth_events = {(e.type, e.state_key): e for e in auth_events_x.values()}
+
+ # This is a hack to fix some old rooms where the initial join event
+ # didn't reference the create event in its auth events.
+ if event.type == EventTypes.Member and not event.auth_event_ids():
+ if len(event.prev_event_ids()) == 1 and event.depth < 5:
+ c = await self.store.get_event(
+ event.prev_event_ids()[0], allow_none=True
+ )
+ if c and c.type == EventTypes.Create:
+ auth_events[(c.type, c.state_key)] = c
+
try:
context = await self._update_auth_events_and_context_for_auth(
origin, event, context, auth_events
@@ -2383,6 +2437,17 @@ class FederationHandler(BaseHandler):
logger.warning("Failed auth resolution for %r because %s", event, e)
context.rejected = RejectedReason.AUTH_ERROR
+ if not context.rejected:
+ await self._check_for_soft_fail(event, state, backfilled)
+
+ if event.type == EventTypes.GuestAccess and not context.rejected:
+ await self.maybe_kick_guest_users(event)
+
+ # If we are going to send this event over federation we precaclculate
+ # the joined hosts.
+ if event.internal_metadata.get_send_on_behalf_of():
+ await self.event_creation_handler.cache_joined_hosts_for_event(event)
+
return context
async def _update_auth_events_and_context_for_auth(
@@ -2392,7 +2457,7 @@ class FederationHandler(BaseHandler):
context: EventContext,
auth_events: MutableStateMap[EventBase],
) -> EventContext:
- """Helper for do_auth. See there for docs.
+ """Helper for _check_event_auth. See there for docs.
Checks whether a given event has the expected auth events. If it
doesn't then we talk to the remote server to compare state to see if
@@ -2472,9 +2537,14 @@ class FederationHandler(BaseHandler):
e.internal_metadata.outlier = True
logger.debug(
- "do_auth %s missing_auth: %s", event.event_id, e.event_id
+ "_check_event_auth %s missing_auth: %s",
+ event.event_id,
+ e.event_id,
+ )
+ context = await self.state_handler.compute_event_context(e)
+ await self._auth_and_persist_event(
+ origin, e, context, auth_events=auth
)
- await self._handle_new_event(origin, e, auth_events=auth)
if e.event_id in event_auth_events:
auth_events[(e.type, e.state_key)] = e
@@ -2617,8 +2687,8 @@ class FederationHandler(BaseHandler):
assumes that we have already processed all events in remote_auth
Params:
- local_auth (list)
- remote_auth (list)
+ local_auth
+ remote_auth
Returns:
dict
@@ -2742,8 +2812,8 @@ class FederationHandler(BaseHandler):
@log_function
async def exchange_third_party_invite(
- self, sender_user_id, target_user_id, room_id, signed
- ):
+ self, sender_user_id: str, target_user_id: str, room_id: str, signed: JsonDict
+ ) -> None:
third_party_invite = {"signed": signed}
event_dict = {
@@ -2835,8 +2905,12 @@ class FederationHandler(BaseHandler):
await member_handler.send_membership_event(None, event, context)
async def add_display_name_to_third_party_invite(
- self, room_version, event_dict, event, context
- ):
+ self,
+ room_version: str,
+ event_dict: JsonDict,
+ event: EventBase,
+ context: EventContext,
+ ) -> Tuple[EventBase, EventContext]:
key = (
EventTypes.ThirdPartyInvite,
event.content["third_party_invite"]["signed"]["token"],
@@ -2872,13 +2946,13 @@ class FederationHandler(BaseHandler):
EventValidator().validate_new(event, self.config)
return (event, context)
- async def _check_signature(self, event, context):
+ async def _check_signature(self, event: EventBase, context: EventContext) -> None:
"""
Checks that the signature in the event is consistent with its invite.
Args:
- event (Event): The m.room.member event to check
- context (EventContext):
+ event: The m.room.member event to check
+ context:
Raises:
AuthError: if signature didn't match any keys, or key has been
@@ -2908,7 +2982,7 @@ class FederationHandler(BaseHandler):
try:
# for each sig on the third_party_invite block of the actual invite
for server, signature_block in signed["signatures"].items():
- for key_name, encoded_signature in signature_block.items():
+ for key_name in signature_block.keys():
if not key_name.startswith("ed25519:"):
continue
@@ -2964,13 +3038,13 @@ class FederationHandler(BaseHandler):
raise last_exception
- async def _check_key_revocation(self, public_key, url):
+ async def _check_key_revocation(self, public_key: str, url: str) -> None:
"""
Checks whether public_key has been revoked.
Args:
- public_key (str): base-64 encoded public key.
- url (str): Key revocation URL.
+ public_key: base-64 encoded public key.
+ url: Key revocation URL.
Raises:
AuthError: if they key has been revoked.
diff --git a/synapse/handlers/groups_local.py b/synapse/handlers/groups_local.py
index a41ca5df..157f2ff2 100644
--- a/synapse/handlers/groups_local.py
+++ b/synapse/handlers/groups_local.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py
index 5f346f6d..0b3b1fad 100644
--- a/synapse/handlers/identity.py
+++ b/synapse/handlers/identity.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2017 Vector Creations Ltd
# Copyright 2018 New Vector Ltd
@@ -16,7 +15,6 @@
# limitations under the License.
"""Utilities for interacting with Identity Servers"""
-
import logging
import urllib.parse
from typing import Awaitable, Callable, Dict, List, Optional, Tuple
@@ -35,7 +33,11 @@ from synapse.http.site import SynapseRequest
from synapse.types import JsonDict, Requester
from synapse.util import json_decoder
from synapse.util.hash import sha256_and_url_safe_base64
-from synapse.util.stringutils import assert_valid_client_secret, random_string
+from synapse.util.stringutils import (
+ assert_valid_client_secret,
+ random_string,
+ valid_id_server_location,
+)
from ._base import BaseHandler
@@ -61,17 +63,19 @@ class IdentityHandler(BaseHandler):
# Ratelimiters for `/requestToken` endpoints.
self._3pid_validation_ratelimiter_ip = Ratelimiter(
+ store=self.store,
clock=hs.get_clock(),
rate_hz=hs.config.ratelimiting.rc_3pid_validation.per_second,
burst_count=hs.config.ratelimiting.rc_3pid_validation.burst_count,
)
self._3pid_validation_ratelimiter_address = Ratelimiter(
+ store=self.store,
clock=hs.get_clock(),
rate_hz=hs.config.ratelimiting.rc_3pid_validation.per_second,
burst_count=hs.config.ratelimiting.rc_3pid_validation.burst_count,
)
- def ratelimit_request_token_requests(
+ async def ratelimit_request_token_requests(
self,
request: SynapseRequest,
medium: str,
@@ -85,8 +89,12 @@ class IdentityHandler(BaseHandler):
address: The actual threepid ID, e.g. the phone number or email address
"""
- self._3pid_validation_ratelimiter_ip.ratelimit((medium, request.getClientIP()))
- self._3pid_validation_ratelimiter_address.ratelimit((medium, address))
+ await self._3pid_validation_ratelimiter_ip.ratelimit(
+ None, (medium, request.getClientIP())
+ )
+ await self._3pid_validation_ratelimiter_address.ratelimit(
+ None, (medium, address)
+ )
async def threepid_from_creds(
self, id_server: str, creds: Dict[str, str]
@@ -167,6 +175,11 @@ class IdentityHandler(BaseHandler):
server with, if necessary. Required if use_v2 is true
use_v2: Whether to use v2 Identity Service API endpoints. Defaults to True
+ Raises:
+ SynapseError: On any of the following conditions
+ - the supplied id_server is not a valid identity server name
+ - we failed to contact the supplied identity server
+
Returns:
The response from the identity server
"""
@@ -176,6 +189,12 @@ class IdentityHandler(BaseHandler):
if id_access_token is None:
use_v2 = False
+ if not valid_id_server_location(id_server):
+ raise SynapseError(
+ 400,
+ "id_server must be a valid hostname with optional port and path components",
+ )
+
# Decide which API endpoint URLs to use
headers = {}
bind_data = {"sid": sid, "client_secret": client_secret, "mxid": mxid}
@@ -264,12 +283,21 @@ class IdentityHandler(BaseHandler):
id_server: Identity server to unbind from
Raises:
- SynapseError: If we failed to contact the identity server
+ SynapseError: On any of the following conditions
+ - the supplied id_server is not a valid identity server name
+ - we failed to contact the supplied identity server
Returns:
True on success, otherwise False if the identity
server doesn't support unbinding
"""
+
+ if not valid_id_server_location(id_server):
+ raise SynapseError(
+ 400,
+ "id_server must be a valid hostname with optional port and path components",
+ )
+
url = "https://%s/_matrix/identity/api/v1/3pid/unbind" % (id_server,)
url_bytes = "/_matrix/identity/api/v1/3pid/unbind".encode("ascii")
diff --git a/synapse/handlers/initial_sync.py b/synapse/handlers/initial_sync.py
index 13f81522..76242865 100644
--- a/synapse/handlers/initial_sync.py
+++ b/synapse/handlers/initial_sync.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index 1b7c065b..ec8eb216 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
@@ -137,7 +136,7 @@ class MessageHandler:
self,
user_id: str,
room_id: str,
- state_filter: StateFilter = StateFilter.all(),
+ state_filter: Optional[StateFilter] = None,
at_token: Optional[StreamToken] = None,
is_guest: bool = False,
) -> List[dict]:
@@ -164,6 +163,8 @@ class MessageHandler:
AuthError (403) if the user doesn't have permission to view
members of this room.
"""
+ state_filter = state_filter or StateFilter.all()
+
if at_token:
# FIXME this claims to get the state at a stream position, but
# get_recent_events_for_room operates by topo ordering. This therefore
@@ -385,7 +386,7 @@ class EventCreationHandler:
self._events_shard_config = self.config.worker.events_shard_config
self._instance_name = hs.get_instance_name()
- self.room_invite_state_types = self.hs.config.room_invite_state_types
+ self.room_invite_state_types = self.hs.config.api.room_prejoin_state
self.membership_types_to_include_profile_data_in = (
{Membership.JOIN, Membership.INVITE}
@@ -874,7 +875,7 @@ class EventCreationHandler:
event: EventBase,
context: EventContext,
ratelimit: bool = True,
- extra_users: List[UserID] = [],
+ extra_users: Optional[List[UserID]] = None,
ignore_shadow_ban: bool = False,
) -> EventBase:
"""Processes a new event.
@@ -902,6 +903,7 @@ class EventCreationHandler:
Raises:
ShadowBanError if the requester has been shadow-banned.
"""
+ extra_users = extra_users or []
# we don't apply shadow-banning to membership events here. Invites are blocked
# higher up the stack, and we allow shadow-banned users to send join and leave
@@ -1071,7 +1073,7 @@ class EventCreationHandler:
event: EventBase,
context: EventContext,
ratelimit: bool = True,
- extra_users: List[UserID] = [],
+ extra_users: Optional[List[UserID]] = None,
) -> EventBase:
"""Called when we have fully built the event, have already
calculated the push actions for the event, and checked auth.
@@ -1083,6 +1085,8 @@ class EventCreationHandler:
it was de-duplicated (e.g. because we had already persisted an
event with the same transaction ID.)
"""
+ extra_users = extra_users or []
+
assert self.storage.persistence is not None
assert self._events_shard_config.should_handle(
self._instance_name, event.room_id
diff --git a/synapse/handlers/oidc_handler.py b/synapse/handlers/oidc.py
index 6624212d..ee6e41c0 100644
--- a/synapse/handlers/oidc_handler.py
+++ b/synapse/handlers/oidc.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Quentin Gliech
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
@@ -16,7 +15,7 @@
import inspect
import logging
from typing import TYPE_CHECKING, Dict, Generic, List, Optional, TypeVar, Union
-from urllib.parse import urlencode
+from urllib.parse import urlencode, urlparse
import attr
import pymacaroons
@@ -38,10 +37,7 @@ from twisted.web.client import readBody
from twisted.web.http_headers import Headers
from synapse.config import ConfigError
-from synapse.config.oidc_config import (
- OidcProviderClientSecretJwtKey,
- OidcProviderConfig,
-)
+from synapse.config.oidc import OidcProviderClientSecretJwtKey, OidcProviderConfig
from synapse.handlers.sso import MappingException, UserAttributes
from synapse.http.site import SynapseRequest
from synapse.logging.context import make_deferred_yieldable
@@ -72,8 +68,8 @@ logger = logging.getLogger(__name__)
#
# Here we have the names of the cookies, and the options we use to set them.
_SESSION_COOKIES = [
- (b"oidc_session", b"Path=/_synapse/client/oidc; HttpOnly; Secure; SameSite=None"),
- (b"oidc_session_no_samesite", b"Path=/_synapse/client/oidc; HttpOnly"),
+ (b"oidc_session", b"HttpOnly; Secure; SameSite=None"),
+ (b"oidc_session_no_samesite", b"HttpOnly"),
]
#: A token exchanged from the token endpoint, as per RFC6749 sec 5.1. and
@@ -283,6 +279,13 @@ class OidcProvider:
self._config = provider
self._callback_url = hs.config.oidc_callback_url # type: str
+ # Calculate the prefix for OIDC callback paths based on the public_baseurl.
+ # We'll insert this into the Path= parameter of any session cookies we set.
+ public_baseurl_path = urlparse(hs.config.server.public_baseurl).path
+ self._callback_path_prefix = (
+ public_baseurl_path.encode("utf-8") + b"_synapse/client/oidc"
+ )
+
self._oidc_attribute_requirements = provider.attribute_requirements
self._scopes = provider.scopes
self._user_profile_method = provider.user_profile_method
@@ -783,8 +786,13 @@ class OidcProvider:
for cookie_name, options in _SESSION_COOKIES:
request.cookies.append(
- b"%s=%s; Max-Age=3600; %s"
- % (cookie_name, cookie.encode("utf-8"), options)
+ b"%s=%s; Max-Age=3600; Path=%s; %s"
+ % (
+ cookie_name,
+ cookie.encode("utf-8"),
+ self._callback_path_prefix,
+ options,
+ )
)
metadata = await self.load_metadata()
@@ -961,6 +969,11 @@ class OidcProvider:
# and attempt to match it.
attributes = await oidc_response_to_user_attributes(failures=0)
+ if attributes.localpart is None:
+ # If no localpart is returned then we will generate one, so
+ # there is no need to search for existing users.
+ return None
+
user_id = UserID(attributes.localpart, self._server_name).to_string()
users = await self._store.get_users_by_id_case_insensitive(user_id)
if users:
diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py
index 66dc886c..1e1186c2 100644
--- a/synapse/handlers/pagination.py
+++ b/synapse/handlers/pagination.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014 - 2016 OpenMarket Ltd
# Copyright 2017 - 2018 New Vector Ltd
#
diff --git a/synapse/handlers/password_policy.py b/synapse/handlers/password_policy.py
index 92cefa11..cd21efdc 100644
--- a/synapse/handlers/password_policy.py
+++ b/synapse/handlers/password_policy.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py
index da92feac..12df35f2 100644
--- a/synapse/handlers/presence.py
+++ b/synapse/handlers/presence.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
@@ -23,9 +22,22 @@ The methods that define policy are:
- should_notify
"""
import abc
+import contextlib
import logging
+from bisect import bisect
from contextlib import contextmanager
-from typing import TYPE_CHECKING, Dict, Iterable, List, Set, Tuple
+from typing import (
+ TYPE_CHECKING,
+ Collection,
+ Dict,
+ FrozenSet,
+ Iterable,
+ List,
+ Optional,
+ Set,
+ Tuple,
+ Union,
+)
from prometheus_client import Counter
from typing_extensions import ContextManager
@@ -34,15 +46,22 @@ import synapse.metrics
from synapse.api.constants import EventTypes, Membership, PresenceState
from synapse.api.errors import SynapseError
from synapse.api.presence import UserPresenceState
+from synapse.events.presence_router import PresenceRouter
from synapse.logging.context import run_in_background
from synapse.logging.utils import log_function
from synapse.metrics import LaterGauge
from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.state import StateHandler
+from synapse.replication.http.presence import (
+ ReplicationBumpPresenceActiveTime,
+ ReplicationPresenceSetState,
+)
+from synapse.replication.http.streams import ReplicationGetStreamUpdates
+from synapse.replication.tcp.commands import ClearUserSyncsCommand
+from synapse.replication.tcp.streams import PresenceFederationStream, PresenceStream
from synapse.storage.databases.main import DataStore
-from synapse.types import Collection, JsonDict, UserID, get_domain_from_id
+from synapse.types import JsonDict, UserID, get_domain_from_id
from synapse.util.async_helpers import Linearizer
-from synapse.util.caches.descriptors import cached
+from synapse.util.caches.descriptors import _CacheContext, cached
from synapse.util.metrics import Measure
from synapse.util.wheel_timer import WheelTimer
@@ -94,15 +113,29 @@ FEDERATION_PING_INTERVAL = 25 * 60 * 1000
# are dead.
EXTERNAL_PROCESS_EXPIRY = 5 * 60 * 1000
+# Delay before a worker tells the presence handler that a user has stopped
+# syncing.
+UPDATE_SYNCING_USERS_MS = 10 * 1000
+
assert LAST_ACTIVE_GRANULARITY < IDLE_TIMER
class BasePresenceHandler(abc.ABC):
- """Parts of the PresenceHandler that are shared between workers and master"""
+ """Parts of the PresenceHandler that are shared between workers and presence
+ writer"""
def __init__(self, hs: "HomeServer"):
self.clock = hs.get_clock()
self.store = hs.get_datastore()
+ self.presence_router = hs.get_presence_router()
+ self.state = hs.get_state_handler()
+ self.is_mine_id = hs.is_mine_id
+
+ self._federation = None
+ if hs.should_send_federation():
+ self._federation = hs.get_federation_sender()
+
+ self._federation_queue = PresenceFederationQueue(hs, self)
self._busy_presence_enabled = hs.config.experimental.msc3026_enabled
@@ -198,17 +231,305 @@ class BasePresenceHandler(abc.ABC):
with the app.
"""
+ async def update_external_syncs_row(
+ self, process_id, user_id, is_syncing, sync_time_msec
+ ):
+ """Update the syncing users for an external process as a delta.
+
+ This is a no-op when presence is handled by a different worker.
+
+ Args:
+ process_id (str): An identifier for the process the users are
+ syncing against. This allows synapse to process updates
+ as user start and stop syncing against a given process.
+ user_id (str): The user who has started or stopped syncing
+ is_syncing (bool): Whether or not the user is now syncing
+ sync_time_msec(int): Time in ms when the user was last syncing
+ """
+ pass
+
+ async def update_external_syncs_clear(self, process_id):
+ """Marks all users that had been marked as syncing by a given process
+ as offline.
+
+ Used when the process has stopped/disappeared.
+
+ This is a no-op when presence is handled by a different worker.
+ """
+ pass
+
+ async def process_replication_rows(
+ self, stream_name: str, instance_name: str, token: int, rows: list
+ ):
+ """Process streams received over replication."""
+ await self._federation_queue.process_replication_rows(
+ stream_name, instance_name, token, rows
+ )
+
+ def get_federation_queue(self) -> "PresenceFederationQueue":
+ """Get the presence federation queue."""
+ return self._federation_queue
+
+ async def maybe_send_presence_to_interested_destinations(
+ self, states: List[UserPresenceState]
+ ):
+ """If this instance is a federation sender, send the states to all
+ destinations that are interested. Filters out any states for remote
+ users.
+ """
+
+ if not self._federation:
+ return
+
+ states = [s for s in states if self.is_mine_id(s.user_id)]
+
+ if not states:
+ return
+
+ hosts_and_states = await get_interested_remotes(
+ self.store,
+ self.presence_router,
+ states,
+ )
+
+ for destinations, states in hosts_and_states:
+ self._federation.send_presence_to_destinations(states, destinations)
+
+
+class _NullContextManager(ContextManager[None]):
+ """A context manager which does nothing."""
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ pass
+
+
+class WorkerPresenceHandler(BasePresenceHandler):
+ def __init__(self, hs):
+ super().__init__(hs)
+ self.hs = hs
+
+ self._presence_writer_instance = hs.config.worker.writers.presence[0]
+
+ self._presence_enabled = hs.config.use_presence
+
+ # Route presence EDUs to the right worker
+ hs.get_federation_registry().register_instances_for_edu(
+ "m.presence",
+ hs.config.worker.writers.presence,
+ )
+
+ # The number of ongoing syncs on this process, by user id.
+ # Empty if _presence_enabled is false.
+ self._user_to_num_current_syncs = {} # type: Dict[str, int]
+
+ self.notifier = hs.get_notifier()
+ self.instance_id = hs.get_instance_id()
+
+ # user_id -> last_sync_ms. Lists the users that have stopped syncing but
+ # we haven't notified the presence writer of that yet
+ self.users_going_offline = {}
+
+ self._bump_active_client = ReplicationBumpPresenceActiveTime.make_client(hs)
+ self._set_state_client = ReplicationPresenceSetState.make_client(hs)
+
+ self._send_stop_syncing_loop = self.clock.looping_call(
+ self.send_stop_syncing, UPDATE_SYNCING_USERS_MS
+ )
+
+ self._busy_presence_enabled = hs.config.experimental.msc3026_enabled
+
+ hs.get_reactor().addSystemEventTrigger(
+ "before",
+ "shutdown",
+ run_as_background_process,
+ "generic_presence.on_shutdown",
+ self._on_shutdown,
+ )
+
+ def _on_shutdown(self):
+ if self._presence_enabled:
+ self.hs.get_tcp_replication().send_command(
+ ClearUserSyncsCommand(self.instance_id)
+ )
+
+ def send_user_sync(self, user_id, is_syncing, last_sync_ms):
+ if self._presence_enabled:
+ self.hs.get_tcp_replication().send_user_sync(
+ self.instance_id, user_id, is_syncing, last_sync_ms
+ )
+
+ def mark_as_coming_online(self, user_id):
+ """A user has started syncing. Send a UserSync to the presence writer,
+ unless they had recently stopped syncing.
+
+ Args:
+ user_id (str)
+ """
+ going_offline = self.users_going_offline.pop(user_id, None)
+ if not going_offline:
+ # Safe to skip because we haven't yet told the presence writer they
+ # were offline
+ self.send_user_sync(user_id, True, self.clock.time_msec())
+
+ def mark_as_going_offline(self, user_id):
+ """A user has stopped syncing. We wait before notifying the presence
+ writer as its likely they'll come back soon. This allows us to avoid
+ sending a stopped syncing immediately followed by a started syncing
+ notification to the presence writer
+
+ Args:
+ user_id (str)
+ """
+ self.users_going_offline[user_id] = self.clock.time_msec()
+
+ def send_stop_syncing(self):
+ """Check if there are any users who have stopped syncing a while ago and
+ haven't come back yet. If there are poke the presence writer about them.
+ """
+ now = self.clock.time_msec()
+ for user_id, last_sync_ms in list(self.users_going_offline.items()):
+ if now - last_sync_ms > UPDATE_SYNCING_USERS_MS:
+ self.users_going_offline.pop(user_id, None)
+ self.send_user_sync(user_id, False, last_sync_ms)
+
+ async def user_syncing(
+ self, user_id: str, affect_presence: bool
+ ) -> ContextManager[None]:
+ """Record that a user is syncing.
+
+ Called by the sync and events servlets to record that a user has connected to
+ this worker and is waiting for some events.
+ """
+ if not affect_presence or not self._presence_enabled:
+ return _NullContextManager()
+
+ curr_sync = self._user_to_num_current_syncs.get(user_id, 0)
+ self._user_to_num_current_syncs[user_id] = curr_sync + 1
+
+ # If we went from no in flight sync to some, notify replication
+ if self._user_to_num_current_syncs[user_id] == 1:
+ self.mark_as_coming_online(user_id)
+
+ def _end():
+ # We check that the user_id is in user_to_num_current_syncs because
+ # user_to_num_current_syncs may have been cleared if we are
+ # shutting down.
+ if user_id in self._user_to_num_current_syncs:
+ self._user_to_num_current_syncs[user_id] -= 1
+
+ # If we went from one in flight sync to non, notify replication
+ if self._user_to_num_current_syncs[user_id] == 0:
+ self.mark_as_going_offline(user_id)
+
+ @contextlib.contextmanager
+ def _user_syncing():
+ try:
+ yield
+ finally:
+ _end()
+
+ return _user_syncing()
+
+ async def notify_from_replication(self, states, stream_id):
+ parties = await get_interested_parties(self.store, self.presence_router, states)
+ room_ids_to_states, users_to_states = parties
+
+ self.notifier.on_new_event(
+ "presence_key",
+ stream_id,
+ rooms=room_ids_to_states.keys(),
+ users=users_to_states.keys(),
+ )
+
+ # If this is a federation sender, notify about presence updates.
+ await self.maybe_send_presence_to_interested_destinations(states)
+
+ async def process_replication_rows(
+ self, stream_name: str, instance_name: str, token: int, rows: list
+ ):
+ await super().process_replication_rows(stream_name, instance_name, token, rows)
+
+ if stream_name != PresenceStream.NAME:
+ return
+
+ states = [
+ UserPresenceState(
+ row.user_id,
+ row.state,
+ row.last_active_ts,
+ row.last_federation_update_ts,
+ row.last_user_sync_ts,
+ row.status_msg,
+ row.currently_active,
+ )
+ for row in rows
+ ]
+
+ for state in states:
+ self.user_to_current_state[state.user_id] = state
+
+ stream_id = token
+ await self.notify_from_replication(states, stream_id)
+
+ def get_currently_syncing_users_for_replication(self) -> Iterable[str]:
+ return [
+ user_id
+ for user_id, count in self._user_to_num_current_syncs.items()
+ if count > 0
+ ]
+
+ async def set_state(self, target_user, state, ignore_status_msg=False):
+ """Set the presence state of the user."""
+ presence = state["presence"]
+
+ valid_presence = (
+ PresenceState.ONLINE,
+ PresenceState.UNAVAILABLE,
+ PresenceState.OFFLINE,
+ PresenceState.BUSY,
+ )
+
+ if presence not in valid_presence or (
+ presence == PresenceState.BUSY and not self._busy_presence_enabled
+ ):
+ raise SynapseError(400, "Invalid presence state")
+
+ user_id = target_user.to_string()
+
+ # If presence is disabled, no-op
+ if not self.hs.config.use_presence:
+ return
+
+ # Proxy request to instance that writes presence
+ await self._set_state_client(
+ instance_name=self._presence_writer_instance,
+ user_id=user_id,
+ state=state,
+ ignore_status_msg=ignore_status_msg,
+ )
+
+ async def bump_presence_active_time(self, user):
+ """We've seen the user do something that indicates they're interacting
+ with the app.
+ """
+ # If presence is disabled, no-op
+ if not self.hs.config.use_presence:
+ return
+
+ # Proxy request to instance that writes presence
+ user_id = user.to_string()
+ await self._bump_active_client(
+ instance_name=self._presence_writer_instance, user_id=user_id
+ )
+
class PresenceHandler(BasePresenceHandler):
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self.hs = hs
- self.is_mine_id = hs.is_mine_id
self.server_name = hs.hostname
self.wheel_timer = WheelTimer()
self.notifier = hs.get_notifier()
- self.federation = hs.get_federation_sender()
- self.state = hs.get_state_handler()
self._presence_enabled = hs.config.use_presence
federation_registry = hs.get_federation_registry()
@@ -415,6 +736,13 @@ class PresenceHandler(BasePresenceHandler):
self.unpersisted_users_changes |= {s.user_id for s in new_states}
self.unpersisted_users_changes -= set(to_notify.keys())
+ # Check if we need to resend any presence states to remote hosts. We
+ # only do this for states that haven't been updated in a while to
+ # ensure that the remote host doesn't time the presence state out.
+ #
+ # Note that since these are states that have *not* been updated,
+ # they won't get sent down the normal presence replication stream,
+ # and so we have to explicitly send them via the federation stream.
to_federation_ping = {
user_id: state
for user_id, state in to_federation_ping.items()
@@ -423,7 +751,16 @@ class PresenceHandler(BasePresenceHandler):
if to_federation_ping:
federation_presence_out_counter.inc(len(to_federation_ping))
- self._push_to_remotes(to_federation_ping.values())
+ hosts_and_states = await get_interested_remotes(
+ self.store,
+ self.presence_router,
+ list(to_federation_ping.values()),
+ )
+
+ for destinations, states in hosts_and_states:
+ self._federation_queue.send_presence_to_destinations(
+ states, destinations
+ )
async def _handle_timeouts(self):
"""Checks the presence of users that have timed out and updates as
@@ -653,7 +990,7 @@ class PresenceHandler(BasePresenceHandler):
"""
stream_id, max_token = await self.store.update_presence(states)
- parties = await get_interested_parties(self.store, states)
+ parties = await get_interested_parties(self.store, self.presence_router, states)
room_ids_to_states, users_to_states = parties
self.notifier.on_new_event(
@@ -663,15 +1000,10 @@ class PresenceHandler(BasePresenceHandler):
users=[UserID.from_string(u) for u in users_to_states],
)
- self._push_to_remotes(states)
-
- def _push_to_remotes(self, states):
- """Sends state updates to remote servers.
-
- Args:
- states (list(UserPresenceState))
- """
- self.federation.send_presence(states)
+ # We only want to poke the local federation sender, if any, as other
+ # workers will receive the presence updates via the presence replication
+ # stream (which is updated by `store.update_presence`).
+ await self.maybe_send_presence_to_interested_destinations(states)
async def incoming_presence(self, origin, content):
"""Called when we receive a `m.presence` EDU from a remote server."""
@@ -909,7 +1241,7 @@ class PresenceHandler(BasePresenceHandler):
# Send out user presence updates for each destination
for destination, user_state_set in presence_destinations.items():
- self.federation.send_presence_to_destinations(
+ self._federation_queue.send_presence_to_destinations(
destinations=[destination], states=user_state_set
)
@@ -1041,10 +1373,14 @@ class PresenceEventSource:
#
# Presence -> Notifier -> PresenceEventSource -> Presence
#
+ # Same with get_module_api, get_presence_router
+ #
+ # AuthHandler -> Notifier -> PresenceEventSource -> ModuleApi -> AuthHandler
self.get_presence_handler = hs.get_presence_handler
+ self.get_module_api = hs.get_module_api
+ self.get_presence_router = hs.get_presence_router
self.clock = hs.get_clock()
self.store = hs.get_datastore()
- self.state = hs.get_state_handler()
@log_function
async def get_new_events(
@@ -1054,8 +1390,8 @@ class PresenceEventSource:
room_ids=None,
include_offline=True,
explicit_room_id=None,
- **kwargs
- ):
+ **kwargs,
+ ) -> Tuple[List[UserPresenceState], int]:
# The process for getting presence events are:
# 1. Get the rooms the user is in.
# 2. Get the list of user in the rooms.
@@ -1068,7 +1404,17 @@ class PresenceEventSource:
# We don't try and limit the presence updates by the current token, as
# sending down the rare duplicate is not a concern.
+ user_id = user.to_string()
+ stream_change_cache = self.store.presence_stream_cache
+
with Measure(self.clock, "presence.get_new_events"):
+ if user_id in self.get_module_api()._send_full_presence_to_local_users:
+ # This user has been specified by a module to receive all current, online
+ # user presence. Removing from_key and setting include_offline to false
+ # will do effectively this.
+ from_key = None
+ include_offline = False
+
if from_key is not None:
from_key = int(from_key)
@@ -1091,59 +1437,209 @@ class PresenceEventSource:
# doesn't return. C.f. #5503.
return [], max_token
- presence = self.get_presence_handler()
- stream_change_cache = self.store.presence_stream_cache
-
+ # Figure out which other users this user should receive updates for
users_interested_in = await self._get_interested_in(user, explicit_room_id)
- user_ids_changed = set() # type: Collection[str]
- changed = None
- if from_key:
- changed = stream_change_cache.get_all_entities_changed(from_key)
+ # We have a set of users that we're interested in the presence of. We want to
+ # cross-reference that with the users that have actually changed their presence.
- if changed is not None and len(changed) < 500:
- assert isinstance(user_ids_changed, set)
+ # Check whether this user should see all user updates
- # For small deltas, its quicker to get all changes and then
- # work out if we share a room or they're in our presence list
- get_updates_counter.labels("stream").inc()
- for other_user_id in changed:
- if other_user_id in users_interested_in:
- user_ids_changed.add(other_user_id)
- else:
- # Too many possible updates. Find all users we can see and check
- # if any of them have changed.
- get_updates_counter.labels("full").inc()
+ if users_interested_in == PresenceRouter.ALL_USERS:
+ # Provide presence state for all users
+ presence_updates = await self._filter_all_presence_updates_for_user(
+ user_id, include_offline, from_key
+ )
- if from_key:
- user_ids_changed = stream_change_cache.get_entities_changed(
- users_interested_in, from_key
+ # Remove the user from the list of users to receive all presence
+ if user_id in self.get_module_api()._send_full_presence_to_local_users:
+ self.get_module_api()._send_full_presence_to_local_users.remove(
+ user_id
)
+
+ return presence_updates, max_token
+
+ # Make mypy happy. users_interested_in should now be a set
+ assert not isinstance(users_interested_in, str)
+
+ # The set of users that we're interested in and that have had a presence update.
+ # We'll actually pull the presence updates for these users at the end.
+ interested_and_updated_users = (
+ set()
+ ) # type: Union[Set[str], FrozenSet[str]]
+
+ if from_key:
+ # First get all users that have had a presence update
+ updated_users = stream_change_cache.get_all_entities_changed(from_key)
+
+ # Cross-reference users we're interested in with those that have had updates.
+ # Use a slightly-optimised method for processing smaller sets of updates.
+ if updated_users is not None and len(updated_users) < 500:
+ # For small deltas, it's quicker to get all changes and then
+ # cross-reference with the users we're interested in
+ get_updates_counter.labels("stream").inc()
+ for other_user_id in updated_users:
+ if other_user_id in users_interested_in:
+ # mypy thinks this variable could be a FrozenSet as it's possibly set
+ # to one in the `get_entities_changed` call below, and `add()` is not
+ # method on a FrozenSet. That doesn't affect us here though, as
+ # `interested_and_updated_users` is clearly a set() above.
+ interested_and_updated_users.add(other_user_id) # type: ignore
else:
- user_ids_changed = users_interested_in
+ # Too many possible updates. Find all users we can see and check
+ # if any of them have changed.
+ get_updates_counter.labels("full").inc()
+
+ interested_and_updated_users = (
+ stream_change_cache.get_entities_changed(
+ users_interested_in, from_key
+ )
+ )
+ else:
+ # No from_key has been specified. Return the presence for all users
+ # this user is interested in
+ interested_and_updated_users = users_interested_in
+
+ # Retrieve the current presence state for each user
+ users_to_state = await self.get_presence_handler().current_state_for_users(
+ interested_and_updated_users
+ )
+ presence_updates = list(users_to_state.values())
+
+ # Remove the user from the list of users to receive all presence
+ if user_id in self.get_module_api()._send_full_presence_to_local_users:
+ self.get_module_api()._send_full_presence_to_local_users.remove(user_id)
+
+ if not include_offline:
+ # Filter out offline presence states
+ presence_updates = self._filter_offline_presence_state(presence_updates)
- updates = await presence.current_state_for_users(user_ids_changed)
+ return presence_updates, max_token
- if include_offline:
- return (list(updates.values()), max_token)
+ async def _filter_all_presence_updates_for_user(
+ self,
+ user_id: str,
+ include_offline: bool,
+ from_key: Optional[int] = None,
+ ) -> List[UserPresenceState]:
+ """
+ Computes the presence updates a user should receive.
+
+ First pulls presence updates from the database. Then consults PresenceRouter
+ for whether any updates should be excluded by user ID.
+
+ Args:
+ user_id: The User ID of the user to compute presence updates for.
+ include_offline: Whether to include offline presence states from the results.
+ from_key: The minimum stream ID of updates to pull from the database
+ before filtering.
+
+ Returns:
+ A list of presence states for the given user to receive.
+ """
+ if from_key:
+ # Only return updates since the last sync
+ updated_users = self.store.presence_stream_cache.get_all_entities_changed(
+ from_key
+ )
+ if not updated_users:
+ updated_users = []
+
+ # Get the actual presence update for each change
+ users_to_state = await self.get_presence_handler().current_state_for_users(
+ updated_users
+ )
+ presence_updates = list(users_to_state.values())
+
+ if not include_offline:
+ # Filter out offline states
+ presence_updates = self._filter_offline_presence_state(presence_updates)
else:
- return (
- [s for s in updates.values() if s.state != PresenceState.OFFLINE],
- max_token,
+ users_to_state = await self.store.get_presence_for_all_users(
+ include_offline=include_offline
)
+ presence_updates = list(users_to_state.values())
+
+ # TODO: This feels wildly inefficient, and it's unfortunate we need to ask the
+ # module for information on a number of users when we then only take the info
+ # for a single user
+
+ # Filter through the presence router
+ users_to_state_set = await self.get_presence_router().get_users_for_states(
+ presence_updates
+ )
+
+ # We only want the mapping for the syncing user
+ presence_updates = list(users_to_state_set[user_id])
+
+ # Return presence information for all users
+ return presence_updates
+
+ def _filter_offline_presence_state(
+ self, presence_updates: Iterable[UserPresenceState]
+ ) -> List[UserPresenceState]:
+ """Given an iterable containing user presence updates, return a list with any offline
+ presence states removed.
+
+ Args:
+ presence_updates: Presence states to filter
+
+ Returns:
+ A new list with any offline presence states removed.
+ """
+ return [
+ update
+ for update in presence_updates
+ if update.state != PresenceState.OFFLINE
+ ]
+
def get_current_key(self):
return self.store.get_current_presence_token()
@cached(num_args=2, cache_context=True)
- async def _get_interested_in(self, user, explicit_room_id, cache_context):
+ async def _get_interested_in(
+ self,
+ user: UserID,
+ explicit_room_id: Optional[str] = None,
+ cache_context: Optional[_CacheContext] = None,
+ ) -> Union[Set[str], str]:
"""Returns the set of users that the given user should see presence
- updates for
+ updates for.
+
+ Args:
+ user: The user to retrieve presence updates for.
+ explicit_room_id: The users that are in the room will be returned.
+
+ Returns:
+ A set of user IDs to return presence updates for, or "ALL" to return all
+ known updates.
"""
user_id = user.to_string()
users_interested_in = set()
users_interested_in.add(user_id) # So that we receive our own presence
+ # cache_context isn't likely to ever be None due to the @cached decorator,
+ # but we can't have a non-optional argument after the optional argument
+ # explicit_room_id either. Assert cache_context is not None so we can use it
+ # without mypy complaining.
+ assert cache_context
+
+ # Check with the presence router whether we should poll additional users for
+ # their presence information
+ additional_users = await self.get_presence_router().get_interested_users(
+ user.to_string()
+ )
+ if additional_users == PresenceRouter.ALL_USERS:
+ # If the module requested that this user see the presence updates of *all*
+ # users, then simply return that instead of calculating what rooms this
+ # user shares
+ return PresenceRouter.ALL_USERS
+
+ # Add the additional users from the router
+ users_interested_in.update(additional_users)
+
+ # Find the users who share a room with this user
users_who_share_room = await self.store.get_users_who_share_room_with_user(
user_id, on_invalidate=cache_context.invalidate
)
@@ -1314,14 +1810,15 @@ def handle_update(prev_state, new_state, is_mine, wheel_timer, now):
async def get_interested_parties(
- store: DataStore, states: List[UserPresenceState]
+ store: DataStore, presence_router: PresenceRouter, states: List[UserPresenceState]
) -> Tuple[Dict[str, List[UserPresenceState]], Dict[str, List[UserPresenceState]]]:
"""Given a list of states return which entities (rooms, users)
are interested in the given states.
Args:
- store
- states
+ store: The homeserver's data store.
+ presence_router: A module for augmenting the destinations for presence updates.
+ states: A list of incoming user presence updates.
Returns:
A 2-tuple of `(room_ids_to_states, users_to_states)`,
@@ -1337,11 +1834,21 @@ async def get_interested_parties(
# Always notify self
users_to_states.setdefault(state.user_id, []).append(state)
+ # Ask a presence routing module for any additional parties if one
+ # is loaded.
+ router_users_to_states = await presence_router.get_users_for_states(states)
+
+ # Update the dictionaries with additional destinations and state to send
+ for user_id, user_states in router_users_to_states.items():
+ users_to_states.setdefault(user_id, []).extend(user_states)
+
return room_ids_to_states, users_to_states
async def get_interested_remotes(
- store: DataStore, states: List[UserPresenceState], state_handler: StateHandler
+ store: DataStore,
+ presence_router: PresenceRouter,
+ states: List[UserPresenceState],
) -> List[Tuple[Collection[str], List[UserPresenceState]]]:
"""Given a list of presence states figure out which remote servers
should be sent which.
@@ -1349,9 +1856,9 @@ async def get_interested_remotes(
All the presence states should be for local users only.
Args:
- store
- states
- state_handler
+ store: The homeserver's data store.
+ presence_router: A module for augmenting the destinations for presence updates.
+ states: A list of incoming user presence updates.
Returns:
A list of 2-tuples of destinations and states, where for
@@ -1363,10 +1870,13 @@ async def get_interested_remotes(
# First we look up the rooms each user is in (as well as any explicit
# subscriptions), then for each distinct room we look up the remote
# hosts in those rooms.
- room_ids_to_states, users_to_states = await get_interested_parties(store, states)
+ room_ids_to_states, users_to_states = await get_interested_parties(
+ store, presence_router, states
+ )
for room_id, states in room_ids_to_states.items():
- hosts = await state_handler.get_current_hosts_in_room(room_id)
+ user_ids = await store.get_users_in_room(room_id)
+ hosts = {get_domain_from_id(user_id) for user_id in user_ids}
hosts_and_states.append((hosts, states))
for user_id, states in users_to_states.items():
@@ -1374,3 +1884,220 @@ async def get_interested_remotes(
hosts_and_states.append(([host], states))
return hosts_and_states
+
+
+class PresenceFederationQueue:
+ """Handles sending ad hoc presence updates over federation, which are *not*
+ due to state updates (that get handled via the presence stream), e.g.
+ federation pings and sending existing present states to newly joined hosts.
+
+ Only the last N minutes will be queued, so if a federation sender instance
+ is down for longer then some updates will be dropped. This is OK as presence
+ is ephemeral, and so it will self correct eventually.
+
+ On workers the class tracks the last received position of the stream from
+ replication, and handles querying for missed updates over HTTP replication,
+ c.f. `get_current_token` and `get_replication_rows`.
+ """
+
+ # How long to keep entries in the queue for. Workers that are down for
+ # longer than this duration will miss out on older updates.
+ _KEEP_ITEMS_IN_QUEUE_FOR_MS = 5 * 60 * 1000
+
+ # How often to check if we can expire entries from the queue.
+ _CLEAR_ITEMS_EVERY_MS = 60 * 1000
+
+ def __init__(self, hs: "HomeServer", presence_handler: BasePresenceHandler):
+ self._clock = hs.get_clock()
+ self._notifier = hs.get_notifier()
+ self._instance_name = hs.get_instance_name()
+ self._presence_handler = presence_handler
+ self._repl_client = ReplicationGetStreamUpdates.make_client(hs)
+
+ # Should we keep a queue of recent presence updates? We only bother if
+ # another process may be handling federation sending.
+ self._queue_presence_updates = True
+
+ # Whether this instance is a presence writer.
+ self._presence_writer = self._instance_name in hs.config.worker.writers.presence
+
+ # The FederationSender instance, if this process sends federation traffic directly.
+ self._federation = None
+
+ if hs.should_send_federation():
+ self._federation = hs.get_federation_sender()
+
+ # We don't bother queuing up presence states if only this instance
+ # is sending federation.
+ if hs.config.worker.federation_shard_config.instances == [
+ self._instance_name
+ ]:
+ self._queue_presence_updates = False
+
+ # The queue of recently queued updates as tuples of: `(timestamp,
+ # stream_id, destinations, user_ids)`. We don't store the full states
+ # for efficiency, and remote workers will already have the full states
+ # cached.
+ self._queue = [] # type: List[Tuple[int, int, Collection[str], Set[str]]]
+
+ self._next_id = 1
+
+ # Map from instance name to current token
+ self._current_tokens = {} # type: Dict[str, int]
+
+ if self._queue_presence_updates:
+ self._clock.looping_call(self._clear_queue, self._CLEAR_ITEMS_EVERY_MS)
+
+ def _clear_queue(self):
+ """Clear out older entries from the queue."""
+ clear_before = self._clock.time_msec() - self._KEEP_ITEMS_IN_QUEUE_FOR_MS
+
+ # The queue is sorted by timestamp, so we can bisect to find the right
+ # place to purge before. Note that we are searching using a 1-tuple with
+ # the time, which does The Right Thing since the queue is a tuple where
+ # the first item is a timestamp.
+ index = bisect(self._queue, (clear_before,))
+ self._queue = self._queue[index:]
+
+ def send_presence_to_destinations(
+ self, states: Collection[UserPresenceState], destinations: Collection[str]
+ ) -> None:
+ """Send the presence states to the given destinations.
+
+ Will forward to the local federation sender (if there is one) and queue
+ to send over replication (if there are other federation sender instances.).
+
+ Must only be called on the presence writer process.
+ """
+
+ # This should only be called on a presence writer.
+ assert self._presence_writer
+
+ if self._federation:
+ self._federation.send_presence_to_destinations(
+ states=states,
+ destinations=destinations,
+ )
+
+ if not self._queue_presence_updates:
+ return
+
+ now = self._clock.time_msec()
+
+ stream_id = self._next_id
+ self._next_id += 1
+
+ self._queue.append((now, stream_id, destinations, {s.user_id for s in states}))
+
+ self._notifier.notify_replication()
+
+ def get_current_token(self, instance_name: str) -> int:
+ """Get the current position of the stream.
+
+ On workers this returns the last stream ID received from replication.
+ """
+ if instance_name == self._instance_name:
+ return self._next_id - 1
+ else:
+ return self._current_tokens.get(instance_name, 0)
+
+ async def get_replication_rows(
+ self,
+ instance_name: str,
+ from_token: int,
+ upto_token: int,
+ target_row_count: int,
+ ) -> Tuple[List[Tuple[int, Tuple[str, str]]], int, bool]:
+ """Get all the updates between the two tokens.
+
+ We return rows in the form of `(destination, user_id)` to keep the size
+ of each row bounded (rather than returning the sets in a row).
+
+ On workers this will query the presence writer process via HTTP replication.
+ """
+ if instance_name != self._instance_name:
+ # If not local we query over http replication from the presence
+ # writer
+ result = await self._repl_client(
+ instance_name=instance_name,
+ stream_name=PresenceFederationStream.NAME,
+ from_token=from_token,
+ upto_token=upto_token,
+ )
+ return result["updates"], result["upto_token"], result["limited"]
+
+ # If the from_token is the current token then there's nothing to return
+ # and we can trivially no-op.
+ if from_token == self._next_id - 1:
+ return [], upto_token, False
+
+ # We can find the correct position in the queue by noting that there is
+ # exactly one entry per stream ID, and that the last entry has an ID of
+ # `self._next_id - 1`, so we can count backwards from the end.
+ #
+ # Since we are returning all states in the range `from_token < stream_id
+ # <= upto_token` we look for the index with a `stream_id` of `from_token
+ # + 1`.
+ #
+ # Since the start of the queue is periodically truncated we need to
+ # handle the case where `from_token` stream ID has already been dropped.
+ start_idx = max(from_token + 1 - self._next_id, -len(self._queue))
+
+ to_send = [] # type: List[Tuple[int, Tuple[str, str]]]
+ limited = False
+ new_id = upto_token
+ for _, stream_id, destinations, user_ids in self._queue[start_idx:]:
+ if stream_id <= from_token:
+ # Paranoia check that we are actually only sending states that
+ # are have stream_id strictly greater than from_token. We should
+ # never hit this.
+ logger.warning(
+ "Tried returning presence federation stream ID: %d less than from_token: %d (next_id: %d, len: %d)",
+ stream_id,
+ from_token,
+ self._next_id,
+ len(self._queue),
+ )
+ continue
+
+ if stream_id > upto_token:
+ break
+
+ new_id = stream_id
+
+ to_send.extend(
+ (stream_id, (destination, user_id))
+ for destination in destinations
+ for user_id in user_ids
+ )
+
+ if len(to_send) > target_row_count:
+ limited = True
+ break
+
+ return to_send, new_id, limited
+
+ async def process_replication_rows(
+ self, stream_name: str, instance_name: str, token: int, rows: list
+ ):
+ if stream_name != PresenceFederationStream.NAME:
+ return
+
+ # We keep track of the current tokens (so that we can catch up with anything we missed after a disconnect)
+ self._current_tokens[instance_name] = token
+
+ # If we're a federation sender we pull out the presence states to send
+ # and forward them on.
+ if not self._federation:
+ return
+
+ hosts_to_users = {} # type: Dict[str, Set[str]]
+ for row in rows:
+ hosts_to_users.setdefault(row.destination, set()).add(row.user_id)
+
+ for host, user_ids in hosts_to_users.items():
+ states = await self._presence_handler.current_state_for_users(user_ids)
+ self._federation.send_presence_to_destinations(
+ states=states.values(),
+ destinations=[host],
+ )
diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py
index a755363c..05b4a97b 100644
--- a/synapse/handlers/profile.py
+++ b/synapse/handlers/profile.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/read_marker.py b/synapse/handlers/read_marker.py
index a54fe196..c679a830 100644
--- a/synapse/handlers/read_marker.py
+++ b/synapse/handlers/read_marker.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py
index dbfe9bfa..f782d9db 100644
--- a/synapse/handlers/receipts.py
+++ b/synapse/handlers/receipts.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py
index 0fc2bf15..007fb128 100644
--- a/synapse/handlers/register.py
+++ b/synapse/handlers/register.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014 - 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -169,7 +168,7 @@ class RegistrationHandler(BaseHandler):
user_type: Optional[str] = None,
default_display_name: Optional[str] = None,
address: Optional[str] = None,
- bind_emails: Iterable[str] = [],
+ bind_emails: Optional[Iterable[str]] = None,
by_admin: bool = False,
user_agent_ips: Optional[List[Tuple[str, str]]] = None,
auth_provider_id: Optional[str] = None,
@@ -204,7 +203,9 @@ class RegistrationHandler(BaseHandler):
Raises:
SynapseError if there was a problem registering.
"""
- self.check_registration_ratelimit(address)
+ bind_emails = bind_emails or []
+
+ await self.check_registration_ratelimit(address)
result = await self.spam_checker.check_registration_for_spam(
threepid,
@@ -583,7 +584,7 @@ class RegistrationHandler(BaseHandler):
errcode=Codes.EXCLUSIVE,
)
- def check_registration_ratelimit(self, address: Optional[str]) -> None:
+ async def check_registration_ratelimit(self, address: Optional[str]) -> None:
"""A simple helper method to check whether the registration rate limit has been hit
for a given IP address
@@ -597,7 +598,7 @@ class RegistrationHandler(BaseHandler):
if not address:
return
- self.ratelimiter.ratelimit(address)
+ await self.ratelimiter.ratelimit(None, address)
async def register_with_store(
self,
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 4b3d0d72..5a888b79 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014 - 2016 OpenMarket Ltd
# Copyright 2018-2019 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py
index 924b81db..141c9c04 100644
--- a/synapse/handlers/room_list.py
+++ b/synapse/handlers/room_list.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014 - 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index 4d20ed83..2c5bada1 100644
--- a/synapse/handlers/room_member.py
+++ b/synapse/handlers/room_member.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016-2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -64,6 +63,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
self.profile_handler = hs.get_profile_handler()
self.event_creation_handler = hs.get_event_creation_handler()
self.account_data_handler = hs.get_account_data_handler()
+ self.event_auth_handler = hs.get_event_auth_handler()
self.member_linearizer = Linearizer(name="member")
@@ -75,22 +75,26 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
self.allow_per_room_profiles = self.config.allow_per_room_profiles
self._join_rate_limiter_local = Ratelimiter(
+ store=self.store,
clock=self.clock,
rate_hz=hs.config.ratelimiting.rc_joins_local.per_second,
burst_count=hs.config.ratelimiting.rc_joins_local.burst_count,
)
self._join_rate_limiter_remote = Ratelimiter(
+ store=self.store,
clock=self.clock,
rate_hz=hs.config.ratelimiting.rc_joins_remote.per_second,
burst_count=hs.config.ratelimiting.rc_joins_remote.burst_count,
)
self._invites_per_room_limiter = Ratelimiter(
+ store=self.store,
clock=self.clock,
rate_hz=hs.config.ratelimiting.rc_invites_per_room.per_second,
burst_count=hs.config.ratelimiting.rc_invites_per_room.burst_count,
)
self._invites_per_user_limiter = Ratelimiter(
+ store=self.store,
clock=self.clock,
rate_hz=hs.config.ratelimiting.rc_invites_per_user.per_second,
burst_count=hs.config.ratelimiting.rc_invites_per_user.burst_count,
@@ -159,15 +163,20 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
async def forget(self, user: UserID, room_id: str) -> None:
raise NotImplementedError()
- def ratelimit_invite(self, room_id: Optional[str], invitee_user_id: str):
+ async def ratelimit_invite(
+ self,
+ requester: Optional[Requester],
+ room_id: Optional[str],
+ invitee_user_id: str,
+ ):
"""Ratelimit invites by room and by target user.
If room ID is missing then we just rate limit by target user.
"""
if room_id:
- self._invites_per_room_limiter.ratelimit(room_id)
+ await self._invites_per_room_limiter.ratelimit(requester, room_id)
- self._invites_per_user_limiter.ratelimit(invitee_user_id)
+ await self._invites_per_user_limiter.ratelimit(requester, invitee_user_id)
async def _local_membership_update(
self,
@@ -226,9 +235,25 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
if event.membership == Membership.JOIN:
newly_joined = True
+ user_is_invited = False
if prev_member_event_id:
prev_member_event = await self.store.get_event(prev_member_event_id)
newly_joined = prev_member_event.membership != Membership.JOIN
+ user_is_invited = prev_member_event.membership == Membership.INVITE
+
+ # If the member is not already in the room and is not accepting an invite,
+ # check if they should be allowed access via membership in a space.
+ if (
+ newly_joined
+ and not user_is_invited
+ and not await self.event_auth_handler.can_join_without_invite(
+ prev_state_ids, event.room_version, user_id
+ )
+ ):
+ raise AuthError(
+ 403,
+ "You do not belong to any of the required spaces to join this room.",
+ )
# Only rate-limit if the user actually joined the room, otherwise we'll end
# up blocking profile updates.
@@ -237,7 +262,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
(
allowed,
time_allowed,
- ) = self._join_rate_limiter_local.can_requester_do_action(requester)
+ ) = await self._join_rate_limiter_local.can_do_action(requester)
if not allowed:
raise LimitExceededError(
@@ -421,9 +446,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
if effective_membership_state == Membership.INVITE:
target_id = target.to_string()
if ratelimit:
- # Don't ratelimit application services.
- if not requester.app_service or requester.app_service.is_rate_limited():
- self.ratelimit_invite(room_id, target_id)
+ await self.ratelimit_invite(requester, room_id, target_id)
# block any attempts to invite the server notices mxid
if target_id == self._server_notices_mxid:
@@ -534,7 +557,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
(
allowed,
time_allowed,
- ) = self._join_rate_limiter_remote.can_requester_do_action(
+ ) = await self._join_rate_limiter_remote.can_do_action(
requester,
)
diff --git a/synapse/handlers/room_member_worker.py b/synapse/handlers/room_member_worker.py
index 3a90fc0c..3e89dd23 100644
--- a/synapse/handlers/room_member_worker.py
+++ b/synapse/handlers/room_member_worker.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/saml_handler.py b/synapse/handlers/saml.py
index ec2ba11c..80ba65b9 100644
--- a/synapse/handlers/saml_handler.py
+++ b/synapse/handlers/saml.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py
index d742dfbd..4e718d3f 100644
--- a/synapse/handlers/search.py
+++ b/synapse/handlers/search.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/set_password.py b/synapse/handlers/set_password.py
index f98a338e..a63fac82 100644
--- a/synapse/handlers/set_password.py
+++ b/synapse/handlers/set_password.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/space_summary.py b/synapse/handlers/space_summary.py
index 5d941896..01e3e050 100644
--- a/synapse/handlers/space_summary.py
+++ b/synapse/handlers/space_summary.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/sso.py b/synapse/handlers/sso.py
index 415b1c2d..044ff06d 100644
--- a/synapse/handlers/sso.py
+++ b/synapse/handlers/sso.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,6 +18,7 @@ from typing import (
Any,
Awaitable,
Callable,
+ Collection,
Dict,
Iterable,
List,
@@ -41,7 +41,7 @@ from synapse.handlers.ui_auth import UIAuthSessionDataConstants
from synapse.http import get_request_user_agent
from synapse.http.server import respond_with_html, respond_with_redirect
from synapse.http.site import SynapseRequest
-from synapse.types import Collection, JsonDict, UserID, contains_invalid_mxid_characters
+from synapse.types import JsonDict, UserID, contains_invalid_mxid_characters
from synapse.util.async_helpers import Linearizer
from synapse.util.stringutils import random_string
diff --git a/synapse/handlers/state_deltas.py b/synapse/handlers/state_deltas.py
index ee8f87e5..077c7c06 100644
--- a/synapse/handlers/state_deltas.py
+++ b/synapse/handlers/state_deltas.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/stats.py b/synapse/handlers/stats.py
index 8730f99d..383e3402 100644
--- a/synapse/handlers/stats.py
+++ b/synapse/handlers/stats.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index ee607e6e..a9a3ee05 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2018, 2019 New Vector Ltd
#
@@ -15,7 +14,17 @@
# limitations under the License.
import itertools
import logging
-from typing import TYPE_CHECKING, Any, Dict, FrozenSet, List, Optional, Set, Tuple
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Collection,
+ Dict,
+ FrozenSet,
+ List,
+ Optional,
+ Set,
+ Tuple,
+)
import attr
from prometheus_client import Counter
@@ -24,11 +33,11 @@ from synapse.api.constants import AccountDataTypes, EventTypes, Membership
from synapse.api.filtering import FilterCollection
from synapse.events import EventBase
from synapse.logging.context import current_context
+from synapse.logging.opentracing import SynapseTags, log_kv, set_tag, start_active_span
from synapse.push.clientformat import format_push_rules_for_user
from synapse.storage.roommember import MemberSummary
from synapse.storage.state import StateFilter
from synapse.types import (
- Collection,
JsonDict,
MutableStateMap,
Requester,
@@ -251,13 +260,13 @@ class SyncHandler:
self.storage = hs.get_storage()
self.state_store = self.storage.state
- # ExpiringCache((User, Device)) -> LruCache(state_key => event_id)
+ # ExpiringCache((User, Device)) -> LruCache(user_id => event_id)
self.lazy_loaded_members_cache = ExpiringCache(
"lazy_loaded_members_cache",
self.clock,
max_len=0,
expiry_ms=LAZY_LOADED_MEMBERS_CACHE_MAX_AGE,
- )
+ ) # type: ExpiringCache[Tuple[str, Optional[str]], LruCache[str, str]]
async def wait_for_sync_for_user(
self,
@@ -340,7 +349,14 @@ class SyncHandler:
full_state: bool = False,
) -> SyncResult:
"""Get the sync for client needed to match what the server has now."""
- return await self.generate_sync_result(sync_config, since_token, full_state)
+ with start_active_span("current_sync_for_user"):
+ log_kv({"since_token": since_token})
+ sync_result = await self.generate_sync_result(
+ sync_config, since_token, full_state
+ )
+
+ set_tag(SynapseTags.SYNC_RESULT, bool(sync_result))
+ return sync_result
async def push_rules_for_user(self, user: UserID) -> JsonDict:
user_id = user.to_string()
@@ -540,7 +556,7 @@ class SyncHandler:
)
async def get_state_after_event(
- self, event: EventBase, state_filter: StateFilter = StateFilter.all()
+ self, event: EventBase, state_filter: Optional[StateFilter] = None
) -> StateMap[str]:
"""
Get the room state after the given event
@@ -550,7 +566,7 @@ class SyncHandler:
state_filter: The state filter used to fetch state from the database.
"""
state_ids = await self.state_store.get_state_ids_for_event(
- event.event_id, state_filter=state_filter
+ event.event_id, state_filter=state_filter or StateFilter.all()
)
if event.is_state():
state_ids = dict(state_ids)
@@ -561,7 +577,7 @@ class SyncHandler:
self,
room_id: str,
stream_position: StreamToken,
- state_filter: StateFilter = StateFilter.all(),
+ state_filter: Optional[StateFilter] = None,
) -> StateMap[str]:
"""Get the room state at a particular stream position
@@ -581,7 +597,7 @@ class SyncHandler:
if last_events:
last_event = last_events[-1]
state = await self.get_state_after_event(
- last_event, state_filter=state_filter
+ last_event, state_filter=state_filter or StateFilter.all()
)
else:
@@ -725,8 +741,10 @@ class SyncHandler:
def get_lazy_loaded_members_cache(
self, cache_key: Tuple[str, Optional[str]]
- ) -> LruCache:
- cache = self.lazy_loaded_members_cache.get(cache_key)
+ ) -> LruCache[str, str]:
+ cache = self.lazy_loaded_members_cache.get(
+ cache_key
+ ) # type: Optional[LruCache[str, str]]
if cache is None:
logger.debug("creating LruCache for %r", cache_key)
cache = LruCache(LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE)
@@ -964,6 +982,7 @@ class SyncHandler:
# to query up to a given point.
# Always use the `now_token` in `SyncResultBuilder`
now_token = self.event_sources.get_current_token()
+ log_kv({"now_token": now_token})
logger.debug(
"Calculating sync response for %r between %s and %s",
@@ -1225,6 +1244,13 @@ class SyncHandler:
user_id, device_id, since_stream_id, now_token.to_device_key
)
+ for message in messages:
+ # We pop here as we shouldn't be sending the message ID down
+ # `/sync`
+ message_id = message.pop("message_id", None)
+ if message_id:
+ set_tag(SynapseTags.TO_DEVICE_MESSAGE_ID, message_id)
+
logger.debug(
"Returning %d to-device messages between %d and %d (current token: %d)",
len(messages),
diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py
index 096d199f..e22393ad 100644
--- a/synapse/handlers/typing.py
+++ b/synapse/handlers/typing.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,7 +18,10 @@ from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Set, Tuple
from synapse.api.errors import AuthError, ShadowBanError, SynapseError
from synapse.appservice import ApplicationService
-from synapse.metrics.background_process_metrics import run_as_background_process
+from synapse.metrics.background_process_metrics import (
+ run_as_background_process,
+ wrap_as_background_process,
+)
from synapse.replication.tcp.streams import TypingStream
from synapse.types import JsonDict, Requester, UserID, get_domain_from_id
from synapse.util.caches.stream_change_cache import StreamChangeCache
@@ -86,6 +88,7 @@ class FollowerTypingHandler:
self._member_last_federation_poke = {}
self.wheel_timer = WheelTimer(bucket_size=5000)
+ @wrap_as_background_process("typing._handle_timeouts")
def _handle_timeouts(self) -> None:
logger.debug("Checking for typing timeouts")
diff --git a/synapse/handlers/ui_auth/__init__.py b/synapse/handlers/ui_auth/__init__.py
index a68d5e79..4c3b669f 100644
--- a/synapse/handlers/ui_auth/__init__.py
+++ b/synapse/handlers/ui_auth/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/ui_auth/checkers.py b/synapse/handlers/ui_auth/checkers.py
index 3d66bf30..0eeb7c03 100644
--- a/synapse/handlers/ui_auth/checkers.py
+++ b/synapse/handlers/ui_auth/checkers.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py
index b121286d..dacc4f30 100644
--- a/synapse/handlers/user_directory.py
+++ b/synapse/handlers/user_directory.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -45,7 +44,6 @@ class UserDirectoryHandler(StateDeltasHandler):
super().__init__(hs)
self.store = hs.get_datastore()
- self.state = hs.get_state_handler()
self.server_name = hs.hostname
self.clock = hs.get_clock()
self.notifier = hs.get_notifier()
@@ -303,10 +301,12 @@ class UserDirectoryHandler(StateDeltasHandler):
# ignore the change
return
- users_with_profile = await self.state.get_current_users_in_room(room_id)
+ other_users_in_room_with_profiles = (
+ await self.store.get_users_in_room_with_profiles(room_id)
+ )
# Remove every user from the sharing tables for that room.
- for user_id in users_with_profile.keys():
+ for user_id in other_users_in_room_with_profiles.keys():
await self.store.remove_user_who_share_room(user_id, room_id)
# Then, re-add them to the tables.
@@ -315,7 +315,7 @@ class UserDirectoryHandler(StateDeltasHandler):
# which when ran over an entire room, will result in the same values
# being added multiple times. The batching upserts shouldn't make this
# too bad, though.
- for user_id, profile in users_with_profile.items():
+ for user_id, profile in other_users_in_room_with_profiles.items():
await self._handle_new_user(room_id, user_id, profile)
async def _handle_new_user(
@@ -337,7 +337,7 @@ class UserDirectoryHandler(StateDeltasHandler):
room_id
)
# Now we update users who share rooms with users.
- users_with_profile = await self.state.get_current_users_in_room(room_id)
+ other_users_in_room = await self.store.get_users_in_room(room_id)
if is_public:
await self.store.add_users_in_public_rooms(room_id, (user_id,))
@@ -353,14 +353,14 @@ class UserDirectoryHandler(StateDeltasHandler):
# We don't care about appservice users.
if not is_appservice:
- for other_user_id in users_with_profile:
+ for other_user_id in other_users_in_room:
if user_id == other_user_id:
continue
to_insert.add((user_id, other_user_id))
# Next we need to update for every local user in the room
- for other_user_id in users_with_profile:
+ for other_user_id in other_users_in_room:
if user_id == other_user_id:
continue
diff --git a/synapse/http/__init__.py b/synapse/http/__init__.py
index 142b007d..ed4671b7 100644
--- a/synapse/http/__init__.py
+++ b/synapse/http/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/http/additional_resource.py b/synapse/http/additional_resource.py
index 479746c9..55ea97a0 100644
--- a/synapse/http/additional_resource.py
+++ b/synapse/http/additional_resource.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/http/client.py b/synapse/http/client.py
index a0caba84..5f40f16e 100644
--- a/synapse/http/client.py
+++ b/synapse/http/client.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
@@ -34,6 +33,7 @@ import treq
from canonicaljson import encode_canonical_json
from netaddr import AddrFormatError, IPAddress, IPSet
from prometheus_client import Counter
+from typing_extensions import Protocol
from zope.interface import implementer, provider
from OpenSSL import SSL
@@ -297,7 +297,7 @@ class SimpleHttpClient:
def __init__(
self,
hs: "HomeServer",
- treq_args: Dict[str, Any] = {},
+ treq_args: Optional[Dict[str, Any]] = None,
ip_whitelist: Optional[IPSet] = None,
ip_blacklist: Optional[IPSet] = None,
use_proxy: bool = False,
@@ -317,7 +317,7 @@ class SimpleHttpClient:
self._ip_whitelist = ip_whitelist
self._ip_blacklist = ip_blacklist
- self._extra_treq_args = treq_args
+ self._extra_treq_args = treq_args or {}
self.user_agent = hs.version_string
self.clock = hs.get_clock()
@@ -590,7 +590,7 @@ class SimpleHttpClient:
uri: str,
json_body: Any,
args: Optional[QueryParams] = None,
- headers: RawHeaders = None,
+ headers: Optional[RawHeaders] = None,
) -> Any:
"""Puts some json to the given URI.
@@ -755,6 +755,16 @@ def _timeout_to_request_timed_out_error(f: Failure):
return f
+class ByteWriteable(Protocol):
+ """The type of object which must be passed into read_body_with_max_size.
+
+ Typically this is a file object.
+ """
+
+ def write(self, data: bytes) -> int:
+ pass
+
+
class BodyExceededMaxSize(Exception):
"""The maximum allowed size of the HTTP body was exceeded."""
@@ -791,7 +801,7 @@ class _ReadBodyWithMaxSizeProtocol(protocol.Protocol):
transport = None # type: Optional[ITCPTransport]
def __init__(
- self, stream: BinaryIO, deferred: defer.Deferred, max_size: Optional[int]
+ self, stream: ByteWriteable, deferred: defer.Deferred, max_size: Optional[int]
):
self.stream = stream
self.deferred = deferred
@@ -831,7 +841,7 @@ class _ReadBodyWithMaxSizeProtocol(protocol.Protocol):
def read_body_with_max_size(
- response: IResponse, stream: BinaryIO, max_size: Optional[int]
+ response: IResponse, stream: ByteWriteable, max_size: Optional[int]
) -> defer.Deferred:
"""
Read a HTTP response body to a file-object. Optionally enforcing a maximum file size.
diff --git a/synapse/http/connectproxyclient.py b/synapse/http/connectproxyclient.py
index b797e3ce..17e1c5ab 100644
--- a/synapse/http/connectproxyclient.py
+++ b/synapse/http/connectproxyclient.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/http/federation/__init__.py b/synapse/http/federation/__init__.py
index 1453d045..743fb990 100644
--- a/synapse/http/federation/__init__.py
+++ b/synapse/http/federation/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py
index 5935a125..95077020 100644
--- a/synapse/http/federation/matrix_federation_agent.py
+++ b/synapse/http/federation/matrix_federation_agent.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/http/federation/srv_resolver.py b/synapse/http/federation/srv_resolver.py
index d9620032..b8ed4ec9 100644
--- a/synapse/http/federation/srv_resolver.py
+++ b/synapse/http/federation/srv_resolver.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019 New Vector Ltd
#
diff --git a/synapse/http/federation/well_known_resolver.py b/synapse/http/federation/well_known_resolver.py
index ce4079f1..20d39a4e 100644
--- a/synapse/http/federation/well_known_resolver.py
+++ b/synapse/http/federation/well_known_resolver.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py
index 5f01ebd3..bb837b7b 100644
--- a/synapse/http/matrixfederationclient.py
+++ b/synapse/http/matrixfederationclient.py
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
-# Copyright 2014-2016 OpenMarket Ltd
-# Copyright 2018 New Vector Ltd
+# Copyright 2014-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,11 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import cgi
+import codecs
import logging
import random
import sys
+import typing
import urllib.parse
-from io import BytesIO
+from io import BytesIO, StringIO
from typing import Callable, Dict, List, Optional, Tuple, Union
import attr
@@ -73,6 +73,9 @@ incoming_responses_counter = Counter(
"synapse_http_matrixfederationclient_responses", "", ["method", "code"]
)
+# a federation response can be rather large (eg a big state_ids is 50M or so), so we
+# need a generous limit here.
+MAX_RESPONSE_SIZE = 100 * 1024 * 1024
MAX_LONG_RETRIES = 10
MAX_SHORT_RETRIES = 3
@@ -168,12 +171,27 @@ async def _handle_json_response(
try:
check_content_type_is_json(response.headers)
- # Use the custom JSON decoder (partially re-implements treq.json_content).
- d = treq.text_content(response, encoding="utf-8")
- d.addCallback(json_decoder.decode)
+ buf = StringIO()
+ d = read_body_with_max_size(response, BinaryIOWrapper(buf), MAX_RESPONSE_SIZE)
d = timeout_deferred(d, timeout=timeout_sec, reactor=reactor)
+ def parse(_len: int):
+ return json_decoder.decode(buf.getvalue())
+
+ d.addCallback(parse)
+
body = await make_deferred_yieldable(d)
+ except BodyExceededMaxSize as e:
+ # The response was too big.
+ logger.warning(
+ "{%s} [%s] JSON response exceeded max size %i - %s %s",
+ request.txn_id,
+ request.destination,
+ MAX_RESPONSE_SIZE,
+ request.method,
+ request.uri.decode("ascii"),
+ )
+ raise RequestSendFailed(e, can_retry=False) from e
except ValueError as e:
# The JSON content was invalid.
logger.warning(
@@ -219,6 +237,18 @@ async def _handle_json_response(
return body
+class BinaryIOWrapper:
+ """A wrapper for a TextIO which converts from bytes on the fly."""
+
+ def __init__(self, file: typing.TextIO, encoding="utf-8", errors="strict"):
+ self.decoder = codecs.getincrementaldecoder(encoding)(errors)
+ self.file = file
+
+ def write(self, b: Union[bytes, bytearray]) -> int:
+ self.file.write(self.decoder.decode(b))
+ return len(b)
+
+
class MatrixFederationHttpClient:
"""HTTP client used to talk to other homeservers over the federation
protocol. Send client certificates and signs requests.
@@ -272,7 +302,7 @@ class MatrixFederationHttpClient:
self,
request: MatrixFederationRequest,
try_trailing_slash_on_400: bool = False,
- **send_request_args
+ **send_request_args,
) -> IResponse:
"""Wrapper for _send_request which can optionally retry the request
upon receiving a combination of a 400 HTTP response code and a
diff --git a/synapse/http/proxyagent.py b/synapse/http/proxyagent.py
index 16ec8500..7dfae8b7 100644
--- a/synapse/http/proxyagent.py
+++ b/synapse/http/proxyagent.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -27,7 +26,7 @@ from twisted.python.failure import Failure
from twisted.web.client import URI, BrowserLikePolicyForHTTPS, _AgentBase
from twisted.web.error import SchemeNotSupported
from twisted.web.http_headers import Headers
-from twisted.web.iweb import IAgent
+from twisted.web.iweb import IAgent, IPolicyForHTTPS
from synapse.http.connectproxyclient import HTTPConnectProxyEndpoint
@@ -88,12 +87,14 @@ class ProxyAgent(_AgentBase):
self,
reactor,
proxy_reactor=None,
- contextFactory=BrowserLikePolicyForHTTPS(),
+ contextFactory: Optional[IPolicyForHTTPS] = None,
connectTimeout=None,
bindAddress=None,
pool=None,
use_proxy=False,
):
+ contextFactory = contextFactory or BrowserLikePolicyForHTTPS()
+
_AgentBase.__init__(self, reactor, pool)
if proxy_reactor is None:
diff --git a/synapse/http/request_metrics.py b/synapse/http/request_metrics.py
index 0ec5d941..602f93c4 100644
--- a/synapse/http/request_metrics.py
+++ b/synapse/http/request_metrics.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/http/server.py b/synapse/http/server.py
index fa892608..845651e6 100644
--- a/synapse/http/server.py
+++ b/synapse/http/server.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py
index 0e637f47..31897546 100644
--- a/synapse/http/servlet.py
+++ b/synapse/http/servlet.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/http/site.py b/synapse/http/site.py
index 47754aff..671fd3fb 100644
--- a/synapse/http/site.py
+++ b/synapse/http/site.py
@@ -14,19 +14,24 @@
import contextlib
import logging
import time
-from typing import Optional, Type, Union
+from typing import Optional, Tuple, Union
import attr
from zope.interface import implementer
-from twisted.internet.interfaces import IAddress
+from twisted.internet.interfaces import IAddress, IReactorTime
from twisted.python.failure import Failure
+from twisted.web.resource import IResource
from twisted.web.server import Request, Site
from synapse.config.server import ListenerConfig
from synapse.http import get_request_user_agent, redact_uri
from synapse.http.request_metrics import RequestMetrics, requests_counter
-from synapse.logging.context import LoggingContext, PreserveLoggingContext
+from synapse.logging.context import (
+ ContextRequest,
+ LoggingContext,
+ PreserveLoggingContext,
+)
from synapse.types import Requester
logger = logging.getLogger(__name__)
@@ -45,6 +50,7 @@ class SynapseRequest(Request):
* Redaction of access_token query-params in __repr__
* Logging at start and end
* Metrics to record CPU, wallclock and DB time by endpoint.
+ * A limit to the size of request which will be accepted
It also provides a method `processing`, which returns a context manager. If this
method is called, the request won't be logged until the context manager is closed;
@@ -55,15 +61,16 @@ class SynapseRequest(Request):
logcontext: the log context for this request
"""
- def __init__(self, channel, *args, **kw):
+ def __init__(self, channel, *args, max_request_body_size=1024, **kw):
Request.__init__(self, channel, *args, **kw)
+ self._max_request_body_size = max_request_body_size
self.site = channel.site # type: SynapseSite
self._channel = channel # this is used by the tests
self.start_time = 0.0
# The requester, if authenticated. For federation requests this is the
# server name, for client requests this is the Requester object.
- self.requester = None # type: Optional[Union[Requester, str]]
+ self._requester = None # type: Optional[Union[Requester, str]]
# we can't yet create the logcontext, as we don't know the method.
self.logcontext = None # type: Optional[LoggingContext]
@@ -93,6 +100,43 @@ class SynapseRequest(Request):
self.site.site_tag,
)
+ def handleContentChunk(self, data):
+ # we should have a `content` by now.
+ assert self.content, "handleContentChunk() called before gotLength()"
+ if self.content.tell() + len(data) > self._max_request_body_size:
+ logger.warning(
+ "Aborting connection from %s because the request exceeds maximum size",
+ self.client,
+ )
+ self.transport.abortConnection()
+ return
+ super().handleContentChunk(data)
+
+ @property
+ def requester(self) -> Optional[Union[Requester, str]]:
+ return self._requester
+
+ @requester.setter
+ def requester(self, value: Union[Requester, str]) -> None:
+ # Store the requester, and update some properties based on it.
+
+ # This should only be called once.
+ assert self._requester is None
+
+ self._requester = value
+
+ # A logging context should exist by now (and have a ContextRequest).
+ assert self.logcontext is not None
+ assert self.logcontext.request is not None
+
+ (
+ requester,
+ authenticated_entity,
+ ) = self.get_authenticated_entity()
+ self.logcontext.request.requester = requester
+ # If there's no authenticated entity, it was the requester.
+ self.logcontext.request.authenticated_entity = authenticated_entity or requester
+
def get_request_id(self):
return "%s-%i" % (self.get_method(), self.request_seq)
@@ -126,13 +170,60 @@ class SynapseRequest(Request):
return self.method.decode("ascii")
return method
+ def get_authenticated_entity(self) -> Tuple[Optional[str], Optional[str]]:
+ """
+ Get the "authenticated" entity of the request, which might be the user
+ performing the action, or a user being puppeted by a server admin.
+
+ Returns:
+ A tuple:
+ The first item is a string representing the user making the request.
+
+ The second item is a string or None representing the user who
+ authenticated when making this request. See
+ Requester.authenticated_entity.
+ """
+ # Convert the requester into a string that we can log
+ if isinstance(self._requester, str):
+ return self._requester, None
+ elif isinstance(self._requester, Requester):
+ requester = self._requester.user.to_string()
+ authenticated_entity = self._requester.authenticated_entity
+
+ # If this is a request where the target user doesn't match the user who
+ # authenticated (e.g. and admin is puppetting a user) then we return both.
+ if self._requester.user.to_string() != authenticated_entity:
+ return requester, authenticated_entity
+
+ return requester, None
+ elif self._requester is not None:
+ # This shouldn't happen, but we log it so we don't lose information
+ # and can see that we're doing something wrong.
+ return repr(self._requester), None # type: ignore[unreachable]
+
+ return None, None
+
def render(self, resrc):
# this is called once a Resource has been found to serve the request; in our
# case the Resource in question will normally be a JsonResource.
# create a LogContext for this request
request_id = self.get_request_id()
- self.logcontext = LoggingContext(request_id, request=request_id)
+ self.logcontext = LoggingContext(
+ request_id,
+ request=ContextRequest(
+ request_id=request_id,
+ ip_address=self.getClientIP(),
+ site_tag=self.site.site_tag,
+ # The requester is going to be unknown at this point.
+ requester=None,
+ authenticated_entity=None,
+ method=self.get_method(),
+ url=self.get_redacted_uri(),
+ protocol=self.clientproto.decode("ascii", errors="replace"),
+ user_agent=get_request_user_agent(self),
+ ),
+ )
# override the Server header which is set by twisted
self.setHeader("Server", self.site.server_version_string)
@@ -277,25 +368,6 @@ class SynapseRequest(Request):
# to the client (nb may be negative)
response_send_time = self.finish_time - self._processing_finished_time
- # Convert the requester into a string that we can log
- authenticated_entity = None
- if isinstance(self.requester, str):
- authenticated_entity = self.requester
- elif isinstance(self.requester, Requester):
- authenticated_entity = self.requester.authenticated_entity
-
- # If this is a request where the target user doesn't match the user who
- # authenticated (e.g. and admin is puppetting a user) then we log both.
- if self.requester.user.to_string() != authenticated_entity:
- authenticated_entity = "{},{}".format(
- authenticated_entity,
- self.requester.user.to_string(),
- )
- elif self.requester is not None:
- # This shouldn't happen, but we log it so we don't lose information
- # and can see that we're doing something wrong.
- authenticated_entity = repr(self.requester) # type: ignore[unreachable]
-
user_agent = get_request_user_agent(self, "-")
code = str(self.code)
@@ -305,6 +377,13 @@ class SynapseRequest(Request):
code += "!"
log_level = logging.INFO if self._should_log_request() else logging.DEBUG
+
+ # If this is a request where the target user doesn't match the user who
+ # authenticated (e.g. and admin is puppetting a user) then we log both.
+ requester, authenticated_entity = self.get_authenticated_entity()
+ if authenticated_entity:
+ requester = "{}.{}".format(authenticated_entity, requester)
+
self.site.access_logger.log(
log_level,
"%s - %s - {%s}"
@@ -312,7 +391,7 @@ class SynapseRequest(Request):
' %sB %s "%s %s %s" "%s" [%d dbevts]',
self.getClientIP(),
self.site.site_tag,
- authenticated_entity,
+ requester,
processing_time,
response_send_time,
usage.ru_utime,
@@ -421,29 +500,55 @@ class _XForwardedForAddress:
class SynapseSite(Site):
"""
- Subclass of a twisted http Site that does access logging with python's
- standard logging
+ Synapse-specific twisted http Site
+
+ This does two main things.
+
+ First, it replaces the requestFactory in use so that we build SynapseRequests
+ instead of regular t.w.server.Requests. All of the constructor params are really
+ just parameters for SynapseRequest.
+
+ Second, it inhibits the log() method called by Request.finish, since SynapseRequest
+ does its own logging.
"""
def __init__(
self,
- logger_name,
- site_tag,
+ logger_name: str,
+ site_tag: str,
config: ListenerConfig,
- resource,
+ resource: IResource,
server_version_string,
- *args,
- **kwargs
+ max_request_body_size: int,
+ reactor: IReactorTime,
):
- Site.__init__(self, resource, *args, **kwargs)
+ """
+
+ Args:
+ logger_name: The name of the logger to use for access logs.
+ site_tag: A tag to use for this site - mostly in access logs.
+ config: Configuration for the HTTP listener corresponding to this site
+ resource: The base of the resource tree to be used for serving requests on
+ this site
+ server_version_string: A string to present for the Server header
+ max_request_body_size: Maximum request body length to allow before
+ dropping the connection
+ reactor: reactor to be used to manage connection timeouts
+ """
+ Site.__init__(self, resource, reactor=reactor)
self.site_tag = site_tag
assert config.http_options is not None
proxied = config.http_options.x_forwarded
- self.requestFactory = (
- XForwardedForRequest if proxied else SynapseRequest
- ) # type: Type[Request]
+ request_class = XForwardedForRequest if proxied else SynapseRequest
+
+ def request_factory(channel, queued) -> Request:
+ return request_class(
+ channel, max_request_body_size=max_request_body_size, queued=queued
+ )
+
+ self.requestFactory = request_factory # type: ignore
self.access_logger = logging.getLogger(logger_name)
self.server_version_string = server_version_string.encode("ascii")
diff --git a/synapse/logging/__init__.py b/synapse/logging/__init__.py
index b28b7b2e..e00969f8 100644
--- a/synapse/logging/__init__.py
+++ b/synapse/logging/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/logging/_remote.py b/synapse/logging/_remote.py
index 643492ce..c515690b 100644
--- a/synapse/logging/_remote.py
+++ b/synapse/logging/_remote.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -227,11 +226,11 @@ class RemoteHandler(logging.Handler):
old_buffer = self._buffer
self._buffer = deque()
- for i in range(buffer_split):
+ for _ in range(buffer_split):
self._buffer.append(old_buffer.popleft())
end_buffer = []
- for i in range(buffer_split):
+ for _ in range(buffer_split):
end_buffer.append(old_buffer.pop())
self._buffer.extend(reversed(end_buffer))
diff --git a/synapse/logging/_structured.py b/synapse/logging/_structured.py
index 3e054f61..c7a971a9 100644
--- a/synapse/logging/_structured.py
+++ b/synapse/logging/_structured.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/logging/_terse_json.py b/synapse/logging/_terse_json.py
index 2fbf5549..8002a250 100644
--- a/synapse/logging/_terse_json.py
+++ b/synapse/logging/_terse_json.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/logging/context.py b/synapse/logging/context.py
index 03cf3c2b..7fc11a9a 100644
--- a/synapse/logging/context.py
+++ b/synapse/logging/context.py
@@ -22,7 +22,6 @@ them.
See doc/log_contexts.rst for details on how this works.
"""
-
import inspect
import logging
import threading
@@ -30,6 +29,7 @@ import types
import warnings
from typing import TYPE_CHECKING, Optional, Tuple, TypeVar, Union
+import attr
from typing_extensions import Literal
from twisted.internet import defer, threads
@@ -181,6 +181,29 @@ class ContextResourceUsage:
return res
+@attr.s(slots=True)
+class ContextRequest:
+ """
+ A bundle of attributes from the SynapseRequest object.
+
+ This exists to:
+
+ * Avoid a cycle between LoggingContext and SynapseRequest.
+ * Be a single variable that can be passed from parent LoggingContexts to
+ their children.
+ """
+
+ request_id = attr.ib(type=str)
+ ip_address = attr.ib(type=str)
+ site_tag = attr.ib(type=str)
+ requester = attr.ib(type=Optional[str])
+ authenticated_entity = attr.ib(type=Optional[str])
+ method = attr.ib(type=str)
+ url = attr.ib(type=str)
+ protocol = attr.ib(type=str)
+ user_agent = attr.ib(type=str)
+
+
LoggingContextOrSentinel = Union["LoggingContext", "_Sentinel"]
@@ -235,7 +258,8 @@ class LoggingContext:
child to the parent
Args:
- name (str): Name for the context for debugging.
+ name: Name for the context for logging. If this is omitted, it is
+ inherited from the parent context.
parent_context (LoggingContext|None): The parent of the new context
"""
@@ -256,10 +280,9 @@ class LoggingContext:
self,
name: Optional[str] = None,
parent_context: "Optional[LoggingContext]" = None,
- request: Optional[str] = None,
+ request: Optional[ContextRequest] = None,
) -> None:
self.previous_context = current_context()
- self.name = name
# track the resources used by this context so far
self._resource_usage = ContextResourceUsage()
@@ -281,16 +304,27 @@ class LoggingContext:
self.parent_context = parent_context
if self.parent_context is not None:
- self.parent_context.copy_to(self)
+ # we track the current request_id
+ self.request = self.parent_context.request
+
+ # we also track the current scope:
+ self.scope = self.parent_context.scope
if request is not None:
# the request param overrides the request from the parent context
self.request = request
+ # if we don't have a `name`, but do have a parent context, use its name.
+ if self.parent_context and name is None:
+ name = str(self.parent_context)
+ if name is None:
+ raise ValueError(
+ "LoggingContext must be given either a name or a parent context"
+ )
+ self.name = name
+
def __str__(self) -> str:
- if self.request:
- return str(self.request)
- return "%s@%x" % (self.name, id(self))
+ return self.name
@classmethod
def current_context(cls) -> LoggingContextOrSentinel:
@@ -556,8 +590,23 @@ class LoggingContextFilter(logging.Filter):
# we end up in a death spiral of infinite loops, so let's check, for
# robustness' sake.
if context is not None:
- # Logging is interested in the request.
- record.request = context.request # type: ignore
+ # Logging is interested in the request ID. Note that for backwards
+ # compatibility this is stored as the "request" on the record.
+ record.request = str(context) # type: ignore
+
+ # Add some data from the HTTP request.
+ request = context.request
+ if request is None:
+ return True
+
+ record.ip_address = request.ip_address # type: ignore
+ record.site_tag = request.site_tag # type: ignore
+ record.requester = request.requester # type: ignore
+ record.authenticated_entity = request.authenticated_entity # type: ignore
+ record.method = request.method # type: ignore
+ record.url = request.url # type: ignore
+ record.protocol = request.protocol # type: ignore
+ record.user_agent = request.user_agent # type: ignore
return True
@@ -630,8 +679,8 @@ def set_current_context(context: LoggingContextOrSentinel) -> LoggingContextOrSe
def nested_logging_context(suffix: str) -> LoggingContext:
"""Creates a new logging context as a child of another.
- The nested logging context will have a 'request' made up of the parent context's
- request, plus the given suffix.
+ The nested logging context will have a 'name' made up of the parent context's
+ name, plus the given suffix.
CPU/db usage stats will be added to the parent context's on exit.
@@ -641,7 +690,7 @@ def nested_logging_context(suffix: str) -> LoggingContext:
# ... do stuff
Args:
- suffix: suffix to add to the parent context's 'request'.
+ suffix: suffix to add to the parent context's 'name'.
Returns:
LoggingContext: new logging context.
@@ -652,12 +701,14 @@ def nested_logging_context(suffix: str) -> LoggingContext:
"Starting nested logging context from sentinel context: metrics will be lost"
)
parent_context = None
- prefix = ""
else:
assert isinstance(curr_context, LoggingContext)
parent_context = curr_context
- prefix = str(parent_context.request)
- return LoggingContext(parent_context=parent_context, request=prefix + "-" + suffix)
+ prefix = str(curr_context)
+ return LoggingContext(
+ prefix + "-" + suffix,
+ parent_context=parent_context,
+ )
def preserve_fn(f):
@@ -847,7 +898,7 @@ def defer_to_threadpool(reactor, threadpool, f, *args, **kwargs):
parent_context = curr_context
def g():
- with LoggingContext(parent_context=parent_context):
+ with LoggingContext(str(curr_context), parent_context=parent_context):
return f(*args, **kwargs)
return make_deferred_yieldable(threads.deferToThreadPool(reactor, threadpool, g))
diff --git a/synapse/logging/filter.py b/synapse/logging/filter.py
index 1baf8dd6..ed51a472 100644
--- a/synapse/logging/filter.py
+++ b/synapse/logging/filter.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/logging/formatter.py b/synapse/logging/formatter.py
index 11f60a77..c0f12ecd 100644
--- a/synapse/logging/formatter.py
+++ b/synapse/logging/formatter.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py
index aa146e8b..fba2fa39 100644
--- a/synapse/logging/opentracing.py
+++ b/synapse/logging/opentracing.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -259,6 +258,14 @@ except ImportError:
logger = logging.getLogger(__name__)
+class SynapseTags:
+ # The message ID of any to_device message processed
+ TO_DEVICE_MESSAGE_ID = "to_device.message_id"
+
+ # Whether the sync response has new data to be returned to the client.
+ SYNC_RESULT = "sync.new_data"
+
+
# Block everything by default
# A regex which matches the server_names to expose traces for.
# None means 'block everything'.
@@ -478,7 +485,7 @@ def start_active_span_from_request(
def start_active_span_from_edu(
edu_content,
operation_name,
- references=[],
+ references: Optional[list] = None,
tags=None,
start_time=None,
ignore_active_span=False,
@@ -493,6 +500,7 @@ def start_active_span_from_edu(
For the other args see opentracing.tracer
"""
+ references = references or []
if opentracing is None:
return noop_context_manager()
diff --git a/synapse/logging/scopecontextmanager.py b/synapse/logging/scopecontextmanager.py
index 7b9c6574..b1e8e08f 100644
--- a/synapse/logging/scopecontextmanager.py
+++ b/synapse/logging/scopecontextmanager.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/logging/utils.py b/synapse/logging/utils.py
index fd3543ab..08895e72 100644
--- a/synapse/logging/utils.py
+++ b/synapse/logging/utils.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py
index 3b499efc..31b7b3c2 100644
--- a/synapse/metrics/__init__.py
+++ b/synapse/metrics/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -214,7 +213,12 @@ class GaugeBucketCollector:
Prometheus, and optimise for that case.
"""
- __slots__ = ("_name", "_documentation", "_bucket_bounds", "_metric")
+ __slots__ = (
+ "_name",
+ "_documentation",
+ "_bucket_bounds",
+ "_metric",
+ )
def __init__(
self,
@@ -242,11 +246,16 @@ class GaugeBucketCollector:
if self._bucket_bounds[-1] != float("inf"):
self._bucket_bounds.append(float("inf"))
- self._metric = self._values_to_metric([])
+ # We initially set this to None. We won't report metrics until
+ # this has been initialised after a successful data update
+ self._metric = None # type: Optional[GaugeHistogramMetricFamily]
+
registry.register(self)
def collect(self):
- yield self._metric
+ # Don't report metrics unless we've already collected some data
+ if self._metric is not None:
+ yield self._metric
def update_data(self, values: Iterable[float]):
"""Update the data to be reported by the metric
diff --git a/synapse/metrics/_exposition.py b/synapse/metrics/_exposition.py
index 71320a14..8002be56 100644
--- a/synapse/metrics/_exposition.py
+++ b/synapse/metrics/_exposition.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015-2019 Prometheus Python Client Developers
# Copyright 2019 Matrix.org Foundation C.I.C.
#
diff --git a/synapse/metrics/background_process_metrics.py b/synapse/metrics/background_process_metrics.py
index b56986d8..714caf84 100644
--- a/synapse/metrics/background_process_metrics.py
+++ b/synapse/metrics/background_process_metrics.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,7 +15,7 @@
import logging
import threading
from functools import wraps
-from typing import TYPE_CHECKING, Dict, Optional, Set
+from typing import TYPE_CHECKING, Dict, Optional, Set, Union
from prometheus_client.core import REGISTRY, Counter, Gauge
@@ -199,11 +198,11 @@ def run_as_background_process(desc: str, func, *args, bg_start_span=True, **kwar
_background_process_start_count.labels(desc).inc()
_background_process_in_flight_count.labels(desc).inc()
- with BackgroundProcessLoggingContext(desc, "%s-%i" % (desc, count)) as context:
+ with BackgroundProcessLoggingContext(desc, count) as context:
try:
ctx = noop_context_manager()
if bg_start_span:
- ctx = start_active_span(desc, tags={"request_id": context.request})
+ ctx = start_active_span(desc, tags={"request_id": str(context)})
with ctx:
return await maybe_awaitable(func(*args, **kwargs))
except Exception:
@@ -244,9 +243,20 @@ class BackgroundProcessLoggingContext(LoggingContext):
__slots__ = ["_proc"]
- def __init__(self, name: str, request: Optional[str] = None):
- super().__init__(name, request=request)
+ def __init__(self, name: str, instance_id: Optional[Union[int, str]] = None):
+ """
+ Args:
+ name: The name of the background process. Each distinct `name` gets a
+ separate prometheus time series.
+
+ instance_id: an identifer to add to `name` to distinguish this instance of
+ the named background process in the logs. If this is `None`, one is
+ made up based on id(self).
+ """
+ if instance_id is None:
+ instance_id = id(self)
+ super().__init__("%s-%s" % (name, instance_id))
self._proc = _BackgroundProcess(name, self)
def start(self, rusage: "Optional[resource._RUsage]"):
diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py
index 781e02fb..a1a2b9ae 100644
--- a/synapse/module_api/__init__.py
+++ b/synapse/module_api/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 New Vector Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
@@ -14,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
-from typing import TYPE_CHECKING, Any, Generator, Iterable, Optional, Tuple
+from typing import TYPE_CHECKING, Any, Generator, Iterable, List, Optional, Tuple
from twisted.internet import defer
@@ -50,11 +49,21 @@ class ModuleApi:
self._auth = hs.get_auth()
self._auth_handler = auth_handler
self._server_name = hs.hostname
+ self._presence_stream = hs.get_event_sources().sources["presence"]
+ self._state = hs.get_state_handler()
# We expose these as properties below in order to attach a helpful docstring.
self._http_client = hs.get_simple_http_client() # type: SimpleHttpClient
self._public_room_list_manager = PublicRoomListManager(hs)
+ # The next time these users sync, they will receive the current presence
+ # state of all local users. Users are added by send_local_online_presence_to,
+ # and removed after a successful sync.
+ #
+ # We make this a private variable to deter modules from accessing it directly,
+ # though other classes in Synapse will still do so.
+ self._send_full_presence_to_local_users = set()
+
@property
def http_client(self):
"""Allows making outbound HTTP requests to remote resources.
@@ -118,7 +127,7 @@ class ModuleApi:
return defer.ensureDeferred(self._auth_handler.check_user_exists(user_id))
@defer.inlineCallbacks
- def register(self, localpart, displayname=None, emails=[]):
+ def register(self, localpart, displayname=None, emails: Optional[List[str]] = None):
"""Registers a new user with given localpart and optional displayname, emails.
Also returns an access token for the new user.
@@ -138,11 +147,13 @@ class ModuleApi:
logger.warning(
"Using deprecated ModuleApi.register which creates a dummy user device."
)
- user_id = yield self.register_user(localpart, displayname, emails)
+ user_id = yield self.register_user(localpart, displayname, emails or [])
_, access_token = yield self.register_device(user_id)
return user_id, access_token
- def register_user(self, localpart, displayname=None, emails=[]):
+ def register_user(
+ self, localpart, displayname=None, emails: Optional[List[str]] = None
+ ):
"""Registers a new user with given localpart and optional displayname, emails.
Args:
@@ -161,7 +172,7 @@ class ModuleApi:
self._hs.get_registration_handler().register_user(
localpart=localpart,
default_display_name=displayname,
- bind_emails=emails,
+ bind_emails=emails or [],
)
)
@@ -385,6 +396,49 @@ class ModuleApi:
return event
+ async def send_local_online_presence_to(self, users: Iterable[str]) -> None:
+ """
+ Forces the equivalent of a presence initial_sync for a set of local or remote
+ users. The users will receive presence for all currently online users that they
+ are considered interested in.
+
+ Updates to remote users will be sent immediately, whereas local users will receive
+ them on their next sync attempt.
+
+ Note that this method can only be run on the main or federation_sender worker
+ processes.
+ """
+ if not self._hs.should_send_federation():
+ raise Exception(
+ "send_local_online_presence_to can only be run "
+ "on processes that send federation",
+ )
+
+ for user in users:
+ if self._hs.is_mine_id(user):
+ # Modify SyncHandler._generate_sync_entry_for_presence to call
+ # presence_source.get_new_events with an empty `from_key` if
+ # that user's ID were in a list modified by ModuleApi somewhere.
+ # That user would then get all presence state on next incremental sync.
+
+ # Force a presence initial_sync for this user next time
+ self._send_full_presence_to_local_users.add(user)
+ else:
+ # Retrieve presence state for currently online users that this user
+ # is considered interested in
+ presence_events, _ = await self._presence_stream.get_new_events(
+ UserID.from_string(user), from_key=None, include_offline=False
+ )
+
+ # Send to remote destinations.
+
+ # We pull out the presence handler here to break a cyclic
+ # dependency between the presence router and module API.
+ presence_handler = self._hs.get_presence_handler()
+ await presence_handler.maybe_send_presence_to_interested_destinations(
+ presence_events
+ )
+
class PublicRoomListManager:
"""Contains methods for adding to, removing from and querying whether a room
diff --git a/synapse/module_api/errors.py b/synapse/module_api/errors.py
index b1544177..d24864c5 100644
--- a/synapse/module_api/errors.py
+++ b/synapse/module_api/errors.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/notifier.py b/synapse/notifier.py
index 1374aae4..b9531007 100644
--- a/synapse/notifier.py
+++ b/synapse/notifier.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014 - 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,6 +17,7 @@ from collections import namedtuple
from typing import (
Awaitable,
Callable,
+ Collection,
Dict,
Iterable,
List,
@@ -39,16 +39,11 @@ from synapse.api.errors import AuthError
from synapse.events import EventBase
from synapse.handlers.presence import format_user_presence_state
from synapse.logging.context import PreserveLoggingContext
+from synapse.logging.opentracing import log_kv, start_active_span
from synapse.logging.utils import log_function
from synapse.metrics import LaterGauge
from synapse.streams.config import PaginationConfig
-from synapse.types import (
- Collection,
- PersistedEventPosition,
- RoomStreamToken,
- StreamToken,
- UserID,
-)
+from synapse.types import PersistedEventPosition, RoomStreamToken, StreamToken, UserID
from synapse.util.async_helpers import ObservableDeferred, timeout_deferred
from synapse.util.metrics import Measure
from synapse.visibility import filter_events_for_client
@@ -136,6 +131,15 @@ class _NotifierUserStream:
self.last_notified_ms = time_now_ms
noify_deferred = self.notify_deferred
+ log_kv(
+ {
+ "notify": self.user_id,
+ "stream": stream_key,
+ "stream_id": stream_id,
+ "listeners": self.count_listeners(),
+ }
+ )
+
users_woken_by_stream_counter.labels(stream_key).inc()
with PreserveLoggingContext():
@@ -266,7 +270,7 @@ class Notifier:
event: EventBase,
event_pos: PersistedEventPosition,
max_room_stream_token: RoomStreamToken,
- extra_users: Collection[UserID] = [],
+ extra_users: Optional[Collection[UserID]] = None,
):
"""Unwraps event and calls `on_new_room_event_args`."""
self.on_new_room_event_args(
@@ -276,7 +280,7 @@ class Notifier:
state_key=event.get("state_key"),
membership=event.content.get("membership"),
max_room_stream_token=max_room_stream_token,
- extra_users=extra_users,
+ extra_users=extra_users or [],
)
def on_new_room_event_args(
@@ -287,7 +291,7 @@ class Notifier:
membership: Optional[str],
event_pos: PersistedEventPosition,
max_room_stream_token: RoomStreamToken,
- extra_users: Collection[UserID] = [],
+ extra_users: Optional[Collection[UserID]] = None,
):
"""Used by handlers to inform the notifier something has happened
in the room, room event wise.
@@ -303,7 +307,7 @@ class Notifier:
self.pending_new_room_events.append(
_PendingRoomEventEntry(
event_pos=event_pos,
- extra_users=extra_users,
+ extra_users=extra_users or [],
room_id=room_id,
type=event_type,
state_key=state_key,
@@ -372,14 +376,14 @@ class Notifier:
self,
stream_key: str,
new_token: Union[int, RoomStreamToken],
- users: Collection[Union[str, UserID]] = [],
+ users: Optional[Collection[Union[str, UserID]]] = None,
):
try:
stream_token = None
if isinstance(new_token, int):
stream_token = new_token
self.appservice_handler.notify_interested_services_ephemeral(
- stream_key, stream_token, users
+ stream_key, stream_token, users or []
)
except Exception:
logger.exception("Error notifying application services of event")
@@ -394,16 +398,26 @@ class Notifier:
self,
stream_key: str,
new_token: Union[int, RoomStreamToken],
- users: Collection[Union[str, UserID]] = [],
- rooms: Collection[str] = [],
+ users: Optional[Collection[Union[str, UserID]]] = None,
+ rooms: Optional[Collection[str]] = None,
):
"""Used to inform listeners that something has happened event wise.
Will wake up all listeners for the given users and rooms.
"""
+ users = users or []
+ rooms = rooms or []
+
with Measure(self.clock, "on_new_event"):
user_streams = set()
+ log_kv(
+ {
+ "waking_up_explicit_users": len(users),
+ "waking_up_explicit_rooms": len(rooms),
+ }
+ )
+
for user in users:
user_stream = self.user_to_user_stream.get(str(user))
if user_stream is not None:
@@ -476,12 +490,34 @@ class Notifier:
(end_time - now) / 1000.0,
self.hs.get_reactor(),
)
- with PreserveLoggingContext():
- await listener.deferred
+
+ with start_active_span("wait_for_events.deferred"):
+ log_kv(
+ {
+ "wait_for_events": "sleep",
+ "token": prev_token,
+ }
+ )
+
+ with PreserveLoggingContext():
+ await listener.deferred
+
+ log_kv(
+ {
+ "wait_for_events": "woken",
+ "token": user_stream.current_token,
+ }
+ )
current_token = user_stream.current_token
result = await callback(prev_token, current_token)
+ log_kv(
+ {
+ "wait_for_events": "result",
+ "result": bool(result),
+ }
+ )
if result:
break
@@ -489,8 +525,10 @@ class Notifier:
# has happened between the old prev_token and the current_token
prev_token = current_token
except defer.TimeoutError:
+ log_kv({"wait_for_events": "timeout"})
break
except defer.CancelledError:
+ log_kv({"wait_for_events": "cancelled"})
break
if result is None:
@@ -507,7 +545,7 @@ class Notifier:
pagination_config: PaginationConfig,
timeout: int,
is_guest: bool = False,
- explicit_room_id: str = None,
+ explicit_room_id: Optional[str] = None,
) -> EventStreamResult:
"""For the given user and rooms, return any new events for them. If
there are no new events wait for up to `timeout` milliseconds for any
diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py
index 9fc3da49..2c23afe8 100644
--- a/synapse/push/__init__.py
+++ b/synapse/push/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/push/action_generator.py b/synapse/push/action_generator.py
index 38a47a60..60758df0 100644
--- a/synapse/push/action_generator.py
+++ b/synapse/push/action_generator.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py
index 1897f591..350646f4 100644
--- a/synapse/push/bulk_push_rule_evaluator.py
+++ b/synapse/push/bulk_push_rule_evaluator.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
# Copyright 2017 New Vector Ltd
#
@@ -107,6 +106,10 @@ class BulkPushRuleEvaluator:
self.store = hs.get_datastore()
self.auth = hs.get_auth()
+ # Used by `RulesForRoom` to ensure only one thing mutates the cache at a
+ # time. Keyed off room_id.
+ self._rules_linearizer = Linearizer(name="rules_for_room")
+
self.room_push_rule_cache_metrics = register_cache(
"cache",
"room_push_rule_cache",
@@ -124,7 +127,16 @@ class BulkPushRuleEvaluator:
dict of user_id -> push_rules
"""
room_id = event.room_id
- rules_for_room = self._get_rules_for_room(room_id)
+
+ rules_for_room_data = self._get_rules_for_room(room_id)
+ rules_for_room = RulesForRoom(
+ hs=self.hs,
+ room_id=room_id,
+ rules_for_room_cache=self._get_rules_for_room.cache,
+ room_push_rule_cache_metrics=self.room_push_rule_cache_metrics,
+ linearizer=self._rules_linearizer,
+ cached_data=rules_for_room_data,
+ )
rules_by_user = await rules_for_room.get_rules(event, context)
@@ -143,17 +155,12 @@ class BulkPushRuleEvaluator:
return rules_by_user
@lru_cache()
- def _get_rules_for_room(self, room_id: str) -> "RulesForRoom":
- """Get the current RulesForRoom object for the given room id"""
- # It's important that RulesForRoom gets added to self._get_rules_for_room.cache
+ def _get_rules_for_room(self, room_id: str) -> "RulesForRoomData":
+ """Get the current RulesForRoomData object for the given room id"""
+ # It's important that the RulesForRoomData object gets added to self._get_rules_for_room.cache
# before any lookup methods get called on it as otherwise there may be
# a race if invalidate_all gets called (which assumes its in the cache)
- return RulesForRoom(
- self.hs,
- room_id,
- self._get_rules_for_room.cache,
- self.room_push_rule_cache_metrics,
- )
+ return RulesForRoomData()
async def _get_power_levels_and_sender_level(
self, event: EventBase, context: EventContext
@@ -283,11 +290,49 @@ def _condition_checker(
return True
+@attr.s(slots=True)
+class RulesForRoomData:
+ """The data stored in the cache by `RulesForRoom`.
+
+ We don't store `RulesForRoom` directly in the cache as we want our caches to
+ *only* include data, and not references to e.g. the data stores.
+ """
+
+ # event_id -> (user_id, state)
+ member_map = attr.ib(type=Dict[str, Tuple[str, str]], factory=dict)
+ # user_id -> rules
+ rules_by_user = attr.ib(type=Dict[str, List[Dict[str, dict]]], factory=dict)
+
+ # The last state group we updated the caches for. If the state_group of
+ # a new event comes along, we know that we can just return the cached
+ # result.
+ # On invalidation of the rules themselves (if the user changes them),
+ # we invalidate everything and set state_group to `object()`
+ state_group = attr.ib(type=Union[object, int], factory=object)
+
+ # A sequence number to keep track of when we're allowed to update the
+ # cache. We bump the sequence number when we invalidate the cache. If
+ # the sequence number changes while we're calculating stuff we should
+ # not update the cache with it.
+ sequence = attr.ib(type=int, default=0)
+
+ # A cache of user_ids that we *know* aren't interesting, e.g. user_ids
+ # owned by AS's, or remote users, etc. (I.e. users we will never need to
+ # calculate push for)
+ # These never need to be invalidated as we will never set up push for
+ # them.
+ uninteresting_user_set = attr.ib(type=Set[str], factory=set)
+
+
class RulesForRoom:
"""Caches push rules for users in a room.
This efficiently handles users joining/leaving the room by not invalidating
the entire cache for the room.
+
+ A new instance is constructed for each call to
+ `BulkPushRuleEvaluator._get_rules_for_event`, with the cached data from
+ previous calls passed in.
"""
def __init__(
@@ -296,6 +341,8 @@ class RulesForRoom:
room_id: str,
rules_for_room_cache: LruCache,
room_push_rule_cache_metrics: CacheMetric,
+ linearizer: Linearizer,
+ cached_data: RulesForRoomData,
):
"""
Args:
@@ -304,38 +351,21 @@ class RulesForRoom:
rules_for_room_cache: The cache object that caches these
RoomsForUser objects.
room_push_rule_cache_metrics: The metrics object
+ linearizer: The linearizer used to ensure only one thing mutates
+ the cache at a time. Keyed off room_id
+ cached_data: Cached data from previous calls to `self.get_rules`,
+ can be mutated.
"""
self.room_id = room_id
self.is_mine_id = hs.is_mine_id
self.store = hs.get_datastore()
self.room_push_rule_cache_metrics = room_push_rule_cache_metrics
- self.linearizer = Linearizer(name="rules_for_room")
-
- # event_id -> (user_id, state)
- self.member_map = {} # type: Dict[str, Tuple[str, str]]
- # user_id -> rules
- self.rules_by_user = {} # type: Dict[str, List[Dict[str, dict]]]
-
- # The last state group we updated the caches for. If the state_group of
- # a new event comes along, we know that we can just return the cached
- # result.
- # On invalidation of the rules themselves (if the user changes them),
- # we invalidate everything and set state_group to `object()`
- self.state_group = object()
-
- # A sequence number to keep track of when we're allowed to update the
- # cache. We bump the sequence number when we invalidate the cache. If
- # the sequence number changes while we're calculating stuff we should
- # not update the cache with it.
- self.sequence = 0
-
- # A cache of user_ids that we *know* aren't interesting, e.g. user_ids
- # owned by AS's, or remote users, etc. (I.e. users we will never need to
- # calculate push for)
- # These never need to be invalidated as we will never set up push for
- # them.
- self.uninteresting_user_set = set() # type: Set[str]
+ # Used to ensure only one thing mutates the cache at a time. Keyed off
+ # room_id.
+ self.linearizer = linearizer
+
+ self.data = cached_data
# We need to be clever on the invalidating caches callbacks, as
# otherwise the invalidation callback holds a reference to the object,
@@ -353,25 +383,25 @@ class RulesForRoom:
"""
state_group = context.state_group
- if state_group and self.state_group == state_group:
+ if state_group and self.data.state_group == state_group:
logger.debug("Using cached rules for %r", self.room_id)
self.room_push_rule_cache_metrics.inc_hits()
- return self.rules_by_user
+ return self.data.rules_by_user
- with (await self.linearizer.queue(())):
- if state_group and self.state_group == state_group:
+ with (await self.linearizer.queue(self.room_id)):
+ if state_group and self.data.state_group == state_group:
logger.debug("Using cached rules for %r", self.room_id)
self.room_push_rule_cache_metrics.inc_hits()
- return self.rules_by_user
+ return self.data.rules_by_user
self.room_push_rule_cache_metrics.inc_misses()
ret_rules_by_user = {}
missing_member_event_ids = {}
- if state_group and self.state_group == context.prev_group:
+ if state_group and self.data.state_group == context.prev_group:
# If we have a simple delta then we can reuse most of the previous
# results.
- ret_rules_by_user = self.rules_by_user
+ ret_rules_by_user = self.data.rules_by_user
current_state_ids = context.delta_ids
push_rules_delta_state_cache_metric.inc_hits()
@@ -394,24 +424,24 @@ class RulesForRoom:
if typ != EventTypes.Member:
continue
- if user_id in self.uninteresting_user_set:
+ if user_id in self.data.uninteresting_user_set:
continue
if not self.is_mine_id(user_id):
- self.uninteresting_user_set.add(user_id)
+ self.data.uninteresting_user_set.add(user_id)
continue
if self.store.get_if_app_services_interested_in_user(user_id):
- self.uninteresting_user_set.add(user_id)
+ self.data.uninteresting_user_set.add(user_id)
continue
event_id = current_state_ids[key]
- res = self.member_map.get(event_id, None)
+ res = self.data.member_map.get(event_id, None)
if res:
user_id, state = res
if state == Membership.JOIN:
- rules = self.rules_by_user.get(user_id, None)
+ rules = self.data.rules_by_user.get(user_id, None)
if rules:
ret_rules_by_user[user_id] = rules
continue
@@ -431,7 +461,7 @@ class RulesForRoom:
else:
# The push rules didn't change but lets update the cache anyway
self.update_cache(
- self.sequence,
+ self.data.sequence,
members={}, # There were no membership changes
rules_by_user=ret_rules_by_user,
state_group=state_group,
@@ -462,7 +492,7 @@ class RulesForRoom:
for. Used when updating the cache.
event: The event we are currently computing push rules for.
"""
- sequence = self.sequence
+ sequence = self.data.sequence
rows = await self.store.get_membership_from_event_ids(member_event_ids.values())
@@ -502,23 +532,11 @@ class RulesForRoom:
self.update_cache(sequence, members, ret_rules_by_user, state_group)
- def invalidate_all(self) -> None:
- # Note: Don't hand this function directly to an invalidation callback
- # as it keeps a reference to self and will stop this instance from being
- # GC'd if it gets dropped from the rules_to_user cache. Instead use
- # `self.invalidate_all_cb`
- logger.debug("Invalidating RulesForRoom for %r", self.room_id)
- self.sequence += 1
- self.state_group = object()
- self.member_map = {}
- self.rules_by_user = {}
- push_rules_invalidation_counter.inc()
-
def update_cache(self, sequence, members, rules_by_user, state_group) -> None:
- if sequence == self.sequence:
- self.member_map.update(members)
- self.rules_by_user = rules_by_user
- self.state_group = state_group
+ if sequence == self.data.sequence:
+ self.data.member_map.update(members)
+ self.data.rules_by_user = rules_by_user
+ self.data.state_group = state_group
@attr.attrs(slots=True, frozen=True)
@@ -536,6 +554,10 @@ class _Invalidation:
room_id = attr.ib(type=str)
def __call__(self) -> None:
- rules = self.cache.get(self.room_id, None, update_metrics=False)
- if rules:
- rules.invalidate_all()
+ rules_data = self.cache.get(self.room_id, None, update_metrics=False)
+ if rules_data:
+ rules_data.sequence += 1
+ rules_data.state_group = object()
+ rules_data.member_map = {}
+ rules_data.rules_by_user = {}
+ push_rules_invalidation_counter.inc()
diff --git a/synapse/push/clientformat.py b/synapse/push/clientformat.py
index 0cadba76..2ee0ccd5 100644
--- a/synapse/push/clientformat.py
+++ b/synapse/push/clientformat.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/push/emailpusher.py b/synapse/push/emailpusher.py
index c0968dc7..99a18874 100644
--- a/synapse/push/emailpusher.py
+++ b/synapse/push/emailpusher.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -20,8 +19,9 @@ from twisted.internet.error import AlreadyCalled, AlreadyCancelled
from twisted.internet.interfaces import IDelayedCall
from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.push import Pusher, PusherConfig, ThrottleParams
+from synapse.push import Pusher, PusherConfig, PusherConfigException, ThrottleParams
from synapse.push.mailer import Mailer
+from synapse.util.threepids import validate_email
if TYPE_CHECKING:
from synapse.server import HomeServer
@@ -72,6 +72,12 @@ class EmailPusher(Pusher):
self._is_processing = False
+ # Make sure that the email is valid.
+ try:
+ validate_email(self.email)
+ except ValueError:
+ raise PusherConfigException("Invalid email")
+
def on_started(self, should_check_for_notifs: bool) -> None:
"""Called when this pusher has been started.
diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py
index 26af5309..06bf5f8a 100644
--- a/synapse/push/httppusher.py
+++ b/synapse/push/httppusher.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2017 New Vector Ltd
#
diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py
index 2e5161de..c4b43b0d 100644
--- a/synapse/push/mailer.py
+++ b/synapse/push/mailer.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/push/presentable_names.py b/synapse/push/presentable_names.py
index 04c2c148..41294139 100644
--- a/synapse/push/presentable_names.py
+++ b/synapse/push/presentable_names.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/push/push_rule_evaluator.py b/synapse/push/push_rule_evaluator.py
index ba1877ad..98b90a4f 100644
--- a/synapse/push/push_rule_evaluator.py
+++ b/synapse/push/push_rule_evaluator.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2017 New Vector Ltd
#
@@ -20,6 +19,7 @@ from typing import Any, Dict, List, Optional, Pattern, Tuple, Union
from synapse.events import EventBase
from synapse.types import UserID
+from synapse.util import glob_to_regex, re_word_boundary
from synapse.util.caches.lrucache import LruCache
logger = logging.getLogger(__name__)
@@ -184,7 +184,7 @@ class PushRuleEvaluatorForEvent:
r = regex_cache.get((display_name, False, True), None)
if not r:
r1 = re.escape(display_name)
- r1 = _re_word_boundary(r1)
+ r1 = re_word_boundary(r1)
r = re.compile(r1, flags=re.IGNORECASE)
regex_cache[(display_name, False, True)] = r
@@ -213,7 +213,7 @@ def _glob_matches(glob: str, value: str, word_boundary: bool = False) -> bool:
try:
r = regex_cache.get((glob, True, word_boundary), None)
if not r:
- r = _glob_to_re(glob, word_boundary)
+ r = glob_to_regex(glob, word_boundary)
regex_cache[(glob, True, word_boundary)] = r
return bool(r.search(value))
except re.error:
@@ -221,56 +221,6 @@ def _glob_matches(glob: str, value: str, word_boundary: bool = False) -> bool:
return False
-def _glob_to_re(glob: str, word_boundary: bool) -> Pattern:
- """Generates regex for a given glob.
-
- Args:
- glob
- word_boundary: Whether to match against word boundaries or entire string.
- """
- if IS_GLOB.search(glob):
- r = re.escape(glob)
-
- r = r.replace(r"\*", ".*?")
- r = r.replace(r"\?", ".")
-
- # handle [abc], [a-z] and [!a-z] style ranges.
- r = GLOB_REGEX.sub(
- lambda x: (
- "[%s%s]" % (x.group(1) and "^" or "", x.group(2).replace(r"\\\-", "-"))
- ),
- r,
- )
- if word_boundary:
- r = _re_word_boundary(r)
-
- return re.compile(r, flags=re.IGNORECASE)
- else:
- r = "^" + r + "$"
-
- return re.compile(r, flags=re.IGNORECASE)
- elif word_boundary:
- r = re.escape(glob)
- r = _re_word_boundary(r)
-
- return re.compile(r, flags=re.IGNORECASE)
- else:
- r = "^" + re.escape(glob) + "$"
- return re.compile(r, flags=re.IGNORECASE)
-
-
-def _re_word_boundary(r: str) -> str:
- """
- Adds word boundary characters to the start and end of an
- expression to require that the match occur as a whole word,
- but do so respecting the fact that strings starting or ending
- with non-word characters will change word boundaries.
- """
- # we can't use \b as it chokes on unicode. however \W seems to be okay
- # as shorthand for [^0-9A-Za-z_].
- return r"(^|\W)%s(\W|$)" % (r,)
-
-
def _flatten_dict(
d: Union[EventBase, dict],
prefix: Optional[List[str]] = None,
diff --git a/synapse/push/push_tools.py b/synapse/push/push_tools.py
index df341032..9c85200c 100644
--- a/synapse/push/push_tools.py
+++ b/synapse/push/push_tools.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/push/pusher.py b/synapse/push/pusher.py
index cb941278..c51938b8 100644
--- a/synapse/push/pusher.py
+++ b/synapse/push/pusher.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py
index 4c7f5fec..579fcdf4 100644
--- a/synapse/push/pusherpool.py
+++ b/synapse/push/pusherpool.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -63,7 +62,9 @@ class PusherPool:
self.store = self.hs.get_datastore()
self.clock = self.hs.get_clock()
- self._account_validity = hs.config.account_validity
+ self._account_validity_enabled = (
+ hs.config.account_validity.account_validity_enabled
+ )
# We shard the handling of push notifications by user ID.
self._pusher_shard_config = hs.config.push.pusher_shard_config
@@ -237,7 +238,7 @@ class PusherPool:
for u in users_affected:
# Don't push if the user account has expired
- if self._account_validity.enabled:
+ if self._account_validity_enabled:
expired = await self.store.is_account_expired(
u, self.clock.time_msec()
)
@@ -267,7 +268,7 @@ class PusherPool:
for u in users_affected:
# Don't push if the user account has expired
- if self._account_validity.enabled:
+ if self._account_validity_enabled:
expired = await self.store.is_account_expired(
u, self.clock.time_msec()
)
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index 2a1c925e..45a6b828 100644
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -78,14 +78,15 @@ REQUIREMENTS = [
# we use attr.validators.deep_iterable, which arrived in 19.1.0 (Note:
# Fedora 31 only has 19.1, so if we want to upgrade we should wait until 33
# is out in November.)
- "attrs>=19.1.0",
+ # Note: 21.1.0 broke `/sync`, see #9936
+ "attrs>=19.1.0,!=21.1.0",
"netaddr>=0.7.18",
"Jinja2>=2.9",
"bleach>=1.4.3",
"typing-extensions>=3.7.4",
# We enforce that we have a `cryptography` version that bundles an `openssl`
# with the latest security patches.
- "cryptography>=3.4.7;python_version>='3.6'",
+ "cryptography>=3.4.7",
]
CONDITIONAL_REQUIREMENTS = {
@@ -100,14 +101,9 @@ CONDITIONAL_REQUIREMENTS = {
# that use the protocol, such as Let's Encrypt.
"acme": [
"txacme>=0.9.2",
- # txacme depends on eliot. Eliot 1.8.0 is incompatible with
- # python 3.5.2, as per https://github.com/itamarst/eliot/issues/418
- "eliot<1.8.0;python_version<'3.5.3'",
],
"saml2": [
- # pysaml2 6.4.0 is incompatible with Python 3.5 (see https://github.com/IdentityPython/pysaml2/issues/749)
- "pysaml2>=4.5.0,<6.4.0;python_version<'3.6'",
- "pysaml2>=4.5.0;python_version>='3.6'",
+ "pysaml2>=4.5.0",
],
"oidc": ["authlib>=0.14.0"],
# systemd-python is necessary for logging to the systemd journal via
diff --git a/synapse/replication/__init__.py b/synapse/replication/__init__.py
index b7df13c9..f43a360a 100644
--- a/synapse/replication/__init__.py
+++ b/synapse/replication/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/http/__init__.py b/synapse/replication/http/__init__.py
index cb4a52db..ba8114ac 100644
--- a/synapse/replication/http/__init__.py
+++ b/synapse/replication/http/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py
index b7aa0c28..5685cf21 100644
--- a/synapse/replication/http/_base.py
+++ b/synapse/replication/http/_base.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -159,7 +158,10 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta):
def make_client(cls, hs):
"""Create a client that makes requests.
- Returns a callable that accepts the same parameters as `_serialize_payload`.
+ Returns a callable that accepts the same parameters as
+ `_serialize_payload`, and also accepts an optional `instance_name`
+ parameter to specify which instance to hit (the instance must be in
+ the `instance_map` config).
"""
clock = hs.get_clock()
client = hs.get_simple_http_client()
diff --git a/synapse/replication/http/account_data.py b/synapse/replication/http/account_data.py
index 60899b6a..70e951af 100644
--- a/synapse/replication/http/account_data.py
+++ b/synapse/replication/http/account_data.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/http/devices.py b/synapse/replication/http/devices.py
index 807b85d2..5a5818ef 100644
--- a/synapse/replication/http/devices.py
+++ b/synapse/replication/http/devices.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py
index 82ea3b89..79cadb7b 100644
--- a/synapse/replication/http/federation.py
+++ b/synapse/replication/http/federation.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/http/login.py b/synapse/replication/http/login.py
index 4ec1bfa6..c2e8c002 100644
--- a/synapse/replication/http/login.py
+++ b/synapse/replication/http/login.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/http/membership.py b/synapse/replication/http/membership.py
index c10992ff..289a397d 100644
--- a/synapse/replication/http/membership.py
+++ b/synapse/replication/http/membership.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/http/presence.py b/synapse/replication/http/presence.py
index bc9aa82c..f2530762 100644
--- a/synapse/replication/http/presence.py
+++ b/synapse/replication/http/presence.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/http/push.py b/synapse/replication/http/push.py
index 054ed64d..139427cb 100644
--- a/synapse/replication/http/push.py
+++ b/synapse/replication/http/push.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/http/register.py b/synapse/replication/http/register.py
index d005f387..d6dd7242 100644
--- a/synapse/replication/http/register.py
+++ b/synapse/replication/http/register.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -77,7 +76,7 @@ class ReplicationRegisterServlet(ReplicationEndpoint):
async def _handle_request(self, request, user_id):
content = parse_json_object_from_request(request)
- self.registration_handler.check_registration_ratelimit(content["address"])
+ await self.registration_handler.check_registration_ratelimit(content["address"])
await self.registration_handler.register_with_store(
user_id=user_id,
diff --git a/synapse/replication/http/send_event.py b/synapse/replication/http/send_event.py
index a4c5b442..fae5ffa4 100644
--- a/synapse/replication/http/send_event.py
+++ b/synapse/replication/http/send_event.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/http/streams.py b/synapse/replication/http/streams.py
index 309159e3..9afa147d 100644
--- a/synapse/replication/http/streams.py
+++ b/synapse/replication/http/streams.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/slave/__init__.py b/synapse/replication/slave/__init__.py
index b7df13c9..f43a360a 100644
--- a/synapse/replication/slave/__init__.py
+++ b/synapse/replication/slave/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/slave/storage/__init__.py b/synapse/replication/slave/storage/__init__.py
index b7df13c9..f43a360a 100644
--- a/synapse/replication/slave/storage/__init__.py
+++ b/synapse/replication/slave/storage/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/slave/storage/_base.py b/synapse/replication/slave/storage/_base.py
index 693c9ab9..faa99387 100644
--- a/synapse/replication/slave/storage/_base.py
+++ b/synapse/replication/slave/storage/_base.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/slave/storage/_slaved_id_tracker.py b/synapse/replication/slave/storage/_slaved_id_tracker.py
index 0d39a93e..2cb74890 100644
--- a/synapse/replication/slave/storage/_slaved_id_tracker.py
+++ b/synapse/replication/slave/storage/_slaved_id_tracker.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/slave/storage/account_data.py b/synapse/replication/slave/storage/account_data.py
index 21afe5f1..ee74ee7d 100644
--- a/synapse/replication/slave/storage/account_data.py
+++ b/synapse/replication/slave/storage/account_data.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/replication/slave/storage/appservice.py b/synapse/replication/slave/storage/appservice.py
index 0f8d7037..29f50c0a 100644
--- a/synapse/replication/slave/storage/appservice.py
+++ b/synapse/replication/slave/storage/appservice.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/replication/slave/storage/client_ips.py b/synapse/replication/slave/storage/client_ips.py
index 0f5b7ade..87309663 100644
--- a/synapse/replication/slave/storage/client_ips.py
+++ b/synapse/replication/slave/storage/client_ips.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/slave/storage/deviceinbox.py b/synapse/replication/slave/storage/deviceinbox.py
index 1260f6d1..e9407510 100644
--- a/synapse/replication/slave/storage/deviceinbox.py
+++ b/synapse/replication/slave/storage/deviceinbox.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/slave/storage/devices.py b/synapse/replication/slave/storage/devices.py
index e0d86240..70207420 100644
--- a/synapse/replication/slave/storage/devices.py
+++ b/synapse/replication/slave/storage/devices.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/slave/storage/directory.py b/synapse/replication/slave/storage/directory.py
index 1945bcf9..71fde0c9 100644
--- a/synapse/replication/slave/storage/directory.py
+++ b/synapse/replication/slave/storage/directory.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/slave/storage/events.py b/synapse/replication/slave/storage/events.py
index fbffe6d8..d4d3f8c4 100644
--- a/synapse/replication/slave/storage/events.py
+++ b/synapse/replication/slave/storage/events.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/replication/slave/storage/filtering.py b/synapse/replication/slave/storage/filtering.py
index 6a232528..37875bc9 100644
--- a/synapse/replication/slave/storage/filtering.py
+++ b/synapse/replication/slave/storage/filtering.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/slave/storage/groups.py b/synapse/replication/slave/storage/groups.py
index 30955bcb..e9bdc384 100644
--- a/synapse/replication/slave/storage/groups.py
+++ b/synapse/replication/slave/storage/groups.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/slave/storage/keys.py b/synapse/replication/slave/storage/keys.py
index 96157975..a00b38c5 100644
--- a/synapse/replication/slave/storage/keys.py
+++ b/synapse/replication/slave/storage/keys.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/slave/storage/presence.py b/synapse/replication/slave/storage/presence.py
deleted file mode 100644
index 55620c03..00000000
--- a/synapse/replication/slave/storage/presence.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2016 OpenMarket Ltd
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from synapse.replication.tcp.streams import PresenceStream
-from synapse.storage import DataStore
-from synapse.storage.database import DatabasePool
-from synapse.storage.databases.main.presence import PresenceStore
-from synapse.util.caches.stream_change_cache import StreamChangeCache
-
-from ._base import BaseSlavedStore
-from ._slaved_id_tracker import SlavedIdTracker
-
-
-class SlavedPresenceStore(BaseSlavedStore):
- def __init__(self, database: DatabasePool, db_conn, hs):
- super().__init__(database, db_conn, hs)
- self._presence_id_gen = SlavedIdTracker(db_conn, "presence_stream", "stream_id")
-
- self._presence_on_startup = self._get_active_presence(db_conn) # type: ignore
-
- self.presence_stream_cache = StreamChangeCache(
- "PresenceStreamChangeCache", self._presence_id_gen.get_current_token()
- )
-
- _get_active_presence = DataStore._get_active_presence
- take_presence_startup_info = DataStore.take_presence_startup_info
- _get_presence_for_user = PresenceStore.__dict__["_get_presence_for_user"]
- get_presence_for_users = PresenceStore.__dict__["get_presence_for_users"]
-
- def get_current_presence_token(self):
- return self._presence_id_gen.get_current_token()
-
- def process_replication_rows(self, stream_name, instance_name, token, rows):
- if stream_name == PresenceStream.NAME:
- self._presence_id_gen.advance(instance_name, token)
- for row in rows:
- self.presence_stream_cache.entity_has_changed(row.user_id, token)
- self._get_presence_for_user.invalidate((row.user_id,))
- return super().process_replication_rows(stream_name, instance_name, token, rows)
diff --git a/synapse/replication/slave/storage/profile.py b/synapse/replication/slave/storage/profile.py
index f85b20a0..99f4a226 100644
--- a/synapse/replication/slave/storage/profile.py
+++ b/synapse/replication/slave/storage/profile.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/slave/storage/push_rule.py b/synapse/replication/slave/storage/push_rule.py
index de904c94..4d5f8628 100644
--- a/synapse/replication/slave/storage/push_rule.py
+++ b/synapse/replication/slave/storage/push_rule.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/replication/slave/storage/pushers.py b/synapse/replication/slave/storage/pushers.py
index 93161c3d..2672a2c9 100644
--- a/synapse/replication/slave/storage/pushers.py
+++ b/synapse/replication/slave/storage/pushers.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/replication/slave/storage/receipts.py b/synapse/replication/slave/storage/receipts.py
index 3dfdd996..3826b87d 100644
--- a/synapse/replication/slave/storage/receipts.py
+++ b/synapse/replication/slave/storage/receipts.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/replication/slave/storage/registration.py b/synapse/replication/slave/storage/registration.py
index a40f064e..5dae35a9 100644
--- a/synapse/replication/slave/storage/registration.py
+++ b/synapse/replication/slave/storage/registration.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/slave/storage/room.py b/synapse/replication/slave/storage/room.py
index 109ac6be..8cc6de3f 100644
--- a/synapse/replication/slave/storage/room.py
+++ b/synapse/replication/slave/storage/room.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/slave/storage/transactions.py b/synapse/replication/slave/storage/transactions.py
index 2091ac0d..a59e5439 100644
--- a/synapse/replication/slave/storage/transactions.py
+++ b/synapse/replication/slave/storage/transactions.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/tcp/__init__.py b/synapse/replication/tcp/__init__.py
index 1b8718b1..1fa60af8 100644
--- a/synapse/replication/tcp/__init__.py
+++ b/synapse/replication/tcp/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py
index 3455839d..4f3c6a18 100644
--- a/synapse/replication/tcp/client.py
+++ b/synapse/replication/tcp/client.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,22 +14,35 @@
"""A replication client for use by synapse workers.
"""
import logging
-from typing import TYPE_CHECKING, Dict, List, Tuple
+from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple
from twisted.internet.defer import Deferred
from twisted.internet.protocol import ReconnectingClientFactory
from synapse.api.constants import EventTypes
+from synapse.federation import send_queue
+from synapse.federation.sender import FederationSender
from synapse.logging.context import PreserveLoggingContext, make_deferred_yieldable
+from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.replication.tcp.protocol import ClientReplicationStreamProtocol
-from synapse.replication.tcp.streams import TypingStream
+from synapse.replication.tcp.streams import (
+ AccountDataStream,
+ DeviceListsStream,
+ GroupServerStream,
+ PushersStream,
+ PushRulesStream,
+ ReceiptsStream,
+ TagAccountDataStream,
+ ToDeviceStream,
+ TypingStream,
+)
from synapse.replication.tcp.streams.events import (
EventsStream,
EventsStreamEventRow,
EventsStreamRow,
)
-from synapse.types import PersistedEventPosition, UserID
-from synapse.util.async_helpers import timeout_deferred
+from synapse.types import PersistedEventPosition, ReadReceipt, UserID
+from synapse.util.async_helpers import Linearizer, timeout_deferred
from synapse.util.metrics import Measure
if TYPE_CHECKING:
@@ -106,6 +118,14 @@ class ReplicationDataHandler:
self._instance_name = hs.get_instance_name()
self._typing_handler = hs.get_typing_handler()
+ self._notify_pushers = hs.config.start_pushers
+ self._pusher_pool = hs.get_pusherpool()
+ self._presence_handler = hs.get_presence_handler()
+
+ self.send_handler = None # type: Optional[FederationSenderHandler]
+ if hs.should_send_federation():
+ self.send_handler = FederationSenderHandler(hs)
+
# Map from stream to list of deferreds waiting for the stream to
# arrive at a particular position. The lists are sorted by stream position.
self._streams_to_waiters = {} # type: Dict[str, List[Tuple[int, Deferred]]]
@@ -126,13 +146,51 @@ class ReplicationDataHandler:
"""
self.store.process_replication_rows(stream_name, instance_name, token, rows)
+ if self.send_handler:
+ await self.send_handler.process_replication_rows(stream_name, token, rows)
+
if stream_name == TypingStream.NAME:
self._typing_handler.process_replication_rows(token, rows)
self.notifier.on_new_event(
"typing_key", token, rooms=[row.room_id for row in rows]
)
-
- if stream_name == EventsStream.NAME:
+ elif stream_name == PushRulesStream.NAME:
+ self.notifier.on_new_event(
+ "push_rules_key", token, users=[row.user_id for row in rows]
+ )
+ elif stream_name in (AccountDataStream.NAME, TagAccountDataStream.NAME):
+ self.notifier.on_new_event(
+ "account_data_key", token, users=[row.user_id for row in rows]
+ )
+ elif stream_name == ReceiptsStream.NAME:
+ self.notifier.on_new_event(
+ "receipt_key", token, rooms=[row.room_id for row in rows]
+ )
+ await self._pusher_pool.on_new_receipts(
+ token, token, {row.room_id for row in rows}
+ )
+ elif stream_name == ToDeviceStream.NAME:
+ entities = [row.entity for row in rows if row.entity.startswith("@")]
+ if entities:
+ self.notifier.on_new_event("to_device_key", token, users=entities)
+ elif stream_name == DeviceListsStream.NAME:
+ all_room_ids = set() # type: Set[str]
+ for row in rows:
+ if row.entity.startswith("@"):
+ room_ids = await self.store.get_rooms_for_user(row.entity)
+ all_room_ids.update(room_ids)
+ self.notifier.on_new_event("device_list_key", token, rooms=all_room_ids)
+ elif stream_name == GroupServerStream.NAME:
+ self.notifier.on_new_event(
+ "groups_key", token, users=[row.user_id for row in rows]
+ )
+ elif stream_name == PushersStream.NAME:
+ for row in rows:
+ if row.deleted:
+ self.stop_pusher(row.user_id, row.app_id, row.pushkey)
+ else:
+ await self.start_pusher(row.user_id, row.app_id, row.pushkey)
+ elif stream_name == EventsStream.NAME:
# We shouldn't get multiple rows per token for events stream, so
# we don't need to optimise this for multiple rows.
for row in rows:
@@ -160,6 +218,10 @@ class ReplicationDataHandler:
membership=row.data.membership,
)
+ await self._presence_handler.process_replication_rows(
+ stream_name, instance_name, token, rows
+ )
+
# Notify any waiting deferreds. The list is ordered by position so we
# just iterate through the list until we reach a position that is
# greater than the received row position.
@@ -191,7 +253,7 @@ class ReplicationDataHandler:
waiting_list[:] = waiting_list[index_of_first_deferred_not_called:]
async def on_position(self, stream_name: str, instance_name: str, token: int):
- self.store.process_replication_rows(stream_name, instance_name, token, [])
+ await self.on_rdata(stream_name, instance_name, token, [])
# We poke the generic "replication" notifier to wake anything up that
# may be streaming.
@@ -200,6 +262,11 @@ class ReplicationDataHandler:
def on_remote_server_up(self, server: str):
"""Called when get a new REMOTE_SERVER_UP command."""
+ # Let's wake up the transaction queue for the server in case we have
+ # pending stuff to send to it.
+ if self.send_handler:
+ self.send_handler.wake_destination(server)
+
async def wait_for_stream_position(
self, instance_name: str, stream_name: str, position: int
):
@@ -236,3 +303,153 @@ class ReplicationDataHandler:
logger.info(
"Finished waiting for repl stream %r to reach %s", stream_name, position
)
+
+ def stop_pusher(self, user_id, app_id, pushkey):
+ if not self._notify_pushers:
+ return
+
+ key = "%s:%s" % (app_id, pushkey)
+ pushers_for_user = self._pusher_pool.pushers.get(user_id, {})
+ pusher = pushers_for_user.pop(key, None)
+ if pusher is None:
+ return
+ logger.info("Stopping pusher %r / %r", user_id, key)
+ pusher.on_stop()
+
+ async def start_pusher(self, user_id, app_id, pushkey):
+ if not self._notify_pushers:
+ return
+
+ key = "%s:%s" % (app_id, pushkey)
+ logger.info("Starting pusher %r / %r", user_id, key)
+ return await self._pusher_pool.start_pusher_by_id(app_id, pushkey, user_id)
+
+
+class FederationSenderHandler:
+ """Processes the fedration replication stream
+
+ This class is only instantiate on the worker responsible for sending outbound
+ federation transactions. It receives rows from the replication stream and forwards
+ the appropriate entries to the FederationSender class.
+ """
+
+ def __init__(self, hs: "HomeServer"):
+ assert hs.should_send_federation()
+
+ self.store = hs.get_datastore()
+ self._is_mine_id = hs.is_mine_id
+ self._hs = hs
+
+ # We need to make a temporary value to ensure that mypy picks up the
+ # right type. We know we should have a federation sender instance since
+ # `should_send_federation` is True.
+ sender = hs.get_federation_sender()
+ assert isinstance(sender, FederationSender)
+ self.federation_sender = sender
+
+ # Stores the latest position in the federation stream we've gotten up
+ # to. This is always set before we use it.
+ self.federation_position = None # type: Optional[int]
+
+ self._fed_position_linearizer = Linearizer(name="_fed_position_linearizer")
+
+ def wake_destination(self, server: str):
+ self.federation_sender.wake_destination(server)
+
+ async def process_replication_rows(self, stream_name, token, rows):
+ # The federation stream contains things that we want to send out, e.g.
+ # presence, typing, etc.
+ if stream_name == "federation":
+ send_queue.process_rows_for_federation(self.federation_sender, rows)
+ await self.update_token(token)
+
+ # ... and when new receipts happen
+ elif stream_name == ReceiptsStream.NAME:
+ await self._on_new_receipts(rows)
+
+ # ... as well as device updates and messages
+ elif stream_name == DeviceListsStream.NAME:
+ # The entities are either user IDs (starting with '@') whose devices
+ # have changed, or remote servers that we need to tell about
+ # changes.
+ hosts = {row.entity for row in rows if not row.entity.startswith("@")}
+ for host in hosts:
+ self.federation_sender.send_device_messages(host)
+
+ elif stream_name == ToDeviceStream.NAME:
+ # The to_device stream includes stuff to be pushed to both local
+ # clients and remote servers, so we ignore entities that start with
+ # '@' (since they'll be local users rather than destinations).
+ hosts = {row.entity for row in rows if not row.entity.startswith("@")}
+ for host in hosts:
+ self.federation_sender.send_device_messages(host)
+
+ async def _on_new_receipts(self, rows):
+ """
+ Args:
+ rows (Iterable[synapse.replication.tcp.streams.ReceiptsStream.ReceiptsStreamRow]):
+ new receipts to be processed
+ """
+ for receipt in rows:
+ # we only want to send on receipts for our own users
+ if not self._is_mine_id(receipt.user_id):
+ continue
+ receipt_info = ReadReceipt(
+ receipt.room_id,
+ receipt.receipt_type,
+ receipt.user_id,
+ [receipt.event_id],
+ receipt.data,
+ )
+ await self.federation_sender.send_read_receipt(receipt_info)
+
+ async def update_token(self, token):
+ """Update the record of where we have processed to in the federation stream.
+
+ Called after we have processed a an update received over replication. Sends
+ a FEDERATION_ACK back to the master, and stores the token that we have processed
+ in `federation_stream_position` so that we can restart where we left off.
+ """
+ self.federation_position = token
+
+ # We save and send the ACK to master asynchronously, so we don't block
+ # processing on persistence. We don't need to do this operation for
+ # every single RDATA we receive, we just need to do it periodically.
+
+ if self._fed_position_linearizer.is_queued(None):
+ # There is already a task queued up to save and send the token, so
+ # no need to queue up another task.
+ return
+
+ run_as_background_process("_save_and_send_ack", self._save_and_send_ack)
+
+ async def _save_and_send_ack(self):
+ """Save the current federation position in the database and send an ACK
+ to master with where we're up to.
+ """
+ # We should only be calling this once we've got a token.
+ assert self.federation_position is not None
+
+ try:
+ # We linearize here to ensure we don't have races updating the token
+ #
+ # XXX this appears to be redundant, since the ReplicationCommandHandler
+ # has a linearizer which ensures that we only process one line of
+ # replication data at a time. Should we remove it, or is it doing useful
+ # service for robustness? Or could we replace it with an assertion that
+ # we're not being re-entered?
+
+ with (await self._fed_position_linearizer.queue(None)):
+ # We persist and ack the same position, so we take a copy of it
+ # here as otherwise it can get modified from underneath us.
+ current_position = self.federation_position
+
+ await self.store.update_federation_out_pos(
+ "federation", current_position
+ )
+
+ # We ACK this token over replication so that the master can drop
+ # its in memory queues
+ self._hs.get_tcp_replication().send_federation_ack(current_position)
+ except Exception:
+ logger.exception("Error updating federation stream position")
diff --git a/synapse/replication/tcp/commands.py b/synapse/replication/tcp/commands.py
index 8abed1f5..505d450e 100644
--- a/synapse/replication/tcp/commands.py
+++ b/synapse/replication/tcp/commands.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/tcp/external_cache.py b/synapse/replication/tcp/external_cache.py
index d89a36f2..1a3b051e 100644
--- a/synapse/replication/tcp/external_cache.py
+++ b/synapse/replication/tcp/external_cache.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/tcp/handler.py b/synapse/replication/tcp/handler.py
index a8894bea..7ced4c54 100644
--- a/synapse/replication/tcp/handler.py
+++ b/synapse/replication/tcp/handler.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
@@ -56,6 +55,8 @@ from synapse.replication.tcp.streams import (
CachesStream,
EventsStream,
FederationStream,
+ PresenceFederationStream,
+ PresenceStream,
ReceiptsStream,
Stream,
TagAccountDataStream,
@@ -100,6 +101,10 @@ class ReplicationCommandHandler:
self._instance_id = hs.get_instance_id()
self._instance_name = hs.get_instance_name()
+ self._is_presence_writer = (
+ hs.get_instance_name() in hs.config.worker.writers.presence
+ )
+
self._streams = {
stream.NAME: stream(hs) for stream in STREAMS_MAP.values()
} # type: Dict[str, Stream]
@@ -154,6 +159,14 @@ class ReplicationCommandHandler:
continue
+ if isinstance(stream, (PresenceStream, PresenceFederationStream)):
+ # Only add PresenceStream as a source on the instance in charge
+ # of presence.
+ if self._is_presence_writer:
+ self._streams_to_replicate.append(stream)
+
+ continue
+
# Only add any other streams if we're on master.
if hs.config.worker_app is not None:
continue
@@ -351,7 +364,7 @@ class ReplicationCommandHandler:
) -> Optional[Awaitable[None]]:
user_sync_counter.inc()
- if self._is_master:
+ if self._is_presence_writer:
return self._presence_handler.update_external_syncs_row(
cmd.instance_id, cmd.user_id, cmd.is_syncing, cmd.last_sync_ms
)
@@ -361,7 +374,7 @@ class ReplicationCommandHandler:
def on_CLEAR_USER_SYNC(
self, conn: IReplicationConnection, cmd: ClearUserSyncsCommand
) -> Optional[Awaitable[None]]:
- if self._is_master:
+ if self._is_presence_writer:
return self._presence_handler.update_external_syncs_clear(cmd.instance_id)
else:
return None
diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py
index e829add2..6e370536 100644
--- a/synapse/replication/tcp/protocol.py
+++ b/synapse/replication/tcp/protocol.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -50,7 +49,7 @@ import fcntl
import logging
import struct
from inspect import isawaitable
-from typing import TYPE_CHECKING, List, Optional
+from typing import TYPE_CHECKING, Collection, List, Optional
from prometheus_client import Counter
from zope.interface import Interface, implementer
@@ -77,7 +76,6 @@ from synapse.replication.tcp.commands import (
ServerCommand,
parse_command_from_line,
)
-from synapse.types import Collection
from synapse.util import Clock
from synapse.util.stringutils import random_string
@@ -184,8 +182,9 @@ class BaseReplicationStreamProtocol(LineOnlyReceiver):
# a logcontext which we use for processing incoming commands. We declare it as a
# background process so that the CPU stats get reported to prometheus.
- ctx_name = "replication-conn-%s" % self.conn_id
- self._logging_context = BackgroundProcessLoggingContext(ctx_name, ctx_name)
+ self._logging_context = BackgroundProcessLoggingContext(
+ "replication-conn", self.conn_id
+ )
def connectionMade(self):
logger.info("[%s] Connection established", self.id())
diff --git a/synapse/replication/tcp/redis.py b/synapse/replication/tcp/redis.py
index 2f4d407f..6a2c2655 100644
--- a/synapse/replication/tcp/redis.py
+++ b/synapse/replication/tcp/redis.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -60,7 +59,7 @@ class ConstantProperty(Generic[T, V]):
constant = attr.ib() # type: V
- def __get__(self, obj: Optional[T], objtype: Type[T] = None) -> V:
+ def __get__(self, obj: Optional[T], objtype: Optional[Type[T]] = None) -> V:
return self.constant
def __set__(self, obj: Optional[T], value: V):
diff --git a/synapse/replication/tcp/resource.py b/synapse/replication/tcp/resource.py
index 2018f9f2..bd47d842 100644
--- a/synapse/replication/tcp/resource.py
+++ b/synapse/replication/tcp/resource.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/replication/tcp/streams/__init__.py b/synapse/replication/tcp/streams/__init__.py
index d1a61c33..4c0023c6 100644
--- a/synapse/replication/tcp/streams/__init__.py
+++ b/synapse/replication/tcp/streams/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
# Copyright 2019 New Vector Ltd
#
@@ -31,6 +30,7 @@ from synapse.replication.tcp.streams._base import (
CachesStream,
DeviceListsStream,
GroupServerStream,
+ PresenceFederationStream,
PresenceStream,
PublicRoomsStream,
PushersStream,
@@ -51,6 +51,7 @@ STREAMS_MAP = {
EventsStream,
BackfillStream,
PresenceStream,
+ PresenceFederationStream,
TypingStream,
ReceiptsStream,
PushRulesStream,
@@ -72,6 +73,7 @@ __all__ = [
"Stream",
"BackfillStream",
"PresenceStream",
+ "PresenceFederationStream",
"TypingStream",
"ReceiptsStream",
"PushRulesStream",
diff --git a/synapse/replication/tcp/streams/_base.py b/synapse/replication/tcp/streams/_base.py
index 3dfee767..b0382492 100644
--- a/synapse/replication/tcp/streams/_base.py
+++ b/synapse/replication/tcp/streams/_base.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
# Copyright 2019 New Vector Ltd
#
@@ -273,15 +272,22 @@ class PresenceStream(Stream):
NAME = "presence"
ROW_TYPE = PresenceStreamRow
- def __init__(self, hs):
+ def __init__(self, hs: "HomeServer"):
store = hs.get_datastore()
- if hs.config.worker_app is None:
- # on the master, query the presence handler
+ if hs.get_instance_name() in hs.config.worker.writers.presence:
+ # on the presence writer, query the presence handler
presence_handler = hs.get_presence_handler()
- update_function = presence_handler.get_all_presence_updates
+
+ from synapse.handlers.presence import PresenceHandler
+
+ assert isinstance(presence_handler, PresenceHandler)
+
+ update_function = (
+ presence_handler.get_all_presence_updates
+ ) # type: UpdateFunction
else:
- # Query master process
+ # Query presence writer process
update_function = make_http_update_function(hs, self.NAME)
super().__init__(
@@ -291,6 +297,30 @@ class PresenceStream(Stream):
)
+class PresenceFederationStream(Stream):
+ """A stream used to send ad hoc presence updates over federation.
+
+ Streams the remote destination and the user ID of the presence state to
+ send.
+ """
+
+ @attr.s(slots=True, auto_attribs=True)
+ class PresenceFederationStreamRow:
+ destination: str
+ user_id: str
+
+ NAME = "presence_federation"
+ ROW_TYPE = PresenceFederationStreamRow
+
+ def __init__(self, hs: "HomeServer"):
+ federation_queue = hs.get_presence_handler().get_federation_queue()
+ super().__init__(
+ hs.get_instance_name(),
+ federation_queue.get_current_token,
+ federation_queue.get_replication_rows,
+ )
+
+
class TypingStream(Stream):
TypingStreamRow = namedtuple(
"TypingStreamRow", ("room_id", "user_ids") # str # list(str)
diff --git a/synapse/replication/tcp/streams/events.py b/synapse/replication/tcp/streams/events.py
index fa5e37ba..e7e87bac 100644
--- a/synapse/replication/tcp/streams/events.py
+++ b/synapse/replication/tcp/streams/events.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
# Copyright 2019 New Vector Ltd
#
diff --git a/synapse/replication/tcp/streams/federation.py b/synapse/replication/tcp/streams/federation.py
index 9bb8e9e1..096a85d3 100644
--- a/synapse/replication/tcp/streams/federation.py
+++ b/synapse/replication/tcp/streams/federation.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
# Copyright 2019 New Vector Ltd
#
diff --git a/synapse/res/templates/account_previously_renewed.html b/synapse/res/templates/account_previously_renewed.html
new file mode 100644
index 00000000..b751359b
--- /dev/null
+++ b/synapse/res/templates/account_previously_renewed.html
@@ -0,0 +1 @@
+<html><body>Your account is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.</body><html>
diff --git a/synapse/res/templates/account_renewed.html b/synapse/res/templates/account_renewed.html
index 894da030..e8c0f52f 100644
--- a/synapse/res/templates/account_renewed.html
+++ b/synapse/res/templates/account_renewed.html
@@ -1 +1 @@
-<html><body>Your account has been successfully renewed.</body><html>
+<html><body>Your account has been successfully renewed and is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.</body><html>
diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py
index 40f5c32d..79d52d2d 100644
--- a/synapse/rest/__init__.py
+++ b/synapse/rest/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py
index 8457db1e..9cb9a9f6 100644
--- a/synapse/rest/admin/__init__.py
+++ b/synapse/rest/admin/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018-2019 New Vector Ltd
# Copyright 2020, 2021 The Matrix.org Foundation C.I.C.
@@ -54,6 +53,7 @@ from synapse.rest.admin.users import (
AccountValidityRenewServlet,
DeactivateAccountRestServlet,
PushersRestServlet,
+ RateLimitRestServlet,
ResetPasswordRestServlet,
SearchUsersRestServlet,
ShadowBanRestServlet,
@@ -62,7 +62,6 @@ from synapse.rest.admin.users import (
UserMembershipRestServlet,
UserRegisterServlet,
UserRestServletV2,
- UsersRestServlet,
UsersRestServletV2,
UserTokenRestServlet,
WhoisRestServlet,
@@ -240,6 +239,7 @@ def register_servlets(hs, http_server):
ShadowBanRestServlet(hs).register(http_server)
ForwardExtremitiesRestServlet(hs).register(http_server)
RoomEventContextServlet(hs).register(http_server)
+ RateLimitRestServlet(hs).register(http_server)
def register_servlets_for_client_rest_resource(hs, http_server):
@@ -248,7 +248,6 @@ def register_servlets_for_client_rest_resource(hs, http_server):
PurgeHistoryStatusRestServlet(hs).register(http_server)
DeactivateAccountRestServlet(hs).register(http_server)
PurgeHistoryRestServlet(hs).register(http_server)
- UsersRestServlet(hs).register(http_server)
ResetPasswordRestServlet(hs).register(http_server)
SearchUsersRestServlet(hs).register(http_server)
ShutdownRoomRestServlet(hs).register(http_server)
diff --git a/synapse/rest/admin/_base.py b/synapse/rest/admin/_base.py
index 7681e55b..f203f6fd 100644
--- a/synapse/rest/admin/_base.py
+++ b/synapse/rest/admin/_base.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/admin/devices.py b/synapse/rest/admin/devices.py
index 5996de11..5715190a 100644
--- a/synapse/rest/admin/devices.py
+++ b/synapse/rest/admin/devices.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Dirk Klimpel
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/admin/event_reports.py b/synapse/rest/admin/event_reports.py
index 381c3fe6..bbfcaf72 100644
--- a/synapse/rest/admin/event_reports.py
+++ b/synapse/rest/admin/event_reports.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Dirk Klimpel
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/admin/groups.py b/synapse/rest/admin/groups.py
index ebc587aa..3b3ffde0 100644
--- a/synapse/rest/admin/groups.py
+++ b/synapse/rest/admin/groups.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/admin/media.py b/synapse/rest/admin/media.py
index 40646ef2..24dd4611 100644
--- a/synapse/rest/admin/media.py
+++ b/synapse/rest/admin/media.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018-2019 New Vector Ltd
#
diff --git a/synapse/rest/admin/purge_room_servlet.py b/synapse/rest/admin/purge_room_servlet.py
index 49966ee3..2365ff7a 100644
--- a/synapse/rest/admin/purge_room_servlet.py
+++ b/synapse/rest/admin/purge_room_servlet.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/admin/rooms.py b/synapse/rest/admin/rooms.py
index cfe1bebb..d0cf1217 100644
--- a/synapse/rest/admin/rooms.py
+++ b/synapse/rest/admin/rooms.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/admin/server_notice_servlet.py b/synapse/rest/admin/server_notice_servlet.py
index f495666f..cc3ab585 100644
--- a/synapse/rest/admin/server_notice_servlet.py
+++ b/synapse/rest/admin/server_notice_servlet.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/admin/statistics.py b/synapse/rest/admin/statistics.py
index f2490e38..948de94c 100644
--- a/synapse/rest/admin/statistics.py
+++ b/synapse/rest/admin/statistics.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Dirk Klimpel
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py
index 309bd277..8c9d21d3 100644
--- a/synapse/rest/admin/users.py
+++ b/synapse/rest/admin/users.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,6 +14,7 @@
import hashlib
import hmac
import logging
+import secrets
from http import HTTPStatus
from typing import TYPE_CHECKING, Dict, List, Optional, Tuple
@@ -36,6 +36,7 @@ from synapse.rest.admin._base import (
)
from synapse.rest.client.v2_alpha._base import client_patterns
from synapse.storage.databases.main.media_repository import MediaSortOrder
+from synapse.storage.databases.main.stats import UserSortOrder
from synapse.types import JsonDict, UserID
if TYPE_CHECKING:
@@ -44,29 +45,6 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__)
-class UsersRestServlet(RestServlet):
- PATTERNS = admin_patterns("/users/(?P<user_id>[^/]*)$")
-
- def __init__(self, hs: "HomeServer"):
- self.hs = hs
- self.store = hs.get_datastore()
- self.auth = hs.get_auth()
- self.admin_handler = hs.get_admin_handler()
-
- async def on_GET(
- self, request: SynapseRequest, user_id: str
- ) -> Tuple[int, List[JsonDict]]:
- target_user = UserID.from_string(user_id)
- await assert_requester_is_admin(self.auth, request)
-
- if not self.hs.is_mine(target_user):
- raise SynapseError(400, "Can only users a local user")
-
- ret = await self.store.get_users()
-
- return 200, ret
-
-
class UsersRestServletV2(RestServlet):
PATTERNS = admin_patterns("/users$", "v2")
@@ -117,8 +95,26 @@ class UsersRestServletV2(RestServlet):
guests = parse_boolean(request, "guests", default=True)
deactivated = parse_boolean(request, "deactivated", default=False)
+ order_by = parse_string(
+ request,
+ "order_by",
+ default=UserSortOrder.NAME.value,
+ allowed_values=(
+ UserSortOrder.NAME.value,
+ UserSortOrder.DISPLAYNAME.value,
+ UserSortOrder.GUEST.value,
+ UserSortOrder.ADMIN.value,
+ UserSortOrder.DEACTIVATED.value,
+ UserSortOrder.USER_TYPE.value,
+ UserSortOrder.AVATAR_URL.value,
+ UserSortOrder.SHADOW_BANNED.value,
+ ),
+ )
+
+ direction = parse_string(request, "dir", default="f", allowed_values=("f", "b"))
+
users, total = await self.store.get_users_paginate(
- start, limit, user_id, name, guests, deactivated
+ start, limit, user_id, name, guests, deactivated, order_by, direction
)
ret = {"users": users, "total": total}
if (start + limit) < total:
@@ -380,7 +376,7 @@ class UserRegisterServlet(RestServlet):
"""
self._clear_old_nonces()
- nonce = self.hs.get_secrets().token_hex(64)
+ nonce = secrets.token_hex(64)
self.nonces[nonce] = int(self.reactor.seconds())
return 200, {"nonce": nonce}
@@ -985,3 +981,114 @@ class ShadowBanRestServlet(RestServlet):
await self.store.set_shadow_banned(UserID.from_string(user_id), True)
return 200, {}
+
+
+class RateLimitRestServlet(RestServlet):
+ """An admin API to override ratelimiting for an user.
+
+ Example:
+ POST /_synapse/admin/v1/users/@test:example.com/override_ratelimit
+ {
+ "messages_per_second": 0,
+ "burst_count": 0
+ }
+ 200 OK
+ {
+ "messages_per_second": 0,
+ "burst_count": 0
+ }
+ """
+
+ PATTERNS = admin_patterns("/users/(?P<user_id>[^/]*)/override_ratelimit")
+
+ def __init__(self, hs: "HomeServer"):
+ self.hs = hs
+ self.store = hs.get_datastore()
+ self.auth = hs.get_auth()
+
+ async def on_GET(
+ self, request: SynapseRequest, user_id: str
+ ) -> Tuple[int, JsonDict]:
+ await assert_requester_is_admin(self.auth, request)
+
+ if not self.hs.is_mine_id(user_id):
+ raise SynapseError(400, "Can only lookup local users")
+
+ if not await self.store.get_user_by_id(user_id):
+ raise NotFoundError("User not found")
+
+ ratelimit = await self.store.get_ratelimit_for_user(user_id)
+
+ if ratelimit:
+ # convert `null` to `0` for consistency
+ # both values do the same in retelimit handler
+ ret = {
+ "messages_per_second": 0
+ if ratelimit.messages_per_second is None
+ else ratelimit.messages_per_second,
+ "burst_count": 0
+ if ratelimit.burst_count is None
+ else ratelimit.burst_count,
+ }
+ else:
+ ret = {}
+
+ return 200, ret
+
+ async def on_POST(
+ self, request: SynapseRequest, user_id: str
+ ) -> Tuple[int, JsonDict]:
+ await assert_requester_is_admin(self.auth, request)
+
+ if not self.hs.is_mine_id(user_id):
+ raise SynapseError(400, "Only local users can be ratelimited")
+
+ if not await self.store.get_user_by_id(user_id):
+ raise NotFoundError("User not found")
+
+ body = parse_json_object_from_request(request, allow_empty_body=True)
+
+ messages_per_second = body.get("messages_per_second", 0)
+ burst_count = body.get("burst_count", 0)
+
+ if not isinstance(messages_per_second, int) or messages_per_second < 0:
+ raise SynapseError(
+ 400,
+ "%r parameter must be a positive int" % (messages_per_second,),
+ errcode=Codes.INVALID_PARAM,
+ )
+
+ if not isinstance(burst_count, int) or burst_count < 0:
+ raise SynapseError(
+ 400,
+ "%r parameter must be a positive int" % (burst_count,),
+ errcode=Codes.INVALID_PARAM,
+ )
+
+ await self.store.set_ratelimit_for_user(
+ user_id, messages_per_second, burst_count
+ )
+ ratelimit = await self.store.get_ratelimit_for_user(user_id)
+ assert ratelimit is not None
+
+ ret = {
+ "messages_per_second": ratelimit.messages_per_second,
+ "burst_count": ratelimit.burst_count,
+ }
+
+ return 200, ret
+
+ async def on_DELETE(
+ self, request: SynapseRequest, user_id: str
+ ) -> Tuple[int, JsonDict]:
+ await assert_requester_is_admin(self.auth, request)
+
+ if not self.hs.is_mine_id(user_id):
+ raise SynapseError(400, "Only local users can be ratelimited")
+
+ if not await self.store.get_user_by_id(user_id):
+ raise NotFoundError("User not found")
+
+ await self.store.delete_ratelimit_for_user(user_id)
+
+ return 200, {}
diff --git a/synapse/rest/client/__init__.py b/synapse/rest/client/__init__.py
index fe0ac3f8..629e2df7 100644
--- a/synapse/rest/client/__init__.py
+++ b/synapse/rest/client/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/transactions.py b/synapse/rest/client/transactions.py
index 7be5c0fb..94ff3719 100644
--- a/synapse/rest/client/transactions.py
+++ b/synapse/rest/client/transactions.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v1/__init__.py b/synapse/rest/client/v1/__init__.py
index bfebb0f6..5e83dba2 100644
--- a/synapse/rest/client/v1/__init__.py
+++ b/synapse/rest/client/v1/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v1/directory.py b/synapse/rest/client/v1/directory.py
index e5af26b1..ae92a3df 100644
--- a/synapse/rest/client/v1/directory.py
+++ b/synapse/rest/client/v1/directory.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v1/events.py b/synapse/rest/client/v1/events.py
index 6de40782..ee745499 100644
--- a/synapse/rest/client/v1/events.py
+++ b/synapse/rest/client/v1/events.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v1/initial_sync.py b/synapse/rest/client/v1/initial_sync.py
index 91da0ee5..bef1edc8 100644
--- a/synapse/rest/client/v1/initial_sync.py
+++ b/synapse/rest/client/v1/initial_sync.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py
index e4c352f5..42e709ec 100644
--- a/synapse/rest/client/v1/login.py
+++ b/synapse/rest/client/v1/login.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -74,11 +73,13 @@ class LoginRestServlet(RestServlet):
self._well_known_builder = WellKnownBuilder(hs)
self._address_ratelimiter = Ratelimiter(
+ store=hs.get_datastore(),
clock=hs.get_clock(),
rate_hz=self.hs.config.rc_login_address.per_second,
burst_count=self.hs.config.rc_login_address.burst_count,
)
self._account_ratelimiter = Ratelimiter(
+ store=hs.get_datastore(),
clock=hs.get_clock(),
rate_hz=self.hs.config.rc_login_account.per_second,
burst_count=self.hs.config.rc_login_account.burst_count,
@@ -141,20 +142,22 @@ class LoginRestServlet(RestServlet):
appservice = self.auth.get_appservice_by_req(request)
if appservice.is_rate_limited():
- self._address_ratelimiter.ratelimit(request.getClientIP())
+ await self._address_ratelimiter.ratelimit(
+ None, request.getClientIP()
+ )
result = await self._do_appservice_login(login_submission, appservice)
elif self.jwt_enabled and (
login_submission["type"] == LoginRestServlet.JWT_TYPE
or login_submission["type"] == LoginRestServlet.JWT_TYPE_DEPRECATED
):
- self._address_ratelimiter.ratelimit(request.getClientIP())
+ await self._address_ratelimiter.ratelimit(None, request.getClientIP())
result = await self._do_jwt_login(login_submission)
elif login_submission["type"] == LoginRestServlet.TOKEN_TYPE:
- self._address_ratelimiter.ratelimit(request.getClientIP())
+ await self._address_ratelimiter.ratelimit(None, request.getClientIP())
result = await self._do_token_login(login_submission)
else:
- self._address_ratelimiter.ratelimit(request.getClientIP())
+ await self._address_ratelimiter.ratelimit(None, request.getClientIP())
result = await self._do_other_login(login_submission)
except KeyError:
raise SynapseError(400, "Missing JSON keys.")
@@ -258,7 +261,7 @@ class LoginRestServlet(RestServlet):
# too often. This happens here rather than before as we don't
# necessarily know the user before now.
if ratelimit:
- self._account_ratelimiter.ratelimit(user_id.lower())
+ await self._account_ratelimiter.ratelimit(None, user_id.lower())
if create_non_existent_users:
canonical_uid = await self.auth_handler.check_user_exists(user_id)
diff --git a/synapse/rest/client/v1/logout.py b/synapse/rest/client/v1/logout.py
index ad8cea49..5aa7908d 100644
--- a/synapse/rest/client/v1/logout.py
+++ b/synapse/rest/client/v1/logout.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v1/presence.py b/synapse/rest/client/v1/presence.py
index 23a529f8..2b24fe5a 100644
--- a/synapse/rest/client/v1/presence.py
+++ b/synapse/rest/client/v1/presence.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -36,10 +35,15 @@ class PresenceStatusRestServlet(RestServlet):
self.clock = hs.get_clock()
self.auth = hs.get_auth()
+ self._use_presence = hs.config.server.use_presence
+
async def on_GET(self, request, user_id):
requester = await self.auth.get_user_by_req(request)
user = UserID.from_string(user_id)
+ if not self._use_presence:
+ return 200, {"presence": "offline"}
+
if requester.user != user:
allowed = await self.presence_handler.is_visible(
observed_user=user, observer_user=requester.user
@@ -81,7 +85,7 @@ class PresenceStatusRestServlet(RestServlet):
except Exception:
raise SynapseError(400, "Unable to parse state")
- if self.hs.config.use_presence:
+ if self._use_presence:
await self.presence_handler.set_state(user, state)
return 200, {}
diff --git a/synapse/rest/client/v1/profile.py b/synapse/rest/client/v1/profile.py
index 717c5f2b..f42f4b35 100644
--- a/synapse/rest/client/v1/profile.py
+++ b/synapse/rest/client/v1/profile.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py
index 241e5359..be29a0b3 100644
--- a/synapse/rest/client/v1/push_rule.py
+++ b/synapse/rest/client/v1/push_rule.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v1/pusher.py b/synapse/rest/client/v1/pusher.py
index 0c148a21..18102eca 100644
--- a/synapse/rest/client/v1/pusher.py
+++ b/synapse/rest/client/v1/pusher.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py
index 525efdf2..5cab4d3c 100644
--- a/synapse/rest/client/v1/room.py
+++ b/synapse/rest/client/v1/room.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/rest/client/v1/voip.py b/synapse/rest/client/v1/voip.py
index d07ca2c4..c780ffde 100644
--- a/synapse/rest/client/v1/voip.py
+++ b/synapse/rest/client/v1/voip.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/__init__.py b/synapse/rest/client/v2_alpha/__init__.py
index bfebb0f6..5e83dba2 100644
--- a/synapse/rest/client/v2_alpha/__init__.py
+++ b/synapse/rest/client/v2_alpha/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/_base.py b/synapse/rest/client/v2_alpha/_base.py
index f016b4f1..0443f457 100644
--- a/synapse/rest/client/v2_alpha/_base.py
+++ b/synapse/rest/client/v2_alpha/_base.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py
index c2ba790b..085561d3 100644
--- a/synapse/rest/client/v2_alpha/account.py
+++ b/synapse/rest/client/v2_alpha/account.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2017 Vector Creations Ltd
# Copyright 2018 New Vector Ltd
@@ -40,7 +39,7 @@ from synapse.metrics import threepid_send_requests
from synapse.push.mailer import Mailer
from synapse.util.msisdn import phone_number_to_msisdn
from synapse.util.stringutils import assert_valid_client_secret, random_string
-from synapse.util.threepids import canonicalise_email, check_3pid_allowed
+from synapse.util.threepids import check_3pid_allowed, validate_email
from ._base import client_patterns, interactive_auth_handler
@@ -93,7 +92,7 @@ class EmailPasswordRequestTokenRestServlet(RestServlet):
# Stored in the database "foo@bar.com"
# User requests with "FOO@bar.com" would raise a Not Found error
try:
- email = canonicalise_email(body["email"])
+ email = validate_email(body["email"])
except ValueError as e:
raise SynapseError(400, str(e))
send_attempt = body["send_attempt"]
@@ -103,7 +102,9 @@ class EmailPasswordRequestTokenRestServlet(RestServlet):
# Raise if the provided next_link value isn't valid
assert_valid_next_link(self.hs, next_link)
- self.identity_handler.ratelimit_request_token_requests(request, "email", email)
+ await self.identity_handler.ratelimit_request_token_requests(
+ request, "email", email
+ )
# The email will be sent to the stored address.
# This avoids a potential account hijack by requesting a password reset to
@@ -246,7 +247,7 @@ class PasswordRestServlet(RestServlet):
# We store all email addresses canonicalised in the DB.
# (See add_threepid in synapse/handlers/auth.py)
try:
- threepid["address"] = canonicalise_email(threepid["address"])
+ threepid["address"] = validate_email(threepid["address"])
except ValueError as e:
raise SynapseError(400, str(e))
# if using email, we must know about the email they're authing with!
@@ -374,7 +375,7 @@ class EmailThreepidRequestTokenRestServlet(RestServlet):
# Otherwise the email will be sent to "FOO@bar.com" and stored as
# "foo@bar.com" in database.
try:
- email = canonicalise_email(body["email"])
+ email = validate_email(body["email"])
except ValueError as e:
raise SynapseError(400, str(e))
send_attempt = body["send_attempt"]
@@ -387,7 +388,9 @@ class EmailThreepidRequestTokenRestServlet(RestServlet):
Codes.THREEPID_DENIED,
)
- self.identity_handler.ratelimit_request_token_requests(request, "email", email)
+ await self.identity_handler.ratelimit_request_token_requests(
+ request, "email", email
+ )
if next_link:
# Raise if the provided next_link value isn't valid
@@ -468,7 +471,7 @@ class MsisdnThreepidRequestTokenRestServlet(RestServlet):
Codes.THREEPID_DENIED,
)
- self.identity_handler.ratelimit_request_token_requests(
+ await self.identity_handler.ratelimit_request_token_requests(
request, "msisdn", msisdn
)
diff --git a/synapse/rest/client/v2_alpha/account_data.py b/synapse/rest/client/v2_alpha/account_data.py
index 3f28c0bc..7517e930 100644
--- a/synapse/rest/client/v2_alpha/account_data.py
+++ b/synapse/rest/client/v2_alpha/account_data.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/account_validity.py b/synapse/rest/client/v2_alpha/account_validity.py
index bd7f9ae2..2d1ad3d3 100644
--- a/synapse/rest/client/v2_alpha/account_validity.py
+++ b/synapse/rest/client/v2_alpha/account_validity.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -37,24 +36,40 @@ class AccountValidityRenewServlet(RestServlet):
self.hs = hs
self.account_activity_handler = hs.get_account_validity_handler()
self.auth = hs.get_auth()
- self.success_html = hs.config.account_validity.account_renewed_html_content
- self.failure_html = hs.config.account_validity.invalid_token_html_content
+ self.account_renewed_template = (
+ hs.config.account_validity.account_validity_account_renewed_template
+ )
+ self.account_previously_renewed_template = (
+ hs.config.account_validity.account_validity_account_previously_renewed_template
+ )
+ self.invalid_token_template = (
+ hs.config.account_validity.account_validity_invalid_token_template
+ )
async def on_GET(self, request):
if b"token" not in request.args:
raise SynapseError(400, "Missing renewal token")
renewal_token = request.args[b"token"][0]
- token_valid = await self.account_activity_handler.renew_account(
+ (
+ token_valid,
+ token_stale,
+ expiration_ts,
+ ) = await self.account_activity_handler.renew_account(
renewal_token.decode("utf8")
)
if token_valid:
status_code = 200
- response = self.success_html
+ response = self.account_renewed_template.render(expiration_ts=expiration_ts)
+ elif token_stale:
+ status_code = 200
+ response = self.account_previously_renewed_template.render(
+ expiration_ts=expiration_ts
+ )
else:
status_code = 404
- response = self.failure_html
+ response = self.invalid_token_template.render(expiration_ts=expiration_ts)
respond_with_html(request, status_code, response)
@@ -72,10 +87,12 @@ class AccountValiditySendMailServlet(RestServlet):
self.hs = hs
self.account_activity_handler = hs.get_account_validity_handler()
self.auth = hs.get_auth()
- self.account_validity = self.hs.config.account_validity
+ self.account_validity_renew_by_email_enabled = (
+ hs.config.account_validity.account_validity_renew_by_email_enabled
+ )
async def on_POST(self, request):
- if not self.account_validity.renew_by_email_enabled:
+ if not self.account_validity_renew_by_email_enabled:
raise AuthError(
403, "Account renewal via email is disabled on this server."
)
diff --git a/synapse/rest/client/v2_alpha/auth.py b/synapse/rest/client/v2_alpha/auth.py
index 75ece1c9..6ea1b50a 100644
--- a/synapse/rest/client/v2_alpha/auth.py
+++ b/synapse/rest/client/v2_alpha/auth.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/capabilities.py b/synapse/rest/client/v2_alpha/capabilities.py
index 44ccf10e..6a240214 100644
--- a/synapse/rest/client/v2_alpha/capabilities.py
+++ b/synapse/rest/client/v2_alpha/capabilities.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/devices.py b/synapse/rest/client/v2_alpha/devices.py
index 3d07aadd..9af05f9b 100644
--- a/synapse/rest/client/v2_alpha/devices.py
+++ b/synapse/rest/client/v2_alpha/devices.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
diff --git a/synapse/rest/client/v2_alpha/filter.py b/synapse/rest/client/v2_alpha/filter.py
index 7cc69264..411667a9 100644
--- a/synapse/rest/client/v2_alpha/filter.py
+++ b/synapse/rest/client/v2_alpha/filter.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/groups.py b/synapse/rest/client/v2_alpha/groups.py
index 08fb6b2b..6285680c 100644
--- a/synapse/rest/client/v2_alpha/groups.py
+++ b/synapse/rest/client/v2_alpha/groups.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/rest/client/v2_alpha/keys.py b/synapse/rest/client/v2_alpha/keys.py
index f092e5b3..a57ccbb5 100644
--- a/synapse/rest/client/v2_alpha/keys.py
+++ b/synapse/rest/client/v2_alpha/keys.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2019 New Vector Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
diff --git a/synapse/rest/client/v2_alpha/notifications.py b/synapse/rest/client/v2_alpha/notifications.py
index 87063ec8..0ede643c 100644
--- a/synapse/rest/client/v2_alpha/notifications.py
+++ b/synapse/rest/client/v2_alpha/notifications.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/openid.py b/synapse/rest/client/v2_alpha/openid.py
index 5b996e2d..d3322acc 100644
--- a/synapse/rest/client/v2_alpha/openid.py
+++ b/synapse/rest/client/v2_alpha/openid.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/password_policy.py b/synapse/rest/client/v2_alpha/password_policy.py
index 68b27ff2..a83927ae 100644
--- a/synapse/rest/client/v2_alpha/password_policy.py
+++ b/synapse/rest/client/v2_alpha/password_policy.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/read_marker.py b/synapse/rest/client/v2_alpha/read_marker.py
index 55c6688f..5988fa47 100644
--- a/synapse/rest/client/v2_alpha/read_marker.py
+++ b/synapse/rest/client/v2_alpha/read_marker.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/receipts.py b/synapse/rest/client/v2_alpha/receipts.py
index 6f7246a3..8cf4aebd 100644
--- a/synapse/rest/client/v2_alpha/receipts.py
+++ b/synapse/rest/client/v2_alpha/receipts.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py
index 8f68d8df..a30a5df1 100644
--- a/synapse/rest/client/v2_alpha/register.py
+++ b/synapse/rest/client/v2_alpha/register.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015 - 2016 OpenMarket Ltd
# Copyright 2017 Vector Creations Ltd
#
@@ -13,7 +12,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
import hmac
import logging
import random
@@ -22,7 +20,7 @@ from typing import List, Union
import synapse
import synapse.api.auth
import synapse.types
-from synapse.api.constants import LoginType
+from synapse.api.constants import APP_SERVICE_REGISTRATION_TYPE, LoginType
from synapse.api.errors import (
Codes,
InteractiveAuthIncompleteError,
@@ -32,7 +30,7 @@ from synapse.api.errors import (
)
from synapse.config import ConfigError
from synapse.config.captcha import CaptchaConfig
-from synapse.config.consent_config import ConsentConfig
+from synapse.config.consent import ConsentConfig
from synapse.config.emailconfig import ThreepidBehaviour
from synapse.config.ratelimiting import FederationRateLimitConfig
from synapse.config.registration import RegistrationConfig
@@ -51,7 +49,11 @@ from synapse.push.mailer import Mailer
from synapse.util.msisdn import phone_number_to_msisdn
from synapse.util.ratelimitutils import FederationRateLimiter
from synapse.util.stringutils import assert_valid_client_secret, random_string
-from synapse.util.threepids import canonicalise_email, check_3pid_allowed
+from synapse.util.threepids import (
+ canonicalise_email,
+ check_3pid_allowed,
+ validate_email,
+)
from ._base import client_patterns, interactive_auth_handler
@@ -113,7 +115,7 @@ class EmailRegisterRequestTokenRestServlet(RestServlet):
# (See on_POST in EmailThreepidRequestTokenRestServlet
# in synapse/rest/client/v2_alpha/account.py)
try:
- email = canonicalise_email(body["email"])
+ email = validate_email(body["email"])
except ValueError as e:
raise SynapseError(400, str(e))
send_attempt = body["send_attempt"]
@@ -126,7 +128,9 @@ class EmailRegisterRequestTokenRestServlet(RestServlet):
Codes.THREEPID_DENIED,
)
- self.identity_handler.ratelimit_request_token_requests(request, "email", email)
+ await self.identity_handler.ratelimit_request_token_requests(
+ request, "email", email
+ )
existing_user_id = await self.hs.get_datastore().get_user_id_by_threepid(
"email", email
@@ -208,7 +212,7 @@ class MsisdnRegisterRequestTokenRestServlet(RestServlet):
Codes.THREEPID_DENIED,
)
- self.identity_handler.ratelimit_request_token_requests(
+ await self.identity_handler.ratelimit_request_token_requests(
request, "msisdn", msisdn
)
@@ -406,7 +410,7 @@ class RegisterRestServlet(RestServlet):
client_addr = request.getClientIP()
- self.ratelimiter.ratelimit(client_addr, update=False)
+ await self.ratelimiter.ratelimit(None, client_addr, update=False)
kind = b"user"
if b"kind" in request.args:
@@ -428,15 +432,20 @@ class RegisterRestServlet(RestServlet):
raise SynapseError(400, "Invalid username")
desired_username = body["username"]
- appservice = None
- if self.auth.has_access_token(request):
- appservice = self.auth.get_appservice_by_req(request)
-
# fork off as soon as possible for ASes which have completely
# different registration flows to normal users
# == Application Service Registration ==
- if appservice:
+ if body.get("type") == APP_SERVICE_REGISTRATION_TYPE:
+ if not self.auth.has_access_token(request):
+ raise SynapseError(
+ 400,
+ "Appservice token must be provided when using a type of m.login.application_service",
+ )
+
+ # Verify the AS
+ self.auth.get_appservice_by_req(request)
+
# Set the desired user according to the AS API (which uses the
# 'user' key not 'username'). Since this is a new addition, we'll
# fallback to 'username' if they gave one.
@@ -457,6 +466,11 @@ class RegisterRestServlet(RestServlet):
)
return 200, result
+ elif self.auth.has_access_token(request):
+ raise SynapseError(
+ 400,
+ "An access token should not be provided on requests to /register (except if type is m.login.application_service)",
+ )
# == Normal User Registration == (everyone else)
if not self._registration_enabled:
diff --git a/synapse/rest/client/v2_alpha/relations.py b/synapse/rest/client/v2_alpha/relations.py
index fe765da2..c7da6759 100644
--- a/synapse/rest/client/v2_alpha/relations.py
+++ b/synapse/rest/client/v2_alpha/relations.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/report_event.py b/synapse/rest/client/v2_alpha/report_event.py
index 215d619c..2c169abb 100644
--- a/synapse/rest/client/v2_alpha/report_event.py
+++ b/synapse/rest/client/v2_alpha/report_event.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/room_keys.py b/synapse/rest/client/v2_alpha/room_keys.py
index 53de9792..263596be 100644
--- a/synapse/rest/client/v2_alpha/room_keys.py
+++ b/synapse/rest/client/v2_alpha/room_keys.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017, 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py b/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py
index 14792076..6d1b083a 100644
--- a/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py
+++ b/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/sendtodevice.py b/synapse/rest/client/v2_alpha/sendtodevice.py
index 79c1b526..f8dcee60 100644
--- a/synapse/rest/client/v2_alpha/sendtodevice.py
+++ b/synapse/rest/client/v2_alpha/sendtodevice.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/shared_rooms.py b/synapse/rest/client/v2_alpha/shared_rooms.py
index c866d515..d2e7f04b 100644
--- a/synapse/rest/client/v2_alpha/shared_rooms.py
+++ b/synapse/rest/client/v2_alpha/shared_rooms.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Half-Shot
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py
index 3481770c..95ee3f1b 100644
--- a/synapse/rest/client/v2_alpha/sync.py
+++ b/synapse/rest/client/v2_alpha/sync.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/tags.py b/synapse/rest/client/v2_alpha/tags.py
index a97cd66c..c14f83be 100644
--- a/synapse/rest/client/v2_alpha/tags.py
+++ b/synapse/rest/client/v2_alpha/tags.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/thirdparty.py b/synapse/rest/client/v2_alpha/thirdparty.py
index 0c127a1b..b5c67c9b 100644
--- a/synapse/rest/client/v2_alpha/thirdparty.py
+++ b/synapse/rest/client/v2_alpha/thirdparty.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/tokenrefresh.py b/synapse/rest/client/v2_alpha/tokenrefresh.py
index 79317c74..b2f85854 100644
--- a/synapse/rest/client/v2_alpha/tokenrefresh.py
+++ b/synapse/rest/client/v2_alpha/tokenrefresh.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/v2_alpha/user_directory.py b/synapse/rest/client/v2_alpha/user_directory.py
index ad598cef..7e8912f0 100644
--- a/synapse/rest/client/v2_alpha/user_directory.py
+++ b/synapse/rest/client/v2_alpha/user_directory.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py
index 3e3d8839..4582c274 100644
--- a/synapse/rest/client/versions.py
+++ b/synapse/rest/client/versions.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
# Copyright 2017 Vector Creations Ltd
# Copyright 2018-2019 New Vector Ltd
diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py
index 8b9ef26c..b19cd8af 100644
--- a/synapse/rest/consent/consent_resource.py
+++ b/synapse/rest/consent/consent_resource.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -33,14 +32,6 @@ TEMPLATE_LANGUAGE = "en"
logger = logging.getLogger(__name__)
-# use hmac.compare_digest if we have it (python 2.7.7), else just use equality
-if hasattr(hmac, "compare_digest"):
- compare_digest = hmac.compare_digest
-else:
-
- def compare_digest(a, b):
- return a == b
-
class ConsentResource(DirectServeHtmlResource):
"""A twisted Resource to display a privacy policy and gather consent to it
@@ -210,5 +201,5 @@ class ConsentResource(DirectServeHtmlResource):
.encode("ascii")
)
- if not compare_digest(want_mac, userhmac):
+ if not hmac.compare_digest(want_mac, userhmac):
raise SynapseError(HTTPStatus.FORBIDDEN, "HMAC incorrect")
diff --git a/synapse/rest/health.py b/synapse/rest/health.py
index 0170950b..4487b54a 100644
--- a/synapse/rest/health.py
+++ b/synapse/rest/health.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/key/__init__.py b/synapse/rest/key/__init__.py
index fe0ac3f8..629e2df7 100644
--- a/synapse/rest/key/__init__.py
+++ b/synapse/rest/key/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/key/v2/__init__.py b/synapse/rest/key/v2/__init__.py
index cb5abcf8..c6c63073 100644
--- a/synapse/rest/key/v2/__init__.py
+++ b/synapse/rest/key/v2/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/key/v2/local_key_resource.py b/synapse/rest/key/v2/local_key_resource.py
index d8e8e48c..e8dbe240 100644
--- a/synapse/rest/key/v2/local_key_resource.py
+++ b/synapse/rest/key/v2/local_key_resource.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py
index c57ac22e..f648678b 100644
--- a/synapse/rest/key/v2/remote_key_resource.py
+++ b/synapse/rest/key/v2/remote_key_resource.py
@@ -144,7 +144,7 @@ class RemoteKey(DirectServeJsonResource):
# Note that the value is unused.
cache_misses = {} # type: Dict[str, Dict[str, int]]
- for (server_name, key_id, from_server), results in cached.items():
+ for (server_name, key_id, _), results in cached.items():
results = [(result["ts_added_ms"], result) for result in results]
if not results and key_id is not None:
@@ -206,7 +206,7 @@ class RemoteKey(DirectServeJsonResource):
# Cast to bytes since postgresql returns a memoryview.
json_results.add(bytes(most_recent_result["key_json"]))
else:
- for ts_added, result in results:
+ for _, result in results:
# Cast to bytes since postgresql returns a memoryview.
json_results.add(bytes(result["key_json"]))
diff --git a/synapse/rest/media/v1/__init__.py b/synapse/rest/media/v1/__init__.py
index 3b8c96e2..d20186bb 100644
--- a/synapse/rest/media/v1/__init__.py
+++ b/synapse/rest/media/v1/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/media/v1/_base.py b/synapse/rest/media/v1/_base.py
index 63669470..0fb4cd81 100644
--- a/synapse/rest/media/v1/_base.py
+++ b/synapse/rest/media/v1/_base.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019-2021 The Matrix.org Foundation C.I.C.
#
diff --git a/synapse/rest/media/v1/config_resource.py b/synapse/rest/media/v1/config_resource.py
index c41a7ab4..a1d36e5c 100644
--- a/synapse/rest/media/v1/config_resource.py
+++ b/synapse/rest/media/v1/config_resource.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 Will Hunt <will@half-shot.uk>
# Copyright 2020-2021 The Matrix.org Foundation C.I.C.
#
@@ -31,7 +30,7 @@ class MediaConfigResource(DirectServeJsonResource):
def __init__(self, hs: "HomeServer"):
super().__init__()
- config = hs.get_config()
+ config = hs.config
self.clock = hs.get_clock()
self.auth = hs.get_auth()
self.limits_dict = {"m.upload.size": config.max_upload_size}
diff --git a/synapse/rest/media/v1/download_resource.py b/synapse/rest/media/v1/download_resource.py
index 5dadaeaf..cd2468f9 100644
--- a/synapse/rest/media/v1/download_resource.py
+++ b/synapse/rest/media/v1/download_resource.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2020-2021 The Matrix.org Foundation C.I.C.
#
diff --git a/synapse/rest/media/v1/filepath.py b/synapse/rest/media/v1/filepath.py
index 7792f26e..09531ebf 100644
--- a/synapse/rest/media/v1/filepath.py
+++ b/synapse/rest/media/v1/filepath.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2020-2021 The Matrix.org Foundation C.I.C.
#
@@ -22,7 +21,7 @@ from typing import Callable, List
NEW_FORMAT_ID_RE = re.compile(r"^\d\d\d\d-\d\d-\d\d")
-def _wrap_in_base_path(func: "Callable[..., str]") -> "Callable[..., str]":
+def _wrap_in_base_path(func: Callable[..., str]) -> Callable[..., str]:
"""Takes a function that returns a relative path and turns it into an
absolute path based on the location of the primary media store
"""
diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py
index 0c041b54..e8a875b9 100644
--- a/synapse/rest/media/v1/media_repository.py
+++ b/synapse/rest/media/v1/media_repository.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018-2021 The Matrix.org Foundation C.I.C.
#
@@ -468,6 +467,9 @@ class MediaRepository:
return media_info
def _get_thumbnail_requirements(self, media_type):
+ scpos = media_type.find(";")
+ if scpos > 0:
+ media_type = media_type[:scpos]
return self.thumbnail_requirements.get(media_type, ())
def _generate_thumbnail(
diff --git a/synapse/rest/media/v1/media_storage.py b/synapse/rest/media/v1/media_storage.py
index b1b1c9e6..c7fd97c4 100644
--- a/synapse/rest/media/v1/media_storage.py
+++ b/synapse/rest/media/v1/media_storage.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py
index c4ed9dfd..0adfb1a7 100644
--- a/synapse/rest/media/v1/preview_url_resource.py
+++ b/synapse/rest/media/v1/preview_url_resource.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
# Copyright 2020-2021 The Matrix.org Foundation C.I.C.
#
@@ -175,7 +174,7 @@ class PreviewUrlResource(DirectServeJsonResource):
clock=self.clock,
# don't spider URLs more often than once an hour
expiry_ms=ONE_HOUR,
- )
+ ) # type: ExpiringCache[str, ObservableDeferred]
if self._worker_run_media_background_jobs:
self._cleaner_loop = self.clock.looping_call(
diff --git a/synapse/rest/media/v1/storage_provider.py b/synapse/rest/media/v1/storage_provider.py
index 03194755..0ff6ad3c 100644
--- a/synapse/rest/media/v1/storage_provider.py
+++ b/synapse/rest/media/v1/storage_provider.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/media/v1/thumbnail_resource.py b/synapse/rest/media/v1/thumbnail_resource.py
index af802bc0..a029d426 100644
--- a/synapse/rest/media/v1/thumbnail_resource.py
+++ b/synapse/rest/media/v1/thumbnail_resource.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2020-2021 The Matrix.org Foundation C.I.C.
#
diff --git a/synapse/rest/media/v1/thumbnailer.py b/synapse/rest/media/v1/thumbnailer.py
index 988f52c7..37fe5823 100644
--- a/synapse/rest/media/v1/thumbnailer.py
+++ b/synapse/rest/media/v1/thumbnailer.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2020-2021 The Matrix.org Foundation C.I.C.
#
diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py
index 0138b2e2..024a105b 100644
--- a/synapse/rest/media/v1/upload_resource.py
+++ b/synapse/rest/media/v1/upload_resource.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2020-2021 The Matrix.org Foundation C.I.C.
#
@@ -52,8 +51,6 @@ class UploadResource(DirectServeJsonResource):
async def _async_render_POST(self, request: SynapseRequest) -> None:
requester = await self.auth.get_user_by_req(request)
- # TODO: The checks here are a bit late. The content will have
- # already been uploaded to a tmp file at this point
content_length = request.getHeader("Content-Length")
if content_length is None:
raise SynapseError(msg="Request must specify a Content-Length", code=400)
diff --git a/synapse/rest/synapse/__init__.py b/synapse/rest/synapse/__init__.py
index c0b73348..6ef4fbe8 100644
--- a/synapse/rest/synapse/__init__.py
+++ b/synapse/rest/synapse/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/synapse/client/__init__.py b/synapse/rest/synapse/client/__init__.py
index 9eeb9705..47a2f72b 100644
--- a/synapse/rest/synapse/client/__init__.py
+++ b/synapse/rest/synapse/client/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/synapse/client/new_user_consent.py b/synapse/rest/synapse/client/new_user_consent.py
index 78ee0b5e..488b97b3 100644
--- a/synapse/rest/synapse/client/new_user_consent.py
+++ b/synapse/rest/synapse/client/new_user_consent.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -62,6 +61,15 @@ class NewUserConsentResource(DirectServeHtmlResource):
self._sso_handler.render_error(request, "bad_session", e.msg, code=e.code)
return
+ # It should be impossible to get here without having first been through
+ # the pick-a-username step, which ensures chosen_localpart gets set.
+ if not session.chosen_localpart:
+ logger.warning("Session has no user name selected")
+ self._sso_handler.render_error(
+ request, "no_user", "No user name has been selected.", code=400
+ )
+ return
+
user_id = UserID(session.chosen_localpart, self._server_name)
user_profile = {
"display_name": session.display_name,
diff --git a/synapse/rest/synapse/client/oidc/__init__.py b/synapse/rest/synapse/client/oidc/__init__.py
index 64c0deb7..36ba4016 100644
--- a/synapse/rest/synapse/client/oidc/__init__.py
+++ b/synapse/rest/synapse/client/oidc/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Quentin Gliech
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/synapse/client/oidc/callback_resource.py b/synapse/rest/synapse/client/oidc/callback_resource.py
index 1af33f0a..7785f17e 100644
--- a/synapse/rest/synapse/client/oidc/callback_resource.py
+++ b/synapse/rest/synapse/client/oidc/callback_resource.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Quentin Gliech
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/synapse/client/password_reset.py b/synapse/rest/synapse/client/password_reset.py
index d26ce46e..f2800bf2 100644
--- a/synapse/rest/synapse/client/password_reset.py
+++ b/synapse/rest/synapse/client/password_reset.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/synapse/client/pick_idp.py b/synapse/rest/synapse/client/pick_idp.py
index 9550b829..d3a94a93 100644
--- a/synapse/rest/synapse/client/pick_idp.py
+++ b/synapse/rest/synapse/client/pick_idp.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/synapse/client/pick_username.py b/synapse/rest/synapse/client/pick_username.py
index d9ffe844..9b002cc1 100644
--- a/synapse/rest/synapse/client/pick_username.py
+++ b/synapse/rest/synapse/client/pick_username.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/synapse/client/saml2/__init__.py b/synapse/rest/synapse/client/saml2/__init__.py
index 3e8235ee..781ccb23 100644
--- a/synapse/rest/synapse/client/saml2/__init__.py
+++ b/synapse/rest/synapse/client/saml2/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/synapse/client/saml2/metadata_resource.py b/synapse/rest/synapse/client/saml2/metadata_resource.py
index 1e8526e2..b37c7083 100644
--- a/synapse/rest/synapse/client/saml2/metadata_resource.py
+++ b/synapse/rest/synapse/client/saml2/metadata_resource.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/synapse/client/saml2/response_resource.py b/synapse/rest/synapse/client/saml2/response_resource.py
index 4dfadf1b..774ccd87 100644
--- a/synapse/rest/synapse/client/saml2/response_resource.py
+++ b/synapse/rest/synapse/client/saml2/response_resource.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
#
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/rest/synapse/client/sso_register.py b/synapse/rest/synapse/client/sso_register.py
index f2acce24..70cd148a 100644
--- a/synapse/rest/synapse/client/sso_register.py
+++ b/synapse/rest/synapse/client/sso_register.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/rest/well_known.py b/synapse/rest/well_known.py
index f591cc6c..19ac3af3 100644
--- a/synapse/rest/well_known.py
+++ b/synapse/rest/well_known.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/secrets.py b/synapse/secrets.py
deleted file mode 100644
index 7939db75..00000000
--- a/synapse/secrets.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2018 New Vector Ltd
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Injectable secrets module for Synapse.
-
-See https://docs.python.org/3/library/secrets.html#module-secrets for the API
-used in Python 3.6, and the API emulated in Python 2.7.
-"""
-import sys
-
-# secrets is available since python 3.6
-if sys.version_info[0:2] >= (3, 6):
- import secrets
-
- class Secrets:
- def token_bytes(self, nbytes: int = 32) -> bytes:
- return secrets.token_bytes(nbytes)
-
- def token_hex(self, nbytes: int = 32) -> str:
- return secrets.token_hex(nbytes)
-
-
-else:
- import binascii
- import os
-
- class Secrets:
- def token_bytes(self, nbytes: int = 32) -> bytes:
- return os.urandom(nbytes)
-
- def token_hex(self, nbytes: int = 32) -> str:
- return binascii.hexlify(self.token_bytes(nbytes)).decode("ascii")
diff --git a/synapse/server.py b/synapse/server.py
index e85b9391..2337d2d9 100644
--- a/synapse/server.py
+++ b/synapse/server.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
@@ -51,6 +50,7 @@ from synapse.crypto import context_factory
from synapse.crypto.context_factory import RegularPolicyForHTTPS
from synapse.crypto.keyring import Keyring
from synapse.events.builder import EventBuilderFactory
+from synapse.events.presence_router import PresenceRouter
from synapse.events.spamcheck import SpamChecker
from synapse.events.third_party_rules import ThirdPartyEventRules
from synapse.events.utils import EventClientSerializer
@@ -70,13 +70,14 @@ from synapse.handlers.acme import AcmeHandler
from synapse.handlers.admin import AdminHandler
from synapse.handlers.appservice import ApplicationServicesHandler
from synapse.handlers.auth import AuthHandler, MacaroonGenerator
-from synapse.handlers.cas_handler import CasHandler
+from synapse.handlers.cas import CasHandler
from synapse.handlers.deactivate_account import DeactivateAccountHandler
from synapse.handlers.device import DeviceHandler, DeviceWorkerHandler
from synapse.handlers.devicemessage import DeviceMessageHandler
from synapse.handlers.directory import DirectoryHandler
from synapse.handlers.e2e_keys import E2eKeysHandler
from synapse.handlers.e2e_room_keys import E2eRoomKeysHandler
+from synapse.handlers.event_auth import EventAuthHandler
from synapse.handlers.events import EventHandler, EventStreamHandler
from synapse.handlers.federation import FederationHandler
from synapse.handlers.groups_local import GroupsLocalHandler, GroupsLocalWorkerHandler
@@ -85,7 +86,11 @@ from synapse.handlers.initial_sync import InitialSyncHandler
from synapse.handlers.message import EventCreationHandler, MessageHandler
from synapse.handlers.pagination import PaginationHandler
from synapse.handlers.password_policy import PasswordPolicyHandler
-from synapse.handlers.presence import PresenceHandler
+from synapse.handlers.presence import (
+ BasePresenceHandler,
+ PresenceHandler,
+ WorkerPresenceHandler,
+)
from synapse.handlers.profile import ProfileHandler
from synapse.handlers.read_marker import ReadMarkerHandler
from synapse.handlers.receipts import ReceiptsHandler
@@ -121,7 +126,6 @@ from synapse.rest.media.v1.media_repository import (
MediaRepository,
MediaRepositoryResource,
)
-from synapse.secrets import Secrets
from synapse.server_notices.server_notices_manager import ServerNoticesManager
from synapse.server_notices.server_notices_sender import ServerNoticesSender
from synapse.server_notices.worker_server_notices_sender import (
@@ -141,8 +145,8 @@ logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from txredisapi import RedisProtocol
- from synapse.handlers.oidc_handler import OidcHandler
- from synapse.handlers.saml_handler import SamlHandler
+ from synapse.handlers.oidc import OidcHandler
+ from synapse.handlers.saml import SamlHandler
T = TypeVar("T", bound=Callable[..., Any])
@@ -282,6 +286,14 @@ class HomeServer(metaclass=abc.ABCMeta):
if self.config.run_background_tasks:
self.setup_background_tasks()
+ def start_listening(self) -> None:
+ """Start the HTTP, manhole, metrics, etc listeners
+
+ Does nothing in this base class; overridden in derived classes to start the
+ appropriate listeners.
+ """
+ pass
+
def setup_background_tasks(self) -> None:
"""
Some handlers have side effects on instantiation (like registering
@@ -319,9 +331,6 @@ class HomeServer(metaclass=abc.ABCMeta):
return self.datastores
- def get_config(self) -> HomeServerConfig:
- return self.config
-
@cache_in_self
def get_distributor(self) -> Distributor:
return Distributor()
@@ -329,6 +338,7 @@ class HomeServer(metaclass=abc.ABCMeta):
@cache_in_self
def get_registration_ratelimiter(self) -> Ratelimiter:
return Ratelimiter(
+ store=self.get_datastore(),
clock=self.get_clock(),
rate_hz=self.config.rc_registration.per_second,
burst_count=self.config.rc_registration.burst_count,
@@ -414,8 +424,11 @@ class HomeServer(metaclass=abc.ABCMeta):
return StateResolutionHandler(self)
@cache_in_self
- def get_presence_handler(self) -> PresenceHandler:
- return PresenceHandler(self)
+ def get_presence_handler(self) -> BasePresenceHandler:
+ if self.get_instance_name() in self.config.worker.writers.presence:
+ return PresenceHandler(self)
+ else:
+ return WorkerPresenceHandler(self)
@cache_in_self
def get_typing_writer_handler(self) -> TypingWriterHandler:
@@ -425,6 +438,10 @@ class HomeServer(metaclass=abc.ABCMeta):
raise Exception("Workers cannot write typing")
@cache_in_self
+ def get_presence_router(self) -> PresenceRouter:
+ return PresenceRouter(self)
+
+ @cache_in_self
def get_typing_handler(self) -> FollowerTypingHandler:
if self.config.worker.writers.typing == self.get_instance_name():
# Use get_typing_writer_handler to ensure that we use the same
@@ -624,10 +641,6 @@ class HomeServer(metaclass=abc.ABCMeta):
return GroupAttestionRenewer(self)
@cache_in_self
- def get_secrets(self) -> Secrets:
- return Secrets()
-
- @cache_in_self
def get_stats_handler(self) -> StatsHandler:
return StatsHandler(self)
@@ -687,13 +700,13 @@ class HomeServer(metaclass=abc.ABCMeta):
@cache_in_self
def get_saml_handler(self) -> "SamlHandler":
- from synapse.handlers.saml_handler import SamlHandler
+ from synapse.handlers.saml import SamlHandler
return SamlHandler(self)
@cache_in_self
def get_oidc_handler(self) -> "OidcHandler":
- from synapse.handlers.oidc_handler import OidcHandler
+ from synapse.handlers.oidc import OidcHandler
return OidcHandler(self)
@@ -738,6 +751,10 @@ class HomeServer(metaclass=abc.ABCMeta):
return SpaceSummaryHandler(self)
@cache_in_self
+ def get_event_auth_handler(self) -> EventAuthHandler:
+ return EventAuthHandler(self)
+
+ @cache_in_self
def get_external_cache(self) -> ExternalCache:
return ExternalCache(self)
diff --git a/synapse/server_notices/consent_server_notices.py b/synapse/server_notices/consent_server_notices.py
index a9349bf9..e65f6f88 100644
--- a/synapse/server_notices/consent_server_notices.py
+++ b/synapse/server_notices/consent_server_notices.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/server_notices/resource_limits_server_notices.py b/synapse/server_notices/resource_limits_server_notices.py
index a18a2e76..e4b0bc5c 100644
--- a/synapse/server_notices/resource_limits_server_notices.py
+++ b/synapse/server_notices/resource_limits_server_notices.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/server_notices/server_notices_manager.py b/synapse/server_notices/server_notices_manager.py
index 144e1da7..f19075b7 100644
--- a/synapse/server_notices/server_notices_manager.py
+++ b/synapse/server_notices/server_notices_manager.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/server_notices/server_notices_sender.py b/synapse/server_notices/server_notices_sender.py
index 965c6458..c875b15b 100644
--- a/synapse/server_notices/server_notices_sender.py
+++ b/synapse/server_notices/server_notices_sender.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/server_notices/worker_server_notices_sender.py b/synapse/server_notices/worker_server_notices_sender.py
index c76bd574..cc533184 100644
--- a/synapse/server_notices/worker_server_notices_sender.py
+++ b/synapse/server_notices/worker_server_notices_sender.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/spam_checker_api/__init__.py b/synapse/spam_checker_api/__init__.py
index 3ce25bb0..73018f2d 100644
--- a/synapse/spam_checker_api/__init__.py
+++ b/synapse/spam_checker_api/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py
index c3d6e80c..b3bd92d3 100644
--- a/synapse/state/__init__.py
+++ b/synapse/state/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
@@ -20,8 +19,10 @@ from typing import (
Any,
Awaitable,
Callable,
+ Collection,
DefaultDict,
Dict,
+ FrozenSet,
Iterable,
List,
Optional,
@@ -46,7 +47,7 @@ from synapse.logging.utils import log_function
from synapse.state import v1, v2
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
from synapse.storage.roommember import ProfileInfo
-from synapse.types import Collection, StateMap
+from synapse.types import StateMap
from synapse.util.async_helpers import Linearizer
from synapse.util.caches.expiringcache import ExpiringCache
from synapse.util.metrics import Measure, measure_func
@@ -515,7 +516,7 @@ class StateResolutionHandler:
expiry_ms=EVICTION_TIMEOUT_SECONDS * 1000,
iterable=True,
reset_expiry_on_get=True,
- )
+ ) # type: ExpiringCache[FrozenSet[int], _StateCacheEntry]
#
# stuff for tracking time spent on state-res by room
@@ -536,7 +537,7 @@ class StateResolutionHandler:
state_groups_ids: Dict[int, StateMap[str]],
event_map: Optional[Dict[str, EventBase]],
state_res_store: "StateResolutionStore",
- ):
+ ) -> _StateCacheEntry:
"""Resolves conflicts between a set of state groups
Always generates a new state group (unless we hit the cache), so should
diff --git a/synapse/state/v1.py b/synapse/state/v1.py
index ce255da6..318e9988 100644
--- a/synapse/state/v1.py
+++ b/synapse/state/v1.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/state/v2.py b/synapse/state/v2.py
index e73a548e..008644cd 100644
--- a/synapse/state/v2.py
+++ b/synapse/state/v2.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,6 +18,7 @@ import logging
from typing import (
Any,
Callable,
+ Collection,
Dict,
Generator,
Iterable,
@@ -38,7 +38,7 @@ from synapse.api.constants import EventTypes
from synapse.api.errors import AuthError
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
from synapse.events import EventBase
-from synapse.types import Collection, MutableStateMap, StateMap
+from synapse.types import MutableStateMap, StateMap
from synapse.util import Clock
logger = logging.getLogger(__name__)
diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py
index 0b9007e5..105e4e1f 100644
--- a/synapse/storage/__init__.py
+++ b/synapse/storage/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018,2019 New Vector Ltd
#
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index 24090532..6b68d872 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
@@ -17,13 +16,13 @@
import logging
import random
from abc import ABCMeta
-from typing import TYPE_CHECKING, Any, Iterable, Optional, Union
+from typing import TYPE_CHECKING, Any, Collection, Iterable, Optional, Union
from synapse.storage.database import LoggingTransaction # noqa: F401
from synapse.storage.database import make_in_list_sql_clause # noqa: F401
from synapse.storage.database import DatabasePool
from synapse.storage.types import Connection
-from synapse.types import Collection, StreamToken, get_domain_from_id
+from synapse.types import StreamToken, get_domain_from_id
from synapse.util import json_decoder
if TYPE_CHECKING:
@@ -115,7 +114,7 @@ def db_to_json(db_content: Union[memoryview, bytes, bytearray, str]) -> Any:
db_content = db_content.tobytes()
# Decode it to a Unicode string before feeding it to the JSON decoder, since
- # Python 3.5 does not support deserializing bytes.
+ # it only supports handling strings
if isinstance(db_content, (bytes, bytearray)):
db_content = db_content.decode("utf8")
diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py
index ccb06aab..142787fd 100644
--- a/synapse/storage/background_updates.py
+++ b/synapse/storage/background_updates.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/database.py b/synapse/storage/database.py
index 94590e7b..bd39c095 100644
--- a/synapse/storage/database.py
+++ b/synapse/storage/database.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
@@ -21,6 +20,7 @@ from time import monotonic as monotonic_time
from typing import (
Any,
Callable,
+ Collection,
Dict,
Iterable,
Iterator,
@@ -49,7 +49,6 @@ from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.storage.background_updates import BackgroundUpdater
from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine
from synapse.storage.types import Connection, Cursor
-from synapse.types import Collection
# python 3 does not have a maximum int value
MAX_TXN_ID = 2 ** 63 - 1
@@ -172,10 +171,7 @@ class LoggingDatabaseConnection:
# The type of entry which goes on our after_callbacks and exception_callbacks lists.
-#
-# Python 3.5.2 doesn't support Callable with an ellipsis, so we wrap it in quotes so
-# that mypy sees the type but the runtime python doesn't.
-_CallbackListEntry = Tuple["Callable[..., None]", Iterable[Any], Dict[str, Any]]
+_CallbackListEntry = Tuple[Callable[..., None], Iterable[Any], Dict[str, Any]]
R = TypeVar("R")
@@ -222,7 +218,7 @@ class LoggingTransaction:
self.after_callbacks = after_callbacks
self.exception_callbacks = exception_callbacks
- def call_after(self, callback: "Callable[..., None]", *args: Any, **kwargs: Any):
+ def call_after(self, callback: Callable[..., None], *args: Any, **kwargs: Any):
"""Call the given callback on the main twisted thread after the
transaction has finished. Used to invalidate the caches on the
correct thread.
@@ -234,7 +230,7 @@ class LoggingTransaction:
self.after_callbacks.append((callback, args, kwargs))
def call_on_exception(
- self, callback: "Callable[..., None]", *args: Any, **kwargs: Any
+ self, callback: Callable[..., None], *args: Any, **kwargs: Any
):
# if self.exception_callbacks is None, that means that whatever constructed the
# LoggingTransaction isn't expecting there to be any callbacks; assert that
@@ -486,9 +482,9 @@ class DatabasePool:
desc: str,
after_callbacks: List[_CallbackListEntry],
exception_callbacks: List[_CallbackListEntry],
- func: "Callable[..., R]",
+ func: Callable[..., R],
*args: Any,
- **kwargs: Any
+ **kwargs: Any,
) -> R:
"""Start a new database transaction with the given connection.
@@ -619,10 +615,10 @@ class DatabasePool:
async def runInteraction(
self,
desc: str,
- func: "Callable[..., R]",
+ func: Callable[..., R],
*args: Any,
db_autocommit: bool = False,
- **kwargs: Any
+ **kwargs: Any,
) -> R:
"""Starts a transaction on the database and runs a given function
@@ -679,10 +675,10 @@ class DatabasePool:
async def runWithConnection(
self,
- func: "Callable[..., R]",
+ func: Callable[..., R],
*args: Any,
db_autocommit: bool = False,
- **kwargs: Any
+ **kwargs: Any,
) -> R:
"""Wraps the .runWithConnection() method on the underlying db_pool.
@@ -775,7 +771,7 @@ class DatabasePool:
desc: str,
decoder: Optional[Callable[[Cursor], R]],
query: str,
- *args: Any
+ *args: Any,
) -> R:
"""Runs a single query for a result set.
@@ -900,7 +896,7 @@ class DatabasePool:
table: str,
keyvalues: Dict[str, Any],
values: Dict[str, Any],
- insertion_values: Dict[str, Any] = {},
+ insertion_values: Optional[Dict[str, Any]] = None,
desc: str = "simple_upsert",
lock: bool = True,
) -> Optional[bool]:
@@ -927,6 +923,8 @@ class DatabasePool:
Native upserts always return None. Emulated upserts return True if a
new entry was created, False if an existing one was updated.
"""
+ insertion_values = insertion_values or {}
+
attempts = 0
while True:
try:
@@ -964,7 +962,7 @@ class DatabasePool:
table: str,
keyvalues: Dict[str, Any],
values: Dict[str, Any],
- insertion_values: Dict[str, Any] = {},
+ insertion_values: Optional[Dict[str, Any]] = None,
lock: bool = True,
) -> Optional[bool]:
"""
@@ -982,6 +980,8 @@ class DatabasePool:
Native upserts always return None. Emulated upserts return True if a
new entry was created, False if an existing one was updated.
"""
+ insertion_values = insertion_values or {}
+
if self.engine.can_native_upsert and table not in self._unsafe_to_upsert_tables:
self.simple_upsert_txn_native_upsert(
txn, table, keyvalues, values, insertion_values=insertion_values
@@ -1003,7 +1003,7 @@ class DatabasePool:
table: str,
keyvalues: Dict[str, Any],
values: Dict[str, Any],
- insertion_values: Dict[str, Any] = {},
+ insertion_values: Optional[Dict[str, Any]] = None,
lock: bool = True,
) -> bool:
"""
@@ -1017,6 +1017,8 @@ class DatabasePool:
Returns True if a new entry was created, False if an existing
one was updated.
"""
+ insertion_values = insertion_values or {}
+
# We need to lock the table :(, unless we're *really* careful
if lock:
self.engine.lock_table(txn, table)
@@ -1077,7 +1079,7 @@ class DatabasePool:
table: str,
keyvalues: Dict[str, Any],
values: Dict[str, Any],
- insertion_values: Dict[str, Any] = {},
+ insertion_values: Optional[Dict[str, Any]] = None,
) -> None:
"""
Use the native UPSERT functionality in recent PostgreSQL versions.
@@ -1090,7 +1092,7 @@ class DatabasePool:
"""
allvalues = {} # type: Dict[str, Any]
allvalues.update(keyvalues)
- allvalues.update(insertion_values)
+ allvalues.update(insertion_values or {})
if not values:
latter = "NOTHING"
@@ -1513,7 +1515,7 @@ class DatabasePool:
column: str,
iterable: Iterable[Any],
retcols: Iterable[str],
- keyvalues: Dict[str, Any] = {},
+ keyvalues: Optional[Dict[str, Any]] = None,
desc: str = "simple_select_many_batch",
batch_size: int = 100,
) -> List[Any]:
@@ -1531,6 +1533,8 @@ class DatabasePool:
desc: description of the transaction, for logging and metrics
batch_size: the number of rows for each select query
"""
+ keyvalues = keyvalues or {}
+
results = [] # type: List[Dict[str, Any]]
if not iterable:
@@ -2059,69 +2063,18 @@ def make_in_list_sql_clause(
KV = TypeVar("KV")
-def make_tuple_comparison_clause(
- database_engine: BaseDatabaseEngine, keys: List[Tuple[str, KV]]
-) -> Tuple[str, List[KV]]:
+def make_tuple_comparison_clause(keys: List[Tuple[str, KV]]) -> Tuple[str, List[KV]]:
"""Returns a tuple comparison SQL clause
- Depending what the SQL engine supports, builds a SQL clause that looks like either
- "(a, b) > (?, ?)", or "(a > ?) OR (a == ? AND b > ?)".
+ Builds a SQL clause that looks like "(a, b) > (?, ?)"
Args:
- database_engine
keys: A set of (column, value) pairs to be compared.
Returns:
A tuple of SQL query and the args
"""
- if database_engine.supports_tuple_comparison:
- return (
- "(%s) > (%s)" % (",".join(k[0] for k in keys), ",".join("?" for _ in keys)),
- [k[1] for k in keys],
- )
-
- # we want to build a clause
- # (a > ?) OR
- # (a == ? AND b > ?) OR
- # (a == ? AND b == ? AND c > ?)
- # ...
- # (a == ? AND b == ? AND ... AND z > ?)
- #
- # or, equivalently:
- #
- # (a > ? OR (a == ? AND
- # (b > ? OR (b == ? AND
- # ...
- # (y > ? OR (y == ? AND
- # z > ?
- # ))
- # ...
- # ))
- # ))
- #
- # which itself is equivalent to (and apparently easier for the query optimiser):
- #
- # (a >= ? AND (a > ? OR
- # (b >= ? AND (b > ? OR
- # ...
- # (y >= ? AND (y > ? OR
- # z > ?
- # ))
- # ...
- # ))
- # ))
- #
- #
-
- clause = ""
- args = [] # type: List[KV]
- for k, v in keys[:-1]:
- clause = clause + "(%s >= ? AND (%s > ? OR " % (k, k)
- args.extend([v, v])
-
- (k, v) = keys[-1]
- clause += "%s > ?" % (k,)
- args.append(v)
-
- clause += "))" * (len(keys) - 1)
- return clause, args
+ return (
+ "(%s) > (%s)" % (",".join(k[0] for k in keys), ",".join("?" for _ in keys)),
+ [k[1] for k in keys],
+ )
diff --git a/synapse/storage/databases/__init__.py b/synapse/storage/databases/__init__.py
index 379c78bb..20b75505 100644
--- a/synapse/storage/databases/__init__.py
+++ b/synapse/storage/databases/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/__init__.py b/synapse/storage/databases/main/__init__.py
index 1d44c3aa..49c7606d 100644
--- a/synapse/storage/databases/main/__init__.py
+++ b/synapse/storage/databases/main/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
# Copyright 2019-2021 The Matrix.org Foundation C.I.C.
@@ -18,9 +17,9 @@
import logging
from typing import List, Optional, Tuple
-from synapse.api.constants import PresenceState
from synapse.config.homeserver import HomeServerConfig
from synapse.storage.database import DatabasePool
+from synapse.storage.databases.main.stats import UserSortOrder
from synapse.storage.engines import PostgresEngine
from synapse.storage.util.id_generators import (
IdGenerator,
@@ -51,7 +50,7 @@ from .media_repository import MediaRepositoryStore
from .metrics import ServerMetricsStore
from .monthly_active_users import MonthlyActiveUsersStore
from .openid import OpenIdStore
-from .presence import PresenceStore, UserPresenceState
+from .presence import PresenceStore
from .profile import ProfileStore
from .purge_events import PurgeEventsStore
from .push_rule import PushRuleStore
@@ -126,9 +125,6 @@ class DataStore(
self._clock = hs.get_clock()
self.database_engine = database.engine
- self._presence_id_gen = StreamIdGenerator(
- db_conn, "presence_stream", "stream_id"
- )
self._public_room_id_gen = StreamIdGenerator(
db_conn, "public_room_list_stream", "stream_id"
)
@@ -177,21 +173,6 @@ class DataStore(
super().__init__(database, db_conn, hs)
- self._presence_on_startup = self._get_active_presence(db_conn)
-
- presence_cache_prefill, min_presence_val = self.db_pool.get_cache_dict(
- db_conn,
- "presence_stream",
- entity_column="user_id",
- stream_column="stream_id",
- max_value=self._presence_id_gen.get_current_token(),
- )
- self.presence_stream_cache = StreamChangeCache(
- "PresenceStreamChangeCache",
- min_presence_val,
- prefilled_cache=presence_cache_prefill,
- )
-
device_list_max = self._device_list_id_gen.get_current_token()
self._device_list_stream_cache = StreamChangeCache(
"DeviceListStreamChangeCache", device_list_max
@@ -238,32 +219,6 @@ class DataStore(
def get_device_stream_token(self) -> int:
return self._device_list_id_gen.get_current_token()
- def take_presence_startup_info(self):
- active_on_startup = self._presence_on_startup
- self._presence_on_startup = None
- return active_on_startup
-
- def _get_active_presence(self, db_conn):
- """Fetch non-offline presence from the database so that we can register
- the appropriate time outs.
- """
-
- sql = (
- "SELECT user_id, state, last_active_ts, last_federation_update_ts,"
- " last_user_sync_ts, status_msg, currently_active FROM presence_stream"
- " WHERE state != ?"
- )
-
- txn = db_conn.cursor()
- txn.execute(sql, (PresenceState.OFFLINE,))
- rows = self.db_pool.cursor_to_dict(txn)
- txn.close()
-
- for row in rows:
- row["currently_active"] = bool(row["currently_active"])
-
- return [UserPresenceState(**row) for row in rows]
-
async def get_users(self) -> List[JsonDict]:
"""Function to retrieve a list of users in users table.
@@ -292,6 +247,8 @@ class DataStore(
name: Optional[str] = None,
guests: bool = True,
deactivated: bool = False,
+ order_by: UserSortOrder = UserSortOrder.USER_ID.value,
+ direction: str = "f",
) -> Tuple[List[JsonDict], int]:
"""Function to retrieve a paginated list of users from
users list. This will return a json list of users and the
@@ -304,6 +261,8 @@ class DataStore(
name: search for local part of user_id or display name
guests: whether to in include guest users
deactivated: whether to include deactivated users
+ order_by: the sort order of the returned list
+ direction: sort ascending or descending
Returns:
A tuple of a list of mappings from user to information and a count of total users.
"""
@@ -312,6 +271,14 @@ class DataStore(
filters = []
args = [self.hs.config.server_name]
+ # Set ordering
+ order_by_column = UserSortOrder(order_by).value
+
+ if direction == "b":
+ order = "DESC"
+ else:
+ order = "ASC"
+
# `name` is in database already in lower case
if name:
filters.append("(name LIKE ? OR LOWER(displayname) LIKE ?)")
@@ -339,10 +306,15 @@ class DataStore(
txn.execute(sql, args)
count = txn.fetchone()[0]
- sql = (
- "SELECT name, user_type, is_guest, admin, deactivated, shadow_banned, displayname, avatar_url "
- + sql_base
- + " ORDER BY u.name LIMIT ? OFFSET ?"
+ sql = """
+ SELECT name, user_type, is_guest, admin, deactivated, shadow_banned, displayname, avatar_url
+ {sql_base}
+ ORDER BY {order_by_column} {order}, u.name ASC
+ LIMIT ? OFFSET ?
+ """.format(
+ sql_base=sql_base,
+ order_by_column=order_by_column,
+ order=order,
)
args += [limit, start]
txn.execute(sql, args)
diff --git a/synapse/storage/databases/main/account_data.py b/synapse/storage/databases/main/account_data.py
index a277a1ef..1d02795f 100644
--- a/synapse/storage/databases/main/account_data.py
+++ b/synapse/storage/databases/main/account_data.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/storage/databases/main/appservice.py b/synapse/storage/databases/main/appservice.py
index 85bb853d..9f182c2a 100644
--- a/synapse/storage/databases/main/appservice.py
+++ b/synapse/storage/databases/main/appservice.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/storage/databases/main/cache.py b/synapse/storage/databases/main/cache.py
index 1e7637a6..ecc1f935 100644
--- a/synapse/storage/databases/main/cache.py
+++ b/synapse/storage/databases/main/cache.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/censor_events.py b/synapse/storage/databases/main/censor_events.py
index 3e26d5ba..f22c1f24 100644
--- a/synapse/storage/databases/main/censor_events.py
+++ b/synapse/storage/databases/main/censor_events.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/client_ips.py b/synapse/storage/databases/main/client_ips.py
index 6d18e692..d60010e9 100644
--- a/synapse/storage/databases/main/client_ips.py
+++ b/synapse/storage/databases/main/client_ips.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -298,7 +297,6 @@ class ClientIpBackgroundUpdateStore(SQLBaseStore):
# times, which is fine.
where_clause, where_args = make_tuple_comparison_clause(
- self.database_engine,
[("user_id", last_user_id), ("device_id", last_device_id)],
)
diff --git a/synapse/storage/databases/main/deviceinbox.py b/synapse/storage/databases/main/deviceinbox.py
index 691080ce..7c9d1f74 100644
--- a/synapse/storage/databases/main/deviceinbox.py
+++ b/synapse/storage/databases/main/deviceinbox.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py
index d327e9aa..c9346de3 100644
--- a/synapse/storage/databases/main/devices.py
+++ b/synapse/storage/databases/main/devices.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
# Copyright 2019 New Vector Ltd
# Copyright 2019,2020 The Matrix.org Foundation C.I.C.
@@ -16,7 +15,7 @@
# limitations under the License.
import abc
import logging
-from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
+from typing import Any, Collection, Dict, Iterable, List, Optional, Set, Tuple
from synapse.api.errors import Codes, StoreError
from synapse.logging.opentracing import (
@@ -32,7 +31,7 @@ from synapse.storage.database import (
LoggingTransaction,
make_tuple_comparison_clause,
)
-from synapse.types import Collection, JsonDict, get_verify_key_from_cross_signing_key
+from synapse.types import JsonDict, get_verify_key_from_cross_signing_key
from synapse.util import json_decoder, json_encoder
from synapse.util.caches.descriptors import cached, cachedList
from synapse.util.caches.lrucache import LruCache
@@ -718,7 +717,15 @@ class DeviceWorkerStore(SQLBaseStore):
keyvalues={"user_id": user_id},
values={},
insertion_values={"added_ts": self._clock.time_msec()},
- desc="make_remote_user_device_cache_as_stale",
+ desc="mark_remote_user_device_cache_as_stale",
+ )
+
+ async def mark_remote_user_device_cache_as_valid(self, user_id: str) -> None:
+ # Remove the database entry that says we need to resync devices, after a resync
+ await self.db_pool.simple_delete(
+ table="device_lists_remote_resync",
+ keyvalues={"user_id": user_id},
+ desc="mark_remote_user_device_cache_as_valid",
)
async def mark_remote_user_device_list_as_unsubscribed(self, user_id: str) -> None:
@@ -985,7 +992,7 @@ class DeviceBackgroundUpdateStore(SQLBaseStore):
def _txn(txn):
clause, args = make_tuple_comparison_clause(
- self.db_pool.engine, [(x, last_row[x]) for x in KEY_COLS]
+ [(x, last_row[x]) for x in KEY_COLS]
)
sql = """
SELECT stream_id, destination, user_id, device_id, MAX(ts) AS ts
@@ -1290,15 +1297,6 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
lock=False,
)
- # If we're replacing the remote user's device list cache presumably
- # we've done a full resync, so we remove the entry that says we need
- # to resync
- self.db_pool.simple_delete_txn(
- txn,
- table="device_lists_remote_resync",
- keyvalues={"user_id": user_id},
- )
-
async def add_device_change_to_streams(
self, user_id: str, device_ids: Collection[str], hosts: List[str]
):
diff --git a/synapse/storage/databases/main/directory.py b/synapse/storage/databases/main/directory.py
index 267b9483..86075bc5 100644
--- a/synapse/storage/databases/main/directory.py
+++ b/synapse/storage/databases/main/directory.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/e2e_room_keys.py b/synapse/storage/databases/main/e2e_room_keys.py
index 12ceccee..b15fb71e 100644
--- a/synapse/storage/databases/main/e2e_room_keys.py
+++ b/synapse/storage/databases/main/e2e_room_keys.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 New Vector Ltd
# Copyright 2019 Matrix.org Foundation C.I.C.
#
diff --git a/synapse/storage/databases/main/end_to_end_keys.py b/synapse/storage/databases/main/end_to_end_keys.py
index f1e7859d..88afe97c 100644
--- a/synapse/storage/databases/main/end_to_end_keys.py
+++ b/synapse/storage/databases/main/end_to_end_keys.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2019 New Vector Ltd
# Copyright 2019,2020 The Matrix.org Foundation C.I.C.
diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py
index a956be49..ff81d5cd 100644
--- a/synapse/storage/databases/main/event_federation.py
+++ b/synapse/storage/databases/main/event_federation.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,7 +14,7 @@
import itertools
import logging
from queue import Empty, PriorityQueue
-from typing import Dict, Iterable, List, Set, Tuple
+from typing import Collection, Dict, Iterable, List, Set, Tuple
from synapse.api.errors import StoreError
from synapse.events import EventBase
@@ -26,7 +25,6 @@ from synapse.storage.databases.main.events_worker import EventsWorkerStore
from synapse.storage.databases.main.signatures import SignatureWorkerStore
from synapse.storage.engines import PostgresEngine
from synapse.storage.types import Cursor
-from synapse.types import Collection
from synapse.util.caches.descriptors import cached
from synapse.util.caches.lrucache import LruCache
from synapse.util.iterutils import batch_iter
diff --git a/synapse/storage/databases/main/event_push_actions.py b/synapse/storage/databases/main/event_push_actions.py
index 78245ad5..58453221 100644
--- a/synapse/storage/databases/main/event_push_actions.py
+++ b/synapse/storage/databases/main/event_push_actions.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py
index 98dac19a..fd25c811 100644
--- a/synapse/storage/databases/main/events.py
+++ b/synapse/storage/databases/main/events.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018-2019 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
@@ -171,7 +170,7 @@ class PersistEventsStore:
)
async with stream_ordering_manager as stream_orderings:
- for (event, context), stream in zip(events_and_contexts, stream_orderings):
+ for (event, _), stream in zip(events_and_contexts, stream_orderings):
event.internal_metadata.stream_ordering = stream
await self.db_pool.runInteraction(
@@ -298,7 +297,7 @@ class PersistEventsStore:
txn.execute(sql + clause, args)
to_recursively_check = []
- for event_id, prev_event_id, metadata, rejected in txn:
+ for _, prev_event_id, metadata, rejected in txn:
if prev_event_id in existing_prevs:
continue
@@ -320,8 +319,8 @@ class PersistEventsStore:
txn: LoggingTransaction,
events_and_contexts: List[Tuple[EventBase, EventContext]],
backfilled: bool,
- state_delta_for_room: Dict[str, DeltaState] = {},
- new_forward_extremeties: Dict[str, List[str]] = {},
+ state_delta_for_room: Optional[Dict[str, DeltaState]] = None,
+ new_forward_extremeties: Optional[Dict[str, List[str]]] = None,
):
"""Insert some number of room events into the necessary database tables.
@@ -342,6 +341,9 @@ class PersistEventsStore:
extremities.
"""
+ state_delta_for_room = state_delta_for_room or {}
+ new_forward_extremeties = new_forward_extremeties or {}
+
all_events_and_contexts = events_and_contexts
min_stream_order = events_and_contexts[0][0].internal_metadata.stream_ordering
@@ -1125,7 +1127,7 @@ class PersistEventsStore:
def _update_forward_extremities_txn(
self, txn, new_forward_extremities, max_stream_order
):
- for room_id, new_extrem in new_forward_extremities.items():
+ for room_id in new_forward_extremities.keys():
self.db_pool.simple_delete_txn(
txn, table="event_forward_extremities", keyvalues={"room_id": room_id}
)
@@ -1376,24 +1378,28 @@ class PersistEventsStore:
],
)
- for event, _ in events_and_contexts:
- if not event.internal_metadata.is_redacted():
- # If we're persisting an unredacted event we go and ensure
- # that we mark any redactions that reference this event as
- # requiring censoring.
- self.db_pool.simple_update_txn(
- txn,
- table="redactions",
- keyvalues={"redacts": event.event_id},
- updatevalues={"have_censored": False},
+ # If we're persisting an unredacted event we go and ensure
+ # that we mark any redactions that reference this event as
+ # requiring censoring.
+ sql = "UPDATE redactions SET have_censored = ? WHERE redacts = ?"
+ txn.execute_batch(
+ sql,
+ (
+ (
+ False,
+ event.event_id,
)
+ for event, _ in events_and_contexts
+ if not event.internal_metadata.is_redacted()
+ ),
+ )
state_events_and_contexts = [
ec for ec in events_and_contexts if ec[0].is_state()
]
state_values = []
- for event, context in state_events_and_contexts:
+ for event, _ in state_events_and_contexts:
vals = {
"event_id": event.event_id,
"room_id": event.room_id,
@@ -1462,7 +1468,7 @@ class PersistEventsStore:
# nothing to do here
return
- for event, context in events_and_contexts:
+ for event, _ in events_and_contexts:
if event.type == EventTypes.Redaction and event.redacts is not None:
# Remove the entries in the event_push_actions table for the
# redacted event.
@@ -1879,20 +1885,28 @@ class PersistEventsStore:
),
)
- for event, _ in events_and_contexts:
- user_ids = self.db_pool.simple_select_onecol_txn(
- txn,
- table="event_push_actions_staging",
- keyvalues={"event_id": event.event_id},
- retcol="user_id",
- )
+ room_to_event_ids = {} # type: Dict[str, List[str]]
+ for e, _ in events_and_contexts:
+ room_to_event_ids.setdefault(e.room_id, []).append(e.event_id)
- for uid in user_ids:
- txn.call_after(
- self.store.get_unread_event_push_actions_by_room_for_user.invalidate_many,
- (event.room_id, uid),
+ for room_id, event_ids in room_to_event_ids.items():
+ rows = self.db_pool.simple_select_many_txn(
+ txn,
+ table="event_push_actions_staging",
+ column="event_id",
+ iterable=event_ids,
+ keyvalues={},
+ retcols=("user_id",),
)
+ user_ids = {row["user_id"] for row in rows}
+
+ for user_id in user_ids:
+ txn.call_after(
+ self.store.get_unread_event_push_actions_by_room_for_user.invalidate_many,
+ (room_id, user_id),
+ )
+
# Now we delete the staging area for *all* events that were being
# persisted.
txn.execute_batch(
diff --git a/synapse/storage/databases/main/events_bg_updates.py b/synapse/storage/databases/main/events_bg_updates.py
index 78367ea5..cbe4be14 100644
--- a/synapse/storage/databases/main/events_bg_updates.py
+++ b/synapse/storage/databases/main/events_bg_updates.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -838,7 +837,6 @@ class EventsBackgroundUpdatesStore(SQLBaseStore):
# We want to do a `(topological_ordering, stream_ordering) > (?,?)`
# comparison, but that is not supported on older SQLite versions
tuple_clause, tuple_args = make_tuple_comparison_clause(
- self.database_engine,
[
("events.room_id", last_room_id),
("topological_ordering", last_depth),
diff --git a/synapse/storage/databases/main/events_forward_extremities.py b/synapse/storage/databases/main/events_forward_extremities.py
index b3703ae1..6d2688d7 100644
--- a/synapse/storage/databases/main/events_forward_extremities.py
+++ b/synapse/storage/databases/main/events_forward_extremities.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py
index 952d4969..2c823e09 100644
--- a/synapse/storage/databases/main/events_worker.py
+++ b/synapse/storage/databases/main/events_worker.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,7 +15,16 @@
import logging
import threading
from collections import namedtuple
-from typing import Dict, Iterable, List, Optional, Tuple, overload
+from typing import (
+ Collection,
+ Container,
+ Dict,
+ Iterable,
+ List,
+ Optional,
+ Tuple,
+ overload,
+)
from constantly import NamedConstant, Names
from typing_extensions import Literal
@@ -46,7 +54,7 @@ from synapse.storage.database import DatabasePool
from synapse.storage.engines import PostgresEngine
from synapse.storage.util.id_generators import MultiWriterIdGenerator, StreamIdGenerator
from synapse.storage.util.sequence import build_sequence_generator
-from synapse.types import Collection, JsonDict, get_domain_from_id
+from synapse.types import JsonDict, get_domain_from_id
from synapse.util.caches.descriptors import cached
from synapse.util.caches.lrucache import LruCache
from synapse.util.iterutils import batch_iter
@@ -544,7 +552,7 @@ class EventsWorkerStore(SQLBaseStore):
async def get_stripped_room_state_from_event_context(
self,
context: EventContext,
- state_types_to_include: List[EventTypes],
+ state_types_to_include: Container[str],
membership_user_id: Optional[str] = None,
) -> List[JsonDict]:
"""
diff --git a/synapse/storage/databases/main/filtering.py b/synapse/storage/databases/main/filtering.py
index d2f5b9a5..bb244a03 100644
--- a/synapse/storage/databases/main/filtering.py
+++ b/synapse/storage/databases/main/filtering.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/group_server.py b/synapse/storage/databases/main/group_server.py
index ac07e019..66ad363b 100644
--- a/synapse/storage/databases/main/group_server.py
+++ b/synapse/storage/databases/main/group_server.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
# Copyright 2018 New Vector Ltd
#
@@ -1027,8 +1026,8 @@ class GroupServerStore(GroupServerWorkerStore):
user_id: str,
is_admin: bool = False,
is_public: bool = True,
- local_attestation: dict = None,
- remote_attestation: dict = None,
+ local_attestation: Optional[dict] = None,
+ remote_attestation: Optional[dict] = None,
) -> None:
"""Add a user to the group server.
@@ -1171,7 +1170,7 @@ class GroupServerStore(GroupServerWorkerStore):
user_id: str,
membership: str,
is_admin: bool = False,
- content: JsonDict = {},
+ content: Optional[JsonDict] = None,
local_attestation: Optional[dict] = None,
remote_attestation: Optional[dict] = None,
is_publicised: bool = False,
@@ -1192,6 +1191,8 @@ class GroupServerStore(GroupServerWorkerStore):
is_publicised: Whether this should be publicised.
"""
+ content = content or {}
+
def _register_user_group_membership_txn(txn, next_id):
# TODO: Upsert?
self.db_pool.simple_delete_txn(
diff --git a/synapse/storage/databases/main/keys.py b/synapse/storage/databases/main/keys.py
index d504323b..0e868078 100644
--- a/synapse/storage/databases/main/keys.py
+++ b/synapse/storage/databases/main/keys.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019 New Vector Ltd.
#
diff --git a/synapse/storage/databases/main/media_repository.py b/synapse/storage/databases/main/media_repository.py
index 4f3d1925..c5848681 100644
--- a/synapse/storage/databases/main/media_repository.py
+++ b/synapse/storage/databases/main/media_repository.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2020-2021 The Matrix.org Foundation C.I.C.
#
@@ -22,6 +21,9 @@ from synapse.storage.database import DatabasePool
BG_UPDATE_REMOVE_MEDIA_REPO_INDEX_WITHOUT_METHOD = (
"media_repository_drop_index_wo_method"
)
+BG_UPDATE_REMOVE_MEDIA_REPO_INDEX_WITHOUT_METHOD_2 = (
+ "media_repository_drop_index_wo_method_2"
+)
class MediaSortOrder(Enum):
@@ -85,23 +87,35 @@ class MediaRepositoryBackgroundUpdateStore(SQLBaseStore):
unique=True,
)
+ # the original impl of _drop_media_index_without_method was broken (see
+ # https://github.com/matrix-org/synapse/issues/8649), so we replace the original
+ # impl with a no-op and run the fixed migration as
+ # media_repository_drop_index_wo_method_2.
+ self.db_pool.updates.register_noop_background_update(
+ BG_UPDATE_REMOVE_MEDIA_REPO_INDEX_WITHOUT_METHOD
+ )
self.db_pool.updates.register_background_update_handler(
- BG_UPDATE_REMOVE_MEDIA_REPO_INDEX_WITHOUT_METHOD,
+ BG_UPDATE_REMOVE_MEDIA_REPO_INDEX_WITHOUT_METHOD_2,
self._drop_media_index_without_method,
)
async def _drop_media_index_without_method(self, progress, batch_size):
+ """background update handler which removes the old constraints.
+
+ Note that this is only run on postgres.
+ """
+
def f(txn):
txn.execute(
"ALTER TABLE local_media_repository_thumbnails DROP CONSTRAINT IF EXISTS local_media_repository_thumbn_media_id_thumbnail_width_thum_key"
)
txn.execute(
- "ALTER TABLE remote_media_cache_thumbnails DROP CONSTRAINT IF EXISTS remote_media_repository_thumbn_media_id_thumbnail_width_thum_key"
+ "ALTER TABLE remote_media_cache_thumbnails DROP CONSTRAINT IF EXISTS remote_media_cache_thumbnails_media_origin_media_id_thumbna_key"
)
await self.db_pool.runInteraction("drop_media_indices_without_method", f)
await self.db_pool.updates._end_background_update(
- BG_UPDATE_REMOVE_MEDIA_REPO_INDEX_WITHOUT_METHOD
+ BG_UPDATE_REMOVE_MEDIA_REPO_INDEX_WITHOUT_METHOD_2
)
return 1
diff --git a/synapse/storage/databases/main/metrics.py b/synapse/storage/databases/main/metrics.py
index 614a418a..c3f551d3 100644
--- a/synapse/storage/databases/main/metrics.py
+++ b/synapse/storage/databases/main/metrics.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/monthly_active_users.py b/synapse/storage/databases/main/monthly_active_users.py
index 757da3d5..fe256382 100644
--- a/synapse/storage/databases/main/monthly_active_users.py
+++ b/synapse/storage/databases/main/monthly_active_users.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/presence.py b/synapse/storage/databases/main/presence.py
index 0ff693a3..db22fab2 100644
--- a/synapse/storage/databases/main/presence.py
+++ b/synapse/storage/databases/main/presence.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,16 +12,69 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import Dict, List, Tuple
+from typing import TYPE_CHECKING, Dict, List, Tuple
-from synapse.api.presence import UserPresenceState
+from synapse.api.presence import PresenceState, UserPresenceState
+from synapse.replication.tcp.streams import PresenceStream
from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause
+from synapse.storage.database import DatabasePool
+from synapse.storage.engines import PostgresEngine
+from synapse.storage.types import Connection
+from synapse.storage.util.id_generators import MultiWriterIdGenerator, StreamIdGenerator
from synapse.util.caches.descriptors import cached, cachedList
+from synapse.util.caches.stream_change_cache import StreamChangeCache
from synapse.util.iterutils import batch_iter
+if TYPE_CHECKING:
+ from synapse.server import HomeServer
+
class PresenceStore(SQLBaseStore):
+ def __init__(
+ self,
+ database: DatabasePool,
+ db_conn: Connection,
+ hs: "HomeServer",
+ ):
+ super().__init__(database, db_conn, hs)
+
+ self._can_persist_presence = (
+ hs.get_instance_name() in hs.config.worker.writers.presence
+ )
+
+ if isinstance(database.engine, PostgresEngine):
+ self._presence_id_gen = MultiWriterIdGenerator(
+ db_conn=db_conn,
+ db=database,
+ stream_name="presence_stream",
+ instance_name=self._instance_name,
+ tables=[("presence_stream", "instance_name", "stream_id")],
+ sequence_name="presence_stream_sequence",
+ writers=hs.config.worker.writers.to_device,
+ )
+ else:
+ self._presence_id_gen = StreamIdGenerator(
+ db_conn, "presence_stream", "stream_id"
+ )
+
+ self._presence_on_startup = self._get_active_presence(db_conn)
+
+ presence_cache_prefill, min_presence_val = self.db_pool.get_cache_dict(
+ db_conn,
+ "presence_stream",
+ entity_column="user_id",
+ stream_column="stream_id",
+ max_value=self._presence_id_gen.get_current_token(),
+ )
+ self.presence_stream_cache = StreamChangeCache(
+ "PresenceStreamChangeCache",
+ min_presence_val,
+ prefilled_cache=presence_cache_prefill,
+ )
+
async def update_presence(self, presence_states):
+ assert self._can_persist_presence
+
stream_ordering_manager = self._presence_id_gen.get_next_mult(
len(presence_states)
)
@@ -58,6 +110,7 @@ class PresenceStore(SQLBaseStore):
"last_user_sync_ts": state.last_user_sync_ts,
"status_msg": state.status_msg,
"currently_active": state.currently_active,
+ "instance_name": self._instance_name,
}
for stream_id, state in zip(stream_orderings, presence_states)
],
@@ -217,3 +270,37 @@ class PresenceStore(SQLBaseStore):
def get_current_presence_token(self):
return self._presence_id_gen.get_current_token()
+
+ def _get_active_presence(self, db_conn: Connection):
+ """Fetch non-offline presence from the database so that we can register
+ the appropriate time outs.
+ """
+
+ sql = (
+ "SELECT user_id, state, last_active_ts, last_federation_update_ts,"
+ " last_user_sync_ts, status_msg, currently_active FROM presence_stream"
+ " WHERE state != ?"
+ )
+
+ txn = db_conn.cursor()
+ txn.execute(sql, (PresenceState.OFFLINE,))
+ rows = self.db_pool.cursor_to_dict(txn)
+ txn.close()
+
+ for row in rows:
+ row["currently_active"] = bool(row["currently_active"])
+
+ return [UserPresenceState(**row) for row in rows]
+
+ def take_presence_startup_info(self):
+ active_on_startup = self._presence_on_startup
+ self._presence_on_startup = None
+ return active_on_startup
+
+ def process_replication_rows(self, stream_name, instance_name, token, rows):
+ if stream_name == PresenceStream.NAME:
+ self._presence_id_gen.advance(instance_name, token)
+ for row in rows:
+ self.presence_stream_cache.entity_has_changed(row.user_id, token)
+ self._get_presence_for_user.invalidate((row.user_id,))
+ return super().process_replication_rows(stream_name, instance_name, token, rows)
diff --git a/synapse/storage/databases/main/profile.py b/synapse/storage/databases/main/profile.py
index ba01d310..9b4e95e1 100644
--- a/synapse/storage/databases/main/profile.py
+++ b/synapse/storage/databases/main/profile.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/purge_events.py b/synapse/storage/databases/main/purge_events.py
index 41f4fe7f..8f83748b 100644
--- a/synapse/storage/databases/main/purge_events.py
+++ b/synapse/storage/databases/main/purge_events.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/push_rule.py b/synapse/storage/databases/main/push_rule.py
index 9e58dc0e..db521763 100644
--- a/synapse/storage/databases/main/push_rule.py
+++ b/synapse/storage/databases/main/push_rule.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/storage/databases/main/pusher.py b/synapse/storage/databases/main/pusher.py
index c65558c2..b48fe086 100644
--- a/synapse/storage/databases/main/pusher.py
+++ b/synapse/storage/databases/main/pusher.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py
index 43c852c9..3647276a 100644
--- a/synapse/storage/databases/main/receipts.py
+++ b/synapse/storage/databases/main/receipts.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py
index 90a8f664..6e5ee557 100644
--- a/synapse/storage/databases/main/registration.py
+++ b/synapse/storage/databases/main/registration.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019,2020 The Matrix.org Foundation C.I.C.
@@ -92,13 +91,25 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore):
id_column=None,
)
- self._account_validity = hs.config.account_validity
- if hs.config.run_background_tasks and self._account_validity.enabled:
- self._clock.call_later(
- 0.0,
- self._set_expiration_date_when_missing,
+ self._account_validity_enabled = (
+ hs.config.account_validity.account_validity_enabled
+ )
+ self._account_validity_period = None
+ self._account_validity_startup_job_max_delta = None
+ if self._account_validity_enabled:
+ self._account_validity_period = (
+ hs.config.account_validity.account_validity_period
+ )
+ self._account_validity_startup_job_max_delta = (
+ hs.config.account_validity.account_validity_startup_job_max_delta
)
+ if hs.config.run_background_tasks:
+ self._clock.call_later(
+ 0.0,
+ self._set_expiration_date_when_missing,
+ )
+
# Create a background job for culling expired 3PID validity tokens
if hs.config.run_background_tasks:
self._clock.looping_call(
@@ -195,6 +206,7 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore):
expiration_ts: int,
email_sent: bool,
renewal_token: Optional[str] = None,
+ token_used_ts: Optional[int] = None,
) -> None:
"""Updates the account validity properties of the given account, with the
given values.
@@ -208,6 +220,8 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore):
period.
renewal_token: Renewal token the user can use to extend the validity
of their account. Defaults to no token.
+ token_used_ts: A timestamp of when the current token was used to renew
+ the account.
"""
def set_account_validity_for_user_txn(txn):
@@ -219,6 +233,7 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore):
"expiration_ts_ms": expiration_ts,
"email_sent": email_sent,
"renewal_token": renewal_token,
+ "token_used_ts_ms": token_used_ts,
},
)
self._invalidate_cache_and_stream(
@@ -232,7 +247,7 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore):
async def set_renewal_token_for_user(
self, user_id: str, renewal_token: str
) -> None:
- """Defines a renewal token for a given user.
+ """Defines a renewal token for a given user, and clears the token_used timestamp.
Args:
user_id: ID of the user to set the renewal token for.
@@ -245,26 +260,40 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore):
await self.db_pool.simple_update_one(
table="account_validity",
keyvalues={"user_id": user_id},
- updatevalues={"renewal_token": renewal_token},
+ updatevalues={"renewal_token": renewal_token, "token_used_ts_ms": None},
desc="set_renewal_token_for_user",
)
- async def get_user_from_renewal_token(self, renewal_token: str) -> str:
- """Get a user ID from a renewal token.
+ async def get_user_from_renewal_token(
+ self, renewal_token: str
+ ) -> Tuple[str, int, Optional[int]]:
+ """Get a user ID and renewal status from a renewal token.
Args:
renewal_token: The renewal token to perform the lookup with.
Returns:
- The ID of the user to which the token belongs.
+ A tuple of containing the following values:
+ * The ID of a user to which the token belongs.
+ * An int representing the user's expiry timestamp as milliseconds since the
+ epoch, or 0 if the token was invalid.
+ * An optional int representing the timestamp of when the user renewed their
+ account timestamp as milliseconds since the epoch. None if the account
+ has not been renewed using the current token yet.
"""
- return await self.db_pool.simple_select_one_onecol(
+ ret_dict = await self.db_pool.simple_select_one(
table="account_validity",
keyvalues={"renewal_token": renewal_token},
- retcol="user_id",
+ retcols=["user_id", "expiration_ts_ms", "token_used_ts_ms"],
desc="get_user_from_renewal_token",
)
+ return (
+ ret_dict["user_id"],
+ ret_dict["expiration_ts_ms"],
+ ret_dict["token_used_ts_ms"],
+ )
+
async def get_renewal_token_for_user(self, user_id: str) -> str:
"""Get the renewal token associated with a given user ID.
@@ -303,7 +332,7 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore):
"get_users_expiring_soon",
select_users_txn,
self._clock.time_msec(),
- self.config.account_validity.renew_at,
+ self.config.account_validity_renew_at,
)
async def set_renewal_mail_status(self, user_id: str, email_sent: bool) -> None:
@@ -965,11 +994,11 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore):
delta equal to 10% of the validity period.
"""
now_ms = self._clock.time_msec()
- expiration_ts = now_ms + self._account_validity.period
+ expiration_ts = now_ms + self._account_validity_period
if use_delta:
expiration_ts = self.rand.randrange(
- expiration_ts - self._account_validity.startup_job_max_delta,
+ expiration_ts - self._account_validity_startup_job_max_delta,
expiration_ts,
)
@@ -1413,7 +1442,7 @@ class RegistrationStore(StatsStore, RegistrationBackgroundUpdateStore):
except self.database_engine.module.IntegrityError:
raise StoreError(400, "User ID already taken.", errcode=Codes.USER_IN_USE)
- if self._account_validity.enabled:
+ if self._account_validity_enabled:
self.set_expiration_date_for_user_txn(txn, user_id)
if create_profile_with_displayname:
diff --git a/synapse/storage/databases/main/rejections.py b/synapse/storage/databases/main/rejections.py
index 1e361aaa..167318b3 100644
--- a/synapse/storage/databases/main/rejections.py
+++ b/synapse/storage/databases/main/rejections.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/relations.py b/synapse/storage/databases/main/relations.py
index 5cd61547..2bbf6d6a 100644
--- a/synapse/storage/databases/main/relations.py
+++ b/synapse/storage/databases/main/relations.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py
index 9cbcd530..5f38634f 100644
--- a/synapse/storage/databases/main/room.py
+++ b/synapse/storage/databases/main/room.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
@@ -521,13 +520,11 @@ class RoomWorkerStore(SQLBaseStore):
)
@cached(max_entries=10000)
- async def get_ratelimit_for_user(self, user_id):
- """Check if there are any overrides for ratelimiting for the given
- user
+ async def get_ratelimit_for_user(self, user_id: str) -> Optional[RatelimitOverride]:
+ """Check if there are any overrides for ratelimiting for the given user
Args:
- user_id (str)
-
+ user_id: user ID of the user
Returns:
RatelimitOverride if there is an override, else None. If the contents
of RatelimitOverride are None or 0 then ratelimitng has been
@@ -549,6 +546,62 @@ class RoomWorkerStore(SQLBaseStore):
else:
return None
+ async def set_ratelimit_for_user(
+ self, user_id: str, messages_per_second: int, burst_count: int
+ ) -> None:
+ """Sets whether a user is set an overridden ratelimit.
+ Args:
+ user_id: user ID of the user
+ messages_per_second: The number of actions that can be performed in a second.
+ burst_count: How many actions that can be performed before being limited.
+ """
+
+ def set_ratelimit_txn(txn):
+ self.db_pool.simple_upsert_txn(
+ txn,
+ table="ratelimit_override",
+ keyvalues={"user_id": user_id},
+ values={
+ "messages_per_second": messages_per_second,
+ "burst_count": burst_count,
+ },
+ )
+
+ self._invalidate_cache_and_stream(
+ txn, self.get_ratelimit_for_user, (user_id,)
+ )
+
+ await self.db_pool.runInteraction("set_ratelimit", set_ratelimit_txn)
+
+ async def delete_ratelimit_for_user(self, user_id: str) -> None:
+ """Delete an overridden ratelimit for a user.
+ Args:
+ user_id: user ID of the user
+ """
+
+ def delete_ratelimit_txn(txn):
+ row = self.db_pool.simple_select_one_txn(
+ txn,
+ table="ratelimit_override",
+ keyvalues={"user_id": user_id},
+ retcols=["user_id"],
+ allow_none=True,
+ )
+
+ if not row:
+ return
+
+ # They are there, delete them.
+ self.db_pool.simple_delete_one_txn(
+ txn, "ratelimit_override", keyvalues={"user_id": user_id}
+ )
+
+ self._invalidate_cache_and_stream(
+ txn, self.get_ratelimit_for_user, (user_id,)
+ )
+
+ await self.db_pool.runInteraction("delete_ratelimit", delete_ratelimit_txn)
+
@cached()
async def get_retention_policy_for_room(self, room_id):
"""Get the retention policy for a given room.
diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py
index a9216ca9..2a8532f8 100644
--- a/synapse/storage/databases/main/roommember.py
+++ b/synapse/storage/databases/main/roommember.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
@@ -14,7 +13,20 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
-from typing import TYPE_CHECKING, Dict, FrozenSet, Iterable, List, Optional, Set, Tuple
+from typing import (
+ TYPE_CHECKING,
+ Collection,
+ Dict,
+ FrozenSet,
+ Iterable,
+ List,
+ Optional,
+ Set,
+ Tuple,
+ Union,
+)
+
+import attr
from synapse.api.constants import EventTypes, Membership
from synapse.events import EventBase
@@ -34,7 +46,7 @@ from synapse.storage.roommember import (
ProfileInfo,
RoomsForUser,
)
-from synapse.types import Collection, PersistedEventPosition, get_domain_from_id
+from synapse.types import PersistedEventPosition, StateMap, get_domain_from_id
from synapse.util.async_helpers import Linearizer
from synapse.util.caches import intern_string
from synapse.util.caches.descriptors import _CacheContext, cached, cachedList
@@ -54,6 +66,10 @@ class RoomMemberWorkerStore(EventsWorkerStore):
def __init__(self, database: DatabasePool, db_conn, hs):
super().__init__(database, db_conn, hs)
+ # Used by `_get_joined_hosts` to ensure only one thing mutates the cache
+ # at a time. Keyed by room_id.
+ self._joined_host_linearizer = Linearizer("_JoinedHostsCache")
+
# Is the current_state_events.membership up to date? Or is the
# background update still running?
self._current_state_events_membership_up_to_date = False
@@ -174,6 +190,33 @@ class RoomMemberWorkerStore(EventsWorkerStore):
txn.execute(sql, (room_id, Membership.JOIN))
return [r[0] for r in txn]
+ @cached(max_entries=100000, iterable=True)
+ async def get_users_in_room_with_profiles(
+ self, room_id: str
+ ) -> Dict[str, ProfileInfo]:
+ """Get a mapping from user ID to profile information for all users in a given room.
+
+ Args:
+ room_id: The ID of the room to retrieve the users of.
+
+ Returns:
+ A mapping from user ID to ProfileInfo.
+ """
+
+ def _get_users_in_room_with_profiles(txn) -> Dict[str, ProfileInfo]:
+ sql = """
+ SELECT user_id, display_name, avatar_url FROM room_memberships
+ WHERE room_id = ? AND membership = ?
+ """
+ txn.execute(sql, (room_id, Membership.JOIN))
+
+ return {r[0]: ProfileInfo(display_name=r[1], avatar_url=r[2]) for r in txn}
+
+ return await self.db_pool.runInteraction(
+ "get_users_in_room_with_profiles",
+ _get_users_in_room_with_profiles,
+ )
+
@cached(max_entries=100000)
async def get_room_summary(self, room_id: str) -> Dict[str, MemberSummary]:
"""Get the details of a room roughly suitable for use by the room
@@ -704,19 +747,82 @@ class RoomMemberWorkerStore(EventsWorkerStore):
@cached(num_args=2, max_entries=10000, iterable=True)
async def _get_joined_hosts(
- self, room_id, state_group, current_state_ids, state_entry
- ):
- # We don't use `state_group`, its there so that we can cache based
- # on it. However, its important that its never None, since two current_state's
- # with a state_group of None are likely to be different.
+ self,
+ room_id: str,
+ state_group: int,
+ current_state_ids: StateMap[str],
+ state_entry: "_StateCacheEntry",
+ ) -> FrozenSet[str]:
+ # We don't use `state_group`, its there so that we can cache based on
+ # it. However, its important that its never None, since two
+ # current_state's with a state_group of None are likely to be different.
+ #
+ # The `state_group` must match the `state_entry.state_group` (if not None).
assert state_group is not None
-
+ assert state_entry.state_group is None or state_entry.state_group == state_group
+
+ # We use a secondary cache of previous work to allow us to build up the
+ # joined hosts for the given state group based on previous state groups.
+ #
+ # We cache one object per room containing the results of the last state
+ # group we got joined hosts for. The idea is that generally
+ # `get_joined_hosts` is called with the "current" state group for the
+ # room, and so consecutive calls will be for consecutive state groups
+ # which point to the previous state group.
cache = await self._get_joined_hosts_cache(room_id)
- return await cache.get_destinations(state_entry)
+
+ # If the state group in the cache matches, we already have the data we need.
+ if state_entry.state_group == cache.state_group:
+ return frozenset(cache.hosts_to_joined_users)
+
+ # Since we'll mutate the cache we need to lock.
+ with (await self._joined_host_linearizer.queue(room_id)):
+ if state_entry.state_group == cache.state_group:
+ # Same state group, so nothing to do. We've already checked for
+ # this above, but the cache may have changed while waiting on
+ # the lock.
+ pass
+ elif state_entry.prev_group == cache.state_group:
+ # The cached work is for the previous state group, so we work out
+ # the delta.
+ for (typ, state_key), event_id in state_entry.delta_ids.items():
+ if typ != EventTypes.Member:
+ continue
+
+ host = intern_string(get_domain_from_id(state_key))
+ user_id = state_key
+ known_joins = cache.hosts_to_joined_users.setdefault(host, set())
+
+ event = await self.get_event(event_id)
+ if event.membership == Membership.JOIN:
+ known_joins.add(user_id)
+ else:
+ known_joins.discard(user_id)
+
+ if not known_joins:
+ cache.hosts_to_joined_users.pop(host, None)
+ else:
+ # The cache doesn't match the state group or prev state group,
+ # so we calculate the result from first principles.
+ joined_users = await self.get_joined_users_from_state(
+ room_id, state_entry
+ )
+
+ cache.hosts_to_joined_users = {}
+ for user_id in joined_users:
+ host = intern_string(get_domain_from_id(user_id))
+ cache.hosts_to_joined_users.setdefault(host, set()).add(user_id)
+
+ if state_entry.state_group:
+ cache.state_group = state_entry.state_group
+ else:
+ cache.state_group = object()
+
+ return frozenset(cache.hosts_to_joined_users)
@cached(max_entries=10000)
def _get_joined_hosts_cache(self, room_id: str) -> "_JoinedHostsCache":
- return _JoinedHostsCache(self, room_id)
+ return _JoinedHostsCache()
@cached(num_args=2)
async def did_forget(self, user_id: str, room_id: str) -> bool:
@@ -1026,71 +1132,18 @@ class RoomMemberStore(RoomMemberWorkerStore, RoomMemberBackgroundUpdateStore):
await self.db_pool.runInteraction("forget_membership", f)
+@attr.s(slots=True)
class _JoinedHostsCache:
- """Cache for joined hosts in a room that is optimised to handle updates
- via state deltas.
- """
-
- def __init__(self, store, room_id):
- self.store = store
- self.room_id = room_id
-
- self.hosts_to_joined_users = {}
-
- self.state_group = object()
+ """The cached data used by the `_get_joined_hosts_cache`."""
- self.linearizer = Linearizer("_JoinedHostsCache")
+ # Dict of host to the set of their users in the room at the state group.
+ hosts_to_joined_users = attr.ib(type=Dict[str, Set[str]], factory=dict)
- self._len = 0
-
- async def get_destinations(self, state_entry: "_StateCacheEntry") -> Set[str]:
- """Get set of destinations for a state entry
-
- Args:
- state_entry
-
- Returns:
- The destinations as a set.
- """
- if state_entry.state_group == self.state_group:
- return frozenset(self.hosts_to_joined_users)
-
- with (await self.linearizer.queue(())):
- if state_entry.state_group == self.state_group:
- pass
- elif state_entry.prev_group == self.state_group:
- for (typ, state_key), event_id in state_entry.delta_ids.items():
- if typ != EventTypes.Member:
- continue
-
- host = intern_string(get_domain_from_id(state_key))
- user_id = state_key
- known_joins = self.hosts_to_joined_users.setdefault(host, set())
-
- event = await self.store.get_event(event_id)
- if event.membership == Membership.JOIN:
- known_joins.add(user_id)
- else:
- known_joins.discard(user_id)
-
- if not known_joins:
- self.hosts_to_joined_users.pop(host, None)
- else:
- joined_users = await self.store.get_joined_users_from_state(
- self.room_id, state_entry
- )
-
- self.hosts_to_joined_users = {}
- for user_id in joined_users:
- host = intern_string(get_domain_from_id(user_id))
- self.hosts_to_joined_users.setdefault(host, set()).add(user_id)
-
- if state_entry.state_group:
- self.state_group = state_entry.state_group
- else:
- self.state_group = object()
- self._len = sum(len(v) for v in self.hosts_to_joined_users.values())
- return frozenset(self.hosts_to_joined_users)
+ # The state group `hosts_to_joined_users` is derived from. Will be an object
+ # if the instance is newly created or if the state is not based on a state
+ # group. (An object is used as a sentinel value to ensure that it never is
+ # equal to anything else).
+ state_group = attr.ib(type=Union[object, int], factory=object)
def __len__(self):
- return self._len
+ return sum(len(v) for v in self.hosts_to_joined_users.values())
diff --git a/synapse/storage/databases/main/schema/delta/50/make_event_content_nullable.py b/synapse/storage/databases/main/schema/delta/50/make_event_content_nullable.py
index b1684a84..acd6ad1e 100644
--- a/synapse/storage/databases/main/schema/delta/50/make_event_content_nullable.py
+++ b/synapse/storage/databases/main/schema/delta/50/make_event_content_nullable.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/schema/delta/57/local_current_membership.py b/synapse/storage/databases/main/schema/delta/57/local_current_membership.py
index 44917f0a..66989222 100644
--- a/synapse/storage/databases/main/schema/delta/57/local_current_membership.py
+++ b/synapse/storage/databases/main/schema/delta/57/local_current_membership.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/schema/delta/59/11drop_thumbnail_constraint.sql.postgres b/synapse/storage/databases/main/schema/delta/59/11drop_thumbnail_constraint.sql.postgres
new file mode 100644
index 00000000..54c1bca3
--- /dev/null
+++ b/synapse/storage/databases/main/schema/delta/59/11drop_thumbnail_constraint.sql.postgres
@@ -0,0 +1,22 @@
+/* Copyright 2021 The Matrix.org Foundation C.I.C
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+-- drop old constraints on remote_media_cache_thumbnails
+--
+-- This was originally part of 57.07, but it was done wrong, per
+-- https://github.com/matrix-org/synapse/issues/8649, so we do it again.
+INSERT INTO background_updates (ordering, update_name, progress_json, depends_on) VALUES
+ (5911, 'media_repository_drop_index_wo_method_2', '{}', 'remote_media_repository_thumbnails_method_idx');
+
diff --git a/synapse/storage/databases/main/schema/delta/59/12account_validity_token_used_ts_ms.sql b/synapse/storage/databases/main/schema/delta/59/12account_validity_token_used_ts_ms.sql
new file mode 100644
index 00000000..4836dac1
--- /dev/null
+++ b/synapse/storage/databases/main/schema/delta/59/12account_validity_token_used_ts_ms.sql
@@ -0,0 +1,18 @@
+/* Copyright 2020 The Matrix.org Foundation C.I.C.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+-- Track when users renew their account using the value of the 'renewal_token' column.
+-- This field should be set to NULL after a fresh token is generated.
+ALTER TABLE account_validity ADD token_used_ts_ms BIGINT;
diff --git a/synapse/storage/databases/main/schema/delta/59/12presence_stream_instance.sql b/synapse/storage/databases/main/schema/delta/59/12presence_stream_instance.sql
new file mode 100644
index 00000000..b6ba0bda
--- /dev/null
+++ b/synapse/storage/databases/main/schema/delta/59/12presence_stream_instance.sql
@@ -0,0 +1,18 @@
+/* Copyright 2021 The Matrix.org Foundation C.I.C
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+-- Add a column to specify which instance wrote the row. Historic rows have
+-- `NULL`, which indicates that the master instance wrote them.
+ALTER TABLE presence_stream ADD COLUMN instance_name TEXT;
diff --git a/synapse/storage/databases/main/schema/delta/59/12presence_stream_instance_seq.sql.postgres b/synapse/storage/databases/main/schema/delta/59/12presence_stream_instance_seq.sql.postgres
new file mode 100644
index 00000000..02b182ad
--- /dev/null
+++ b/synapse/storage/databases/main/schema/delta/59/12presence_stream_instance_seq.sql.postgres
@@ -0,0 +1,20 @@
+/* Copyright 2021 The Matrix.org Foundation C.I.C
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+CREATE SEQUENCE IF NOT EXISTS presence_stream_sequence;
+
+SELECT setval('presence_stream_sequence', (
+ SELECT COALESCE(MAX(stream_id), 1) FROM presence_stream
+));
diff --git a/synapse/storage/databases/main/search.py b/synapse/storage/databases/main/search.py
index f5e7d9ef..6480d5a9 100644
--- a/synapse/storage/databases/main/search.py
+++ b/synapse/storage/databases/main/search.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,7 +15,7 @@
import logging
import re
from collections import namedtuple
-from typing import List, Optional, Set
+from typing import Collection, List, Optional, Set
from synapse.api.errors import SynapseError
from synapse.events import EventBase
@@ -24,7 +23,6 @@ from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_cla
from synapse.storage.database import DatabasePool
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
from synapse.storage.engines import PostgresEngine, Sqlite3Engine
-from synapse.types import Collection
logger = logging.getLogger(__name__)
diff --git a/synapse/storage/databases/main/signatures.py b/synapse/storage/databases/main/signatures.py
index c8c67953..ab2159c2 100644
--- a/synapse/storage/databases/main/signatures.py
+++ b/synapse/storage/databases/main/signatures.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/state.py b/synapse/storage/databases/main/state.py
index a7f37173..1757064a 100644
--- a/synapse/storage/databases/main/state.py
+++ b/synapse/storage/databases/main/state.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
@@ -190,7 +189,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
# FIXME: how should this be cached?
async def get_filtered_current_state_ids(
- self, room_id: str, state_filter: StateFilter = StateFilter.all()
+ self, room_id: str, state_filter: Optional[StateFilter] = None
) -> StateMap[str]:
"""Get the current state event of a given type for a room based on the
current_state_events table. This may not be as up-to-date as the result
@@ -205,7 +204,9 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
Map from type/state_key to event ID.
"""
- where_clause, where_args = state_filter.make_sql_filter_clause()
+ where_clause, where_args = (
+ state_filter or StateFilter.all()
+ ).make_sql_filter_clause()
if not where_clause:
# We delegate to the cached version
diff --git a/synapse/storage/databases/main/state_deltas.py b/synapse/storage/databases/main/state_deltas.py
index 0dbb501f..bff7d040 100644
--- a/synapse/storage/databases/main/state_deltas.py
+++ b/synapse/storage/databases/main/state_deltas.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/stats.py b/synapse/storage/databases/main/stats.py
index 1c99393c..ae9f8809 100644
--- a/synapse/storage/databases/main/stats.py
+++ b/synapse/storage/databases/main/stats.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018, 2019 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
@@ -66,18 +65,37 @@ TYPE_TO_ORIGIN_TABLE = {"room": ("rooms", "room_id"), "user": ("users", "name")}
class UserSortOrder(Enum):
"""
Enum to define the sorting method used when returning users
- with get_users_media_usage_paginate
+ with get_users_paginate in __init__.py
+ and get_users_media_usage_paginate in stats.py
- MEDIA_LENGTH = ordered by size of uploaded media. Smallest to largest.
- MEDIA_COUNT = ordered by number of uploaded media. Smallest to largest.
+ When moves this to __init__.py gets `builtins.ImportError` with
+ `most likely due to a circular import`
+
+ MEDIA_LENGTH = ordered by size of uploaded media.
+ MEDIA_COUNT = ordered by number of uploaded media.
USER_ID = ordered alphabetically by `user_id`.
+ NAME = ordered alphabetically by `user_id`. This is for compatibility reasons,
+ as the user_id is returned in the name field in the response in list users admin API.
DISPLAYNAME = ordered alphabetically by `displayname`
+ GUEST = ordered by `is_guest`
+ ADMIN = ordered by `admin`
+ DEACTIVATED = ordered by `deactivated`
+ USER_TYPE = ordered alphabetically by `user_type`
+ AVATAR_URL = ordered alphabetically by `avatar_url`
+ SHADOW_BANNED = ordered by `shadow_banned`
"""
MEDIA_LENGTH = "media_length"
MEDIA_COUNT = "media_count"
USER_ID = "user_id"
+ NAME = "name"
DISPLAYNAME = "displayname"
+ GUEST = "is_guest"
+ ADMIN = "admin"
+ DEACTIVATED = "deactivated"
+ USER_TYPE = "user_type"
+ AVATAR_URL = "avatar_url"
+ SHADOW_BANNED = "shadow_banned"
class StatsStore(StateDeltasStore):
diff --git a/synapse/storage/databases/main/stream.py b/synapse/storage/databases/main/stream.py
index 91f8abb6..7581c7d3 100644
--- a/synapse/storage/databases/main/stream.py
+++ b/synapse/storage/databases/main/stream.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017 Vector Creations Ltd
# Copyright 2018-2019 New Vector Ltd
@@ -38,7 +37,7 @@ what sort order was used:
import abc
import logging
from collections import namedtuple
-from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple
+from typing import TYPE_CHECKING, Collection, Dict, List, Optional, Set, Tuple
from twisted.internet import defer
@@ -54,7 +53,7 @@ from synapse.storage.database import (
from synapse.storage.databases.main.events_worker import EventsWorkerStore
from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine
from synapse.storage.util.id_generators import MultiWriterIdGenerator
-from synapse.types import Collection, PersistedEventPosition, RoomStreamToken
+from synapse.types import PersistedEventPosition, RoomStreamToken
from synapse.util.caches.descriptors import cached
from synapse.util.caches.stream_change_cache import StreamChangeCache
diff --git a/synapse/storage/databases/main/tags.py b/synapse/storage/databases/main/tags.py
index 50067eab..1d62c614 100644
--- a/synapse/storage/databases/main/tags.py
+++ b/synapse/storage/databases/main/tags.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/storage/databases/main/transactions.py b/synapse/storage/databases/main/transactions.py
index b7072f1f..82335e7a 100644
--- a/synapse/storage/databases/main/transactions.py
+++ b/synapse/storage/databases/main/transactions.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/ui_auth.py b/synapse/storage/databases/main/ui_auth.py
index 5473ec14..22c05cdd 100644
--- a/synapse/storage/databases/main/ui_auth.py
+++ b/synapse/storage/databases/main/ui_auth.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/user_directory.py b/synapse/storage/databases/main/user_directory.py
index 1026f321..7a082fdd 100644
--- a/synapse/storage/databases/main/user_directory.py
+++ b/synapse/storage/databases/main/user_directory.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/main/user_erasure_store.py b/synapse/storage/databases/main/user_erasure_store.py
index f9575b1f..acf6b2fb 100644
--- a/synapse/storage/databases/main/user_erasure_store.py
+++ b/synapse/storage/databases/main/user_erasure_store.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/state/__init__.py b/synapse/storage/databases/state/__init__.py
index c90d0228..e5100d61 100644
--- a/synapse/storage/databases/state/__init__.py
+++ b/synapse/storage/databases/state/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/databases/state/bg_updates.py b/synapse/storage/databases/state/bg_updates.py
index 1fd333b7..c2891cb0 100644
--- a/synapse/storage/databases/state/bg_updates.py
+++ b/synapse/storage/databases/state/bg_updates.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,6 +13,7 @@
# limitations under the License.
import logging
+from typing import Optional
from synapse.storage._base import SQLBaseStore
from synapse.storage.database import DatabasePool
@@ -73,8 +73,10 @@ class StateGroupBackgroundUpdateStore(SQLBaseStore):
return count
def _get_state_groups_from_groups_txn(
- self, txn, groups, state_filter=StateFilter.all()
+ self, txn, groups, state_filter: Optional[StateFilter] = None
):
+ state_filter = state_filter or StateFilter.all()
+
results = {group: {} for group in groups}
where_clause, where_args = state_filter.make_sql_filter_clause()
diff --git a/synapse/storage/databases/state/store.py b/synapse/storage/databases/state/store.py
index 97ec65f7..e38461ad 100644
--- a/synapse/storage/databases/state/store.py
+++ b/synapse/storage/databases/state/store.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,7 +14,7 @@
import logging
from collections import namedtuple
-from typing import Dict, Iterable, List, Set, Tuple
+from typing import Dict, Iterable, List, Optional, Set, Tuple
from synapse.api.constants import EventTypes
from synapse.storage._base import SQLBaseStore
@@ -210,7 +209,7 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore):
return state_filter.filter_state(state_dict_ids), not missing_types
async def _get_state_for_groups(
- self, groups: Iterable[int], state_filter: StateFilter = StateFilter.all()
+ self, groups: Iterable[int], state_filter: Optional[StateFilter] = None
) -> Dict[int, MutableStateMap[str]]:
"""Gets the state at each of a list of state groups, optionally
filtering by type/state_key
@@ -223,6 +222,7 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore):
Returns:
Dict of state group to state map.
"""
+ state_filter = state_filter or StateFilter.all()
member_filter, non_member_filter = state_filter.get_member_split()
diff --git a/synapse/storage/engines/__init__.py b/synapse/storage/engines/__init__.py
index d15ccfac..9abc0204 100644
--- a/synapse/storage/engines/__init__.py
+++ b/synapse/storage/engines/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/engines/_base.py b/synapse/storage/engines/_base.py
index cca839c7..1882bfd9 100644
--- a/synapse/storage/engines/_base.py
+++ b/synapse/storage/engines/_base.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -44,14 +43,6 @@ class BaseDatabaseEngine(Generic[ConnectionType], metaclass=abc.ABCMeta):
@property
@abc.abstractmethod
- def supports_tuple_comparison(self) -> bool:
- """
- Do we support comparing tuples, i.e. `(a, b) > (c, d)`?
- """
- ...
-
- @property
- @abc.abstractmethod
def supports_using_any_list(self) -> bool:
"""
Do we support using `a = ANY(?)` and passing a list
diff --git a/synapse/storage/engines/postgres.py b/synapse/storage/engines/postgres.py
index 80a3558a..21411c5f 100644
--- a/synapse/storage/engines/postgres.py
+++ b/synapse/storage/engines/postgres.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -47,8 +46,8 @@ class PostgresEngine(BaseDatabaseEngine):
self._version = db_conn.server_version
# Are we on a supported PostgreSQL version?
- if not allow_outdated_version and self._version < 90500:
- raise RuntimeError("Synapse requires PostgreSQL 9.5+ or above.")
+ if not allow_outdated_version and self._version < 90600:
+ raise RuntimeError("Synapse requires PostgreSQL 9.6 or above.")
with db_conn.cursor() as txn:
txn.execute("SHOW SERVER_ENCODING")
@@ -130,13 +129,6 @@ class PostgresEngine(BaseDatabaseEngine):
return True
@property
- def supports_tuple_comparison(self):
- """
- Do we support comparing tuples, i.e. `(a, b) > (c, d)`?
- """
- return True
-
- @property
def supports_using_any_list(self):
"""Do we support using `a = ANY(?)` and passing a list"""
return True
diff --git a/synapse/storage/engines/sqlite.py b/synapse/storage/engines/sqlite.py
index b87e7798..5fe1b205 100644
--- a/synapse/storage/engines/sqlite.py
+++ b/synapse/storage/engines/sqlite.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -57,14 +56,6 @@ class Sqlite3Engine(BaseDatabaseEngine["sqlite3.Connection"]):
return self.module.sqlite_version_info >= (3, 24, 0)
@property
- def supports_tuple_comparison(self):
- """
- Do we support comparing tuples, i.e. `(a, b) > (c, d)`? This requires
- SQLite 3.15+.
- """
- return self.module.sqlite_version_info >= (3, 15, 0)
-
- @property
def supports_using_any_list(self):
"""Do we support using `a = ANY(?)` and passing a list"""
return False
@@ -72,8 +63,11 @@ class Sqlite3Engine(BaseDatabaseEngine["sqlite3.Connection"]):
def check_database(self, db_conn, allow_outdated_version: bool = False):
if not allow_outdated_version:
version = self.module.sqlite_version_info
- if version < (3, 11, 0):
- raise RuntimeError("Synapse requires sqlite 3.11 or above.")
+ # Synapse is untested against older SQLite versions, and we don't want
+ # to let users upgrade to a version of Synapse with broken support for their
+ # sqlite version, because it risks leaving them with a half-upgraded db.
+ if version < (3, 22, 0):
+ raise RuntimeError("Synapse requires sqlite 3.22 or above.")
def check_new_database(self, txn):
"""Gets called when setting up a brand new database. This allows us to
diff --git a/synapse/storage/keys.py b/synapse/storage/keys.py
index c03871f3..540adb87 100644
--- a/synapse/storage/keys.py
+++ b/synapse/storage/keys.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019 New Vector Ltd.
#
diff --git a/synapse/storage/persist_events.py b/synapse/storage/persist_events.py
index 3a0d6fb3..33dc752d 100644
--- a/synapse/storage/persist_events.py
+++ b/synapse/storage/persist_events.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018-2019 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
@@ -18,7 +17,7 @@
import itertools
import logging
from collections import deque, namedtuple
-from typing import Dict, Iterable, List, Optional, Set, Tuple
+from typing import Collection, Dict, Iterable, List, Optional, Set, Tuple
from prometheus_client import Counter, Histogram
@@ -33,7 +32,6 @@ from synapse.storage.databases import Databases
from synapse.storage.databases.main.events import DeltaState
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
from synapse.types import (
- Collection,
PersistedEventPosition,
RoomStreamToken,
StateMap,
diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py
index 6c3c2da5..7a2cbee4 100644
--- a/synapse/storage/prepare_database.py
+++ b/synapse/storage/prepare_database.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014 - 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
@@ -13,12 +12,12 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-import imp
+import importlib.util
import logging
import os
import re
from collections import Counter
-from typing import Generator, Iterable, List, Optional, TextIO, Tuple
+from typing import Collection, Generator, Iterable, List, Optional, TextIO, Tuple
import attr
from typing_extensions import Counter as CounterType
@@ -28,7 +27,6 @@ from synapse.storage.database import LoggingDatabaseConnection
from synapse.storage.engines import BaseDatabaseEngine
from synapse.storage.engines.postgres import PostgresEngine
from synapse.storage.types import Cursor
-from synapse.types import Collection
logger = logging.getLogger(__name__)
@@ -454,8 +452,13 @@ def _upgrade_existing_database(
)
module_name = "synapse.storage.v%d_%s" % (v, root_name)
- with open(absolute_path) as python_file:
- module = imp.load_source(module_name, absolute_path, python_file) # type: ignore
+
+ spec = importlib.util.spec_from_file_location(
+ module_name, absolute_path
+ )
+ module = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(module) # type: ignore
+
logger.info("Running script %s", relative_path)
module.run_create(cur, database_engine) # type: ignore
if not is_empty:
diff --git a/synapse/storage/purge_events.py b/synapse/storage/purge_events.py
index ad954990..30669beb 100644
--- a/synapse/storage/purge_events.py
+++ b/synapse/storage/purge_events.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py
index f47cec0d..2d5c21ef 100644
--- a/synapse/storage/push_rule.py
+++ b/synapse/storage/push_rule.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/storage/relations.py b/synapse/storage/relations.py
index 2564f34b..c552dbf0 100644
--- a/synapse/storage/relations.py
+++ b/synapse/storage/relations.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py
index d2ff4da6..c34fbf21 100644
--- a/synapse/storage/roommember.py
+++ b/synapse/storage/roommember.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/storage/state.py b/synapse/storage/state.py
index 2e277a21..cfafba22 100644
--- a/synapse/storage/state.py
+++ b/synapse/storage/state.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -449,7 +448,7 @@ class StateGroupStorage:
return self.stores.state._get_state_groups_from_groups(groups, state_filter)
async def get_state_for_events(
- self, event_ids: Iterable[str], state_filter: StateFilter = StateFilter.all()
+ self, event_ids: Iterable[str], state_filter: Optional[StateFilter] = None
) -> Dict[str, StateMap[EventBase]]:
"""Given a list of event_ids and type tuples, return a list of state
dicts for each event.
@@ -465,7 +464,7 @@ class StateGroupStorage:
groups = set(event_to_groups.values())
group_to_state = await self.stores.state._get_state_for_groups(
- groups, state_filter
+ groups, state_filter or StateFilter.all()
)
state_event_map = await self.stores.main.get_events(
@@ -485,7 +484,7 @@ class StateGroupStorage:
return {event: event_to_state[event] for event in event_ids}
async def get_state_ids_for_events(
- self, event_ids: Iterable[str], state_filter: StateFilter = StateFilter.all()
+ self, event_ids: Iterable[str], state_filter: Optional[StateFilter] = None
) -> Dict[str, StateMap[str]]:
"""
Get the state dicts corresponding to a list of events, containing the event_ids
@@ -502,7 +501,7 @@ class StateGroupStorage:
groups = set(event_to_groups.values())
group_to_state = await self.stores.state._get_state_for_groups(
- groups, state_filter
+ groups, state_filter or StateFilter.all()
)
event_to_state = {
@@ -513,7 +512,7 @@ class StateGroupStorage:
return {event: event_to_state[event] for event in event_ids}
async def get_state_for_event(
- self, event_id: str, state_filter: StateFilter = StateFilter.all()
+ self, event_id: str, state_filter: Optional[StateFilter] = None
) -> StateMap[EventBase]:
"""
Get the state dict corresponding to a particular event
@@ -525,11 +524,13 @@ class StateGroupStorage:
Returns:
A dict from (type, state_key) -> state_event
"""
- state_map = await self.get_state_for_events([event_id], state_filter)
+ state_map = await self.get_state_for_events(
+ [event_id], state_filter or StateFilter.all()
+ )
return state_map[event_id]
async def get_state_ids_for_event(
- self, event_id: str, state_filter: StateFilter = StateFilter.all()
+ self, event_id: str, state_filter: Optional[StateFilter] = None
) -> StateMap[str]:
"""
Get the state dict corresponding to a particular event
@@ -541,11 +542,13 @@ class StateGroupStorage:
Returns:
A dict from (type, state_key) -> state_event
"""
- state_map = await self.get_state_ids_for_events([event_id], state_filter)
+ state_map = await self.get_state_ids_for_events(
+ [event_id], state_filter or StateFilter.all()
+ )
return state_map[event_id]
def _get_state_for_groups(
- self, groups: Iterable[int], state_filter: StateFilter = StateFilter.all()
+ self, groups: Iterable[int], state_filter: Optional[StateFilter] = None
) -> Awaitable[Dict[int, MutableStateMap[str]]]:
"""Gets the state at each of a list of state groups, optionally
filtering by type/state_key
@@ -558,7 +561,9 @@ class StateGroupStorage:
Returns:
Dict of state group to state map.
"""
- return self.stores.state._get_state_for_groups(groups, state_filter)
+ return self.stores.state._get_state_for_groups(
+ groups, state_filter or StateFilter.all()
+ )
async def store_state_group(
self,
diff --git a/synapse/storage/types.py b/synapse/storage/types.py
index 17291c9d..57f4883b 100644
--- a/synapse/storage/types.py
+++ b/synapse/storage/types.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/util/__init__.py b/synapse/storage/util/__init__.py
index bfebb0f6..5e83dba2 100644
--- a/synapse/storage/util/__init__.py
+++ b/synapse/storage/util/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py
index d4643c4f..b1bd3a52 100644
--- a/synapse/storage/util/id_generators.py
+++ b/synapse/storage/util/id_generators.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -17,7 +16,7 @@ import logging
import threading
from collections import OrderedDict
from contextlib import contextmanager
-from typing import Dict, List, Optional, Set, Tuple, Union
+from typing import Dict, Iterable, List, Optional, Set, Tuple, Union
import attr
@@ -91,7 +90,14 @@ class StreamIdGenerator:
# ... persist event ...
"""
- def __init__(self, db_conn, table, column, extra_tables=[], step=1):
+ def __init__(
+ self,
+ db_conn,
+ table,
+ column,
+ extra_tables: Iterable[Tuple[str, str]] = (),
+ step=1,
+ ):
assert step != 0
self._lock = threading.Lock()
self._step = step
diff --git a/synapse/storage/util/sequence.py b/synapse/storage/util/sequence.py
index 36a67e70..30b6b8e0 100644
--- a/synapse/storage/util/sequence.py
+++ b/synapse/storage/util/sequence.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/streams/__init__.py b/synapse/streams/__init__.py
index bfebb0f6..5e83dba2 100644
--- a/synapse/streams/__init__.py
+++ b/synapse/streams/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/streams/config.py b/synapse/streams/config.py
index fdda21d1..13d30058 100644
--- a/synapse/streams/config.py
+++ b/synapse/streams/config.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/streams/events.py b/synapse/streams/events.py
index 92fd5d48..20fceaa9 100644
--- a/synapse/streams/events.py
+++ b/synapse/streams/events.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/types.py b/synapse/types.py
index b08ce901..e52cd7ff 100644
--- a/synapse/types.py
+++ b/synapse/types.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
@@ -16,13 +15,11 @@
import abc
import re
import string
-import sys
from collections import namedtuple
from typing import (
TYPE_CHECKING,
Any,
Dict,
- Iterable,
Mapping,
MutableMapping,
Optional,
@@ -51,18 +48,6 @@ if TYPE_CHECKING:
from synapse.appservice.api import ApplicationService
from synapse.storage.databases.main import DataStore
-# define a version of typing.Collection that works on python 3.5
-if sys.version_info[:3] >= (3, 6, 0):
- from typing import Collection
-else:
- from typing import Container, Sized
-
- T_co = TypeVar("T_co", covariant=True)
-
- class Collection(Iterable[T_co], Container[T_co], Sized): # type: ignore
- __slots__ = ()
-
-
# Define a state map type from type/state_key to T (usually an event ID or
# event)
T = TypeVar("T")
@@ -214,9 +199,8 @@ def get_localpart_from_id(string):
DS = TypeVar("DS", bound="DomainSpecificString")
-class DomainSpecificString(
- namedtuple("DomainSpecificString", ("localpart", "domain")), metaclass=abc.ABCMeta
-):
+@attr.s(slots=True, frozen=True, repr=False)
+class DomainSpecificString(metaclass=abc.ABCMeta):
"""Common base class among ID/name strings that have a local part and a
domain name, prefixed with a sigil.
@@ -228,11 +212,8 @@ class DomainSpecificString(
SIGIL = abc.abstractproperty() # type: str # type: ignore
- # Deny iteration because it will bite you if you try to create a singleton
- # set by:
- # users = set(user)
- def __iter__(self):
- raise ValueError("Attempted to iterate a %s" % (type(self).__name__,))
+ localpart = attr.ib(type=str)
+ domain = attr.ib(type=str)
# Because this class is a namedtuple of strings and booleans, it is deeply
# immutable.
@@ -287,30 +268,35 @@ class DomainSpecificString(
__repr__ = to_string
+@attr.s(slots=True, frozen=True, repr=False)
class UserID(DomainSpecificString):
"""Structure representing a user ID."""
SIGIL = "@"
+@attr.s(slots=True, frozen=True, repr=False)
class RoomAlias(DomainSpecificString):
"""Structure representing a room name."""
SIGIL = "#"
+@attr.s(slots=True, frozen=True, repr=False)
class RoomID(DomainSpecificString):
"""Structure representing a room id. """
SIGIL = "!"
+@attr.s(slots=True, frozen=True, repr=False)
class EventID(DomainSpecificString):
"""Structure representing an event id. """
SIGIL = "$"
+@attr.s(slots=True, frozen=True, repr=False)
class GroupID(DomainSpecificString):
"""Structure representing a group ID."""
diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py
index 517686f0..b69f562c 100644
--- a/synapse/util/__init__.py
+++ b/synapse/util/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
import json
import logging
import re
+from typing import Pattern
import attr
from frozendict import frozendict
@@ -27,6 +27,9 @@ from synapse.logging import context
logger = logging.getLogger(__name__)
+_WILDCARD_RUN = re.compile(r"([\?\*]+)")
+
+
def _reject_invalid_json(val):
"""Do not allow Infinity, -Infinity, or NaN values in JSON."""
raise ValueError("Invalid JSON value: '%s'" % val)
@@ -159,25 +162,54 @@ def log_failure(failure, msg, consumeErrors=True):
return failure
-def glob_to_regex(glob):
+def glob_to_regex(glob: str, word_boundary: bool = False) -> Pattern:
"""Converts a glob to a compiled regex object.
- The regex is anchored at the beginning and end of the string.
-
Args:
- glob (str)
+ glob: pattern to match
+ word_boundary: If True, the pattern will be allowed to match at word boundaries
+ anywhere in the string. Otherwise, the pattern is anchored at the start and
+ end of the string.
Returns:
- re.RegexObject
+ compiled regex pattern
"""
- res = ""
- for c in glob:
- if c == "*":
- res = res + ".*"
- elif c == "?":
- res = res + "."
+
+ # Patterns with wildcards must be simplified to avoid performance cliffs
+ # - The glob `?**?**?` is equivalent to the glob `???*`
+ # - The glob `???*` is equivalent to the regex `.{3,}`
+ chunks = []
+ for chunk in _WILDCARD_RUN.split(glob):
+ # No wildcards? re.escape()
+ if not _WILDCARD_RUN.match(chunk):
+ chunks.append(re.escape(chunk))
+ continue
+
+ # Wildcards? Simplify.
+ qmarks = chunk.count("?")
+ if "*" in chunk:
+ chunks.append(".{%d,}" % qmarks)
else:
- res = res + re.escape(c)
+ chunks.append(".{%d}" % qmarks)
+
+ res = "".join(chunks)
- # \A anchors at start of string, \Z at end of string
- return re.compile(r"\A" + res + r"\Z", re.IGNORECASE)
+ if word_boundary:
+ res = re_word_boundary(res)
+ else:
+ # \A anchors at start of string, \Z at end of string
+ res = r"\A" + res + r"\Z"
+
+ return re.compile(res, re.IGNORECASE)
+
+
+def re_word_boundary(r: str) -> str:
+ """
+ Adds word boundary characters to the start and end of an
+ expression to require that the match occur as a whole word,
+ but do so respecting the fact that strings starting or ending
+ with non-word characters will change word boundaries.
+ """
+ # we can't use \b as it chokes on unicode. however \W seems to be okay
+ # as shorthand for [^0-9A-Za-z_].
+ return r"(^|\W)%s(\W|$)" % (r,)
diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py
index c3b2d981..5c55bb01 100644
--- a/synapse/util/async_helpers.py
+++ b/synapse/util/async_helpers.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py
index 48f64eeb..46af7fa4 100644
--- a/synapse/util/caches/__init__.py
+++ b/synapse/util/caches/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2019, 2020 The Matrix.org Foundation C.I.C.
#
diff --git a/synapse/util/caches/cached_call.py b/synapse/util/caches/cached_call.py
index 3ee0f231..a301c9e8 100644
--- a/synapse/util/caches/cached_call.py
+++ b/synapse/util/caches/cached_call.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/util/caches/deferred_cache.py b/synapse/util/caches/deferred_cache.py
index 1adc92eb..484097a4 100644
--- a/synapse/util/caches/deferred_cache.py
+++ b/synapse/util/caches/deferred_cache.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
@@ -283,7 +282,9 @@ class DeferredCache(Generic[KT, VT]):
# we return a new Deferred which will be called before any subsequent observers.
return observable.observe()
- def prefill(self, key: KT, value: VT, callback: Callable[[], None] = None):
+ def prefill(
+ self, key: KT, value: VT, callback: Optional[Callable[[], None]] = None
+ ):
callbacks = [callback] if callback else []
self.cache.set(key, value, callbacks=callbacks)
diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py
index 4e843799..ac4a078b 100644
--- a/synapse/util/caches/descriptors.py
+++ b/synapse/util/caches/descriptors.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synapse/util/caches/dictionary_cache.py b/synapse/util/caches/dictionary_cache.py
index b3b413b0..56d94d96 100644
--- a/synapse/util/caches/dictionary_cache.py
+++ b/synapse/util/caches/dictionary_cache.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py
index e15f7ee6..ac47a31c 100644
--- a/synapse/util/caches/expiringcache.py
+++ b/synapse/util/caches/expiringcache.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,40 +14,50 @@
import logging
from collections import OrderedDict
+from typing import Any, Generic, Optional, TypeVar, Union, overload
+
+import attr
+from typing_extensions import Literal
from synapse.config import cache as cache_config
from synapse.metrics.background_process_metrics import run_as_background_process
+from synapse.util import Clock
from synapse.util.caches import register_cache
logger = logging.getLogger(__name__)
-SENTINEL = object()
+SENTINEL = object() # type: Any
+
+T = TypeVar("T")
+KT = TypeVar("KT")
+VT = TypeVar("VT")
-class ExpiringCache:
+
+class ExpiringCache(Generic[KT, VT]):
def __init__(
self,
- cache_name,
- clock,
- max_len=0,
- expiry_ms=0,
- reset_expiry_on_get=False,
- iterable=False,
+ cache_name: str,
+ clock: Clock,
+ max_len: int = 0,
+ expiry_ms: int = 0,
+ reset_expiry_on_get: bool = False,
+ iterable: bool = False,
):
"""
Args:
- cache_name (str): Name of this cache, used for logging.
- clock (Clock)
- max_len (int): Max size of dict. If the dict grows larger than this
+ cache_name: Name of this cache, used for logging.
+ clock
+ max_len: Max size of dict. If the dict grows larger than this
then the oldest items get automatically evicted. Default is 0,
which indicates there is no max limit.
- expiry_ms (int): How long before an item is evicted from the cache
+ expiry_ms: How long before an item is evicted from the cache
in milliseconds. Default is 0, indicating items never get
evicted based on time.
- reset_expiry_on_get (bool): If true, will reset the expiry time for
+ reset_expiry_on_get: If true, will reset the expiry time for
an item on access. Defaults to False.
- iterable (bool): If true, the size is calculated by summing the
+ iterable: If true, the size is calculated by summing the
sizes of all entries, rather than the number of entries.
"""
self._cache_name = cache_name
@@ -62,7 +71,7 @@ class ExpiringCache:
self._expiry_ms = expiry_ms
self._reset_expiry_on_get = reset_expiry_on_get
- self._cache = OrderedDict()
+ self._cache = OrderedDict() # type: OrderedDict[KT, _CacheEntry]
self.iterable = iterable
@@ -79,12 +88,12 @@ class ExpiringCache:
self._clock.looping_call(f, self._expiry_ms / 2)
- def __setitem__(self, key, value):
+ def __setitem__(self, key: KT, value: VT) -> None:
now = self._clock.time_msec()
self._cache[key] = _CacheEntry(now, value)
self.evict()
- def evict(self):
+ def evict(self) -> None:
# Evict if there are now too many items
while self._max_size and len(self) > self._max_size:
_key, value = self._cache.popitem(last=False)
@@ -93,7 +102,7 @@ class ExpiringCache:
else:
self.metrics.inc_evictions()
- def __getitem__(self, key):
+ def __getitem__(self, key: KT) -> VT:
try:
entry = self._cache[key]
self.metrics.inc_hits()
@@ -106,7 +115,7 @@ class ExpiringCache:
return entry.value
- def pop(self, key, default=SENTINEL):
+ def pop(self, key: KT, default: T = SENTINEL) -> Union[VT, T]:
"""Removes and returns the value with the given key from the cache.
If the key isn't in the cache then `default` will be returned if
@@ -115,29 +124,40 @@ class ExpiringCache:
Identical functionality to `dict.pop(..)`.
"""
- value = self._cache.pop(key, default)
+ value = self._cache.pop(key, SENTINEL)
+ # The key was not found.
if value is SENTINEL:
- raise KeyError(key)
+ if default is SENTINEL:
+ raise KeyError(key)
+ return default
- return value
+ return value.value
- def __contains__(self, key):
+ def __contains__(self, key: KT) -> bool:
return key in self._cache
- def get(self, key, default=None):
+ @overload
+ def get(self, key: KT, default: Literal[None] = None) -> Optional[VT]:
+ ...
+
+ @overload
+ def get(self, key: KT, default: T) -> Union[VT, T]:
+ ...
+
+ def get(self, key: KT, default: Optional[T] = None) -> Union[VT, Optional[T]]:
try:
return self[key]
except KeyError:
return default
- def setdefault(self, key, value):
+ def setdefault(self, key: KT, value: VT) -> VT:
try:
return self[key]
except KeyError:
self[key] = value
return value
- def _prune_cache(self):
+ def _prune_cache(self) -> None:
if not self._expiry_ms:
# zero expiry time means don't expire. This should never get called
# since we have this check in start too.
@@ -166,7 +186,7 @@ class ExpiringCache:
len(self),
)
- def __len__(self):
+ def __len__(self) -> int:
if self.iterable:
return sum(len(entry.value) for entry in self._cache.values())
else:
@@ -190,9 +210,7 @@ class ExpiringCache:
return False
+@attr.s(slots=True)
class _CacheEntry:
- __slots__ = ["time", "value"]
-
- def __init__(self, time, value):
- self.time = time
- self.value = value
+ time = attr.ib(type=int)
+ value = attr.ib()
diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py
index 60bb6ff6..a21d34fc 100644
--- a/synapse/util/caches/lrucache.py
+++ b/synapse/util/caches/lrucache.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -57,12 +56,14 @@ def enumerate_leaves(node, depth):
class _Node:
__slots__ = ["prev_node", "next_node", "key", "value", "callbacks"]
- def __init__(self, prev_node, next_node, key, value, callbacks=set()):
+ def __init__(
+ self, prev_node, next_node, key, value, callbacks: Optional[set] = None
+ ):
self.prev_node = prev_node
self.next_node = next_node
self.key = key
self.value = value
- self.callbacks = callbacks
+ self.callbacks = callbacks or set()
class LruCache(Generic[KT, VT]):
@@ -176,10 +177,10 @@ class LruCache(Generic[KT, VT]):
self.len = synchronized(cache_len)
- def add_node(key, value, callbacks=set()):
+ def add_node(key, value, callbacks: Optional[set] = None):
prev_node = list_root
next_node = prev_node.next_node
- node = _Node(prev_node, next_node, key, value, callbacks)
+ node = _Node(prev_node, next_node, key, value, callbacks or set())
prev_node.next_node = node
next_node.prev_node = node
cache[key] = node
@@ -237,7 +238,7 @@ class LruCache(Generic[KT, VT]):
def cache_get(
key: KT,
default: Optional[T] = None,
- callbacks: Iterable[Callable[[], None]] = [],
+ callbacks: Iterable[Callable[[], None]] = (),
update_metrics: bool = True,
):
node = cache.get(key, None)
@@ -253,7 +254,7 @@ class LruCache(Generic[KT, VT]):
return default
@synchronized
- def cache_set(key: KT, value: VT, callbacks: Iterable[Callable[[], None]] = []):
+ def cache_set(key: KT, value: VT, callbacks: Iterable[Callable[[], None]] = ()):
node = cache.get(key, None)
if node is not None:
# We sometimes store large objects, e.g. dicts, which cause
diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py
index 46ea8e09..25ea1bcc 100644
--- a/synapse/util/caches/response_cache.py
+++ b/synapse/util/caches/response_cache.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -111,7 +110,7 @@ class ResponseCache(Generic[T]):
return result.observe()
def wrap(
- self, key: T, callback: "Callable[..., Any]", *args: Any, **kwargs: Any
+ self, key: T, callback: Callable[..., Any], *args: Any, **kwargs: Any
) -> defer.Deferred:
"""Wrap together a *get* and *set* call, taking care of logcontexts
diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py
index 644e9e77..e81e4688 100644
--- a/synapse/util/caches/stream_change_cache.py
+++ b/synapse/util/caches/stream_change_cache.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,11 +14,10 @@
import logging
import math
-from typing import Dict, FrozenSet, List, Mapping, Optional, Set, Union
+from typing import Collection, Dict, FrozenSet, List, Mapping, Optional, Set, Union
from sortedcontainers import SortedDict
-from synapse.types import Collection
from synapse.util import caches
logger = logging.getLogger(__name__)
diff --git a/synapse/util/caches/ttlcache.py b/synapse/util/caches/ttlcache.py
index 96a82749..c276107d 100644
--- a/synapse/util/caches/ttlcache.py
+++ b/synapse/util/caches/ttlcache.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/util/daemonize.py b/synapse/util/daemonize.py
index 23393cf4..31b24dd1 100644
--- a/synapse/util/daemonize.py
+++ b/synapse/util/daemonize.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright (c) 2012, 2013, 2014 Ilya Otyutskiy <ilya.otyutskiy@icloud.com>
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
diff --git a/synapse/util/distributor.py b/synapse/util/distributor.py
index 3c47285d..1f803aef 100644
--- a/synapse/util/distributor.py
+++ b/synapse/util/distributor.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/util/file_consumer.py b/synapse/util/file_consumer.py
index 68dc6324..e946189f 100644
--- a/synapse/util/file_consumer.py
+++ b/synapse/util/file_consumer.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/util/frozenutils.py b/synapse/util/frozenutils.py
index 5ca2e71e..2ac7c291 100644
--- a/synapse/util/frozenutils.py
+++ b/synapse/util/frozenutils.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/util/hash.py b/synapse/util/hash.py
index 35916870..ba676e17 100644
--- a/synapse/util/hash.py
+++ b/synapse/util/hash.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/util/iterutils.py b/synapse/util/iterutils.py
index 98707c11..abfdc298 100644
--- a/synapse/util/iterutils.py
+++ b/synapse/util/iterutils.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
@@ -16,6 +15,7 @@
import heapq
from itertools import islice
from typing import (
+ Collection,
Dict,
Generator,
Iterable,
@@ -27,8 +27,6 @@ from typing import (
TypeVar,
)
-from synapse.types import Collection
-
T = TypeVar("T")
diff --git a/synapse/util/jsonobject.py b/synapse/util/jsonobject.py
index e3a8ed5b..abc12f08 100644
--- a/synapse/util/jsonobject.py
+++ b/synapse/util/jsonobject.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/util/macaroons.py b/synapse/util/macaroons.py
index 12cdd533..f6ebfd7e 100644
--- a/synapse/util/macaroons.py
+++ b/synapse/util/macaroons.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Quentin Gliech
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
diff --git a/synapse/util/metrics.py b/synapse/util/metrics.py
index 1023c856..6d14351b 100644
--- a/synapse/util/metrics.py
+++ b/synapse/util/metrics.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -105,7 +104,13 @@ class Measure:
"start",
]
- def __init__(self, clock, name):
+ def __init__(self, clock, name: str):
+ """
+ Args:
+ clock: A n object with a "time()" method, which returns the current
+ time in seconds.
+ name: The name of the metric to report.
+ """
self.clock = clock
self.name = name
curr_context = current_context()
@@ -118,10 +123,8 @@ class Measure:
else:
assert isinstance(curr_context, LoggingContext)
parent_context = curr_context
- self._logging_context = LoggingContext(
- "Measure[%s]" % (self.name,), parent_context
- )
- self.start = None
+ self._logging_context = LoggingContext(str(curr_context), parent_context)
+ self.start = None # type: Optional[int]
def __enter__(self) -> "Measure":
if self.start is not None:
diff --git a/synapse/util/module_loader.py b/synapse/util/module_loader.py
index d184e2a9..8acbe276 100644
--- a/synapse/util/module_loader.py
+++ b/synapse/util/module_loader.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/util/msisdn.py b/synapse/util/msisdn.py
index c8bcbe29..bbbdebf2 100644
--- a/synapse/util/msisdn.py
+++ b/synapse/util/msisdn.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/util/patch_inline_callbacks.py b/synapse/util/patch_inline_callbacks.py
index d9f9ae99..eed0291c 100644
--- a/synapse/util/patch_inline_callbacks.py
+++ b/synapse/util/patch_inline_callbacks.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py
index 70d11e1e..a654c696 100644
--- a/synapse/util/ratelimitutils.py
+++ b/synapse/util/ratelimitutils.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py
index 4ab379e4..f9c370a8 100644
--- a/synapse/util/retryutils.py
+++ b/synapse/util/retryutils.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/util/rlimit.py b/synapse/util/rlimit.py
index 207cd17c..bf812ab5 100644
--- a/synapse/util/rlimit.py
+++ b/synapse/util/rlimit.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/util/stringutils.py b/synapse/util/stringutils.py
index 9ce7873a..cd82777f 100644
--- a/synapse/util/stringutils.py
+++ b/synapse/util/stringutils.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
@@ -133,6 +132,38 @@ def parse_and_validate_server_name(server_name: str) -> Tuple[str, Optional[int]
return host, port
+def valid_id_server_location(id_server: str) -> bool:
+ """Check whether an identity server location, such as the one passed as the
+ `id_server` parameter to `/_matrix/client/r0/account/3pid/bind`, is valid.
+
+ A valid identity server location consists of a valid hostname and optional
+ port number, optionally followed by any number of `/` delimited path
+ components, without any fragment or query string parts.
+
+ Args:
+ id_server: identity server location string to validate
+
+ Returns:
+ True if valid, False otherwise.
+ """
+
+ components = id_server.split("/", 1)
+
+ host = components[0]
+
+ try:
+ parse_and_validate_server_name(host)
+ except ValueError:
+ return False
+
+ if len(components) < 2:
+ # no path
+ return True
+
+ path = components[1]
+ return "#" not in path and "?" not in path
+
+
def parse_and_validate_mxc_uri(mxc: str) -> Tuple[str, Optional[int], str]:
"""Parse the given string as an MXC URI
diff --git a/synapse/util/templates.py b/synapse/util/templates.py
index 392dae4a..38543dd1 100644
--- a/synapse/util/templates.py
+++ b/synapse/util/templates.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/util/threepids.py b/synapse/util/threepids.py
index 43c2e0ac..a1cf1960 100644
--- a/synapse/util/threepids.py
+++ b/synapse/util/threepids.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,6 +18,16 @@ import re
logger = logging.getLogger(__name__)
+# it's unclear what the maximum length of an email address is. RFC3696 (as corrected
+# by errata) says:
+# the upper limit on address lengths should normally be considered to be 254.
+#
+# In practice, mail servers appear to be more tolerant and allow 400 characters
+# or so. Let's allow 500, which should be plenty for everyone.
+#
+MAX_EMAIL_ADDRESS_LENGTH = 500
+
+
def check_3pid_allowed(hs, medium, address):
"""Checks whether a given format of 3PID is allowed to be used on this HS
@@ -71,3 +80,23 @@ def canonicalise_email(address: str) -> str:
raise ValueError("Unable to parse email address")
return parts[0].casefold() + "@" + parts[1].lower()
+
+
+def validate_email(address: str) -> str:
+ """Does some basic validation on an email address.
+
+ Returns the canonicalised email, as returned by `canonicalise_email`.
+
+ Raises a ValueError if the email is invalid.
+ """
+ # First we try canonicalising in case that fails
+ address = canonicalise_email(address)
+
+ # Email addresses have to be at least 3 characters.
+ if len(address) < 3:
+ raise ValueError("Unable to parse email address")
+
+ if len(address) > MAX_EMAIL_ADDRESS_LENGTH:
+ raise ValueError("Unable to parse email address")
+
+ return address
diff --git a/synapse/util/versionstring.py b/synapse/util/versionstring.py
index ab7d03af..dfa30a62 100644
--- a/synapse/util/versionstring.py
+++ b/synapse/util/versionstring.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/util/wheel_timer.py b/synapse/util/wheel_timer.py
index be3b2246..61814aff 100644
--- a/synapse/util/wheel_timer.py
+++ b/synapse/util/wheel_timer.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synapse/visibility.py b/synapse/visibility.py
index ff53a49b..490fb26e 100644
--- a/synapse/visibility.py
+++ b/synapse/visibility.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014 - 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synctl b/synctl
index 56c0e394..ccf404ac 100755
--- a/synctl
+++ b/synctl
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/synmark/__init__.py b/synmark/__init__.py
index 3d4ec3e1..2cc00b0f 100644
--- a/synmark/__init__.py
+++ b/synmark/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synmark/__main__.py b/synmark/__main__.py
index f55968a5..35a59e34 100644
--- a/synmark/__main__.py
+++ b/synmark/__main__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synmark/suites/logging.py b/synmark/suites/logging.py
index c306891b..9419892e 100644
--- a/synmark/suites/logging.py
+++ b/synmark/suites/logging.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,8 +15,7 @@
import logging
import warnings
from io import StringIO
-
-from mock import Mock
+from unittest.mock import Mock
from pyperf import perf_counter
diff --git a/synmark/suites/lrucache.py b/synmark/suites/lrucache.py
index 69ab042c..9b4a4241 100644
--- a/synmark/suites/lrucache.py
+++ b/synmark/suites/lrucache.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/synmark/suites/lrucache_evict.py b/synmark/suites/lrucache_evict.py
index 532b1cc7..0ee202ed 100644
--- a/synmark/suites/lrucache_evict.py
+++ b/synmark/suites/lrucache_evict.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/__init__.py b/tests/__init__.py
index ed805db1..5fced5cc 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py
index 34f72ae7..c0ed64f7 100644
--- a/tests/api/test_auth.py
+++ b/tests/api/test_auth.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015 - 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
import pymacaroons
diff --git a/tests/api/test_filtering.py b/tests/api/test_filtering.py
index ab7d2907..f44c91a3 100644
--- a/tests/api/test_filtering.py
+++ b/tests/api/test_filtering.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2017 Vector Creations Ltd
# Copyright 2018-2019 New Vector Ltd
diff --git a/tests/api/test_ratelimiting.py b/tests/api/test_ratelimiting.py
index 48341819..fa96ba07 100644
--- a/tests/api/test_ratelimiting.py
+++ b/tests/api/test_ratelimiting.py
@@ -5,38 +5,25 @@ from synapse.types import create_requester
from tests import unittest
-class TestRatelimiter(unittest.TestCase):
+class TestRatelimiter(unittest.HomeserverTestCase):
def test_allowed_via_can_do_action(self):
- limiter = Ratelimiter(clock=None, rate_hz=0.1, burst_count=1)
- allowed, time_allowed = limiter.can_do_action(key="test_id", _time_now_s=0)
- self.assertTrue(allowed)
- self.assertEquals(10.0, time_allowed)
-
- allowed, time_allowed = limiter.can_do_action(key="test_id", _time_now_s=5)
- self.assertFalse(allowed)
- self.assertEquals(10.0, time_allowed)
-
- allowed, time_allowed = limiter.can_do_action(key="test_id", _time_now_s=10)
- self.assertTrue(allowed)
- self.assertEquals(20.0, time_allowed)
-
- def test_allowed_user_via_can_requester_do_action(self):
- user_requester = create_requester("@user:example.com")
- limiter = Ratelimiter(clock=None, rate_hz=0.1, burst_count=1)
- allowed, time_allowed = limiter.can_requester_do_action(
- user_requester, _time_now_s=0
+ limiter = Ratelimiter(
+ store=self.hs.get_datastore(), clock=None, rate_hz=0.1, burst_count=1
+ )
+ allowed, time_allowed = self.get_success_or_raise(
+ limiter.can_do_action(None, key="test_id", _time_now_s=0)
)
self.assertTrue(allowed)
self.assertEquals(10.0, time_allowed)
- allowed, time_allowed = limiter.can_requester_do_action(
- user_requester, _time_now_s=5
+ allowed, time_allowed = self.get_success_or_raise(
+ limiter.can_do_action(None, key="test_id", _time_now_s=5)
)
self.assertFalse(allowed)
self.assertEquals(10.0, time_allowed)
- allowed, time_allowed = limiter.can_requester_do_action(
- user_requester, _time_now_s=10
+ allowed, time_allowed = self.get_success_or_raise(
+ limiter.can_do_action(None, key="test_id", _time_now_s=10)
)
self.assertTrue(allowed)
self.assertEquals(20.0, time_allowed)
@@ -51,21 +38,23 @@ class TestRatelimiter(unittest.TestCase):
)
as_requester = create_requester("@user:example.com", app_service=appservice)
- limiter = Ratelimiter(clock=None, rate_hz=0.1, burst_count=1)
- allowed, time_allowed = limiter.can_requester_do_action(
- as_requester, _time_now_s=0
+ limiter = Ratelimiter(
+ store=self.hs.get_datastore(), clock=None, rate_hz=0.1, burst_count=1
+ )
+ allowed, time_allowed = self.get_success_or_raise(
+ limiter.can_do_action(as_requester, _time_now_s=0)
)
self.assertTrue(allowed)
self.assertEquals(10.0, time_allowed)
- allowed, time_allowed = limiter.can_requester_do_action(
- as_requester, _time_now_s=5
+ allowed, time_allowed = self.get_success_or_raise(
+ limiter.can_do_action(as_requester, _time_now_s=5)
)
self.assertFalse(allowed)
self.assertEquals(10.0, time_allowed)
- allowed, time_allowed = limiter.can_requester_do_action(
- as_requester, _time_now_s=10
+ allowed, time_allowed = self.get_success_or_raise(
+ limiter.can_do_action(as_requester, _time_now_s=10)
)
self.assertTrue(allowed)
self.assertEquals(20.0, time_allowed)
@@ -80,73 +69,89 @@ class TestRatelimiter(unittest.TestCase):
)
as_requester = create_requester("@user:example.com", app_service=appservice)
- limiter = Ratelimiter(clock=None, rate_hz=0.1, burst_count=1)
- allowed, time_allowed = limiter.can_requester_do_action(
- as_requester, _time_now_s=0
+ limiter = Ratelimiter(
+ store=self.hs.get_datastore(), clock=None, rate_hz=0.1, burst_count=1
+ )
+ allowed, time_allowed = self.get_success_or_raise(
+ limiter.can_do_action(as_requester, _time_now_s=0)
)
self.assertTrue(allowed)
self.assertEquals(-1, time_allowed)
- allowed, time_allowed = limiter.can_requester_do_action(
- as_requester, _time_now_s=5
+ allowed, time_allowed = self.get_success_or_raise(
+ limiter.can_do_action(as_requester, _time_now_s=5)
)
self.assertTrue(allowed)
self.assertEquals(-1, time_allowed)
- allowed, time_allowed = limiter.can_requester_do_action(
- as_requester, _time_now_s=10
+ allowed, time_allowed = self.get_success_or_raise(
+ limiter.can_do_action(as_requester, _time_now_s=10)
)
self.assertTrue(allowed)
self.assertEquals(-1, time_allowed)
def test_allowed_via_ratelimit(self):
- limiter = Ratelimiter(clock=None, rate_hz=0.1, burst_count=1)
+ limiter = Ratelimiter(
+ store=self.hs.get_datastore(), clock=None, rate_hz=0.1, burst_count=1
+ )
# Shouldn't raise
- limiter.ratelimit(key="test_id", _time_now_s=0)
+ self.get_success_or_raise(limiter.ratelimit(None, key="test_id", _time_now_s=0))
# Should raise
with self.assertRaises(LimitExceededError) as context:
- limiter.ratelimit(key="test_id", _time_now_s=5)
+ self.get_success_or_raise(
+ limiter.ratelimit(None, key="test_id", _time_now_s=5)
+ )
self.assertEqual(context.exception.retry_after_ms, 5000)
# Shouldn't raise
- limiter.ratelimit(key="test_id", _time_now_s=10)
+ self.get_success_or_raise(
+ limiter.ratelimit(None, key="test_id", _time_now_s=10)
+ )
def test_allowed_via_can_do_action_and_overriding_parameters(self):
"""Test that we can override options of can_do_action that would otherwise fail
an action
"""
# Create a Ratelimiter with a very low allowed rate_hz and burst_count
- limiter = Ratelimiter(clock=None, rate_hz=0.1, burst_count=1)
+ limiter = Ratelimiter(
+ store=self.hs.get_datastore(), clock=None, rate_hz=0.1, burst_count=1
+ )
# First attempt should be allowed
- allowed, time_allowed = limiter.can_do_action(
- ("test_id",),
- _time_now_s=0,
+ allowed, time_allowed = self.get_success_or_raise(
+ limiter.can_do_action(
+ None,
+ ("test_id",),
+ _time_now_s=0,
+ )
)
self.assertTrue(allowed)
self.assertEqual(10.0, time_allowed)
# Second attempt, 1s later, will fail
- allowed, time_allowed = limiter.can_do_action(
- ("test_id",),
- _time_now_s=1,
+ allowed, time_allowed = self.get_success_or_raise(
+ limiter.can_do_action(
+ None,
+ ("test_id",),
+ _time_now_s=1,
+ )
)
self.assertFalse(allowed)
self.assertEqual(10.0, time_allowed)
# But, if we allow 10 actions/sec for this request, we should be allowed
# to continue.
- allowed, time_allowed = limiter.can_do_action(
- ("test_id",), _time_now_s=1, rate_hz=10.0
+ allowed, time_allowed = self.get_success_or_raise(
+ limiter.can_do_action(None, ("test_id",), _time_now_s=1, rate_hz=10.0)
)
self.assertTrue(allowed)
self.assertEqual(1.1, time_allowed)
# Similarly if we allow a burst of 10 actions
- allowed, time_allowed = limiter.can_do_action(
- ("test_id",), _time_now_s=1, burst_count=10
+ allowed, time_allowed = self.get_success_or_raise(
+ limiter.can_do_action(None, ("test_id",), _time_now_s=1, burst_count=10)
)
self.assertTrue(allowed)
self.assertEqual(1.0, time_allowed)
@@ -156,29 +161,72 @@ class TestRatelimiter(unittest.TestCase):
fail an action
"""
# Create a Ratelimiter with a very low allowed rate_hz and burst_count
- limiter = Ratelimiter(clock=None, rate_hz=0.1, burst_count=1)
+ limiter = Ratelimiter(
+ store=self.hs.get_datastore(), clock=None, rate_hz=0.1, burst_count=1
+ )
# First attempt should be allowed
- limiter.ratelimit(key=("test_id",), _time_now_s=0)
+ self.get_success_or_raise(
+ limiter.ratelimit(None, key=("test_id",), _time_now_s=0)
+ )
# Second attempt, 1s later, will fail
with self.assertRaises(LimitExceededError) as context:
- limiter.ratelimit(key=("test_id",), _time_now_s=1)
+ self.get_success_or_raise(
+ limiter.ratelimit(None, key=("test_id",), _time_now_s=1)
+ )
self.assertEqual(context.exception.retry_after_ms, 9000)
# But, if we allow 10 actions/sec for this request, we should be allowed
# to continue.
- limiter.ratelimit(key=("test_id",), _time_now_s=1, rate_hz=10.0)
+ self.get_success_or_raise(
+ limiter.ratelimit(None, key=("test_id",), _time_now_s=1, rate_hz=10.0)
+ )
# Similarly if we allow a burst of 10 actions
- limiter.ratelimit(key=("test_id",), _time_now_s=1, burst_count=10)
+ self.get_success_or_raise(
+ limiter.ratelimit(None, key=("test_id",), _time_now_s=1, burst_count=10)
+ )
def test_pruning(self):
- limiter = Ratelimiter(clock=None, rate_hz=0.1, burst_count=1)
- limiter.can_do_action(key="test_id_1", _time_now_s=0)
+ limiter = Ratelimiter(
+ store=self.hs.get_datastore(), clock=None, rate_hz=0.1, burst_count=1
+ )
+ self.get_success_or_raise(
+ limiter.can_do_action(None, key="test_id_1", _time_now_s=0)
+ )
self.assertIn("test_id_1", limiter.actions)
- limiter.can_do_action(key="test_id_2", _time_now_s=10)
+ self.get_success_or_raise(
+ limiter.can_do_action(None, key="test_id_2", _time_now_s=10)
+ )
self.assertNotIn("test_id_1", limiter.actions)
+
+ def test_db_user_override(self):
+ """Test that users that have ratelimiting disabled in the DB aren't
+ ratelimited.
+ """
+ store = self.hs.get_datastore()
+
+ user_id = "@user:test"
+ requester = create_requester(user_id)
+
+ self.get_success(
+ store.db_pool.simple_insert(
+ table="ratelimit_override",
+ values={
+ "user_id": user_id,
+ "messages_per_second": None,
+ "burst_count": None,
+ },
+ desc="test_db_user_override",
+ )
+ )
+
+ limiter = Ratelimiter(store=store, clock=None, rate_hz=0.1, burst_count=1)
+
+ # Shouldn't raise
+ for _ in range(20):
+ self.get_success_or_raise(limiter.ratelimit(requester, _time_now_s=0))
diff --git a/tests/app/test_frontend_proxy.py b/tests/app/test_frontend_proxy.py
deleted file mode 100644
index e0ca2888..00000000
--- a/tests/app/test_frontend_proxy.py
+++ /dev/null
@@ -1,84 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2018 New Vector Ltd
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from synapse.app.generic_worker import GenericWorkerServer
-
-from tests.server import make_request
-from tests.unittest import HomeserverTestCase
-
-
-class FrontendProxyTests(HomeserverTestCase):
- def make_homeserver(self, reactor, clock):
-
- hs = self.setup_test_homeserver(
- federation_http_client=None, homeserver_to_use=GenericWorkerServer
- )
-
- return hs
-
- def default_config(self):
- c = super().default_config()
- c["worker_app"] = "synapse.app.frontend_proxy"
-
- c["worker_listeners"] = [
- {
- "type": "http",
- "port": 8080,
- "bind_addresses": ["0.0.0.0"],
- "resources": [{"names": ["client"]}],
- }
- ]
-
- return c
-
- def test_listen_http_with_presence_enabled(self):
- """
- When presence is on, the stub servlet will not register.
- """
- # Presence is on
- self.hs.config.use_presence = True
-
- # Listen with the config
- self.hs._listen_http(self.hs.config.worker.worker_listeners[0])
-
- # Grab the resource from the site that was told to listen
- self.assertEqual(len(self.reactor.tcpServers), 1)
- site = self.reactor.tcpServers[0][1]
-
- channel = make_request(self.reactor, site, "PUT", "presence/a/status")
-
- # 400 + unrecognised, because nothing is registered
- self.assertEqual(channel.code, 400)
- self.assertEqual(channel.json_body["errcode"], "M_UNRECOGNIZED")
-
- def test_listen_http_with_presence_disabled(self):
- """
- When presence is off, the stub servlet will register.
- """
- # Presence is off
- self.hs.config.use_presence = False
-
- # Listen with the config
- self.hs._listen_http(self.hs.config.worker.worker_listeners[0])
-
- # Grab the resource from the site that was told to listen
- self.assertEqual(len(self.reactor.tcpServers), 1)
- site = self.reactor.tcpServers[0][1]
-
- channel = make_request(self.reactor, site, "PUT", "presence/a/status")
-
- # 401, because the stub servlet still checks authentication
- self.assertEqual(channel.code, 401)
- self.assertEqual(channel.json_body["errcode"], "M_MISSING_TOKEN")
diff --git a/tests/app/test_openid_listener.py b/tests/app/test_openid_listener.py
index 467033e2..264e1010 100644
--- a/tests/app/test_openid_listener.py
+++ b/tests/app/test_openid_listener.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock, patch
+from unittest.mock import Mock, patch
from parameterized import parameterized
@@ -110,7 +109,7 @@ class SynapseHomeserverOpenIDListenerTests(HomeserverTestCase):
}
# Listen with the config
- self.hs._listener_http(self.hs.get_config(), parse_listener_def(config))
+ self.hs._listener_http(self.hs.config, parse_listener_def(config))
# Grab the resource from the site that was told to listen
site = self.reactor.tcpServers[0][1]
diff --git a/tests/appservice/__init__.py b/tests/appservice/__init__.py
index fe0ac3f8..629e2df7 100644
--- a/tests/appservice/__init__.py
+++ b/tests/appservice/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/appservice/test_appservice.py b/tests/appservice/test_appservice.py
index 0bffeb11..f386b5e1 100644
--- a/tests/appservice/test_appservice.py
+++ b/tests/appservice/test_appservice.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,8 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import re
-
-from mock import Mock
+from unittest.mock import Mock
from twisted.internet import defer
diff --git a/tests/appservice/test_scheduler.py b/tests/appservice/test_scheduler.py
index 97f8cad0..a2b5ed20 100644
--- a/tests/appservice/test_scheduler.py
+++ b/tests/appservice/test_scheduler.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
from twisted.internet import defer
diff --git a/tests/config/__init__.py b/tests/config/__init__.py
index b7df13c9..f43a360a 100644
--- a/tests/config/__init__.py
+++ b/tests/config/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/config/test_base.py b/tests/config/test_base.py
index 42ee5f56..84ae3b88 100644
--- a/tests/config/test_base.py
+++ b/tests/config/test_base.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/config/test_cache.py b/tests/config/test_cache.py
index 2b7f09c1..857d9cd0 100644
--- a/tests/config/test_cache.py
+++ b/tests/config/test_cache.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/config/test_database.py b/tests/config/test_database.py
index f675bde6..9eca10bb 100644
--- a/tests/config/test_database.py
+++ b/tests/config/test_database.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/config/test_generate.py b/tests/config/test_generate.py
index 463855ec..fdfbb0e3 100644
--- a/tests/config/test_generate.py
+++ b/tests/config/test_generate.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/config/test_load.py b/tests/config/test_load.py
index 734a9983..ebe2c051 100644
--- a/tests/config/test_load.py
+++ b/tests/config/test_load.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -20,6 +19,7 @@ from io import StringIO
import yaml
+from synapse.config import ConfigError
from synapse.config.homeserver import HomeServerConfig
from tests import unittest
@@ -35,9 +35,9 @@ class ConfigLoadingTestCase(unittest.TestCase):
def test_load_fails_if_server_name_missing(self):
self.generate_config_and_remove_lines_containing("server_name")
- with self.assertRaises(Exception):
+ with self.assertRaises(ConfigError):
HomeServerConfig.load_config("", ["-c", self.file])
- with self.assertRaises(Exception):
+ with self.assertRaises(ConfigError):
HomeServerConfig.load_or_generate_config("", ["-c", self.file])
def test_generates_and_loads_macaroon_secret_key(self):
diff --git a/tests/config/test_ratelimiting.py b/tests/config/test_ratelimiting.py
index 13ab2823..3c7bb32e 100644
--- a/tests/config/test_ratelimiting.py
+++ b/tests/config/test_ratelimiting.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/config/test_room_directory.py b/tests/config/test_room_directory.py
index 0ec10019..db745815 100644
--- a/tests/config/test_room_directory.py
+++ b/tests/config/test_room_directory.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/config/test_server.py b/tests/config/test_server.py
index 98af7aa6..6f2b9e99 100644
--- a/tests/config/test_server.py
+++ b/tests/config/test_server.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/config/test_tls.py b/tests/config/test_tls.py
index ec32d4b1..183034f7 100644
--- a/tests/config/test_tls.py
+++ b/tests/config/test_tls.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
# Copyright 2019 Matrix.org Foundation C.I.C.
#
diff --git a/tests/config/test_util.py b/tests/config/test_util.py
index 10363e37..3d4929da 100644
--- a/tests/config/test_util.py
+++ b/tests/config/test_util.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/crypto/__init__.py b/tests/crypto/__init__.py
index bfebb0f6..5e83dba2 100644
--- a/tests/crypto/__init__.py
+++ b/tests/crypto/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/crypto/test_event_signing.py b/tests/crypto/test_event_signing.py
index 62f639a1..1c920157 100644
--- a/tests/crypto/test_event_signing.py
+++ b/tests/crypto/test_event_signing.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py
index 30fcc4c1..2775dfd8 100644
--- a/tests/crypto/test_keyring.py
+++ b/tests/crypto/test_keyring.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,9 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import time
+from unittest.mock import Mock
-from mock import Mock
-
+import attr
import canonicaljson
import signedjson.key
import signedjson.sign
@@ -68,6 +67,11 @@ class MockPerspectiveServer:
signedjson.sign.sign_json(res, self.server_name, self.key)
+@attr.s(slots=True)
+class FakeRequest:
+ id = attr.ib()
+
+
@logcontext_clean
class KeyringTestCase(unittest.HomeserverTestCase):
def check_context(self, val, expected):
@@ -89,7 +93,7 @@ class KeyringTestCase(unittest.HomeserverTestCase):
first_lookup_deferred = Deferred()
async def first_lookup_fetch(keys_to_fetch):
- self.assertEquals(current_context().request, "context_11")
+ self.assertEquals(current_context().request.id, "context_11")
self.assertEqual(keys_to_fetch, {"server10": {get_key_id(key1): 0}})
await make_deferred_yieldable(first_lookup_deferred)
@@ -102,9 +106,7 @@ class KeyringTestCase(unittest.HomeserverTestCase):
mock_fetcher.get_keys.side_effect = first_lookup_fetch
async def first_lookup():
- with LoggingContext("context_11") as context_11:
- context_11.request = "context_11"
-
+ with LoggingContext("context_11", request=FakeRequest("context_11")):
res_deferreds = kr.verify_json_objects_for_server(
[("server10", json1, 0, "test10"), ("server11", {}, 0, "test11")]
)
@@ -130,7 +132,7 @@ class KeyringTestCase(unittest.HomeserverTestCase):
# should block rather than start a second call
async def second_lookup_fetch(keys_to_fetch):
- self.assertEquals(current_context().request, "context_12")
+ self.assertEquals(current_context().request.id, "context_12")
return {
"server10": {
get_key_id(key1): FetchKeyResult(get_verify_key(key1), 100)
@@ -142,9 +144,7 @@ class KeyringTestCase(unittest.HomeserverTestCase):
second_lookup_state = [0]
async def second_lookup():
- with LoggingContext("context_12") as context_12:
- context_12.request = "context_12"
-
+ with LoggingContext("context_12", request=FakeRequest("context_12")):
res_deferreds_2 = kr.verify_json_objects_for_server(
[("server10", json1, 0, "test")]
)
@@ -589,10 +589,7 @@ def get_key_id(key):
@defer.inlineCallbacks
def run_in_context(f, *args, **kwargs):
- with LoggingContext("testctx") as ctx:
- # we set the "request" prop to make it easier to follow what's going on in the
- # logs.
- ctx.request = "testctx"
+ with LoggingContext("testctx"):
rv = yield f(*args, **kwargs)
return rv
diff --git a/tests/events/test_presence_router.py b/tests/events/test_presence_router.py
new file mode 100644
index 00000000..01d25730
--- /dev/null
+++ b/tests/events/test_presence_router.py
@@ -0,0 +1,385 @@
+# Copyright 2021 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from typing import Dict, Iterable, List, Optional, Set, Tuple, Union
+from unittest.mock import Mock
+
+import attr
+
+from synapse.api.constants import EduTypes
+from synapse.events.presence_router import PresenceRouter
+from synapse.federation.units import Transaction
+from synapse.handlers.presence import UserPresenceState
+from synapse.module_api import ModuleApi
+from synapse.rest import admin
+from synapse.rest.client.v1 import login, presence, room
+from synapse.types import JsonDict, StreamToken, create_requester
+
+from tests.handlers.test_sync import generate_sync_config
+from tests.unittest import FederatingHomeserverTestCase, TestCase, override_config
+
+
+@attr.s
+class PresenceRouterTestConfig:
+ users_who_should_receive_all_presence = attr.ib(type=List[str], default=[])
+
+
+class PresenceRouterTestModule:
+ def __init__(self, config: PresenceRouterTestConfig, module_api: ModuleApi):
+ self._config = config
+ self._module_api = module_api
+
+ async def get_users_for_states(
+ self, state_updates: Iterable[UserPresenceState]
+ ) -> Dict[str, Set[UserPresenceState]]:
+ users_to_state = {
+ user_id: set(state_updates)
+ for user_id in self._config.users_who_should_receive_all_presence
+ }
+ return users_to_state
+
+ async def get_interested_users(
+ self, user_id: str
+ ) -> Union[Set[str], PresenceRouter.ALL_USERS]:
+ if user_id in self._config.users_who_should_receive_all_presence:
+ return PresenceRouter.ALL_USERS
+
+ return set()
+
+ @staticmethod
+ def parse_config(config_dict: dict) -> PresenceRouterTestConfig:
+ """Parse a configuration dictionary from the homeserver config, do
+ some validation and return a typed PresenceRouterConfig.
+
+ Args:
+ config_dict: The configuration dictionary.
+
+ Returns:
+ A validated config object.
+ """
+ # Initialise a typed config object
+ config = PresenceRouterTestConfig()
+
+ config.users_who_should_receive_all_presence = config_dict.get(
+ "users_who_should_receive_all_presence"
+ )
+
+ return config
+
+
+class PresenceRouterTestCase(FederatingHomeserverTestCase):
+ servlets = [
+ admin.register_servlets,
+ login.register_servlets,
+ room.register_servlets,
+ presence.register_servlets,
+ ]
+
+ def make_homeserver(self, reactor, clock):
+ return self.setup_test_homeserver(
+ federation_transport_client=Mock(spec=["send_transaction"]),
+ )
+
+ def prepare(self, reactor, clock, homeserver):
+ self.sync_handler = self.hs.get_sync_handler()
+ self.module_api = homeserver.get_module_api()
+
+ @override_config(
+ {
+ "presence": {
+ "presence_router": {
+ "module": __name__ + ".PresenceRouterTestModule",
+ "config": {
+ "users_who_should_receive_all_presence": [
+ "@presence_gobbler:test",
+ ]
+ },
+ }
+ },
+ "send_federation": True,
+ }
+ )
+ def test_receiving_all_presence(self):
+ """Test that a user that does not share a room with another other can receive
+ presence for them, due to presence routing.
+ """
+ # Create a user who should receive all presence of others
+ self.presence_receiving_user_id = self.register_user(
+ "presence_gobbler", "monkey"
+ )
+ self.presence_receiving_user_tok = self.login("presence_gobbler", "monkey")
+
+ # And two users who should not have any special routing
+ self.other_user_one_id = self.register_user("other_user_one", "monkey")
+ self.other_user_one_tok = self.login("other_user_one", "monkey")
+ self.other_user_two_id = self.register_user("other_user_two", "monkey")
+ self.other_user_two_tok = self.login("other_user_two", "monkey")
+
+ # Put the other two users in a room with each other
+ room_id = self.helper.create_room_as(
+ self.other_user_one_id, tok=self.other_user_one_tok
+ )
+
+ self.helper.invite(
+ room_id,
+ self.other_user_one_id,
+ self.other_user_two_id,
+ tok=self.other_user_one_tok,
+ )
+ self.helper.join(room_id, self.other_user_two_id, tok=self.other_user_two_tok)
+ # User one sends some presence
+ send_presence_update(
+ self,
+ self.other_user_one_id,
+ self.other_user_one_tok,
+ "online",
+ "boop",
+ )
+
+ # Check that the presence receiving user gets user one's presence when syncing
+ presence_updates, sync_token = sync_presence(
+ self, self.presence_receiving_user_id
+ )
+ self.assertEqual(len(presence_updates), 1)
+
+ presence_update = presence_updates[0] # type: UserPresenceState
+ self.assertEqual(presence_update.user_id, self.other_user_one_id)
+ self.assertEqual(presence_update.state, "online")
+ self.assertEqual(presence_update.status_msg, "boop")
+
+ # Have all three users send presence
+ send_presence_update(
+ self,
+ self.other_user_one_id,
+ self.other_user_one_tok,
+ "online",
+ "user_one",
+ )
+ send_presence_update(
+ self,
+ self.other_user_two_id,
+ self.other_user_two_tok,
+ "online",
+ "user_two",
+ )
+ send_presence_update(
+ self,
+ self.presence_receiving_user_id,
+ self.presence_receiving_user_tok,
+ "online",
+ "presence_gobbler",
+ )
+
+ # Check that the presence receiving user gets everyone's presence
+ presence_updates, _ = sync_presence(
+ self, self.presence_receiving_user_id, sync_token
+ )
+ self.assertEqual(len(presence_updates), 3)
+
+ # But that User One only get itself and User Two's presence
+ presence_updates, _ = sync_presence(self, self.other_user_one_id)
+ self.assertEqual(len(presence_updates), 2)
+
+ found = False
+ for update in presence_updates:
+ if update.user_id == self.other_user_two_id:
+ self.assertEqual(update.state, "online")
+ self.assertEqual(update.status_msg, "user_two")
+ found = True
+
+ self.assertTrue(found)
+
+ @override_config(
+ {
+ "presence": {
+ "presence_router": {
+ "module": __name__ + ".PresenceRouterTestModule",
+ "config": {
+ "users_who_should_receive_all_presence": [
+ "@presence_gobbler1:test",
+ "@presence_gobbler2:test",
+ "@far_away_person:island",
+ ]
+ },
+ }
+ },
+ "send_federation": True,
+ }
+ )
+ def test_send_local_online_presence_to_with_module(self):
+ """Tests that send_local_presence_to_users sends local online presence to a set
+ of specified local and remote users, with a custom PresenceRouter module enabled.
+ """
+ # Create a user who will send presence updates
+ self.other_user_id = self.register_user("other_user", "monkey")
+ self.other_user_tok = self.login("other_user", "monkey")
+
+ # And another two users that will also send out presence updates, as well as receive
+ # theirs and everyone else's
+ self.presence_receiving_user_one_id = self.register_user(
+ "presence_gobbler1", "monkey"
+ )
+ self.presence_receiving_user_one_tok = self.login("presence_gobbler1", "monkey")
+ self.presence_receiving_user_two_id = self.register_user(
+ "presence_gobbler2", "monkey"
+ )
+ self.presence_receiving_user_two_tok = self.login("presence_gobbler2", "monkey")
+
+ # Have all three users send some presence updates
+ send_presence_update(
+ self,
+ self.other_user_id,
+ self.other_user_tok,
+ "online",
+ "I'm online!",
+ )
+ send_presence_update(
+ self,
+ self.presence_receiving_user_one_id,
+ self.presence_receiving_user_one_tok,
+ "online",
+ "I'm also online!",
+ )
+ send_presence_update(
+ self,
+ self.presence_receiving_user_two_id,
+ self.presence_receiving_user_two_tok,
+ "unavailable",
+ "I'm in a meeting!",
+ )
+
+ # Mark each presence-receiving user for receiving all user presence
+ self.get_success(
+ self.module_api.send_local_online_presence_to(
+ [
+ self.presence_receiving_user_one_id,
+ self.presence_receiving_user_two_id,
+ ]
+ )
+ )
+
+ # Perform a sync for each user
+
+ # The other user should only receive their own presence
+ presence_updates, _ = sync_presence(self, self.other_user_id)
+ self.assertEqual(len(presence_updates), 1)
+
+ presence_update = presence_updates[0] # type: UserPresenceState
+ self.assertEqual(presence_update.user_id, self.other_user_id)
+ self.assertEqual(presence_update.state, "online")
+ self.assertEqual(presence_update.status_msg, "I'm online!")
+
+ # Whereas both presence receiving users should receive everyone's presence updates
+ presence_updates, _ = sync_presence(self, self.presence_receiving_user_one_id)
+ self.assertEqual(len(presence_updates), 3)
+ presence_updates, _ = sync_presence(self, self.presence_receiving_user_two_id)
+ self.assertEqual(len(presence_updates), 3)
+
+ # Test that sending to a remote user works
+ remote_user_id = "@far_away_person:island"
+
+ # Note that due to the remote user being in our module's
+ # users_who_should_receive_all_presence config, they would have
+ # received user presence updates already.
+ #
+ # Thus we reset the mock, and try sending all online local user
+ # presence again
+ self.hs.get_federation_transport_client().send_transaction.reset_mock()
+
+ # Broadcast local user online presence
+ self.get_success(
+ self.module_api.send_local_online_presence_to([remote_user_id])
+ )
+
+ # Check that the expected presence updates were sent
+ expected_users = [
+ self.other_user_id,
+ self.presence_receiving_user_one_id,
+ self.presence_receiving_user_two_id,
+ ]
+
+ calls = (
+ self.hs.get_federation_transport_client().send_transaction.call_args_list
+ )
+ for call in calls:
+ call_args = call[0]
+ federation_transaction = call_args[0] # type: Transaction
+
+ # Get the sent EDUs in this transaction
+ edus = federation_transaction.get_dict()["edus"]
+
+ for edu in edus:
+ # Make sure we're only checking presence-type EDUs
+ if edu["edu_type"] != EduTypes.Presence:
+ continue
+
+ # EDUs can contain multiple presence updates
+ for presence_update in edu["content"]["push"]:
+ # Check for presence updates that contain the user IDs we're after
+ expected_users.remove(presence_update["user_id"])
+
+ # Ensure that no offline states are being sent out
+ self.assertNotEqual(presence_update["presence"], "offline")
+
+ self.assertEqual(len(expected_users), 0)
+
+
+def send_presence_update(
+ testcase: TestCase,
+ user_id: str,
+ access_token: str,
+ presence_state: str,
+ status_message: Optional[str] = None,
+) -> JsonDict:
+ # Build the presence body
+ body = {"presence": presence_state}
+ if status_message:
+ body["status_msg"] = status_message
+
+ # Update the user's presence state
+ channel = testcase.make_request(
+ "PUT", "/presence/%s/status" % (user_id,), body, access_token=access_token
+ )
+ testcase.assertEqual(channel.code, 200)
+
+ return channel.json_body
+
+
+def sync_presence(
+ testcase: TestCase,
+ user_id: str,
+ since_token: Optional[StreamToken] = None,
+) -> Tuple[List[UserPresenceState], StreamToken]:
+ """Perform a sync request for the given user and return the user presence updates
+ they've received, as well as the next_batch token.
+
+ This method assumes testcase.sync_handler points to the homeserver's sync handler.
+
+ Args:
+ testcase: The testcase that is currently being run.
+ user_id: The ID of the user to generate a sync response for.
+ since_token: An optional token to indicate from at what point to sync from.
+
+ Returns:
+ A tuple containing a list of presence updates, and the sync response's
+ next_batch token.
+ """
+ requester = create_requester(user_id)
+ sync_config = generate_sync_config(requester.user.to_string())
+ sync_result = testcase.get_success(
+ testcase.sync_handler.wait_for_sync_for_user(
+ requester, sync_config, since_token
+ )
+ )
+
+ return sync_result.presence, sync_result.next_batch
diff --git a/tests/events/test_snapshot.py b/tests/events/test_snapshot.py
index ec85324c..48e98aac 100644
--- a/tests/events/test_snapshot.py
+++ b/tests/events/test_snapshot.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/events/test_utils.py b/tests/events/test_utils.py
index 8ba36c60..9274ce4c 100644
--- a/tests/events/test_utils.py
+++ b/tests/events/test_utils.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the 'License');
diff --git a/tests/federation/test_complexity.py b/tests/federation/test_complexity.py
index 8186b8ca..1a809b2a 100644
--- a/tests/federation/test_complexity.py
+++ b/tests/federation/test_complexity.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 Matrix.org Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
from synapse.api.errors import Codes, SynapseError
from synapse.rest import admin
diff --git a/tests/federation/test_federation_catch_up.py b/tests/federation/test_federation_catch_up.py
index 95eac6a5..802c5ad2 100644
--- a/tests/federation/test_federation_catch_up.py
+++ b/tests/federation/test_federation_catch_up.py
@@ -1,6 +1,5 @@
from typing import List, Tuple
-
-from mock import Mock
+from unittest.mock import Mock
from synapse.api.constants import EventTypes
from synapse.events import EventBase
diff --git a/tests/federation/test_federation_sender.py b/tests/federation/test_federation_sender.py
index ecc3faa5..b00dd143 100644
--- a/tests/federation/test_federation_sender.py
+++ b/tests/federation/test_federation_sender.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,8 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
-
-from mock import Mock
+from unittest.mock import Mock
from signedjson import key, sign
from signedjson.types import BaseKey, SigningKey
diff --git a/tests/federation/test_federation_server.py b/tests/federation/test_federation_server.py
index cfeccc05..17378915 100644
--- a/tests/federation/test_federation_server.py
+++ b/tests/federation/test_federation_server.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
# Copyright 2019 Matrix.org Federation C.I.C
#
@@ -75,6 +74,25 @@ class ServerACLsTestCase(unittest.TestCase):
self.assertFalse(server_matches_acl_event("[1:2::]", e))
self.assertTrue(server_matches_acl_event("1:2:3:4", e))
+ def test_wildcard_matching(self):
+ e = _create_acl_event({"allow": ["good*.com"]})
+ self.assertTrue(
+ server_matches_acl_event("good.com", e),
+ "* matches 0 characters",
+ )
+ self.assertTrue(
+ server_matches_acl_event("GOOD.COM", e),
+ "pattern is case-insensitive",
+ )
+ self.assertTrue(
+ server_matches_acl_event("good.aa.com", e),
+ "* matches several characters, including '.'",
+ )
+ self.assertFalse(
+ server_matches_acl_event("ishgood.com", e),
+ "pattern does not allow prefixes",
+ )
+
class StateQueryTests(unittest.FederatingHomeserverTestCase):
diff --git a/tests/federation/transport/test_server.py b/tests/federation/transport/test_server.py
index 85500e16..84fa72b9 100644
--- a/tests/federation/transport/test_server.py
+++ b/tests/federation/transport/test_server.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/handlers/test_admin.py b/tests/handlers/test_admin.py
index a01fdd08..18a734da 100644
--- a/tests/handlers/test_admin.py
+++ b/tests/handlers/test_admin.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,8 +13,7 @@
# limitations under the License.
from collections import Counter
-
-from mock import Mock
+from unittest.mock import Mock
import synapse.api.errors
import synapse.handlers.admin
diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py
index d5d3fdd9..b037b12a 100644
--- a/tests/handlers/test_appservice.py
+++ b/tests/handlers/test_appservice.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
from twisted.internet import defer
diff --git a/tests/handlers/test_auth.py b/tests/handlers/test_auth.py
index c9f889b5..fe7e9484 100644
--- a/tests/handlers/test_auth.py
+++ b/tests/handlers/test_auth.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
import pymacaroons
diff --git a/tests/handlers/test_cas.py b/tests/handlers/test_cas.py
index 7975af24..b625995d 100644
--- a/tests/handlers/test_cas.py
+++ b/tests/handlers/test_cas.py
@@ -11,9 +11,9 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
-from synapse.handlers.cas_handler import CasResponse
+from synapse.handlers.cas import CasResponse
from tests.test_utils import simple_async_mock
from tests.unittest import HomeserverTestCase, override_config
diff --git a/tests/handlers/test_device.py b/tests/handlers/test_device.py
index 821629bc..84c38b29 100644
--- a/tests/handlers/test_device.py
+++ b/tests/handlers/test_device.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
diff --git a/tests/handlers/test_directory.py b/tests/handlers/test_directory.py
index 863d8737..1908d3c2 100644
--- a/tests/handlers/test_directory.py
+++ b/tests/handlers/test_directory.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,7 +13,7 @@
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
import synapse
import synapse.api.errors
diff --git a/tests/handlers/test_e2e_keys.py b/tests/handlers/test_e2e_keys.py
index 5e86c5e5..61a00130 100644
--- a/tests/handlers/test_e2e_keys.py
+++ b/tests/handlers/test_e2e_keys.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
# Copyright 2019 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
@@ -14,7 +13,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-import mock
+from unittest import mock
from signedjson import key as key, sign as sign
diff --git a/tests/handlers/test_e2e_room_keys.py b/tests/handlers/test_e2e_room_keys.py
index d7498aa5..9b7e7a8e 100644
--- a/tests/handlers/test_e2e_room_keys.py
+++ b/tests/handlers/test_e2e_room_keys.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
# Copyright 2017 New Vector Ltd
# Copyright 2019 Matrix.org Foundation C.I.C.
@@ -16,8 +15,7 @@
# limitations under the License.
import copy
-
-import mock
+from unittest import mock
from synapse.api.errors import SynapseError
diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py
index 3af36119..8796af45 100644
--- a/tests/handlers/test_federation.py
+++ b/tests/handlers/test_federation.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -223,7 +222,7 @@ class FederationTestCase(unittest.HomeserverTestCase):
room_version,
)
- for i in range(3):
+ for _ in range(3):
event = create_invite()
self.get_success(
self.handler.on_invite_request(
diff --git a/tests/handlers/test_message.py b/tests/handlers/test_message.py
index a0d1ebdb..a8a9fc5b 100644
--- a/tests/handlers/test_message.py
+++ b/tests/handlers/test_message.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/handlers/test_oidc.py b/tests/handlers/test_oidc.py
index c7796fb8..a25c89bd 100644
--- a/tests/handlers/test_oidc.py
+++ b/tests/handlers/test_oidc.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Quentin Gliech
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,10 +13,9 @@
# limitations under the License.
import json
import os
+from unittest.mock import ANY, Mock, patch
from urllib.parse import parse_qs, urlparse
-from mock import ANY, Mock, patch
-
import pymacaroons
from synapse.handlers.sso import MappingException
@@ -501,7 +499,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
self.assertRenderedError("fetch_error")
# Handle code exchange failure
- from synapse.handlers.oidc_handler import OidcError
+ from synapse.handlers.oidc import OidcError
self.provider._exchange_code = simple_async_mock(
raises=OidcError("invalid_request")
@@ -585,7 +583,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
body=b'{"error": "foo", "error_description": "bar"}',
)
)
- from synapse.handlers.oidc_handler import OidcError
+ from synapse.handlers.oidc import OidcError
exc = self.get_failure(self.provider._exchange_code(code), OidcError)
self.assertEqual(exc.value.error, "foo")
@@ -1128,7 +1126,7 @@ class OidcHandlerTestCase(HomeserverTestCase):
client_redirect_url: str,
ui_auth_session_id: str = "",
) -> str:
- from synapse.handlers.oidc_handler import OidcSessionData
+ from synapse.handlers.oidc import OidcSessionData
return self.handler._token_generator.generate_oidc_session_token(
state=state,
@@ -1154,7 +1152,7 @@ async def _make_callback_with_userinfo(
userinfo: the OIDC userinfo dict
client_redirect_url: the URL to redirect to on success.
"""
- from synapse.handlers.oidc_handler import OidcSessionData
+ from synapse.handlers.oidc import OidcSessionData
handler = hs.get_oidc_handler()
provider = handler._providers["oidc"]
diff --git a/tests/handlers/test_password_providers.py b/tests/handlers/test_password_providers.py
index a98a65ae..32651db0 100644
--- a/tests/handlers/test_password_providers.py
+++ b/tests/handlers/test_password_providers.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,8 +15,7 @@
"""Tests for the password_auth_provider interface"""
from typing import Any, Type, Union
-
-from mock import Mock
+from unittest.mock import Mock
from twisted.internet import defer
diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py
index 77330f59..ce330e79 100644
--- a/tests/handlers/test_presence.py
+++ b/tests/handlers/test_presence.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,7 +13,7 @@
# limitations under the License.
-from mock import Mock, call
+from unittest.mock import Mock, call
from signedjson.key import generate_signing_key
@@ -22,6 +21,7 @@ from synapse.api.constants import EventTypes, Membership, PresenceState
from synapse.api.presence import UserPresenceState
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
from synapse.events.builder import EventBuilder
+from synapse.federation.sender import FederationSender
from synapse.handlers.presence import (
EXTERNAL_PROCESS_EXPIRY,
FEDERATION_PING_INTERVAL,
@@ -472,6 +472,190 @@ class PresenceHandlerTestCase(unittest.HomeserverTestCase):
self.assertEqual(state.state, PresenceState.OFFLINE)
+class PresenceFederationQueueTestCase(unittest.HomeserverTestCase):
+ def prepare(self, reactor, clock, hs):
+ self.presence_handler = hs.get_presence_handler()
+ self.clock = hs.get_clock()
+ self.instance_name = hs.get_instance_name()
+
+ self.queue = self.presence_handler.get_federation_queue()
+
+ def test_send_and_get(self):
+ state1 = UserPresenceState.default("@user1:test")
+ state2 = UserPresenceState.default("@user2:test")
+ state3 = UserPresenceState.default("@user3:test")
+
+ prev_token = self.queue.get_current_token(self.instance_name)
+
+ self.queue.send_presence_to_destinations((state1, state2), ("dest1", "dest2"))
+ self.queue.send_presence_to_destinations((state3,), ("dest3",))
+
+ now_token = self.queue.get_current_token(self.instance_name)
+
+ rows, upto_token, limited = self.get_success(
+ self.queue.get_replication_rows("master", prev_token, now_token, 10)
+ )
+
+ self.assertEqual(upto_token, now_token)
+ self.assertFalse(limited)
+
+ expected_rows = [
+ (1, ("dest1", "@user1:test")),
+ (1, ("dest2", "@user1:test")),
+ (1, ("dest1", "@user2:test")),
+ (1, ("dest2", "@user2:test")),
+ (2, ("dest3", "@user3:test")),
+ ]
+
+ self.assertCountEqual(rows, expected_rows)
+
+ now_token = self.queue.get_current_token(self.instance_name)
+ rows, upto_token, limited = self.get_success(
+ self.queue.get_replication_rows("master", upto_token, now_token, 10)
+ )
+ self.assertEqual(upto_token, now_token)
+ self.assertFalse(limited)
+ self.assertCountEqual(rows, [])
+
+ def test_send_and_get_split(self):
+ state1 = UserPresenceState.default("@user1:test")
+ state2 = UserPresenceState.default("@user2:test")
+ state3 = UserPresenceState.default("@user3:test")
+
+ prev_token = self.queue.get_current_token(self.instance_name)
+
+ self.queue.send_presence_to_destinations((state1, state2), ("dest1", "dest2"))
+
+ now_token = self.queue.get_current_token(self.instance_name)
+
+ self.queue.send_presence_to_destinations((state3,), ("dest3",))
+
+ rows, upto_token, limited = self.get_success(
+ self.queue.get_replication_rows("master", prev_token, now_token, 10)
+ )
+
+ self.assertEqual(upto_token, now_token)
+ self.assertFalse(limited)
+
+ expected_rows = [
+ (1, ("dest1", "@user1:test")),
+ (1, ("dest2", "@user1:test")),
+ (1, ("dest1", "@user2:test")),
+ (1, ("dest2", "@user2:test")),
+ ]
+
+ self.assertCountEqual(rows, expected_rows)
+
+ now_token = self.queue.get_current_token(self.instance_name)
+ rows, upto_token, limited = self.get_success(
+ self.queue.get_replication_rows("master", upto_token, now_token, 10)
+ )
+
+ self.assertEqual(upto_token, now_token)
+ self.assertFalse(limited)
+
+ expected_rows = [
+ (2, ("dest3", "@user3:test")),
+ ]
+
+ self.assertCountEqual(rows, expected_rows)
+
+ def test_clear_queue_all(self):
+ state1 = UserPresenceState.default("@user1:test")
+ state2 = UserPresenceState.default("@user2:test")
+ state3 = UserPresenceState.default("@user3:test")
+
+ prev_token = self.queue.get_current_token(self.instance_name)
+
+ self.queue.send_presence_to_destinations((state1, state2), ("dest1", "dest2"))
+ self.queue.send_presence_to_destinations((state3,), ("dest3",))
+
+ self.reactor.advance(10 * 60 * 1000)
+
+ now_token = self.queue.get_current_token(self.instance_name)
+
+ rows, upto_token, limited = self.get_success(
+ self.queue.get_replication_rows("master", prev_token, now_token, 10)
+ )
+ self.assertEqual(upto_token, now_token)
+ self.assertFalse(limited)
+ self.assertCountEqual(rows, [])
+
+ prev_token = self.queue.get_current_token(self.instance_name)
+
+ self.queue.send_presence_to_destinations((state1, state2), ("dest1", "dest2"))
+ self.queue.send_presence_to_destinations((state3,), ("dest3",))
+
+ now_token = self.queue.get_current_token(self.instance_name)
+
+ rows, upto_token, limited = self.get_success(
+ self.queue.get_replication_rows("master", prev_token, now_token, 10)
+ )
+ self.assertEqual(upto_token, now_token)
+ self.assertFalse(limited)
+
+ expected_rows = [
+ (3, ("dest1", "@user1:test")),
+ (3, ("dest2", "@user1:test")),
+ (3, ("dest1", "@user2:test")),
+ (3, ("dest2", "@user2:test")),
+ (4, ("dest3", "@user3:test")),
+ ]
+
+ self.assertCountEqual(rows, expected_rows)
+
+ def test_partially_clear_queue(self):
+ state1 = UserPresenceState.default("@user1:test")
+ state2 = UserPresenceState.default("@user2:test")
+ state3 = UserPresenceState.default("@user3:test")
+
+ prev_token = self.queue.get_current_token(self.instance_name)
+
+ self.queue.send_presence_to_destinations((state1, state2), ("dest1", "dest2"))
+
+ self.reactor.advance(2 * 60 * 1000)
+
+ self.queue.send_presence_to_destinations((state3,), ("dest3",))
+
+ self.reactor.advance(4 * 60 * 1000)
+
+ now_token = self.queue.get_current_token(self.instance_name)
+
+ rows, upto_token, limited = self.get_success(
+ self.queue.get_replication_rows("master", prev_token, now_token, 10)
+ )
+ self.assertEqual(upto_token, now_token)
+ self.assertFalse(limited)
+
+ expected_rows = [
+ (2, ("dest3", "@user3:test")),
+ ]
+ self.assertCountEqual(rows, [])
+
+ prev_token = self.queue.get_current_token(self.instance_name)
+
+ self.queue.send_presence_to_destinations((state1, state2), ("dest1", "dest2"))
+ self.queue.send_presence_to_destinations((state3,), ("dest3",))
+
+ now_token = self.queue.get_current_token(self.instance_name)
+
+ rows, upto_token, limited = self.get_success(
+ self.queue.get_replication_rows("master", prev_token, now_token, 10)
+ )
+ self.assertEqual(upto_token, now_token)
+ self.assertFalse(limited)
+
+ expected_rows = [
+ (3, ("dest1", "@user1:test")),
+ (3, ("dest2", "@user1:test")),
+ (3, ("dest1", "@user2:test")),
+ (3, ("dest2", "@user2:test")),
+ (4, ("dest3", "@user3:test")),
+ ]
+
+ self.assertCountEqual(rows, expected_rows)
+
+
class PresenceJoinTestCase(unittest.HomeserverTestCase):
"""Tests remote servers get told about presence of users in the room when
they join and when new local users join.
@@ -483,10 +667,17 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
- "server", federation_http_client=None, federation_sender=Mock()
+ "server",
+ federation_http_client=None,
+ federation_sender=Mock(spec=FederationSender),
)
return hs
+ def default_config(self):
+ config = super().default_config()
+ config["send_federation"] = True
+ return config
+
def prepare(self, reactor, clock, hs):
self.federation_sender = hs.get_federation_sender()
self.event_builder_factory = hs.get_event_builder_factory()
@@ -530,9 +721,6 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase):
# Add a new remote server to the room
self._add_new_user(room_id, "@alice:server2")
- # We shouldn't have sent out any local presence *updates*
- self.federation_sender.send_presence.assert_not_called()
-
# When new server is joined we send it the local users presence states.
# We expect to only see user @test2:server, as @test:server is offline
# and has a zero last_active_ts
@@ -551,7 +739,6 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase):
self.federation_sender.reset_mock()
self._add_new_user(room_id, "@bob:server3")
- self.federation_sender.send_presence.assert_not_called()
self.federation_sender.send_presence_to_destinations.assert_called_once_with(
destinations=["server3"], states={expected_state}
)
@@ -596,9 +783,6 @@ class PresenceJoinTestCase(unittest.HomeserverTestCase):
self.reactor.pump([0]) # Wait for presence updates to be handled
- # We shouldn't have sent out any local presence *updates*
- self.federation_sender.send_presence.assert_not_called()
-
# We expect to only send test2 presence to server2 and server3
expected_state = self.get_success(
self.presence_handler.current_state_for_user("@test2:server")
diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py
index 75c6a4e2..5330a9b3 100644
--- a/tests/handlers/test_profile.py
+++ b/tests/handlers/test_profile.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
import synapse.types
from synapse.api.errors import AuthError, SynapseError
diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py
index 94b69035..608f8f3d 100644
--- a/tests/handlers/test_register.py
+++ b/tests/handlers/test_register.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
from synapse.api.auth import Auth
from synapse.api.constants import UserTypes
diff --git a/tests/handlers/test_saml.py b/tests/handlers/test_saml.py
index 30efd43b..8cfc184f 100644
--- a/tests/handlers/test_saml.py
+++ b/tests/handlers/test_saml.py
@@ -13,8 +13,7 @@
# limitations under the License.
from typing import Optional
-
-from mock import Mock
+from unittest.mock import Mock
import attr
diff --git a/tests/handlers/test_stats.py b/tests/handlers/test_stats.py
index 312c0a0d..c9d4fd93 100644
--- a/tests/handlers/test_stats.py
+++ b/tests/handlers/test_stats.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py
index e6258614..c8b43305 100644
--- a/tests/handlers/test_sync.py
+++ b/tests/handlers/test_sync.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -37,7 +36,7 @@ class SyncTestCase(tests.unittest.HomeserverTestCase):
def test_wait_for_sync_for_user_auth_blocking(self):
user_id1 = "@user1:test"
user_id2 = "@user2:test"
- sync_config = self._generate_sync_config(user_id1)
+ sync_config = generate_sync_config(user_id1)
requester = create_requester(user_id1)
self.reactor.advance(100) # So we get not 0 time
@@ -60,7 +59,7 @@ class SyncTestCase(tests.unittest.HomeserverTestCase):
self.auth_blocking._hs_disabled = False
- sync_config = self._generate_sync_config(user_id2)
+ sync_config = generate_sync_config(user_id2)
requester = create_requester(user_id2)
e = self.get_failure(
@@ -69,11 +68,12 @@ class SyncTestCase(tests.unittest.HomeserverTestCase):
)
self.assertEquals(e.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
- def _generate_sync_config(self, user_id):
- return SyncConfig(
- user=UserID(user_id.split(":")[0][1:], user_id.split(":")[1]),
- filter_collection=DEFAULT_FILTER_COLLECTION,
- is_guest=False,
- request_key="request_key",
- device_id="device_id",
- )
+
+def generate_sync_config(user_id: str) -> SyncConfig:
+ return SyncConfig(
+ user=UserID(user_id.split(":")[0][1:], user_id.split(":")[1]),
+ filter_collection=DEFAULT_FILTER_COLLECTION,
+ is_guest=False,
+ request_key="request_key",
+ device_id="device_id",
+ )
diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py
index 24e71381..0c89487e 100644
--- a/tests/handlers/test_typing.py
+++ b/tests/handlers/test_typing.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,8 +15,7 @@
import json
from typing import Dict
-
-from mock import ANY, Mock, call
+from unittest.mock import ANY, Mock, call
from twisted.internet import defer
from twisted.web.resource import Resource
diff --git a/tests/handlers/test_user_directory.py b/tests/handlers/test_user_directory.py
index 98b2f5b3..daac37ab 100644
--- a/tests/handlers/test_user_directory.py
+++ b/tests/handlers/test_user_directory.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
from twisted.internet import defer
diff --git a/tests/http/__init__.py b/tests/http/__init__.py
index 3e5a8565..e74f7f5b 100644
--- a/tests/http/__init__.py
+++ b/tests/http/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/http/federation/__init__.py b/tests/http/federation/__init__.py
index 1453d045..743fb990 100644
--- a/tests/http/federation/__init__.py
+++ b/tests/http/federation/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py
index 4c56253d..e4598031 100644
--- a/tests/http/federation/test_matrix_federation_agent.py
+++ b/tests/http/federation/test_matrix_federation_agent.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,8 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
-
-from mock import Mock
+from typing import Optional
+from unittest.mock import Mock
import treq
from netaddr import IPSet
@@ -180,7 +179,11 @@ class MatrixFederationAgentTests(unittest.TestCase):
_check_logcontext(context)
def _handle_well_known_connection(
- self, client_factory, expected_sni, content, response_headers={}
+ self,
+ client_factory,
+ expected_sni,
+ content,
+ response_headers: Optional[dict] = None,
):
"""Handle an outgoing HTTPs connection: wire it up to a server, check that the
request is for a .well-known, and send the response.
@@ -202,10 +205,12 @@ class MatrixFederationAgentTests(unittest.TestCase):
self.assertEqual(
request.requestHeaders.getRawHeaders(b"user-agent"), [b"test-agent"]
)
- self._send_well_known_response(request, content, headers=response_headers)
+ self._send_well_known_response(request, content, headers=response_headers or {})
return well_known_server
- def _send_well_known_response(self, request, content, headers={}):
+ def _send_well_known_response(
+ self, request, content, headers: Optional[dict] = None
+ ):
"""Check that an incoming request looks like a valid .well-known request, and
send back the response.
"""
@@ -213,7 +218,7 @@ class MatrixFederationAgentTests(unittest.TestCase):
self.assertEqual(request.path, b"/.well-known/matrix/server")
self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"testserv"])
# send back a response
- for k, v in headers.items():
+ for k, v in (headers or {}).items():
request.setHeader(k, v)
request.write(content)
request.finish()
diff --git a/tests/http/federation/test_srv_resolver.py b/tests/http/federation/test_srv_resolver.py
index fee2985d..c49be33b 100644
--- a/tests/http/federation/test_srv_resolver.py
+++ b/tests/http/federation/test_srv_resolver.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019 New Vector Ltd
#
@@ -14,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
from twisted.internet import defer
from twisted.internet.defer import Deferred
diff --git a/tests/http/test_additional_resource.py b/tests/http/test_additional_resource.py
index 453391a5..768c2ba4 100644
--- a/tests/http/test_additional_resource.py
+++ b/tests/http/test_additional_resource.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/http/test_client.py b/tests/http/test_client.py
index 0ce181a5..7e2f2a01 100644
--- a/tests/http/test_client.py
+++ b/tests/http/test_client.py
@@ -13,8 +13,7 @@
# limitations under the License.
from io import BytesIO
-
-from mock import Mock
+from unittest.mock import Mock
from netaddr import IPSet
diff --git a/tests/http/test_endpoint.py b/tests/http/test_endpoint.py
index d06ea518..1f9a2f9b 100644
--- a/tests/http/test_endpoint.py
+++ b/tests/http/test_endpoint.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/http/test_fedclient.py b/tests/http/test_fedclient.py
index 9c52c8fd..ed9a884d 100644
--- a/tests/http/test_fedclient.py
+++ b/tests/http/test_fedclient.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
from netaddr import IPSet
from parameterized import parameterized
@@ -27,6 +26,7 @@ from twisted.web.http import HTTPChannel
from synapse.api.errors import RequestSendFailed
from synapse.http.matrixfederationclient import (
+ MAX_RESPONSE_SIZE,
MatrixFederationHttpClient,
MatrixFederationRequest,
)
@@ -561,3 +561,61 @@ class FederationClientTests(HomeserverTestCase):
f = self.failureResultOf(test_d)
self.assertIsInstance(f.value, RequestSendFailed)
+
+ def test_too_big(self):
+ """
+ Test what happens if a huge response is returned from the remote endpoint.
+ """
+
+ test_d = defer.ensureDeferred(self.cl.get_json("testserv:8008", "foo/bar"))
+
+ self.pump()
+
+ # Nothing happened yet
+ self.assertNoResult(test_d)
+
+ # Make sure treq is trying to connect
+ clients = self.reactor.tcpClients
+ self.assertEqual(len(clients), 1)
+ (host, port, factory, _timeout, _bindAddress) = clients[0]
+ self.assertEqual(host, "1.2.3.4")
+ self.assertEqual(port, 8008)
+
+ # complete the connection and wire it up to a fake transport
+ protocol = factory.buildProtocol(None)
+ transport = StringTransport()
+ protocol.makeConnection(transport)
+
+ # that should have made it send the request to the transport
+ self.assertRegex(transport.value(), b"^GET /foo/bar")
+ self.assertRegex(transport.value(), b"Host: testserv:8008")
+
+ # Deferred is still without a result
+ self.assertNoResult(test_d)
+
+ # Send it a huge HTTP response
+ protocol.dataReceived(
+ b"HTTP/1.1 200 OK\r\n"
+ b"Server: Fake\r\n"
+ b"Content-Type: application/json\r\n"
+ b"\r\n"
+ )
+
+ self.pump()
+
+ # should still be waiting
+ self.assertNoResult(test_d)
+
+ sent = 0
+ chunk_size = 1024 * 512
+ while not test_d.called:
+ protocol.dataReceived(b"a" * chunk_size)
+ sent += chunk_size
+ self.assertLessEqual(sent, MAX_RESPONSE_SIZE)
+
+ self.assertEqual(sent, MAX_RESPONSE_SIZE)
+
+ f = self.failureResultOf(test_d)
+ self.assertIsInstance(f.value, RequestSendFailed)
+
+ self.assertTrue(transport.disconnecting)
diff --git a/tests/http/test_proxyagent.py b/tests/http/test_proxyagent.py
index 3ea8b5be..fefc8099 100644
--- a/tests/http/test_proxyagent.py
+++ b/tests/http/test_proxyagent.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/http/test_servlet.py b/tests/http/test_servlet.py
index 45089158..a80bfb9f 100644
--- a/tests/http/test_servlet.py
+++ b/tests/http/test_servlet.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,8 +13,7 @@
# limitations under the License.
import json
from io import BytesIO
-
-from mock import Mock
+from unittest.mock import Mock
from synapse.api.errors import SynapseError
from synapse.http.servlet import (
diff --git a/tests/http/test_simple_client.py b/tests/http/test_simple_client.py
index a1cf0862..c85a3665 100644
--- a/tests/http/test_simple_client.py
+++ b/tests/http/test_simple_client.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
from netaddr import IPSet
diff --git a/tests/http/test_site.py b/tests/http/test_site.py
new file mode 100644
index 00000000..8c13b4f6
--- /dev/null
+++ b/tests/http/test_site.py
@@ -0,0 +1,83 @@
+# Copyright 2021 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from twisted.internet.address import IPv6Address
+from twisted.test.proto_helpers import StringTransport
+
+from synapse.app.homeserver import SynapseHomeServer
+
+from tests.unittest import HomeserverTestCase
+
+
+class SynapseRequestTestCase(HomeserverTestCase):
+ def make_homeserver(self, reactor, clock):
+ return self.setup_test_homeserver(homeserver_to_use=SynapseHomeServer)
+
+ def test_large_request(self):
+ """overlarge HTTP requests should be rejected"""
+ self.hs.start_listening()
+
+ # find the HTTP server which is configured to listen on port 0
+ (port, factory, _backlog, interface) = self.reactor.tcpServers[0]
+ self.assertEqual(interface, "::")
+ self.assertEqual(port, 0)
+
+ # as a control case, first send a regular request.
+
+ # complete the connection and wire it up to a fake transport
+ client_address = IPv6Address("TCP", "::1", "2345")
+ protocol = factory.buildProtocol(client_address)
+ transport = StringTransport()
+ protocol.makeConnection(transport)
+
+ protocol.dataReceived(
+ b"POST / HTTP/1.1\r\n"
+ b"Connection: close\r\n"
+ b"Transfer-Encoding: chunked\r\n"
+ b"\r\n"
+ b"0\r\n"
+ b"\r\n"
+ )
+
+ while not transport.disconnecting:
+ self.reactor.advance(1)
+
+ # we should get a 404
+ self.assertRegex(transport.value().decode(), r"^HTTP/1\.1 404 ")
+
+ # now send an oversized request
+ protocol = factory.buildProtocol(client_address)
+ transport = StringTransport()
+ protocol.makeConnection(transport)
+
+ protocol.dataReceived(
+ b"POST / HTTP/1.1\r\n"
+ b"Connection: close\r\n"
+ b"Transfer-Encoding: chunked\r\n"
+ b"\r\n"
+ )
+
+ # we deliberately send all the data in one big chunk, to ensure that
+ # twisted isn't buffering the data in the chunked transfer decoder.
+ # we start with the chunk size, in hex. (We won't actually send this much)
+ protocol.dataReceived(b"10000000\r\n")
+ sent = 0
+ while not transport.disconnected:
+ self.assertLess(sent, 0x10000000, "connection did not drop")
+ protocol.dataReceived(b"\0" * 1024)
+ sent += 1024
+
+ # default max upload size is 50M, so it should drop on the next buffer after
+ # that.
+ self.assertEqual(sent, 50 * 1024 * 1024 + 1024)
diff --git a/tests/logging/__init__.py b/tests/logging/__init__.py
index a58d5144..1acf5666 100644
--- a/tests/logging/__init__.py
+++ b/tests/logging/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/logging/test_remote_handler.py b/tests/logging/test_remote_handler.py
index 4bc27a1d..b0d046fe 100644
--- a/tests/logging/test_remote_handler.py
+++ b/tests/logging/test_remote_handler.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/logging/test_terse_json.py b/tests/logging/test_terse_json.py
index 48a74e2e..11607169 100644
--- a/tests/logging/test_terse_json.py
+++ b/tests/logging/test_terse_json.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,15 +11,19 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
import json
import logging
-from io import StringIO
+from io import BytesIO, StringIO
+from unittest.mock import Mock, patch
+
+from twisted.web.server import Request
+from synapse.http.site import SynapseRequest
from synapse.logging._terse_json import JsonFormatter, TerseJsonFormatter
from synapse.logging.context import LoggingContext, LoggingContextFilter
from tests.logging import LoggerCleanupMixin
+from tests.server import FakeChannel
from tests.unittest import TestCase
@@ -120,7 +123,7 @@ class TerseJsonTestCase(LoggerCleanupMixin, TestCase):
handler.addFilter(LoggingContextFilter())
logger = self.get_logger(handler)
- with LoggingContext(request="test"):
+ with LoggingContext("name"):
logger.info("Hello there, %s!", "wally")
log = self.get_log_line()
@@ -134,4 +137,63 @@ class TerseJsonTestCase(LoggerCleanupMixin, TestCase):
]
self.assertCountEqual(log.keys(), expected_log_keys)
self.assertEqual(log["log"], "Hello there, wally!")
- self.assertEqual(log["request"], "test")
+ self.assertEqual(log["request"], "name")
+
+ def test_with_request_context(self):
+ """
+ Information from the logging context request should be added to the JSON response.
+ """
+ handler = logging.StreamHandler(self.output)
+ handler.setFormatter(JsonFormatter())
+ handler.addFilter(LoggingContextFilter())
+ logger = self.get_logger(handler)
+
+ # A full request isn't needed here.
+ site = Mock(spec=["site_tag", "server_version_string", "getResourceFor"])
+ site.site_tag = "test-site"
+ site.server_version_string = "Server v1"
+ request = SynapseRequest(FakeChannel(site, None))
+ # Call requestReceived to finish instantiating the object.
+ request.content = BytesIO()
+ # Partially skip some of the internal processing of SynapseRequest.
+ request._started_processing = Mock()
+ request.request_metrics = Mock(spec=["name"])
+ with patch.object(Request, "render"):
+ request.requestReceived(b"POST", b"/_matrix/client/versions", b"1.1")
+
+ # Also set the requester to ensure the processing works.
+ request.requester = "@foo:test"
+
+ with LoggingContext(
+ request.get_request_id(), parent_context=request.logcontext
+ ):
+ logger.info("Hello there, %s!", "wally")
+
+ log = self.get_log_line()
+
+ # The terse logger includes additional request information, if possible.
+ expected_log_keys = [
+ "log",
+ "level",
+ "namespace",
+ "request",
+ "ip_address",
+ "site_tag",
+ "requester",
+ "authenticated_entity",
+ "method",
+ "url",
+ "protocol",
+ "user_agent",
+ ]
+ self.assertCountEqual(log.keys(), expected_log_keys)
+ self.assertEqual(log["log"], "Hello there, wally!")
+ self.assertTrue(log["request"].startswith("POST-"))
+ self.assertEqual(log["ip_address"], "127.0.0.1")
+ self.assertEqual(log["site_tag"], "test-site")
+ self.assertEqual(log["requester"], "@foo:test")
+ self.assertEqual(log["authenticated_entity"], "@foo:test")
+ self.assertEqual(log["method"], "POST")
+ self.assertEqual(log["url"], "/_matrix/client/versions")
+ self.assertEqual(log["protocol"], "1.1")
+ self.assertEqual(log["user_agent"], "")
diff --git a/tests/module_api/test_api.py b/tests/module_api/test_api.py
index edacd1b5..742ad14b 100644
--- a/tests/module_api/test_api.py
+++ b/tests/module_api/test_api.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,27 +11,39 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
+from synapse.api.constants import EduTypes
from synapse.events import EventBase
+from synapse.federation.units import Transaction
+from synapse.handlers.presence import UserPresenceState
from synapse.rest import admin
-from synapse.rest.client.v1 import login, room
+from synapse.rest.client.v1 import login, presence, room
from synapse.types import create_requester
-from tests.unittest import HomeserverTestCase
+from tests.events.test_presence_router import send_presence_update, sync_presence
+from tests.test_utils.event_injection import inject_member_event
+from tests.unittest import FederatingHomeserverTestCase, override_config
-class ModuleApiTestCase(HomeserverTestCase):
+class ModuleApiTestCase(FederatingHomeserverTestCase):
servlets = [
admin.register_servlets,
login.register_servlets,
room.register_servlets,
+ presence.register_servlets,
]
def prepare(self, reactor, clock, homeserver):
self.store = homeserver.get_datastore()
self.module_api = homeserver.get_module_api()
self.event_creation_handler = homeserver.get_event_creation_handler()
+ self.sync_handler = homeserver.get_sync_handler()
+
+ def make_homeserver(self, reactor, clock):
+ return self.setup_test_homeserver(
+ federation_transport_client=Mock(spec=["send_transaction"]),
+ )
def test_can_register_user(self):
"""Tests that an external module can register a user"""
@@ -205,3 +216,161 @@ class ModuleApiTestCase(HomeserverTestCase):
)
)
self.assertFalse(is_in_public_rooms)
+
+ # The ability to send federation is required by send_local_online_presence_to.
+ @override_config({"send_federation": True})
+ def test_send_local_online_presence_to(self):
+ """Tests that send_local_presence_to_users sends local online presence to local users."""
+ # Create a user who will send presence updates
+ self.presence_receiver_id = self.register_user("presence_receiver", "monkey")
+ self.presence_receiver_tok = self.login("presence_receiver", "monkey")
+
+ # And another user that will send presence updates out
+ self.presence_sender_id = self.register_user("presence_sender", "monkey")
+ self.presence_sender_tok = self.login("presence_sender", "monkey")
+
+ # Put them in a room together so they will receive each other's presence updates
+ room_id = self.helper.create_room_as(
+ self.presence_receiver_id,
+ tok=self.presence_receiver_tok,
+ )
+ self.helper.join(room_id, self.presence_sender_id, tok=self.presence_sender_tok)
+
+ # Presence sender comes online
+ send_presence_update(
+ self,
+ self.presence_sender_id,
+ self.presence_sender_tok,
+ "online",
+ "I'm online!",
+ )
+
+ # Presence receiver should have received it
+ presence_updates, sync_token = sync_presence(self, self.presence_receiver_id)
+ self.assertEqual(len(presence_updates), 1)
+
+ presence_update = presence_updates[0] # type: UserPresenceState
+ self.assertEqual(presence_update.user_id, self.presence_sender_id)
+ self.assertEqual(presence_update.state, "online")
+
+ # Syncing again should result in no presence updates
+ presence_updates, sync_token = sync_presence(
+ self, self.presence_receiver_id, sync_token
+ )
+ self.assertEqual(len(presence_updates), 0)
+
+ # Trigger sending local online presence
+ self.get_success(
+ self.module_api.send_local_online_presence_to(
+ [
+ self.presence_receiver_id,
+ ]
+ )
+ )
+
+ # Presence receiver should have received online presence again
+ presence_updates, sync_token = sync_presence(
+ self, self.presence_receiver_id, sync_token
+ )
+ self.assertEqual(len(presence_updates), 1)
+
+ presence_update = presence_updates[0] # type: UserPresenceState
+ self.assertEqual(presence_update.user_id, self.presence_sender_id)
+ self.assertEqual(presence_update.state, "online")
+
+ # Presence sender goes offline
+ send_presence_update(
+ self,
+ self.presence_sender_id,
+ self.presence_sender_tok,
+ "offline",
+ "I slink back into the darkness.",
+ )
+
+ # Trigger sending local online presence
+ self.get_success(
+ self.module_api.send_local_online_presence_to(
+ [
+ self.presence_receiver_id,
+ ]
+ )
+ )
+
+ # Presence receiver should *not* have received offline state
+ presence_updates, sync_token = sync_presence(
+ self, self.presence_receiver_id, sync_token
+ )
+ self.assertEqual(len(presence_updates), 0)
+
+ @override_config({"send_federation": True})
+ def test_send_local_online_presence_to_federation(self):
+ """Tests that send_local_presence_to_users sends local online presence to remote users."""
+ # Create a user who will send presence updates
+ self.presence_sender_id = self.register_user("presence_sender", "monkey")
+ self.presence_sender_tok = self.login("presence_sender", "monkey")
+
+ # And a room they're a part of
+ room_id = self.helper.create_room_as(
+ self.presence_sender_id,
+ tok=self.presence_sender_tok,
+ )
+
+ # Mark them as online
+ send_presence_update(
+ self,
+ self.presence_sender_id,
+ self.presence_sender_tok,
+ "online",
+ "I'm online!",
+ )
+
+ # Make up a remote user to send presence to
+ remote_user_id = "@far_away_person:island"
+
+ # Create a join membership event for the remote user into the room.
+ # This allows presence information to flow from one user to the other.
+ self.get_success(
+ inject_member_event(
+ self.hs,
+ room_id,
+ sender=remote_user_id,
+ target=remote_user_id,
+ membership="join",
+ )
+ )
+
+ # The remote user would have received the existing room members' presence
+ # when they joined the room.
+ #
+ # Thus we reset the mock, and try sending online local user
+ # presence again
+ self.hs.get_federation_transport_client().send_transaction.reset_mock()
+
+ # Broadcast local user online presence
+ self.get_success(
+ self.module_api.send_local_online_presence_to([remote_user_id])
+ )
+
+ # Check that a presence update was sent as part of a federation transaction
+ found_update = False
+ calls = (
+ self.hs.get_federation_transport_client().send_transaction.call_args_list
+ )
+ for call in calls:
+ call_args = call[0]
+ federation_transaction = call_args[0] # type: Transaction
+
+ # Get the sent EDUs in this transaction
+ edus = federation_transaction.get_dict()["edus"]
+
+ for edu in edus:
+ # Make sure we're only checking presence-type EDUs
+ if edu["edu_type"] != EduTypes.Presence:
+ continue
+
+ # EDUs can contain multiple presence updates
+ for presence_update in edu["content"]["push"]:
+ if presence_update["user_id"] == self.presence_sender_id:
+ found_update = True
+
+ self.assertTrue(found_update)
diff --git a/tests/push/test_email.py b/tests/push/test_email.py
index 941cf424..e04bc5c9 100644
--- a/tests/push/test_email.py
+++ b/tests/push/test_email.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/push/test_http.py b/tests/push/test_http.py
index 60f0820c..ffd75b14 100644
--- a/tests/push/test_http.py
+++ b/tests/push/test_http.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
from twisted.internet.defer import Deferred
diff --git a/tests/push/test_push_rule_evaluator.py b/tests/push/test_push_rule_evaluator.py
index 4a841f5b..a52e89e4 100644
--- a/tests/push/test_push_rule_evaluator.py
+++ b/tests/push/test_push_rule_evaluator.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,6 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from typing import Any, Dict
+
from synapse.api.room_versions import RoomVersions
from synapse.events import FrozenEvent
from synapse.push import push_rule_evaluator
@@ -67,6 +68,170 @@ class PushRuleEvaluatorTestCase(unittest.TestCase):
# A display name with spaces should work fine.
self.assertTrue(evaluator.matches(condition, "@user:test", "foo bar"))
+ def _assert_matches(
+ self, condition: Dict[str, Any], content: Dict[str, Any], msg=None
+ ) -> None:
+ evaluator = self._get_evaluator(content)
+ self.assertTrue(evaluator.matches(condition, "@user:test", "display_name"), msg)
+
+ def _assert_not_matches(
+ self, condition: Dict[str, Any], content: Dict[str, Any], msg=None
+ ) -> None:
+ evaluator = self._get_evaluator(content)
+ self.assertFalse(
+ evaluator.matches(condition, "@user:test", "display_name"), msg
+ )
+
+ def test_event_match_body(self):
+ """Check that event_match conditions on content.body work as expected"""
+
+ # if the key is `content.body`, the pattern matches substrings.
+
+ # non-wildcards should match
+ condition = {
+ "kind": "event_match",
+ "key": "content.body",
+ "pattern": "foobaz",
+ }
+ self._assert_matches(
+ condition,
+ {"body": "aaa FoobaZ zzz"},
+ "patterns should match and be case-insensitive",
+ )
+ self._assert_not_matches(
+ condition,
+ {"body": "aa xFoobaZ yy"},
+ "pattern should only match at word boundaries",
+ )
+ self._assert_not_matches(
+ condition,
+ {"body": "aa foobazx yy"},
+ "pattern should only match at word boundaries",
+ )
+
+ # wildcards should match
+ condition = {
+ "kind": "event_match",
+ "key": "content.body",
+ "pattern": "f?o*baz",
+ }
+
+ self._assert_matches(
+ condition,
+ {"body": "aaa FoobarbaZ zzz"},
+ "* should match string and pattern should be case-insensitive",
+ )
+ self._assert_matches(
+ condition, {"body": "aa foobaz yy"}, "* should match 0 characters"
+ )
+ self._assert_not_matches(
+ condition, {"body": "aa fobbaz yy"}, "? should not match 0 characters"
+ )
+ self._assert_not_matches(
+ condition, {"body": "aa fiiobaz yy"}, "? should not match 2 characters"
+ )
+ self._assert_not_matches(
+ condition,
+ {"body": "aa xfooxbaz yy"},
+ "pattern should only match at word boundaries",
+ )
+ self._assert_not_matches(
+ condition,
+ {"body": "aa fooxbazx yy"},
+ "pattern should only match at word boundaries",
+ )
+
+ # test backslashes
+ condition = {
+ "kind": "event_match",
+ "key": "content.body",
+ "pattern": r"f\oobaz",
+ }
+ self._assert_matches(
+ condition,
+ {"body": r"F\oobaz"},
+ "backslash should match itself",
+ )
+ condition = {
+ "kind": "event_match",
+ "key": "content.body",
+ "pattern": r"f\?obaz",
+ }
+ self._assert_matches(
+ condition,
+ {"body": r"F\oobaz"},
+ r"? after \ should match any character",
+ )
+
+ def test_event_match_non_body(self):
+ """Check that event_match conditions on other keys work as expected"""
+
+ # if the key is anything other than 'content.body', the pattern must match the
+ # whole value.
+
+ # non-wildcards should match
+ condition = {
+ "kind": "event_match",
+ "key": "content.value",
+ "pattern": "foobaz",
+ }
+ self._assert_matches(
+ condition,
+ {"value": "FoobaZ"},
+ "patterns should match and be case-insensitive",
+ )
+ self._assert_not_matches(
+ condition,
+ {"value": "xFoobaZ"},
+ "pattern should only match at the start/end of the value",
+ )
+ self._assert_not_matches(
+ condition,
+ {"value": "FoobaZz"},
+ "pattern should only match at the start/end of the value",
+ )
+
+ # wildcards should match
+ condition = {
+ "kind": "event_match",
+ "key": "content.value",
+ "pattern": "f?o*baz",
+ }
+ self._assert_matches(
+ condition,
+ {"value": "FoobarbaZ"},
+ "* should match string and pattern should be case-insensitive",
+ )
+ self._assert_matches(
+ condition, {"value": "foobaz"}, "* should match 0 characters"
+ )
+ self._assert_not_matches(
+ condition, {"value": "fobbaz"}, "? should not match 0 characters"
+ )
+ self._assert_not_matches(
+ condition, {"value": "fiiobaz"}, "? should not match 2 characters"
+ )
+ self._assert_not_matches(
+ condition,
+ {"value": "xfooxbaz"},
+ "pattern should only match at the start/end of the value",
+ )
+ self._assert_not_matches(
+ condition,
+ {"value": "fooxbazx"},
+ "pattern should only match at the start/end of the value",
+ )
+ self._assert_not_matches(
+ condition,
+ {"value": "x\nfooxbaz"},
+ "pattern should not match after a newline",
+ )
+ self._assert_not_matches(
+ condition,
+ {"value": "fooxbaz\nx"},
+ "pattern should not match before a newline",
+ )
+
def test_no_body(self):
"""Not having a body shouldn't break the evaluator."""
evaluator = self._get_evaluator({})
diff --git a/tests/replication/__init__.py b/tests/replication/__init__.py
index b7df13c9..f43a360a 100644
--- a/tests/replication/__init__.py
+++ b/tests/replication/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/replication/_base.py b/tests/replication/_base.py
index 1d4a5928..624bd1b9 100644
--- a/tests/replication/_base.py
+++ b/tests/replication/_base.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,22 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
-from typing import Any, Callable, Dict, List, Optional, Tuple, Type
+from typing import Any, Callable, Dict, List, Optional, Tuple
-from twisted.internet.interfaces import IConsumer, IPullProducer, IReactorTime
from twisted.internet.protocol import Protocol
-from twisted.internet.task import LoopingCall
-from twisted.web.http import HTTPChannel
from twisted.web.resource import Resource
-from twisted.web.server import Request, Site
-from synapse.app.generic_worker import (
- GenericWorkerReplicationHandler,
- GenericWorkerServer,
-)
+from synapse.app.generic_worker import GenericWorkerServer
from synapse.http.server import JsonResource
from synapse.http.site import SynapseRequest, SynapseSite
from synapse.replication.http import ReplicationRestResource
+from synapse.replication.tcp.client import ReplicationDataHandler
from synapse.replication.tcp.handler import ReplicationCommandHandler
from synapse.replication.tcp.protocol import ClientReplicationStreamProtocol
from synapse.replication.tcp.resource import (
@@ -36,7 +29,6 @@ from synapse.replication.tcp.resource import (
ServerReplicationStreamProtocol,
)
from synapse.server import HomeServer
-from synapse.util import Clock
from tests import unittest
from tests.server import FakeTransport
@@ -157,7 +149,19 @@ class BaseStreamTestCase(unittest.HomeserverTestCase):
client_protocol = client_factory.buildProtocol(None)
# Set up the server side protocol
- channel = _PushHTTPChannel(self.reactor, SynapseRequest, self.site)
+ channel = self.site.buildProtocol(None)
+
+ # hook into the channel's request factory so that we can keep a record
+ # of the requests
+ requests: List[SynapseRequest] = []
+ real_request_factory = channel.requestFactory
+
+ def request_factory(*args, **kwargs):
+ request = real_request_factory(*args, **kwargs)
+ requests.append(request)
+ return request
+
+ channel.requestFactory = request_factory
# Connect client to server and vice versa.
client_to_server_transport = FakeTransport(
@@ -179,7 +183,10 @@ class BaseStreamTestCase(unittest.HomeserverTestCase):
server_to_client_transport.loseConnection()
client_to_server_transport.loseConnection()
- return channel.request
+ # there should have been exactly one request
+ self.assertEqual(len(requests), 1)
+
+ return requests[0]
def assert_request_is_get_repl_stream_updates(
self, request: SynapseRequest, stream_name: str
@@ -266,7 +273,7 @@ class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase):
return resource
def make_worker_hs(
- self, worker_app: str, extra_config: dict = {}, **kwargs
+ self, worker_app: str, extra_config: Optional[dict] = None, **kwargs
) -> HomeServer:
"""Make a new worker HS instance, correctly connecting replcation
stream to the master HS.
@@ -283,7 +290,7 @@ class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase):
config = self._get_worker_hs_config()
config["worker_app"] = worker_app
- config.update(extra_config)
+ config.update(extra_config or {})
worker_hs = self.setup_test_homeserver(
homeserver_to_use=GenericWorkerServer,
@@ -352,6 +359,8 @@ class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase):
config=worker_hs.config.server.listeners[0],
resource=resource,
server_version_string="1",
+ max_request_body_size=4096,
+ reactor=self.reactor,
)
if worker_hs.config.redis.redis_enabled:
@@ -389,7 +398,7 @@ class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase):
client_protocol = client_factory.buildProtocol(None)
# Set up the server side protocol
- channel = _PushHTTPChannel(self.reactor, SynapseRequest, self._hs_to_site[hs])
+ channel = self._hs_to_site[hs].buildProtocol(None)
# Connect client to server and vice versa.
client_to_server_transport = FakeTransport(
@@ -432,7 +441,7 @@ class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase):
server_protocol.makeConnection(server_to_client_transport)
-class TestReplicationDataHandler(GenericWorkerReplicationHandler):
+class TestReplicationDataHandler(ReplicationDataHandler):
"""Drop-in for ReplicationDataHandler which just collects RDATA rows"""
def __init__(self, hs: HomeServer):
@@ -447,112 +456,6 @@ class TestReplicationDataHandler(GenericWorkerReplicationHandler):
self.received_rdata_rows.append((stream_name, token, r))
-class _PushHTTPChannel(HTTPChannel):
- """A HTTPChannel that wraps pull producers to push producers.
-
- This is a hack to get around the fact that HTTPChannel transparently wraps a
- pull producer (which is what Synapse uses to reply to requests) with
- `_PullToPush` to convert it to a push producer. Unfortunately `_PullToPush`
- uses the standard reactor rather than letting us use our test reactor, which
- makes it very hard to test.
- """
-
- def __init__(
- self, reactor: IReactorTime, request_factory: Type[Request], site: Site
- ):
- super().__init__()
- self.reactor = reactor
- self.requestFactory = request_factory
- self.site = site
-
- self._pull_to_push_producer = None # type: Optional[_PullToPushProducer]
-
- def registerProducer(self, producer, streaming):
- # Convert pull producers to push producer.
- if not streaming:
- self._pull_to_push_producer = _PullToPushProducer(
- self.reactor, producer, self
- )
- producer = self._pull_to_push_producer
-
- super().registerProducer(producer, True)
-
- def unregisterProducer(self):
- if self._pull_to_push_producer:
- # We need to manually stop the _PullToPushProducer.
- self._pull_to_push_producer.stop()
-
- def checkPersistence(self, request, version):
- """Check whether the connection can be re-used"""
- # We hijack this to always say no for ease of wiring stuff up in
- # `handle_http_replication_attempt`.
- request.responseHeaders.setRawHeaders(b"connection", [b"close"])
- return False
-
- def requestDone(self, request):
- # Store the request for inspection.
- self.request = request
- super().requestDone(request)
-
-
-class _PullToPushProducer:
- """A push producer that wraps a pull producer."""
-
- def __init__(
- self, reactor: IReactorTime, producer: IPullProducer, consumer: IConsumer
- ):
- self._clock = Clock(reactor)
- self._producer = producer
- self._consumer = consumer
-
- # While running we use a looping call with a zero delay to call
- # resumeProducing on given producer.
- self._looping_call = None # type: Optional[LoopingCall]
-
- # We start writing next reactor tick.
- self._start_loop()
-
- def _start_loop(self):
- """Start the looping call to"""
-
- if not self._looping_call:
- # Start a looping call which runs every tick.
- self._looping_call = self._clock.looping_call(self._run_once, 0)
-
- def stop(self):
- """Stops calling resumeProducing."""
- if self._looping_call:
- self._looping_call.stop()
- self._looping_call = None
-
- def pauseProducing(self):
- """Implements IPushProducer"""
- self.stop()
-
- def resumeProducing(self):
- """Implements IPushProducer"""
- self._start_loop()
-
- def stopProducing(self):
- """Implements IPushProducer"""
- self.stop()
- self._producer.stopProducing()
-
- def _run_once(self):
- """Calls resumeProducing on producer once."""
-
- try:
- self._producer.resumeProducing()
- except Exception:
- logger.exception("Failed to call resumeProducing")
- try:
- self._consumer.unregisterProducer()
- except Exception:
- pass
-
- self.stopProducing()
-
-
class FakeRedisPubSubServer:
"""A fake Redis server for pub/sub."""
diff --git a/tests/replication/slave/__init__.py b/tests/replication/slave/__init__.py
index b7df13c9..f43a360a 100644
--- a/tests/replication/slave/__init__.py
+++ b/tests/replication/slave/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/replication/slave/storage/__init__.py b/tests/replication/slave/storage/__init__.py
index b7df13c9..f43a360a 100644
--- a/tests/replication/slave/storage/__init__.py
+++ b/tests/replication/slave/storage/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/replication/slave/storage/_base.py b/tests/replication/slave/storage/_base.py
index 56497b84..83e89383 100644
--- a/tests/replication/slave/storage/_base.py
+++ b/tests/replication/slave/storage/_base.py
@@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
from tests.replication._base import BaseStreamTestCase
diff --git a/tests/replication/slave/storage/test_events.py b/tests/replication/slave/storage/test_events.py
index 0ceb0f93..db80a0bd 100644
--- a/tests/replication/slave/storage/test_events.py
+++ b/tests/replication/slave/storage/test_events.py
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
+from typing import Iterable, Optional
from canonicaljson import encode_canonical_json
@@ -332,15 +333,18 @@ class SlavedEventStoreTestCase(BaseSlavedStoreTestCase):
room_id=ROOM_ID,
type="m.room.message",
key=None,
- internal={},
+ internal: Optional[dict] = None,
depth=None,
- prev_events=[],
- auth_events=[],
- prev_state=[],
+ prev_events: Optional[list] = None,
+ auth_events: Optional[list] = None,
+ prev_state: Optional[list] = None,
redacts=None,
- push_actions=[],
- **content
+ push_actions: Iterable = frozenset(),
+ **content,
):
+ prev_events = prev_events or []
+ auth_events = auth_events or []
+ prev_state = prev_state or []
if depth is None:
depth = self.event_id
@@ -369,7 +373,7 @@ class SlavedEventStoreTestCase(BaseSlavedStoreTestCase):
if redacts is not None:
event_dict["redacts"] = redacts
- event = make_event_from_dict(event_dict, internal_metadata_dict=internal)
+ event = make_event_from_dict(event_dict, internal_metadata_dict=internal or {})
self.event_id += 1
state_handler = self.hs.get_state_handler()
diff --git a/tests/replication/tcp/__init__.py b/tests/replication/tcp/__init__.py
index 1453d045..743fb990 100644
--- a/tests/replication/tcp/__init__.py
+++ b/tests/replication/tcp/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/replication/tcp/streams/__init__.py b/tests/replication/tcp/streams/__init__.py
index 1453d045..743fb990 100644
--- a/tests/replication/tcp/streams/__init__.py
+++ b/tests/replication/tcp/streams/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/replication/tcp/streams/test_account_data.py b/tests/replication/tcp/streams/test_account_data.py
index 153634d4..cdd05200 100644
--- a/tests/replication/tcp/streams/test_account_data.py
+++ b/tests/replication/tcp/streams/test_account_data.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/replication/tcp/streams/test_events.py b/tests/replication/tcp/streams/test_events.py
index 77856fc3..f51fa0a7 100644
--- a/tests/replication/tcp/streams/test_events.py
+++ b/tests/replication/tcp/streams/test_events.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -240,7 +239,7 @@ class EventsStreamTestCase(BaseStreamTestCase):
# the state rows are unsorted
state_rows = [] # type: List[EventsStreamCurrentStateRow]
- for stream_name, token, row in received_rows:
+ for stream_name, _, row in received_rows:
self.assertEqual("events", stream_name)
self.assertIsInstance(row, EventsStreamRow)
self.assertEqual(row.type, "state")
@@ -357,7 +356,7 @@ class EventsStreamTestCase(BaseStreamTestCase):
# the state rows are unsorted
state_rows = [] # type: List[EventsStreamCurrentStateRow]
- for j in range(STATES_PER_USER + 1):
+ for _ in range(STATES_PER_USER + 1):
stream_name, token, row = received_rows.pop(0)
self.assertEqual("events", stream_name)
self.assertIsInstance(row, EventsStreamRow)
diff --git a/tests/replication/tcp/streams/test_federation.py b/tests/replication/tcp/streams/test_federation.py
index aa4bf1c7..ffec06a0 100644
--- a/tests/replication/tcp/streams/test_federation.py
+++ b/tests/replication/tcp/streams/test_federation.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/replication/tcp/streams/test_receipts.py b/tests/replication/tcp/streams/test_receipts.py
index 56b062ec..7f5d932f 100644
--- a/tests/replication/tcp/streams/test_receipts.py
+++ b/tests/replication/tcp/streams/test_receipts.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,7 +14,7 @@
# type: ignore
-from mock import Mock
+from unittest.mock import Mock
from synapse.replication.tcp.streams._base import ReceiptsStream
diff --git a/tests/replication/tcp/streams/test_typing.py b/tests/replication/tcp/streams/test_typing.py
index ca49d4dd..ecd360c2 100644
--- a/tests/replication/tcp/streams/test_typing.py
+++ b/tests/replication/tcp/streams/test_typing.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
from synapse.handlers.typing import RoomMember
from synapse.replication.tcp.streams import TypingStream
diff --git a/tests/replication/tcp/test_commands.py b/tests/replication/tcp/test_commands.py
index 60c10a44..cca7ebb7 100644
--- a/tests/replication/tcp/test_commands.py
+++ b/tests/replication/tcp/test_commands.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/replication/tcp/test_remote_server_up.py b/tests/replication/tcp/test_remote_server_up.py
index 1fe9d5b4..262c35ce 100644
--- a/tests/replication/tcp/test_remote_server_up.py
+++ b/tests/replication/tcp/test_remote_server_up.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/replication/test_auth.py b/tests/replication/test_auth.py
index f8fd8a84..1346e0e1 100644
--- a/tests/replication/test_auth.py
+++ b/tests/replication/test_auth.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/replication/test_client_reader_shard.py b/tests/replication/test_client_reader_shard.py
index 5da1d5dc..b9751efd 100644
--- a/tests/replication/test_client_reader_shard.py
+++ b/tests/replication/test_client_reader_shard.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/replication/test_federation_ack.py b/tests/replication/test_federation_ack.py
index 0d9e3bb1..04a869e2 100644
--- a/tests/replication/test_federation_ack.py
+++ b/tests/replication/test_federation_ack.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import mock
+from unittest import mock
from synapse.app.generic_worker import GenericWorkerServer
from synapse.replication.tcp.commands import FederationAckCommand
diff --git a/tests/replication/test_federation_sender_shard.py b/tests/replication/test_federation_sender_shard.py
index 2f2d1178..48ab3aa4 100644
--- a/tests/replication/test_federation_sender_shard.py
+++ b/tests/replication/test_federation_sender_shard.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,8 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
-
-from mock import Mock
+from unittest.mock import Mock
from synapse.api.constants import EventTypes, Membership
from synapse.events.builder import EventBuilderFactory
diff --git a/tests/replication/test_multi_media_repo.py b/tests/replication/test_multi_media_repo.py
index b0800f98..76e66443 100644
--- a/tests/replication/test_multi_media_repo.py
+++ b/tests/replication/test_multi_media_repo.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/replication/test_pusher_shard.py b/tests/replication/test_pusher_shard.py
index ab2988a6..1e4e3821 100644
--- a/tests/replication/test_pusher_shard.py
+++ b/tests/replication/test_pusher_shard.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,8 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
-
-from mock import Mock
+from unittest.mock import Mock
from twisted.internet import defer
diff --git a/tests/replication/test_sharded_event_persister.py b/tests/replication/test_sharded_event_persister.py
index c9b773fb..d739eb6b 100644
--- a/tests/replication/test_sharded_event_persister.py
+++ b/tests/replication/test_sharded_event_persister.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,8 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
-
-from mock import patch
+from unittest.mock import patch
from synapse.api.room_versions import RoomVersion
from synapse.rest import admin
diff --git a/tests/rest/__init__.py b/tests/rest/__init__.py
index fe0ac3f8..629e2df7 100644
--- a/tests/rest/__init__.py
+++ b/tests/rest/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/admin/__init__.py b/tests/rest/admin/__init__.py
index 1453d045..743fb990 100644
--- a/tests/rest/admin/__init__.py
+++ b/tests/rest/admin/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/admin/test_admin.py b/tests/rest/admin/test_admin.py
index 057e2737..2f7090e5 100644
--- a/tests/rest/admin/test_admin.py
+++ b/tests/rest/admin/test_admin.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -17,8 +16,7 @@ import json
import os
import urllib.parse
from binascii import unhexlify
-
-from mock import Mock
+from unittest.mock import Mock
from twisted.internet.defer import Deferred
diff --git a/tests/rest/admin/test_device.py b/tests/rest/admin/test_device.py
index 2a1bcf17..120730b7 100644
--- a/tests/rest/admin/test_device.py
+++ b/tests/rest/admin/test_device.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Dirk Klimpel
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -431,7 +430,7 @@ class DevicesRestTestCase(unittest.HomeserverTestCase):
"""
# Create devices
number_devices = 5
- for n in range(number_devices):
+ for _ in range(number_devices):
self.login("user", "pass")
# Get devices
@@ -548,7 +547,7 @@ class DeleteDevicesRestTestCase(unittest.HomeserverTestCase):
# Create devices
number_devices = 5
- for n in range(number_devices):
+ for _ in range(number_devices):
self.login("user", "pass")
# Get devices
diff --git a/tests/rest/admin/test_event_reports.py b/tests/rest/admin/test_event_reports.py
index e30ffe4f..29341bc6 100644
--- a/tests/rest/admin/test_event_reports.py
+++ b/tests/rest/admin/test_event_reports.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Dirk Klimpel
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -49,22 +48,22 @@ class EventReportsTestCase(unittest.HomeserverTestCase):
self.helper.join(self.room_id2, user=self.admin_user, tok=self.admin_user_tok)
# Two rooms and two users. Every user sends and reports every room event
- for i in range(5):
+ for _ in range(5):
self._create_event_and_report(
room_id=self.room_id1,
user_tok=self.other_user_tok,
)
- for i in range(5):
+ for _ in range(5):
self._create_event_and_report(
room_id=self.room_id2,
user_tok=self.other_user_tok,
)
- for i in range(5):
+ for _ in range(5):
self._create_event_and_report(
room_id=self.room_id1,
user_tok=self.admin_user_tok,
)
- for i in range(5):
+ for _ in range(5):
self._create_event_and_report(
room_id=self.room_id2,
user_tok=self.admin_user_tok,
diff --git a/tests/rest/admin/test_media.py b/tests/rest/admin/test_media.py
index 31db472c..ac7b2197 100644
--- a/tests/rest/admin/test_media.py
+++ b/tests/rest/admin/test_media.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Dirk Klimpel
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/admin/test_room.py b/tests/rest/admin/test_room.py
index b55160b7..6b841881 100644
--- a/tests/rest/admin/test_room.py
+++ b/tests/rest/admin/test_room.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Dirk Klimpel
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,8 +15,7 @@
import json
import urllib.parse
from typing import List, Optional
-
-from mock import Mock
+from unittest.mock import Mock
import synapse.rest.admin
from synapse.api.constants import EventTypes, Membership
@@ -617,7 +615,7 @@ class RoomTestCase(unittest.HomeserverTestCase):
# Create 3 test rooms
total_rooms = 3
room_ids = []
- for x in range(total_rooms):
+ for _ in range(total_rooms):
room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
@@ -681,7 +679,7 @@ class RoomTestCase(unittest.HomeserverTestCase):
# Create 5 test rooms
total_rooms = 5
room_ids = []
- for x in range(total_rooms):
+ for _ in range(total_rooms):
room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
@@ -1579,7 +1577,7 @@ class JoinAliasRoomTestCase(unittest.HomeserverTestCase):
channel.json_body["event"]["event_id"], events[midway]["event_id"]
)
- for i, found_event in enumerate(channel.json_body["events_before"]):
+ for found_event in channel.json_body["events_before"]:
for j, posted_event in enumerate(events):
if found_event["event_id"] == posted_event["event_id"]:
self.assertTrue(j < midway)
@@ -1587,7 +1585,7 @@ class JoinAliasRoomTestCase(unittest.HomeserverTestCase):
else:
self.fail("Event %s from events_before not found" % j)
- for i, found_event in enumerate(channel.json_body["events_after"]):
+ for found_event in channel.json_body["events_after"]:
for j, posted_event in enumerate(events):
if found_event["event_id"] == posted_event["event_id"]:
self.assertTrue(j > midway)
diff --git a/tests/rest/admin/test_statistics.py b/tests/rest/admin/test_statistics.py
index 1f1d11f5..79cac426 100644
--- a/tests/rest/admin/test_statistics.py
+++ b/tests/rest/admin/test_statistics.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Dirk Klimpel
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -468,7 +467,7 @@ class UserMediaStatisticsTestCase(unittest.HomeserverTestCase):
number_media: Number of media to be created for the user
"""
upload_resource = self.media_repo.children[b"upload"]
- for i in range(number_media):
+ for _ in range(number_media):
# file size is 67 Byte
image_data = unhexlify(
b"89504e470d0a1a0a0000000d4948445200000001000000010806"
diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py
index cf61f284..d599a4c9 100644
--- a/tests/rest/admin/test_user.py
+++ b/tests/rest/admin/test_user.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,8 +18,7 @@ import json
import urllib.parse
from binascii import unhexlify
from typing import List, Optional
-
-from mock import Mock
+from unittest.mock import Mock, patch
import synapse.rest.admin
from synapse.api.constants import UserTypes
@@ -28,7 +26,7 @@ from synapse.api.errors import Codes, HttpResponseException, ResourceLimitError
from synapse.api.room_versions import RoomVersions
from synapse.rest.client.v1 import login, logout, profile, room
from synapse.rest.client.v2_alpha import devices, sync
-from synapse.types import JsonDict
+from synapse.types import JsonDict, UserID
from tests import unittest
from tests.server import FakeSite, make_request
@@ -56,8 +54,6 @@ class UserRegisterTestCase(unittest.HomeserverTestCase):
self.datastore = Mock(return_value=Mock())
self.datastore.get_current_state_deltas = Mock(return_value=(0, []))
- self.secrets = Mock()
-
self.hs = self.setup_test_homeserver()
self.hs.config.registration_shared_secret = "shared"
@@ -86,14 +82,13 @@ class UserRegisterTestCase(unittest.HomeserverTestCase):
Calling GET on the endpoint will return a randomised nonce, using the
homeserver's secrets provider.
"""
- secrets = Mock()
- secrets.token_hex = Mock(return_value="abcd")
-
- self.hs.get_secrets = Mock(return_value=secrets)
+ with patch("secrets.token_hex") as token_hex:
+ # Patch secrets.token_hex for the duration of this context
+ token_hex.return_value = "abcd"
- channel = self.make_request("GET", self.url)
+ channel = self.make_request("GET", self.url)
- self.assertEqual(channel.json_body, {"nonce": "abcd"})
+ self.assertEqual(channel.json_body, {"nonce": "abcd"})
def test_expired_nonce(self):
"""
@@ -467,6 +462,8 @@ class UsersListTestCase(unittest.HomeserverTestCase):
url = "/_synapse/admin/v2/users"
def prepare(self, reactor, clock, hs):
+ self.store = hs.get_datastore()
+
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
@@ -634,6 +631,26 @@ class UsersListTestCase(unittest.HomeserverTestCase):
self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
self.assertEqual(Codes.UNKNOWN, channel.json_body["errcode"])
+ # unkown order_by
+ channel = self.make_request(
+ "GET",
+ self.url + "?order_by=bar",
+ access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.UNKNOWN, channel.json_body["errcode"])
+
+ # invalid search order
+ channel = self.make_request(
+ "GET",
+ self.url + "?dir=bar",
+ access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.UNKNOWN, channel.json_body["errcode"])
+
def test_limit(self):
"""
Testing list of users with limit
@@ -759,6 +776,103 @@ class UsersListTestCase(unittest.HomeserverTestCase):
self.assertEqual(len(channel.json_body["users"]), 1)
self.assertNotIn("next_token", channel.json_body)
+ def test_order_by(self):
+ """
+ Testing order list with parameter `order_by`
+ """
+
+ user1 = self.register_user("user1", "pass1", admin=False, displayname="Name Z")
+ user2 = self.register_user("user2", "pass2", admin=False, displayname="Name Y")
+
+ # Modify user
+ self.get_success(self.store.set_user_deactivated_status(user1, True))
+ self.get_success(self.store.set_shadow_banned(UserID.from_string(user1), True))
+
+ # Set avatar URL to all users, that no user has a NULL value to avoid
+ # different sort order between SQlite and PostreSQL
+ self.get_success(self.store.set_profile_avatar_url("user1", "mxc://url3"))
+ self.get_success(self.store.set_profile_avatar_url("user2", "mxc://url2"))
+ self.get_success(self.store.set_profile_avatar_url("admin", "mxc://url1"))
+
+ # order by default (name)
+ self._order_test([self.admin_user, user1, user2], None)
+ self._order_test([self.admin_user, user1, user2], None, "f")
+ self._order_test([user2, user1, self.admin_user], None, "b")
+
+ # order by name
+ self._order_test([self.admin_user, user1, user2], "name")
+ self._order_test([self.admin_user, user1, user2], "name", "f")
+ self._order_test([user2, user1, self.admin_user], "name", "b")
+
+ # order by displayname
+ self._order_test([user2, user1, self.admin_user], "displayname")
+ self._order_test([user2, user1, self.admin_user], "displayname", "f")
+ self._order_test([self.admin_user, user1, user2], "displayname", "b")
+
+ # order by is_guest
+ # like sort by ascending name, as no guest user here
+ self._order_test([self.admin_user, user1, user2], "is_guest")
+ self._order_test([self.admin_user, user1, user2], "is_guest", "f")
+ self._order_test([self.admin_user, user1, user2], "is_guest", "b")
+
+ # order by admin
+ self._order_test([user1, user2, self.admin_user], "admin")
+ self._order_test([user1, user2, self.admin_user], "admin", "f")
+ self._order_test([self.admin_user, user1, user2], "admin", "b")
+
+ # order by deactivated
+ self._order_test([self.admin_user, user2, user1], "deactivated")
+ self._order_test([self.admin_user, user2, user1], "deactivated", "f")
+ self._order_test([user1, self.admin_user, user2], "deactivated", "b")
+
+ # order by user_type
+ # like sort by ascending name, as no special user type here
+ self._order_test([self.admin_user, user1, user2], "user_type")
+ self._order_test([self.admin_user, user1, user2], "user_type", "f")
+ self._order_test([self.admin_user, user1, user2], "is_guest", "b")
+
+ # order by shadow_banned
+ self._order_test([self.admin_user, user2, user1], "shadow_banned")
+ self._order_test([self.admin_user, user2, user1], "shadow_banned", "f")
+ self._order_test([user1, self.admin_user, user2], "shadow_banned", "b")
+
+ # order by avatar_url
+ self._order_test([self.admin_user, user2, user1], "avatar_url")
+ self._order_test([self.admin_user, user2, user1], "avatar_url", "f")
+ self._order_test([user1, user2, self.admin_user], "avatar_url", "b")
+
+ def _order_test(
+ self,
+ expected_user_list: List[str],
+ order_by: Optional[str],
+ dir: Optional[str] = None,
+ ):
+ """Request the list of users in a certain order. Assert that order is what
+ we expect
+ Args:
+ expected_user_list: The list of user_id in the order we expect to get
+ back from the server
+ order_by: The type of ordering to give the server
+ dir: The direction of ordering to give the server
+ """
+
+ url = self.url + "?deactivated=true&"
+ if order_by is not None:
+ url += "order_by=%s&" % (order_by,)
+ if dir is not None and dir in ("b", "f"):
+ url += "dir=%s" % (dir,)
+ channel = self.make_request(
+ "GET",
+ url.encode("ascii"),
+ access_token=self.admin_user_tok,
+ )
+ self.assertEqual(200, channel.code, msg=channel.json_body)
+ self.assertEqual(channel.json_body["total"], len(expected_user_list))
+
+ returned_order = [row["name"] for row in channel.json_body["users"]]
+ self.assertEqual(expected_user_list, returned_order)
+ self._check_fields(channel.json_body["users"])
+
def _check_fields(self, content: JsonDict):
"""Checks that the expected user attributes are present in content
Args:
@@ -1820,7 +1934,7 @@ class UserMembershipRestTestCase(unittest.HomeserverTestCase):
# Create rooms and join
other_user_tok = self.login("user", "pass")
number_rooms = 5
- for n in range(number_rooms):
+ for _ in range(number_rooms):
self.helper.create_room_as(self.other_user, tok=other_user_tok)
# Get rooms
@@ -2400,7 +2514,7 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
user_token: Access token of the user
number_media: Number of media to be created for the user
"""
- for i in range(number_media):
+ for _ in range(number_media):
# file size is 67 Byte
image_data = unhexlify(
b"89504e470d0a1a0a0000000d4948445200000001000000010806"
@@ -2893,3 +3007,287 @@ class ShadowBanRestTestCase(unittest.HomeserverTestCase):
# Ensure the user is shadow-banned (and the cache was cleared).
result = self.get_success(self.store.get_user_by_access_token(other_user_token))
self.assertTrue(result.shadow_banned)
+
+
+class RateLimitTestCase(unittest.HomeserverTestCase):
+
+ servlets = [
+ synapse.rest.admin.register_servlets,
+ login.register_servlets,
+ ]
+
+ def prepare(self, reactor, clock, hs):
+ self.store = hs.get_datastore()
+
+ self.admin_user = self.register_user("admin", "pass", admin=True)
+ self.admin_user_tok = self.login("admin", "pass")
+
+ self.other_user = self.register_user("user", "pass")
+ self.url = (
+ "/_synapse/admin/v1/users/%s/override_ratelimit"
+ % urllib.parse.quote(self.other_user)
+ )
+
+ def test_no_auth(self):
+ """
+ Try to get information of a user without authentication.
+ """
+ channel = self.make_request("GET", self.url, b"{}")
+
+ self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
+
+ channel = self.make_request("POST", self.url, b"{}")
+
+ self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
+
+ channel = self.make_request("DELETE", self.url, b"{}")
+
+ self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
+
+ def test_requester_is_no_admin(self):
+ """
+ If the user is not a server admin, an error is returned.
+ """
+ other_user_token = self.login("user", "pass")
+
+ channel = self.make_request(
+ "GET",
+ self.url,
+ access_token=other_user_token,
+ )
+
+ self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
+
+ channel = self.make_request(
+ "POST",
+ self.url,
+ access_token=other_user_token,
+ )
+
+ self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
+
+ channel = self.make_request(
+ "DELETE",
+ self.url,
+ access_token=other_user_token,
+ )
+
+ self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
+
+ def test_user_does_not_exist(self):
+ """
+ Tests that a lookup for a user that does not exist returns a 404
+ """
+ url = "/_synapse/admin/v1/users/@unknown_person:test/override_ratelimit"
+
+ channel = self.make_request(
+ "GET",
+ url,
+ access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(404, channel.code, msg=channel.json_body)
+ self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"])
+
+ channel = self.make_request(
+ "POST",
+ url,
+ access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(404, channel.code, msg=channel.json_body)
+ self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"])
+
+ channel = self.make_request(
+ "DELETE",
+ url,
+ access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(404, channel.code, msg=channel.json_body)
+ self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"])
+
+ def test_user_is_not_local(self):
+ """
+ Tests that a lookup for a user that is not a local returns a 400
+ """
+ url = (
+ "/_synapse/admin/v1/users/@unknown_person:unknown_domain/override_ratelimit"
+ )
+
+ channel = self.make_request(
+ "GET",
+ url,
+ access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(400, channel.code, msg=channel.json_body)
+ self.assertEqual("Can only lookup local users", channel.json_body["error"])
+
+ channel = self.make_request(
+ "POST",
+ url,
+ access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(400, channel.code, msg=channel.json_body)
+ self.assertEqual(
+ "Only local users can be ratelimited", channel.json_body["error"]
+ )
+
+ channel = self.make_request(
+ "DELETE",
+ url,
+ access_token=self.admin_user_tok,
+ )
+
+ self.assertEqual(400, channel.code, msg=channel.json_body)
+ self.assertEqual(
+ "Only local users can be ratelimited", channel.json_body["error"]
+ )
+
+ def test_invalid_parameter(self):
+ """
+ If parameters are invalid, an error is returned.
+ """
+ # messages_per_second is a string
+ channel = self.make_request(
+ "POST",
+ self.url,
+ access_token=self.admin_user_tok,
+ content={"messages_per_second": "string"},
+ )
+
+ self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
+
+ # messages_per_second is negative
+ channel = self.make_request(
+ "POST",
+ self.url,
+ access_token=self.admin_user_tok,
+ content={"messages_per_second": -1},
+ )
+
+ self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
+
+ # burst_count is a string
+ channel = self.make_request(
+ "POST",
+ self.url,
+ access_token=self.admin_user_tok,
+ content={"burst_count": "string"},
+ )
+
+ self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
+
+ # burst_count is negative
+ channel = self.make_request(
+ "POST",
+ self.url,
+ access_token=self.admin_user_tok,
+ content={"burst_count": -1},
+ )
+
+ self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
+
+ def test_return_zero_when_null(self):
+ """
+ If values in database are `null` API should return an int `0`
+ """
+
+ self.get_success(
+ self.store.db_pool.simple_upsert(
+ table="ratelimit_override",
+ keyvalues={"user_id": self.other_user},
+ values={
+ "messages_per_second": None,
+ "burst_count": None,
+ },
+ )
+ )
+
+ # request status
+ channel = self.make_request(
+ "GET",
+ self.url,
+ access_token=self.admin_user_tok,
+ )
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(0, channel.json_body["messages_per_second"])
+ self.assertEqual(0, channel.json_body["burst_count"])
+
+ def test_success(self):
+ """
+ Rate-limiting (set/update/delete) should succeed for an admin.
+ """
+ # request status
+ channel = self.make_request(
+ "GET",
+ self.url,
+ access_token=self.admin_user_tok,
+ )
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertNotIn("messages_per_second", channel.json_body)
+ self.assertNotIn("burst_count", channel.json_body)
+
+ # set ratelimit
+ channel = self.make_request(
+ "POST",
+ self.url,
+ access_token=self.admin_user_tok,
+ content={"messages_per_second": 10, "burst_count": 11},
+ )
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(10, channel.json_body["messages_per_second"])
+ self.assertEqual(11, channel.json_body["burst_count"])
+
+ # update ratelimit
+ channel = self.make_request(
+ "POST",
+ self.url,
+ access_token=self.admin_user_tok,
+ content={"messages_per_second": 20, "burst_count": 21},
+ )
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(20, channel.json_body["messages_per_second"])
+ self.assertEqual(21, channel.json_body["burst_count"])
+
+ # request status
+ channel = self.make_request(
+ "GET",
+ self.url,
+ access_token=self.admin_user_tok,
+ )
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertEqual(20, channel.json_body["messages_per_second"])
+ self.assertEqual(21, channel.json_body["burst_count"])
+
+ # delete ratelimit
+ channel = self.make_request(
+ "DELETE",
+ self.url,
+ access_token=self.admin_user_tok,
+ )
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertNotIn("messages_per_second", channel.json_body)
+ self.assertNotIn("burst_count", channel.json_body)
+
+ # request status
+ channel = self.make_request(
+ "GET",
+ self.url,
+ access_token=self.admin_user_tok,
+ )
+ self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"])
+ self.assertNotIn("messages_per_second", channel.json_body)
+ self.assertNotIn("burst_count", channel.json_body)
diff --git a/tests/rest/client/__init__.py b/tests/rest/client/__init__.py
index fe0ac3f8..629e2df7 100644
--- a/tests/rest/client/__init__.py
+++ b/tests/rest/client/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/client/test_consent.py b/tests/rest/client/test_consent.py
index c74693e9..5cc62a91 100644
--- a/tests/rest/client/test_consent.py
+++ b/tests/rest/client/test_consent.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/client/test_ephemeral_message.py b/tests/rest/client/test_ephemeral_message.py
index 56937dcd..eec0fc01 100644
--- a/tests/rest/client/test_ephemeral_message.py
+++ b/tests/rest/client/test_ephemeral_message.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/client/test_identity.py b/tests/rest/client/test_identity.py
index c0a9fc69..478296ba 100644
--- a/tests/rest/client/test_identity.py
+++ b/tests/rest/client/test_identity.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/client/test_power_levels.py b/tests/rest/client/test_power_levels.py
index 5256c11f..ba5ad47d 100644
--- a/tests/rest/client/test_power_levels.py
+++ b/tests/rest/client/test_power_levels.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/client/test_redactions.py b/tests/rest/client/test_redactions.py
index e0c74591..dfd85221 100644
--- a/tests/rest/client/test_redactions.py
+++ b/tests/rest/client/test_redactions.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/client/test_retention.py b/tests/rest/client/test_retention.py
index aee99bb6..e1a6e73e 100644
--- a/tests/rest/client/test_retention.py
+++ b/tests/rest/client/test_retention.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
from synapse.api.constants import EventTypes
from synapse.rest import admin
diff --git a/tests/rest/client/test_shadow_banned.py b/tests/rest/client/test_shadow_banned.py
index d2cce440..288ee128 100644
--- a/tests/rest/client/test_shadow_banned.py
+++ b/tests/rest/client/test_shadow_banned.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock, patch
+from unittest.mock import Mock, patch
import synapse.rest.admin
from synapse.api.constants import EventTypes
diff --git a/tests/rest/client/test_third_party_rules.py b/tests/rest/client/test_third_party_rules.py
index bf390142..e1fe72fc 100644
--- a/tests/rest/client/test_third_party_rules.py
+++ b/tests/rest/client/test_third_party_rules.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
@@ -14,8 +13,7 @@
# limitations under the License.
import threading
from typing import Dict
-
-from mock import Mock
+from unittest.mock import Mock
from synapse.events import EventBase
from synapse.module_api import ModuleApi
diff --git a/tests/rest/client/test_transactions.py b/tests/rest/client/test_transactions.py
index 171632e1..3b5747cb 100644
--- a/tests/rest/client/test_transactions.py
+++ b/tests/rest/client/test_transactions.py
@@ -1,4 +1,4 @@
-from mock import Mock, call
+from unittest.mock import Mock, call
from twisted.internet import defer, reactor
diff --git a/tests/rest/client/v1/__init__.py b/tests/rest/client/v1/__init__.py
index bfebb0f6..5e83dba2 100644
--- a/tests/rest/client/v1/__init__.py
+++ b/tests/rest/client/v1/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/client/v1/test_directory.py b/tests/rest/client/v1/test_directory.py
index edd1d184..8ed47049 100644
--- a/tests/rest/client/v1/test_directory.py
+++ b/tests/rest/client/v1/test_directory.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/client/v1/test_events.py b/tests/rest/client/v1/test_events.py
index 2ae896db..852bda40 100644
--- a/tests/rest/client/v1/test_events.py
+++ b/tests/rest/client/v1/test_events.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,7 +14,7 @@
""" Tests REST events for /events paths."""
-from mock import Mock
+from unittest.mock import Mock
import synapse.rest.admin
from synapse.rest.client.v1 import events, login, room
diff --git a/tests/rest/client/v1/test_login.py b/tests/rest/client/v1/test_login.py
index 988821b1..605b9523 100644
--- a/tests/rest/client/v1/test_login.py
+++ b/tests/rest/client/v1/test_login.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,10 +15,9 @@
import time
import urllib.parse
from typing import Any, Dict, List, Optional, Union
+from unittest.mock import Mock
from urllib.parse import urlencode
-from mock import Mock
-
import pymacaroons
from twisted.web.resource import Resource
diff --git a/tests/rest/client/v1/test_presence.py b/tests/rest/client/v1/test_presence.py
index 94a51548..409f3949 100644
--- a/tests/rest/client/v1/test_presence.py
+++ b/tests/rest/client/v1/test_presence.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,10 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
from twisted.internet import defer
+from synapse.handlers.presence import PresenceHandler
from synapse.rest.client.v1 import presence
from synapse.types import UserID
@@ -33,7 +33,7 @@ class PresenceTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor, clock):
- presence_handler = Mock()
+ presence_handler = Mock(spec=PresenceHandler)
presence_handler.set_state.return_value = defer.succeed(None)
hs = self.setup_test_homeserver(
@@ -60,12 +60,12 @@ class PresenceTestCase(unittest.HomeserverTestCase):
self.assertEqual(channel.code, 200)
self.assertEqual(self.hs.get_presence_handler().set_state.call_count, 1)
+ @unittest.override_config({"use_presence": False})
def test_put_presence_disabled(self):
"""
PUT to the status endpoint with use_presence disabled will NOT call
set_state on the presence handler.
"""
- self.hs.config.use_presence = False
body = {"presence": "here", "status_msg": "beep boop"}
channel = self.make_request(
diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py
index f3448c94..165ad33f 100644
--- a/tests/rest/client/v1/test_profile.py
+++ b/tests/rest/client/v1/test_profile.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/client/v1/test_push_rule_attrs.py b/tests/rest/client/v1/test_push_rule_attrs.py
index 2bc512d7..d0776160 100644
--- a/tests/rest/client/v1/test_push_rule_attrs.py
+++ b/tests/rest/client/v1/test_push_rule_attrs.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/client/v1/test_rooms.py b/tests/rest/client/v1/test_rooms.py
index ed65f645..a3694f3d 100644
--- a/tests/rest/client/v1/test_rooms.py
+++ b/tests/rest/client/v1/test_rooms.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017 Vector Creations Ltd
# Copyright 2018-2019 New Vector Ltd
@@ -19,10 +18,10 @@
"""Tests REST events for /rooms paths."""
import json
+from typing import Iterable
+from unittest.mock import Mock
from urllib import parse as urlparse
-from mock import Mock
-
import synapse.rest.admin
from synapse.api.constants import EventContentFields, EventTypes, Membership
from synapse.handlers.pagination import PurgeStatus
@@ -207,7 +206,9 @@ class RoomPermissionsTestCase(RoomBase):
)
self.assertEquals(403, channel.code, msg=channel.result["body"])
- def _test_get_membership(self, room=None, members=[], expect_code=None):
+ def _test_get_membership(
+ self, room=None, members: Iterable = frozenset(), expect_code=None
+ ):
for member in members:
path = "/rooms/%s/state/m.room.member/%s" % (room, member)
channel = self.make_request("GET", path)
@@ -645,7 +646,7 @@ class RoomInviteRatelimitTestCase(RoomBase):
def test_invites_by_users_ratelimit(self):
"""Tests that invites to a specific user are actually rate-limited."""
- for i in range(3):
+ for _ in range(3):
room_id = self.helper.create_room_as(self.user_id)
self.helper.invite(room_id, self.user_id, "@other-users:red")
@@ -667,7 +668,7 @@ class RoomJoinRatelimitTestCase(RoomBase):
)
def test_join_local_ratelimit(self):
"""Tests that local joins are actually rate-limited."""
- for i in range(3):
+ for _ in range(3):
self.helper.create_room_as(self.user_id)
self.helper.create_room_as(self.user_id, expect_code=429)
@@ -732,7 +733,7 @@ class RoomJoinRatelimitTestCase(RoomBase):
for path in paths_to_test:
# Make sure we send more requests than the rate-limiting config would allow
# if all of these requests ended up joining the user to a room.
- for i in range(4):
+ for _ in range(4):
channel = self.make_request("POST", path % room_id, {})
self.assertEquals(channel.code, 200)
diff --git a/tests/rest/client/v1/test_typing.py b/tests/rest/client/v1/test_typing.py
index 329dbd06..0aad48a1 100644
--- a/tests/rest/client/v1/test_typing.py
+++ b/tests/rest/client/v1/test_typing.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector
#
@@ -16,7 +15,7 @@
"""Tests REST events for /rooms paths."""
-from mock import Mock
+from unittest.mock import Mock
from synapse.rest.client.v1 import room
from synapse.types import UserID
diff --git a/tests/rest/client/v1/utils.py b/tests/rest/client/v1/utils.py
index 946740aa..ed55a640 100644
--- a/tests/rest/client/v1/utils.py
+++ b/tests/rest/client/v1/utils.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017 Vector Creations Ltd
# Copyright 2018-2019 New Vector Ltd
@@ -21,8 +20,7 @@ import re
import time
import urllib.parse
from typing import Any, Dict, Mapping, MutableMapping, Optional
-
-from mock import patch
+from unittest.mock import patch
import attr
@@ -132,7 +130,7 @@ class RestHelper:
src: str,
targ: str,
membership: str,
- extra_data: dict = {},
+ extra_data: Optional[dict] = None,
tok: Optional[str] = None,
expect_code: int = 200,
) -> None:
@@ -156,7 +154,7 @@ class RestHelper:
path = path + "?access_token=%s" % tok
data = {"membership": membership}
- data.update(extra_data)
+ data.update(extra_data or {})
channel = make_request(
self.hs.get_reactor(),
@@ -187,7 +185,13 @@ class RestHelper:
)
def send_event(
- self, room_id, type, content={}, txn_id=None, tok=None, expect_code=200
+ self,
+ room_id,
+ type,
+ content: Optional[dict] = None,
+ txn_id=None,
+ tok=None,
+ expect_code=200,
):
if txn_id is None:
txn_id = "m%s" % (str(time.time()))
@@ -201,7 +205,7 @@ class RestHelper:
self.site,
"PUT",
path,
- json.dumps(content).encode("utf8"),
+ json.dumps(content or {}).encode("utf8"),
)
assert (
diff --git a/tests/rest/client/v2_alpha/test_account.py b/tests/rest/client/v2_alpha/test_account.py
index e72b6196..4ef19145 100644
--- a/tests/rest/client/v2_alpha/test_account.py
+++ b/tests/rest/client/v2_alpha/test_account.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
diff --git a/tests/rest/client/v2_alpha/test_auth.py b/tests/rest/client/v2_alpha/test_auth.py
index 9734a215..485e3650 100644
--- a/tests/rest/client/v2_alpha/test_auth.py
+++ b/tests/rest/client/v2_alpha/test_auth.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector
# Copyright 2020-2021 The Matrix.org Foundation C.I.C
#
@@ -13,7 +12,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from typing import Union
+from typing import Optional, Union
from twisted.internet.defer import succeed
@@ -74,7 +73,10 @@ class FallbackAuthTests(unittest.HomeserverTestCase):
return channel
def recaptcha(
- self, session: str, expected_post_response: int, post_session: str = None
+ self,
+ session: str,
+ expected_post_response: int,
+ post_session: Optional[str] = None,
) -> None:
"""Get and respond to a fallback recaptcha. Returns the second request."""
if post_session is None:
diff --git a/tests/rest/client/v2_alpha/test_capabilities.py b/tests/rest/client/v2_alpha/test_capabilities.py
index 287a1a48..874052c6 100644
--- a/tests/rest/client/v2_alpha/test_capabilities.py
+++ b/tests/rest/client/v2_alpha/test_capabilities.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/client/v2_alpha/test_filter.py b/tests/rest/client/v2_alpha/test_filter.py
index f761c449..c7e47725 100644
--- a/tests/rest/client/v2_alpha/test_filter.py
+++ b/tests/rest/client/v2_alpha/test_filter.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/client/v2_alpha/test_password_policy.py b/tests/rest/client/v2_alpha/test_password_policy.py
index 5ebc5707..6f07ff6c 100644
--- a/tests/rest/client/v2_alpha/test_password_policy.py
+++ b/tests/rest/client/v2_alpha/test_password_policy.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py
index 27db4f55..1cad5f00 100644
--- a/tests/rest/client/v2_alpha/test_register.py
+++ b/tests/rest/client/v2_alpha/test_register.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
@@ -14,7 +13,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
import datetime
import json
import os
@@ -22,7 +20,7 @@ import os
import pkg_resources
import synapse.rest.admin
-from synapse.api.constants import LoginType
+from synapse.api.constants import APP_SERVICE_REGISTRATION_TYPE, LoginType
from synapse.api.errors import Codes
from synapse.appservice import ApplicationService
from synapse.rest.client.v1 import login, logout
@@ -59,7 +57,9 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase):
)
self.hs.get_datastore().services_cache.append(appservice)
- request_data = json.dumps({"username": "as_user_kermit"})
+ request_data = json.dumps(
+ {"username": "as_user_kermit", "type": APP_SERVICE_REGISTRATION_TYPE}
+ )
channel = self.make_request(
b"POST", self.url + b"?access_token=i_am_an_app_service", request_data
@@ -69,9 +69,31 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase):
det_data = {"user_id": user_id, "home_server": self.hs.hostname}
self.assertDictContainsSubset(det_data, channel.json_body)
+ def test_POST_appservice_registration_no_type(self):
+ as_token = "i_am_an_app_service"
+
+ appservice = ApplicationService(
+ as_token,
+ self.hs.config.server_name,
+ id="1234",
+ namespaces={"users": [{"regex": r"@as_user.*", "exclusive": True}]},
+ sender="@as:test",
+ )
+
+ self.hs.get_datastore().services_cache.append(appservice)
+ request_data = json.dumps({"username": "as_user_kermit"})
+
+ channel = self.make_request(
+ b"POST", self.url + b"?access_token=i_am_an_app_service", request_data
+ )
+
+ self.assertEquals(channel.result["code"], b"400", channel.result)
+
def test_POST_appservice_registration_invalid(self):
self.appservice = None # no application service exists
- request_data = json.dumps({"username": "kermit"})
+ request_data = json.dumps(
+ {"username": "kermit", "type": APP_SERVICE_REGISTRATION_TYPE}
+ )
channel = self.make_request(
b"POST", self.url + b"?access_token=i_am_an_app_service", request_data
)
@@ -288,6 +310,57 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase):
self.assertIsNotNone(channel.json_body.get("sid"))
+ @unittest.override_config(
+ {
+ "public_baseurl": "https://test_server",
+ "email": {
+ "smtp_host": "mail_server",
+ "smtp_port": 2525,
+ "notif_from": "sender@host",
+ },
+ }
+ )
+ def test_reject_invalid_email(self):
+ """Check that bad emails are rejected"""
+
+ # Test for email with multiple @
+ channel = self.make_request(
+ "POST",
+ b"register/email/requestToken",
+ {"client_secret": "foobar", "email": "email@@email", "send_attempt": 1},
+ )
+ self.assertEquals(400, channel.code, channel.result)
+ # Check error to ensure that we're not erroring due to a bug in the test.
+ self.assertEquals(
+ channel.json_body,
+ {"errcode": "M_UNKNOWN", "error": "Unable to parse email address"},
+ )
+
+ # Test for email with no @
+ channel = self.make_request(
+ "POST",
+ b"register/email/requestToken",
+ {"client_secret": "foobar", "email": "email", "send_attempt": 1},
+ )
+ self.assertEquals(400, channel.code, channel.result)
+ self.assertEquals(
+ channel.json_body,
+ {"errcode": "M_UNKNOWN", "error": "Unable to parse email address"},
+ )
+
+ # Test for super long email
+ email = "a@" + "a" * 1000
+ channel = self.make_request(
+ "POST",
+ b"register/email/requestToken",
+ {"client_secret": "foobar", "email": email, "send_attempt": 1},
+ )
+ self.assertEquals(400, channel.code, channel.result)
+ self.assertEquals(
+ channel.json_body,
+ {"errcode": "M_UNKNOWN", "error": "Unable to parse email address"},
+ )
+
class AccountValidityTestCase(unittest.HomeserverTestCase):
@@ -470,8 +543,8 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase):
(user_id, tok) = self.create_user()
- # Move 6 days forward. This should trigger a renewal email to be sent.
- self.reactor.advance(datetime.timedelta(days=6).total_seconds())
+ # Move 5 days forward. This should trigger a renewal email to be sent.
+ self.reactor.advance(datetime.timedelta(days=5).total_seconds())
self.assertEqual(len(self.email_attempts), 1)
# Retrieving the URL from the email is too much pain for now, so we
@@ -482,14 +555,32 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase):
self.assertEquals(channel.result["code"], b"200", channel.result)
# Check that we're getting HTML back.
- content_type = None
- for header in channel.result.get("headers", []):
- if header[0] == b"Content-Type":
- content_type = header[1]
- self.assertEqual(content_type, b"text/html; charset=utf-8", channel.result)
+ content_type = channel.headers.getRawHeaders(b"Content-Type")
+ self.assertEqual(content_type, [b"text/html; charset=utf-8"], channel.result)
# Check that the HTML we're getting is the one we expect on a successful renewal.
- expected_html = self.hs.config.account_validity.account_renewed_html_content
+ expiration_ts = self.get_success(self.store.get_expiration_ts_for_user(user_id))
+ expected_html = self.hs.config.account_validity.account_validity_account_renewed_template.render(
+ expiration_ts=expiration_ts
+ )
+ self.assertEqual(
+ channel.result["body"], expected_html.encode("utf8"), channel.result
+ )
+
+ # Move 1 day forward. Try to renew with the same token again.
+ url = "/_matrix/client/unstable/account_validity/renew?token=%s" % renewal_token
+ channel = self.make_request(b"GET", url)
+ self.assertEquals(channel.result["code"], b"200", channel.result)
+
+ # Check that we're getting HTML back.
+ content_type = channel.headers.getRawHeaders(b"Content-Type")
+ self.assertEqual(content_type, [b"text/html; charset=utf-8"], channel.result)
+
+ # Check that the HTML we're getting is the one we expect when reusing a
+ # token. The account expiration date should not have changed.
+ expected_html = self.hs.config.account_validity.account_validity_account_previously_renewed_template.render(
+ expiration_ts=expiration_ts
+ )
self.assertEqual(
channel.result["body"], expected_html.encode("utf8"), channel.result
)
@@ -509,15 +600,14 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase):
self.assertEquals(channel.result["code"], b"404", channel.result)
# Check that we're getting HTML back.
- content_type = None
- for header in channel.result.get("headers", []):
- if header[0] == b"Content-Type":
- content_type = header[1]
- self.assertEqual(content_type, b"text/html; charset=utf-8", channel.result)
+ content_type = channel.headers.getRawHeaders(b"Content-Type")
+ self.assertEqual(content_type, [b"text/html; charset=utf-8"], channel.result)
# Check that the HTML we're getting is the one we expect when using an
# invalid/unknown token.
- expected_html = self.hs.config.account_validity.invalid_token_html_content
+ expected_html = (
+ self.hs.config.account_validity.account_validity_invalid_token_template.render()
+ )
self.assertEqual(
channel.result["body"], expected_html.encode("utf8"), channel.result
)
@@ -625,7 +715,12 @@ class AccountValidityBackgroundJobTestCase(unittest.HomeserverTestCase):
config["account_validity"] = {"enabled": False}
self.hs = self.setup_test_homeserver(config=config)
- self.hs.config.account_validity.period = self.validity_period
+
+ # We need to set these directly, instead of in the homeserver config dict above.
+ # This is due to account validity-related config options not being read by
+ # Synapse when account_validity.enabled is False.
+ self.hs.get_datastore()._account_validity_period = self.validity_period
+ self.hs.get_datastore()._account_validity_startup_job_max_delta = self.max_delta
self.store = self.hs.get_datastore()
diff --git a/tests/rest/client/v2_alpha/test_relations.py b/tests/rest/client/v2_alpha/test_relations.py
index e7bb5583..856aa868 100644
--- a/tests/rest/client/v2_alpha/test_relations.py
+++ b/tests/rest/client/v2_alpha/test_relations.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,6 +15,7 @@
import itertools
import json
import urllib
+from typing import Optional
from synapse.api.constants import EventTypes, RelationTypes
from synapse.rest import admin
@@ -681,7 +681,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
relation_type,
event_type,
key=None,
- content={},
+ content: Optional[dict] = None,
access_token=None,
parent_id=None,
):
@@ -713,7 +713,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
"POST",
"/_matrix/client/unstable/rooms/%s/send_relation/%s/%s/%s%s"
% (self.room, original_id, relation_type, event_type, query),
- json.dumps(content).encode("utf-8"),
+ json.dumps(content or {}).encode("utf-8"),
access_token=access_token,
)
return channel
diff --git a/tests/rest/client/v2_alpha/test_shared_rooms.py b/tests/rest/client/v2_alpha/test_shared_rooms.py
index dd83a1f8..cedb9614 100644
--- a/tests/rest/client/v2_alpha/test_shared_rooms.py
+++ b/tests/rest/client/v2_alpha/test_shared_rooms.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Half-Shot
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/client/v2_alpha/test_sync.py b/tests/rest/client/v2_alpha/test_sync.py
index 2dbf4239..dbcbdf15 100644
--- a/tests/rest/client/v2_alpha/test_sync.py
+++ b/tests/rest/client/v2_alpha/test_sync.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018-2019 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
diff --git a/tests/rest/client/v2_alpha/test_upgrade_room.py b/tests/rest/client/v2_alpha/test_upgrade_room.py
index d890d118..5f3f15fc 100644
--- a/tests/rest/client/v2_alpha/test_upgrade_room.py
+++ b/tests/rest/client/v2_alpha/test_upgrade_room.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/key/v2/test_remote_key_resource.py b/tests/rest/key/v2/test_remote_key_resource.py
index 9d0d0ef4..3b275bc2 100644
--- a/tests/rest/key/v2/test_remote_key_resource.py
+++ b/tests/rest/key/v2/test_remote_key_resource.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,8 +13,7 @@
# limitations under the License.
import urllib.parse
from io import BytesIO, StringIO
-
-from mock import Mock
+from unittest.mock import Mock
import signedjson.key
from canonicaljson import encode_canonical_json
diff --git a/tests/rest/media/__init__.py b/tests/rest/media/__init__.py
index a354d38c..b1ee10cf 100644
--- a/tests/rest/media/__init__.py
+++ b/tests/rest/media/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/media/v1/__init__.py b/tests/rest/media/v1/__init__.py
index a354d38c..b1ee10cf 100644
--- a/tests/rest/media/v1/__init__.py
+++ b/tests/rest/media/v1/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/media/v1/test_base.py b/tests/rest/media/v1/test_base.py
index ebd78692..f761e23f 100644
--- a/tests/rest/media/v1/test_base.py
+++ b/tests/rest/media/v1/test_base.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/media/v1/test_media_storage.py b/tests/rest/media/v1/test_media_storage.py
index 9f77125f..4a213d13 100644
--- a/tests/rest/media/v1/test_media_storage.py
+++ b/tests/rest/media/v1/test_media_storage.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,10 +17,9 @@ import tempfile
from binascii import unhexlify
from io import BytesIO
from typing import Optional
+from unittest.mock import Mock
from urllib import parse
-from mock import Mock
-
import attr
from parameterized import parameterized_class
from PIL import Image as Image
diff --git a/tests/rest/media/v1/test_url_preview.py b/tests/rest/media/v1/test_url_preview.py
index 69685024..d3ef7bb4 100644
--- a/tests/rest/media/v1/test_url_preview.py
+++ b/tests/rest/media/v1/test_url_preview.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,8 +14,7 @@
import json
import os
import re
-
-from mock import patch
+from unittest.mock import patch
from twisted.internet._resolver import HostResolution
from twisted.internet.address import IPv4Address, IPv6Address
diff --git a/tests/rest/test_health.py b/tests/rest/test_health.py
index 32acd93d..01d48c38 100644
--- a/tests/rest/test_health.py
+++ b/tests/rest/test_health.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/rest/test_well_known.py b/tests/rest/test_well_known.py
index 14de0921..ac0e4277 100644
--- a/tests/rest/test_well_known.py
+++ b/tests/rest/test_well_known.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/scripts/test_new_matrix_user.py b/tests/scripts/test_new_matrix_user.py
index 6f56893f..6f3c365c 100644
--- a/tests/scripts/test_new_matrix_user.py
+++ b/tests/scripts/test_new_matrix_user.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
from synapse._scripts.register_new_matrix_user import request_registration
diff --git a/tests/server.py b/tests/server.py
index b535a5d8..9df8cda2 100644
--- a/tests/server.py
+++ b/tests/server.py
@@ -603,12 +603,6 @@ class FakeTransport:
if self.disconnected:
return
- if not hasattr(self.other, "transport"):
- # the other has no transport yet; reschedule
- if self.autoflush:
- self._reactor.callLater(0.0, self.flush)
- return
-
if maxbytes is not None:
to_write = self.buffer[:maxbytes]
else:
diff --git a/tests/server_notices/test_consent.py b/tests/server_notices/test_consent.py
index 4dd5a361..ac98259b 100644
--- a/tests/server_notices/test_consent.py
+++ b/tests/server_notices/test_consent.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/server_notices/test_resource_limits_server_notices.py b/tests/server_notices/test_resource_limits_server_notices.py
index d40d65b0..d46521cc 100644
--- a/tests/server_notices/test_resource_limits_server_notices.py
+++ b/tests/server_notices/test_resource_limits_server_notices.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018, 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
from twisted.internet import defer
diff --git a/tests/state/test_v2.py b/tests/state/test_v2.py
index 66e3cafe..43fc79ca 100644
--- a/tests/state/test_v2.py
+++ b/tests/state/test_v2.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/storage/test__base.py b/tests/storage/test__base.py
index 1ac4ebc6..200b9198 100644
--- a/tests/storage/test__base.py
+++ b/tests/storage/test__base.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2019 New Vector Ltd
#
@@ -14,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import secrets
from tests import unittest
@@ -22,7 +22,7 @@ class UpsertManyTests(unittest.HomeserverTestCase):
def prepare(self, reactor, clock, hs):
self.storage = hs.get_datastore()
- self.table_name = "table_" + hs.get_secrets().token_hex(6)
+ self.table_name = "table_" + secrets.token_hex(6)
self.get_success(
self.storage.db_pool.runInteraction(
"create",
diff --git a/tests/storage/test_account_data.py b/tests/storage/test_account_data.py
index 38444e48..01af49a1 100644
--- a/tests/storage/test_account_data.py
+++ b/tests/storage/test_account_data.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py
index 1ce29af5..666bffe2 100644
--- a/tests/storage/test_appservice.py
+++ b/tests/storage/test_appservice.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,8 +14,7 @@
import json
import os
import tempfile
-
-from mock import Mock
+from unittest.mock import Mock
import yaml
diff --git a/tests/storage/test_background_update.py b/tests/storage/test_background_update.py
index 1b4fae0b..069db0ed 100644
--- a/tests/storage/test_background_update.py
+++ b/tests/storage/test_background_update.py
@@ -1,4 +1,4 @@
-from mock import Mock
+from unittest.mock import Mock
from synapse.storage.background_updates import BackgroundUpdater
diff --git a/tests/storage/test_base.py b/tests/storage/test_base.py
index eac7e4dc..3b45a7ef 100644
--- a/tests/storage/test_base.py
+++ b/tests/storage/test_base.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,8 +14,7 @@
from collections import OrderedDict
-
-from mock import Mock
+from unittest.mock import Mock
from twisted.internet import defer
diff --git a/tests/storage/test_cleanup_extrems.py b/tests/storage/test_cleanup_extrems.py
index 77911386..aa20588b 100644
--- a/tests/storage/test_cleanup_extrems.py
+++ b/tests/storage/test_cleanup_extrems.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,9 +13,7 @@
# limitations under the License.
import os.path
-from unittest.mock import patch
-
-from mock import Mock
+from unittest.mock import Mock, patch
import synapse.rest.admin
from synapse.api.constants import EventTypes
diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py
index 34e65260..e57fce96 100644
--- a/tests/storage/test_client_ips.py
+++ b/tests/storage/test_client_ips.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
@@ -14,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
import synapse.rest.admin
from synapse.http.site import XForwardedForRequest
@@ -390,7 +389,7 @@ class ClientIpStoreTestCase(unittest.HomeserverTestCase):
class ClientIpAuthTestCase(unittest.HomeserverTestCase):
servlets = [
- synapse.rest.admin.register_servlets_for_client_rest_resource,
+ synapse.rest.admin.register_servlets,
login.register_servlets,
]
@@ -434,7 +433,7 @@ class ClientIpAuthTestCase(unittest.HomeserverTestCase):
self.reactor,
self.site,
"GET",
- "/_synapse/admin/v1/users/" + self.user_id,
+ "/_synapse/admin/v2/users/" + self.user_id,
access_token=access_token,
custom_headers=headers1.items(),
**make_request_args,
diff --git a/tests/storage/test_database.py b/tests/storage/test_database.py
index 5a77c849..6fbac0ab 100644
--- a/tests/storage/test_database.py
+++ b/tests/storage/test_database.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -36,17 +35,6 @@ def _stub_db_engine(**kwargs) -> BaseDatabaseEngine:
class TupleComparisonClauseTestCase(unittest.TestCase):
def test_native_tuple_comparison(self):
- db_engine = _stub_db_engine(supports_tuple_comparison=True)
- clause, args = make_tuple_comparison_clause(db_engine, [("a", 1), ("b", 2)])
+ clause, args = make_tuple_comparison_clause([("a", 1), ("b", 2)])
self.assertEqual(clause, "(a,b) > (?,?)")
self.assertEqual(args, [1, 2])
-
- def test_emulated_tuple_comparison(self):
- db_engine = _stub_db_engine(supports_tuple_comparison=False)
- clause, args = make_tuple_comparison_clause(
- db_engine, [("a", 1), ("b", 2), ("c", 3)]
- )
- self.assertEqual(
- clause, "(a >= ? AND (a > ? OR (b >= ? AND (b > ? OR c > ?))))"
- )
- self.assertEqual(args, [1, 1, 2, 2, 3])
diff --git a/tests/storage/test_devices.py b/tests/storage/test_devices.py
index dabc1c5f..6790aa52 100644
--- a/tests/storage/test_devices.py
+++ b/tests/storage/test_devices.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-# Copyright 2016 OpenMarket Ltd
+# Copyright 2016-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,32 +12,21 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from twisted.internet import defer
-
import synapse.api.errors
-import tests.unittest
-import tests.utils
-
-
-class DeviceStoreTestCase(tests.unittest.TestCase):
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
- self.store = None # type: synapse.storage.DataStore
+from tests.unittest import HomeserverTestCase
- @defer.inlineCallbacks
- def setUp(self):
- hs = yield tests.utils.setup_test_homeserver(self.addCleanup)
+class DeviceStoreTestCase(HomeserverTestCase):
+ def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
- @defer.inlineCallbacks
def test_store_new_device(self):
- yield defer.ensureDeferred(
+ self.get_success(
self.store.store_device("user_id", "device_id", "display_name")
)
- res = yield defer.ensureDeferred(self.store.get_device("user_id", "device_id"))
+ res = self.get_success(self.store.get_device("user_id", "device_id"))
self.assertDictContainsSubset(
{
"user_id": "user_id",
@@ -48,19 +36,18 @@ class DeviceStoreTestCase(tests.unittest.TestCase):
res,
)
- @defer.inlineCallbacks
def test_get_devices_by_user(self):
- yield defer.ensureDeferred(
+ self.get_success(
self.store.store_device("user_id", "device1", "display_name 1")
)
- yield defer.ensureDeferred(
+ self.get_success(
self.store.store_device("user_id", "device2", "display_name 2")
)
- yield defer.ensureDeferred(
+ self.get_success(
self.store.store_device("user_id2", "device3", "display_name 3")
)
- res = yield defer.ensureDeferred(self.store.get_devices_by_user("user_id"))
+ res = self.get_success(self.store.get_devices_by_user("user_id"))
self.assertEqual(2, len(res.keys()))
self.assertDictContainsSubset(
{
@@ -79,43 +66,41 @@ class DeviceStoreTestCase(tests.unittest.TestCase):
res["device2"],
)
- @defer.inlineCallbacks
def test_count_devices_by_users(self):
- yield defer.ensureDeferred(
+ self.get_success(
self.store.store_device("user_id", "device1", "display_name 1")
)
- yield defer.ensureDeferred(
+ self.get_success(
self.store.store_device("user_id", "device2", "display_name 2")
)
- yield defer.ensureDeferred(
+ self.get_success(
self.store.store_device("user_id2", "device3", "display_name 3")
)
- res = yield defer.ensureDeferred(self.store.count_devices_by_users())
+ res = self.get_success(self.store.count_devices_by_users())
self.assertEqual(0, res)
- res = yield defer.ensureDeferred(self.store.count_devices_by_users(["unknown"]))
+ res = self.get_success(self.store.count_devices_by_users(["unknown"]))
self.assertEqual(0, res)
- res = yield defer.ensureDeferred(self.store.count_devices_by_users(["user_id"]))
+ res = self.get_success(self.store.count_devices_by_users(["user_id"]))
self.assertEqual(2, res)
- res = yield defer.ensureDeferred(
+ res = self.get_success(
self.store.count_devices_by_users(["user_id", "user_id2"])
)
self.assertEqual(3, res)
- @defer.inlineCallbacks
def test_get_device_updates_by_remote(self):
device_ids = ["device_id1", "device_id2"]
# Add two device updates with a single stream_id
- yield defer.ensureDeferred(
+ self.get_success(
self.store.add_device_change_to_streams("user_id", device_ids, ["somehost"])
)
# Get all device updates ever meant for this remote
- now_stream_id, device_updates = yield defer.ensureDeferred(
+ now_stream_id, device_updates = self.get_success(
self.store.get_device_updates_by_remote("somehost", -1, limit=100)
)
@@ -131,37 +116,35 @@ class DeviceStoreTestCase(tests.unittest.TestCase):
}
self.assertEqual(received_device_ids, set(expected_device_ids))
- @defer.inlineCallbacks
def test_update_device(self):
- yield defer.ensureDeferred(
+ self.get_success(
self.store.store_device("user_id", "device_id", "display_name 1")
)
- res = yield defer.ensureDeferred(self.store.get_device("user_id", "device_id"))
+ res = self.get_success(self.store.get_device("user_id", "device_id"))
self.assertEqual("display_name 1", res["display_name"])
# do a no-op first
- yield defer.ensureDeferred(self.store.update_device("user_id", "device_id"))
- res = yield defer.ensureDeferred(self.store.get_device("user_id", "device_id"))
+ self.get_success(self.store.update_device("user_id", "device_id"))
+ res = self.get_success(self.store.get_device("user_id", "device_id"))
self.assertEqual("display_name 1", res["display_name"])
# do the update
- yield defer.ensureDeferred(
+ self.get_success(
self.store.update_device(
"user_id", "device_id", new_display_name="display_name 2"
)
)
# check it worked
- res = yield defer.ensureDeferred(self.store.get_device("user_id", "device_id"))
+ res = self.get_success(self.store.get_device("user_id", "device_id"))
self.assertEqual("display_name 2", res["display_name"])
- @defer.inlineCallbacks
def test_update_unknown_device(self):
- with self.assertRaises(synapse.api.errors.StoreError) as cm:
- yield defer.ensureDeferred(
- self.store.update_device(
- "user_id", "unknown_device_id", new_display_name="display_name 2"
- )
- )
- self.assertEqual(404, cm.exception.code)
+ exc = self.get_failure(
+ self.store.update_device(
+ "user_id", "unknown_device_id", new_display_name="display_name 2"
+ ),
+ synapse.api.errors.StoreError,
+ )
+ self.assertEqual(404, exc.value.code)
diff --git a/tests/storage/test_directory.py b/tests/storage/test_directory.py
index da93ca39..41bef62c 100644
--- a/tests/storage/test_directory.py
+++ b/tests/storage/test_directory.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-# Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2014-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,28 +12,20 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-
-from twisted.internet import defer
-
from synapse.types import RoomAlias, RoomID
-from tests import unittest
-from tests.utils import setup_test_homeserver
+from tests.unittest import HomeserverTestCase
-class DirectoryStoreTestCase(unittest.TestCase):
- @defer.inlineCallbacks
- def setUp(self):
- hs = yield setup_test_homeserver(self.addCleanup)
-
+class DirectoryStoreTestCase(HomeserverTestCase):
+ def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.room = RoomID.from_string("!abcde:test")
self.alias = RoomAlias.from_string("#my-room:test")
- @defer.inlineCallbacks
def test_room_to_alias(self):
- yield defer.ensureDeferred(
+ self.get_success(
self.store.create_room_alias_association(
room_alias=self.alias, room_id=self.room.to_string(), servers=["test"]
)
@@ -42,16 +33,11 @@ class DirectoryStoreTestCase(unittest.TestCase):
self.assertEquals(
["#my-room:test"],
- (
- yield defer.ensureDeferred(
- self.store.get_aliases_for_room(self.room.to_string())
- )
- ),
+ (self.get_success(self.store.get_aliases_for_room(self.room.to_string()))),
)
- @defer.inlineCallbacks
def test_alias_to_room(self):
- yield defer.ensureDeferred(
+ self.get_success(
self.store.create_room_alias_association(
room_alias=self.alias, room_id=self.room.to_string(), servers=["test"]
)
@@ -59,28 +45,19 @@ class DirectoryStoreTestCase(unittest.TestCase):
self.assertObjectHasAttributes(
{"room_id": self.room.to_string(), "servers": ["test"]},
- (
- yield defer.ensureDeferred(
- self.store.get_association_from_room_alias(self.alias)
- )
- ),
+ (self.get_success(self.store.get_association_from_room_alias(self.alias))),
)
- @defer.inlineCallbacks
def test_delete_alias(self):
- yield defer.ensureDeferred(
+ self.get_success(
self.store.create_room_alias_association(
room_alias=self.alias, room_id=self.room.to_string(), servers=["test"]
)
)
- room_id = yield defer.ensureDeferred(self.store.delete_room_alias(self.alias))
+ room_id = self.get_success(self.store.delete_room_alias(self.alias))
self.assertEqual(self.room.to_string(), room_id)
self.assertIsNone(
- (
- yield defer.ensureDeferred(
- self.store.get_association_from_room_alias(self.alias)
- )
- )
+ (self.get_success(self.store.get_association_from_room_alias(self.alias)))
)
diff --git a/tests/storage/test_e2e_room_keys.py b/tests/storage/test_e2e_room_keys.py
index 3d7760d5..9b6b4254 100644
--- a/tests/storage/test_e2e_room_keys.py
+++ b/tests/storage/test_e2e_room_keys.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/storage/test_end_to_end_keys.py b/tests/storage/test_end_to_end_keys.py
index 3fc4bb13..3bf6e337 100644
--- a/tests/storage/test_end_to_end_keys.py
+++ b/tests/storage/test_end_to_end_keys.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-# Copyright 2016 OpenMarket Ltd
+# Copyright 2016-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,30 +12,22 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from twisted.internet import defer
+from tests.unittest import HomeserverTestCase
-import tests.unittest
-import tests.utils
-
-class EndToEndKeyStoreTestCase(tests.unittest.TestCase):
- @defer.inlineCallbacks
- def setUp(self):
- hs = yield tests.utils.setup_test_homeserver(self.addCleanup)
+class EndToEndKeyStoreTestCase(HomeserverTestCase):
+ def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
- @defer.inlineCallbacks
def test_key_without_device_name(self):
now = 1470174257070
json = {"key": "value"}
- yield defer.ensureDeferred(self.store.store_device("user", "device", None))
+ self.get_success(self.store.store_device("user", "device", None))
- yield defer.ensureDeferred(
- self.store.set_e2e_device_keys("user", "device", now, json)
- )
+ self.get_success(self.store.set_e2e_device_keys("user", "device", now, json))
- res = yield defer.ensureDeferred(
+ res = self.get_success(
self.store.get_e2e_device_keys_for_cs_api((("user", "device"),))
)
self.assertIn("user", res)
@@ -44,38 +35,32 @@ class EndToEndKeyStoreTestCase(tests.unittest.TestCase):
dev = res["user"]["device"]
self.assertDictContainsSubset(json, dev)
- @defer.inlineCallbacks
def test_reupload_key(self):
now = 1470174257070
json = {"key": "value"}
- yield defer.ensureDeferred(self.store.store_device("user", "device", None))
+ self.get_success(self.store.store_device("user", "device", None))
- changed = yield defer.ensureDeferred(
+ changed = self.get_success(
self.store.set_e2e_device_keys("user", "device", now, json)
)
self.assertTrue(changed)
# If we try to upload the same key then we should be told nothing
# changed
- changed = yield defer.ensureDeferred(
+ changed = self.get_success(
self.store.set_e2e_device_keys("user", "device", now, json)
)
self.assertFalse(changed)
- @defer.inlineCallbacks
def test_get_key_with_device_name(self):
now = 1470174257070
json = {"key": "value"}
- yield defer.ensureDeferred(
- self.store.set_e2e_device_keys("user", "device", now, json)
- )
- yield defer.ensureDeferred(
- self.store.store_device("user", "device", "display_name")
- )
+ self.get_success(self.store.set_e2e_device_keys("user", "device", now, json))
+ self.get_success(self.store.store_device("user", "device", "display_name"))
- res = yield defer.ensureDeferred(
+ res = self.get_success(
self.store.get_e2e_device_keys_for_cs_api((("user", "device"),))
)
self.assertIn("user", res)
@@ -85,29 +70,28 @@ class EndToEndKeyStoreTestCase(tests.unittest.TestCase):
{"key": "value", "unsigned": {"device_display_name": "display_name"}}, dev
)
- @defer.inlineCallbacks
def test_multiple_devices(self):
now = 1470174257070
- yield defer.ensureDeferred(self.store.store_device("user1", "device1", None))
- yield defer.ensureDeferred(self.store.store_device("user1", "device2", None))
- yield defer.ensureDeferred(self.store.store_device("user2", "device1", None))
- yield defer.ensureDeferred(self.store.store_device("user2", "device2", None))
+ self.get_success(self.store.store_device("user1", "device1", None))
+ self.get_success(self.store.store_device("user1", "device2", None))
+ self.get_success(self.store.store_device("user2", "device1", None))
+ self.get_success(self.store.store_device("user2", "device2", None))
- yield defer.ensureDeferred(
+ self.get_success(
self.store.set_e2e_device_keys("user1", "device1", now, {"key": "json11"})
)
- yield defer.ensureDeferred(
+ self.get_success(
self.store.set_e2e_device_keys("user1", "device2", now, {"key": "json12"})
)
- yield defer.ensureDeferred(
+ self.get_success(
self.store.set_e2e_device_keys("user2", "device1", now, {"key": "json21"})
)
- yield defer.ensureDeferred(
+ self.get_success(
self.store.set_e2e_device_keys("user2", "device2", now, {"key": "json22"})
)
- res = yield defer.ensureDeferred(
+ res = self.get_success(
self.store.get_e2e_device_keys_for_cs_api(
(("user1", "device1"), ("user2", "device2"))
)
diff --git a/tests/storage/test_event_chain.py b/tests/storage/test_event_chain.py
index 16daa66c..d87f124c 100644
--- a/tests/storage/test_event_chain.py
+++ b/tests/storage/test_event_chain.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
diff --git a/tests/storage/test_event_federation.py b/tests/storage/test_event_federation.py
index d597d712..a0e22594 100644
--- a/tests/storage/test_event_federation.py
+++ b/tests/storage/test_event_federation.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the 'License');
diff --git a/tests/storage/test_event_metrics.py b/tests/storage/test_event_metrics.py
index 7691f2d7..088fbb24 100644
--- a/tests/storage/test_event_metrics.py
+++ b/tests/storage/test_event_metrics.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
@@ -39,12 +38,12 @@ class ExtremStatisticsTestCase(HomeserverTestCase):
last_event = None
# Make a real event chain
- for i in range(event_count):
+ for _ in range(event_count):
ev = self.create_and_send_event(room_id, user, False, last_event)
last_event = [ev]
# Sprinkle in some extremities
- for i in range(extrems):
+ for _ in range(extrems):
ev = self.create_and_send_event(room_id, user, False, last_event)
# Let it run for a while, then pull out the statistics from the
diff --git a/tests/storage/test_event_push_actions.py b/tests/storage/test_event_push_actions.py
index 485f1ee0..1930b37e 100644
--- a/tests/storage/test_event_push_actions.py
+++ b/tests/storage/test_event_push_actions.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-# Copyright 2016 OpenMarket Ltd
+# Copyright 2016-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,12 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
-from twisted.internet import defer
-
-import tests.unittest
-import tests.utils
+from tests.unittest import HomeserverTestCase
USER_ID = "@user:example.com"
@@ -30,37 +26,31 @@ HIGHLIGHT = [
]
-class EventPushActionsStoreTestCase(tests.unittest.TestCase):
- @defer.inlineCallbacks
- def setUp(self):
- hs = yield tests.utils.setup_test_homeserver(self.addCleanup)
+class EventPushActionsStoreTestCase(HomeserverTestCase):
+ def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.persist_events_store = hs.get_datastores().persist_events
- @defer.inlineCallbacks
def test_get_unread_push_actions_for_user_in_range_for_http(self):
- yield defer.ensureDeferred(
+ self.get_success(
self.store.get_unread_push_actions_for_user_in_range_for_http(
USER_ID, 0, 1000, 20
)
)
- @defer.inlineCallbacks
def test_get_unread_push_actions_for_user_in_range_for_email(self):
- yield defer.ensureDeferred(
+ self.get_success(
self.store.get_unread_push_actions_for_user_in_range_for_email(
USER_ID, 0, 1000, 20
)
)
- @defer.inlineCallbacks
def test_count_aggregation(self):
room_id = "!foo:example.com"
user_id = "@user1235:example.com"
- @defer.inlineCallbacks
def _assert_counts(noitf_count, highlight_count):
- counts = yield defer.ensureDeferred(
+ counts = self.get_success(
self.store.db_pool.runInteraction(
"", self.store._get_unread_counts_by_pos_txn, room_id, user_id, 0
)
@@ -74,7 +64,6 @@ class EventPushActionsStoreTestCase(tests.unittest.TestCase):
},
)
- @defer.inlineCallbacks
def _inject_actions(stream, action):
event = Mock()
event.room_id = room_id
@@ -82,14 +71,14 @@ class EventPushActionsStoreTestCase(tests.unittest.TestCase):
event.internal_metadata.stream_ordering = stream
event.depth = stream
- yield defer.ensureDeferred(
+ self.get_success(
self.store.add_push_actions_to_staging(
event.event_id,
{user_id: action},
False,
)
)
- yield defer.ensureDeferred(
+ self.get_success(
self.store.db_pool.runInteraction(
"",
self.persist_events_store._set_push_actions_for_event_and_users_txn,
@@ -99,14 +88,14 @@ class EventPushActionsStoreTestCase(tests.unittest.TestCase):
)
def _rotate(stream):
- return defer.ensureDeferred(
+ self.get_success(
self.store.db_pool.runInteraction(
"", self.store._rotate_notifs_before_txn, stream
)
)
def _mark_read(stream, depth):
- return defer.ensureDeferred(
+ self.get_success(
self.store.db_pool.runInteraction(
"",
self.store._remove_old_push_actions_before_txn,
@@ -116,49 +105,48 @@ class EventPushActionsStoreTestCase(tests.unittest.TestCase):
)
)
- yield _assert_counts(0, 0)
- yield _inject_actions(1, PlAIN_NOTIF)
- yield _assert_counts(1, 0)
- yield _rotate(2)
- yield _assert_counts(1, 0)
+ _assert_counts(0, 0)
+ _inject_actions(1, PlAIN_NOTIF)
+ _assert_counts(1, 0)
+ _rotate(2)
+ _assert_counts(1, 0)
- yield _inject_actions(3, PlAIN_NOTIF)
- yield _assert_counts(2, 0)
- yield _rotate(4)
- yield _assert_counts(2, 0)
+ _inject_actions(3, PlAIN_NOTIF)
+ _assert_counts(2, 0)
+ _rotate(4)
+ _assert_counts(2, 0)
- yield _inject_actions(5, PlAIN_NOTIF)
- yield _mark_read(3, 3)
- yield _assert_counts(1, 0)
+ _inject_actions(5, PlAIN_NOTIF)
+ _mark_read(3, 3)
+ _assert_counts(1, 0)
- yield _mark_read(5, 5)
- yield _assert_counts(0, 0)
+ _mark_read(5, 5)
+ _assert_counts(0, 0)
- yield _inject_actions(6, PlAIN_NOTIF)
- yield _rotate(7)
+ _inject_actions(6, PlAIN_NOTIF)
+ _rotate(7)
- yield defer.ensureDeferred(
+ self.get_success(
self.store.db_pool.simple_delete(
table="event_push_actions", keyvalues={"1": 1}, desc=""
)
)
- yield _assert_counts(1, 0)
+ _assert_counts(1, 0)
- yield _mark_read(7, 7)
- yield _assert_counts(0, 0)
+ _mark_read(7, 7)
+ _assert_counts(0, 0)
- yield _inject_actions(8, HIGHLIGHT)
- yield _assert_counts(1, 1)
- yield _rotate(9)
- yield _assert_counts(1, 1)
- yield _rotate(10)
- yield _assert_counts(1, 1)
+ _inject_actions(8, HIGHLIGHT)
+ _assert_counts(1, 1)
+ _rotate(9)
+ _assert_counts(1, 1)
+ _rotate(10)
+ _assert_counts(1, 1)
- @defer.inlineCallbacks
def test_find_first_stream_ordering_after_ts(self):
def add_event(so, ts):
- return defer.ensureDeferred(
+ self.get_success(
self.store.db_pool.simple_insert(
"events",
{
@@ -177,24 +165,16 @@ class EventPushActionsStoreTestCase(tests.unittest.TestCase):
)
# start with the base case where there are no events in the table
- r = yield defer.ensureDeferred(
- self.store.find_first_stream_ordering_after_ts(11)
- )
+ r = self.get_success(self.store.find_first_stream_ordering_after_ts(11))
self.assertEqual(r, 0)
# now with one event
- yield add_event(2, 10)
- r = yield defer.ensureDeferred(
- self.store.find_first_stream_ordering_after_ts(9)
- )
+ add_event(2, 10)
+ r = self.get_success(self.store.find_first_stream_ordering_after_ts(9))
self.assertEqual(r, 2)
- r = yield defer.ensureDeferred(
- self.store.find_first_stream_ordering_after_ts(10)
- )
+ r = self.get_success(self.store.find_first_stream_ordering_after_ts(10))
self.assertEqual(r, 2)
- r = yield defer.ensureDeferred(
- self.store.find_first_stream_ordering_after_ts(11)
- )
+ r = self.get_success(self.store.find_first_stream_ordering_after_ts(11))
self.assertEqual(r, 3)
# add a bunch of dummy events to the events table
@@ -205,39 +185,27 @@ class EventPushActionsStoreTestCase(tests.unittest.TestCase):
(10, 130),
(20, 140),
):
- yield add_event(stream_ordering, ts)
+ add_event(stream_ordering, ts)
- r = yield defer.ensureDeferred(
- self.store.find_first_stream_ordering_after_ts(110)
- )
+ r = self.get_success(self.store.find_first_stream_ordering_after_ts(110))
self.assertEqual(r, 3, "First event after 110ms should be 3, was %i" % r)
# 4 and 5 are both after 120: we want 4 rather than 5
- r = yield defer.ensureDeferred(
- self.store.find_first_stream_ordering_after_ts(120)
- )
+ r = self.get_success(self.store.find_first_stream_ordering_after_ts(120))
self.assertEqual(r, 4, "First event after 120ms should be 4, was %i" % r)
- r = yield defer.ensureDeferred(
- self.store.find_first_stream_ordering_after_ts(129)
- )
+ r = self.get_success(self.store.find_first_stream_ordering_after_ts(129))
self.assertEqual(r, 10, "First event after 129ms should be 10, was %i" % r)
# check we can get the last event
- r = yield defer.ensureDeferred(
- self.store.find_first_stream_ordering_after_ts(140)
- )
+ r = self.get_success(self.store.find_first_stream_ordering_after_ts(140))
self.assertEqual(r, 20, "First event after 14ms should be 20, was %i" % r)
# off the end
- r = yield defer.ensureDeferred(
- self.store.find_first_stream_ordering_after_ts(160)
- )
+ r = self.get_success(self.store.find_first_stream_ordering_after_ts(160))
self.assertEqual(r, 21)
# check we can find an event at ordering zero
- yield add_event(0, 5)
- r = yield defer.ensureDeferred(
- self.store.find_first_stream_ordering_after_ts(1)
- )
+ add_event(0, 5)
+ r = self.get_success(self.store.find_first_stream_ordering_after_ts(1))
self.assertEqual(r, 0)
diff --git a/tests/storage/test_events.py b/tests/storage/test_events.py
index ed898b8d..617bc809 100644
--- a/tests/storage/test_events.py
+++ b/tests/storage/test_events.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/storage/test_id_generators.py b/tests/storage/test_id_generators.py
index aad6bc90..792b1c44 100644
--- a/tests/storage/test_id_generators.py
+++ b/tests/storage/test_id_generators.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,6 +11,8 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+from typing import List, Optional
+
from synapse.storage.database import DatabasePool
from synapse.storage.engines import IncorrectDatabaseSetup
from synapse.storage.util.id_generators import MultiWriterIdGenerator
@@ -43,7 +44,7 @@ class MultiWriterIdGeneratorTestCase(HomeserverTestCase):
)
def _create_id_generator(
- self, instance_name="master", writers=["master"]
+ self, instance_name="master", writers: Optional[List[str]] = None
) -> MultiWriterIdGenerator:
def _create(conn):
return MultiWriterIdGenerator(
@@ -53,7 +54,7 @@ class MultiWriterIdGeneratorTestCase(HomeserverTestCase):
instance_name=instance_name,
tables=[("foobar", "instance_name", "stream_id")],
sequence_name="foobar_seq",
- writers=writers,
+ writers=writers or ["master"],
)
return self.get_success_or_raise(self.db_pool.runWithConnection(_create))
@@ -476,7 +477,7 @@ class BackwardsMultiWriterIdGeneratorTestCase(HomeserverTestCase):
)
def _create_id_generator(
- self, instance_name="master", writers=["master"]
+ self, instance_name="master", writers: Optional[List[str]] = None
) -> MultiWriterIdGenerator:
def _create(conn):
return MultiWriterIdGenerator(
@@ -486,7 +487,7 @@ class BackwardsMultiWriterIdGeneratorTestCase(HomeserverTestCase):
instance_name=instance_name,
tables=[("foobar", "instance_name", "stream_id")],
sequence_name="foobar_seq",
- writers=writers,
+ writers=writers or ["master"],
positive=False,
)
@@ -612,7 +613,7 @@ class MultiTableMultiWriterIdGeneratorTestCase(HomeserverTestCase):
)
def _create_id_generator(
- self, instance_name="master", writers=["master"]
+ self, instance_name="master", writers: Optional[List[str]] = None
) -> MultiWriterIdGenerator:
def _create(conn):
return MultiWriterIdGenerator(
@@ -625,7 +626,7 @@ class MultiTableMultiWriterIdGeneratorTestCase(HomeserverTestCase):
("foobar2", "instance_name", "stream_id"),
],
sequence_name="foobar_seq",
- writers=writers,
+ writers=writers or ["master"],
)
return self.get_success_or_raise(self.db_pool.runWithConnection(_create))
diff --git a/tests/storage/test_keys.py b/tests/storage/test_keys.py
index 95f309fb..a94b5fd7 100644
--- a/tests/storage/test_keys.py
+++ b/tests/storage/test_keys.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/storage/test_main.py b/tests/storage/test_main.py
index e9e3bca3..d2b7b899 100644
--- a/tests/storage/test_main.py
+++ b/tests/storage/test_main.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Awesome Technologies Innovationslabor GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py
index 5858c7fc..944dbc34 100644
--- a/tests/storage/test_monthly_active_users.py
+++ b/tests/storage/test_monthly_active_users.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
from twisted.internet import defer
diff --git a/tests/storage/test_profile.py b/tests/storage/test_profile.py
index ea63bd56..8a446da8 100644
--- a/tests/storage/test_profile.py
+++ b/tests/storage/test_profile.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-# Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2014-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,59 +12,50 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-
-from twisted.internet import defer
-
from synapse.types import UserID
from tests import unittest
-from tests.utils import setup_test_homeserver
-
-class ProfileStoreTestCase(unittest.TestCase):
- @defer.inlineCallbacks
- def setUp(self):
- hs = yield setup_test_homeserver(self.addCleanup)
+class ProfileStoreTestCase(unittest.HomeserverTestCase):
+ def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.u_frank = UserID.from_string("@frank:test")
- @defer.inlineCallbacks
def test_displayname(self):
- yield defer.ensureDeferred(self.store.create_profile(self.u_frank.localpart))
+ self.get_success(self.store.create_profile(self.u_frank.localpart))
- yield defer.ensureDeferred(
+ self.get_success(
self.store.set_profile_displayname(self.u_frank.localpart, "Frank")
)
self.assertEquals(
"Frank",
(
- yield defer.ensureDeferred(
+ self.get_success(
self.store.get_profile_displayname(self.u_frank.localpart)
)
),
)
# test set to None
- yield defer.ensureDeferred(
+ self.get_success(
self.store.set_profile_displayname(self.u_frank.localpart, None)
)
self.assertIsNone(
(
- yield defer.ensureDeferred(
+ self.get_success(
self.store.get_profile_displayname(self.u_frank.localpart)
)
)
)
- @defer.inlineCallbacks
def test_avatar_url(self):
- yield defer.ensureDeferred(self.store.create_profile(self.u_frank.localpart))
+ self.get_success(self.store.create_profile(self.u_frank.localpart))
- yield defer.ensureDeferred(
+ self.get_success(
self.store.set_profile_avatar_url(
self.u_frank.localpart, "http://my.site/here"
)
@@ -74,20 +64,20 @@ class ProfileStoreTestCase(unittest.TestCase):
self.assertEquals(
"http://my.site/here",
(
- yield defer.ensureDeferred(
+ self.get_success(
self.store.get_profile_avatar_url(self.u_frank.localpart)
)
),
)
# test set to None
- yield defer.ensureDeferred(
+ self.get_success(
self.store.set_profile_avatar_url(self.u_frank.localpart, None)
)
self.assertIsNone(
(
- yield defer.ensureDeferred(
+ self.get_success(
self.store.get_profile_avatar_url(self.u_frank.localpart)
)
)
diff --git a/tests/storage/test_purge.py b/tests/storage/test_purge.py
index 41af8c48..54c5b470 100644
--- a/tests/storage/test_purge.py
+++ b/tests/storage/test_purge.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/storage/test_redaction.py b/tests/storage/test_redaction.py
index b2a0e608..bb31ab75 100644
--- a/tests/storage/test_redaction.py
+++ b/tests/storage/test_redaction.py
@@ -1,6 +1,4 @@
-# -*- coding: utf-8 -*-
-# Copyright 2014-2016 OpenMarket Ltd
-# Copyright 2019 The Matrix.org Foundation C.I.C.
+# Copyright 2014-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,11 +11,10 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+from typing import Optional
from canonicaljson import json
-from twisted.internet import defer
-
from synapse.api.constants import EventTypes, Membership
from synapse.api.room_versions import RoomVersions
from synapse.types import RoomID, UserID
@@ -50,10 +47,15 @@ class RedactionTestCase(unittest.HomeserverTestCase):
self.depth = 1
def inject_room_member(
- self, room, user, membership, replaces_state=None, extra_content={}
+ self,
+ room,
+ user,
+ membership,
+ replaces_state=None,
+ extra_content: Optional[dict] = None,
):
content = {"membership": membership}
- content.update(extra_content)
+ content.update(extra_content or {})
builder = self.event_builder_factory.for_room_version(
RoomVersions.V1,
{
@@ -230,10 +232,9 @@ class RedactionTestCase(unittest.HomeserverTestCase):
self._base_builder = base_builder
self._event_id = event_id
- @defer.inlineCallbacks
- def build(self, prev_event_ids, auth_event_ids):
- built_event = yield defer.ensureDeferred(
- self._base_builder.build(prev_event_ids, auth_event_ids)
+ async def build(self, prev_event_ids, auth_event_ids):
+ built_event = await self._base_builder.build(
+ prev_event_ids, auth_event_ids
)
built_event._event_id = self._event_id
diff --git a/tests/storage/test_registration.py b/tests/storage/test_registration.py
index 4eb41c46..97480652 100644
--- a/tests/storage/test_registration.py
+++ b/tests/storage/test_registration.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-# Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2014-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,21 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-
-from twisted.internet import defer
-
from synapse.api.constants import UserTypes
from synapse.api.errors import ThreepidValidationError
-from tests import unittest
-from tests.utils import setup_test_homeserver
-
+from tests.unittest import HomeserverTestCase
-class RegistrationStoreTestCase(unittest.TestCase):
- @defer.inlineCallbacks
- def setUp(self):
- hs = yield setup_test_homeserver(self.addCleanup)
+class RegistrationStoreTestCase(HomeserverTestCase):
+ def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.user_id = "@my-user:test"
@@ -35,9 +27,8 @@ class RegistrationStoreTestCase(unittest.TestCase):
self.pwhash = "{xx1}123456789"
self.device_id = "akgjhdjklgshg"
- @defer.inlineCallbacks
def test_register(self):
- yield defer.ensureDeferred(self.store.register_user(self.user_id, self.pwhash))
+ self.get_success(self.store.register_user(self.user_id, self.pwhash))
self.assertEquals(
{
@@ -49,93 +40,81 @@ class RegistrationStoreTestCase(unittest.TestCase):
"consent_version": None,
"consent_server_notice_sent": None,
"appservice_id": None,
- "creation_ts": 1000,
+ "creation_ts": 0,
"user_type": None,
"deactivated": 0,
"shadow_banned": 0,
},
- (yield defer.ensureDeferred(self.store.get_user_by_id(self.user_id))),
+ (self.get_success(self.store.get_user_by_id(self.user_id))),
)
- @defer.inlineCallbacks
def test_add_tokens(self):
- yield defer.ensureDeferred(self.store.register_user(self.user_id, self.pwhash))
- yield defer.ensureDeferred(
+ self.get_success(self.store.register_user(self.user_id, self.pwhash))
+ self.get_success(
self.store.add_access_token_to_user(
self.user_id, self.tokens[1], self.device_id, valid_until_ms=None
)
)
- result = yield defer.ensureDeferred(
- self.store.get_user_by_access_token(self.tokens[1])
- )
+ result = self.get_success(self.store.get_user_by_access_token(self.tokens[1]))
self.assertEqual(result.user_id, self.user_id)
self.assertEqual(result.device_id, self.device_id)
self.assertIsNotNone(result.token_id)
- @defer.inlineCallbacks
def test_user_delete_access_tokens(self):
# add some tokens
- yield defer.ensureDeferred(self.store.register_user(self.user_id, self.pwhash))
- yield defer.ensureDeferred(
+ self.get_success(self.store.register_user(self.user_id, self.pwhash))
+ self.get_success(
self.store.add_access_token_to_user(
self.user_id, self.tokens[0], device_id=None, valid_until_ms=None
)
)
- yield defer.ensureDeferred(
+ self.get_success(
self.store.add_access_token_to_user(
self.user_id, self.tokens[1], self.device_id, valid_until_ms=None
)
)
# now delete some
- yield defer.ensureDeferred(
+ self.get_success(
self.store.user_delete_access_tokens(self.user_id, device_id=self.device_id)
)
# check they were deleted
- user = yield defer.ensureDeferred(
- self.store.get_user_by_access_token(self.tokens[1])
- )
+ user = self.get_success(self.store.get_user_by_access_token(self.tokens[1]))
self.assertIsNone(user, "access token was not deleted by device_id")
# check the one not associated with the device was not deleted
- user = yield defer.ensureDeferred(
- self.store.get_user_by_access_token(self.tokens[0])
- )
+ user = self.get_success(self.store.get_user_by_access_token(self.tokens[0]))
self.assertEqual(self.user_id, user.user_id)
# now delete the rest
- yield defer.ensureDeferred(self.store.user_delete_access_tokens(self.user_id))
+ self.get_success(self.store.user_delete_access_tokens(self.user_id))
- user = yield defer.ensureDeferred(
- self.store.get_user_by_access_token(self.tokens[0])
- )
+ user = self.get_success(self.store.get_user_by_access_token(self.tokens[0]))
self.assertIsNone(user, "access token was not deleted without device_id")
- @defer.inlineCallbacks
def test_is_support_user(self):
TEST_USER = "@test:test"
SUPPORT_USER = "@support:test"
- res = yield defer.ensureDeferred(self.store.is_support_user(None))
+ res = self.get_success(self.store.is_support_user(None))
self.assertFalse(res)
- yield defer.ensureDeferred(
+ self.get_success(
self.store.register_user(user_id=TEST_USER, password_hash=None)
)
- res = yield defer.ensureDeferred(self.store.is_support_user(TEST_USER))
+ res = self.get_success(self.store.is_support_user(TEST_USER))
self.assertFalse(res)
- yield defer.ensureDeferred(
+ self.get_success(
self.store.register_user(
user_id=SUPPORT_USER, password_hash=None, user_type=UserTypes.SUPPORT
)
)
- res = yield defer.ensureDeferred(self.store.is_support_user(SUPPORT_USER))
+ res = self.get_success(self.store.is_support_user(SUPPORT_USER))
self.assertTrue(res)
- @defer.inlineCallbacks
def test_3pid_inhibit_invalid_validation_session_error(self):
"""Tests that enabling the configuration option to inhibit 3PID errors on
/requestToken also inhibits validation errors caused by an unknown session ID.
@@ -143,30 +122,28 @@ class RegistrationStoreTestCase(unittest.TestCase):
# Check that, with the config setting set to false (the default value), a
# validation error is caused by the unknown session ID.
- try:
- yield defer.ensureDeferred(
- self.store.validate_threepid_session(
- "fake_sid",
- "fake_client_secret",
- "fake_token",
- 0,
- )
- )
- except ThreepidValidationError as e:
- self.assertEquals(e.msg, "Unknown session_id", e)
+ e = self.get_failure(
+ self.store.validate_threepid_session(
+ "fake_sid",
+ "fake_client_secret",
+ "fake_token",
+ 0,
+ ),
+ ThreepidValidationError,
+ )
+ self.assertEquals(e.value.msg, "Unknown session_id", e)
# Set the config setting to true.
self.store._ignore_unknown_session_error = True
# Check that now the validation error is caused by the token not matching.
- try:
- yield defer.ensureDeferred(
- self.store.validate_threepid_session(
- "fake_sid",
- "fake_client_secret",
- "fake_token",
- 0,
- )
- )
- except ThreepidValidationError as e:
- self.assertEquals(e.msg, "Validation token not found or has expired", e)
+ e = self.get_failure(
+ self.store.validate_threepid_session(
+ "fake_sid",
+ "fake_client_secret",
+ "fake_token",
+ 0,
+ ),
+ ThreepidValidationError,
+ )
+ self.assertEquals(e.value.msg, "Validation token not found or has expired", e)
diff --git a/tests/storage/test_room.py b/tests/storage/test_room.py
index bc8400f2..70257bf2 100644
--- a/tests/storage/test_room.py
+++ b/tests/storage/test_room.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-# Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2014-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,22 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-
-from twisted.internet import defer
-
from synapse.api.constants import EventTypes
from synapse.api.room_versions import RoomVersions
from synapse.types import RoomAlias, RoomID, UserID
-from tests import unittest
-from tests.utils import setup_test_homeserver
-
+from tests.unittest import HomeserverTestCase
-class RoomStoreTestCase(unittest.TestCase):
- @defer.inlineCallbacks
- def setUp(self):
- hs = yield setup_test_homeserver(self.addCleanup)
+class RoomStoreTestCase(HomeserverTestCase):
+ def prepare(self, reactor, clock, hs):
# We can't test RoomStore on its own without the DirectoryStore, for
# management of the 'room_aliases' table
self.store = hs.get_datastore()
@@ -37,7 +29,7 @@ class RoomStoreTestCase(unittest.TestCase):
self.alias = RoomAlias.from_string("#a-room-name:test")
self.u_creator = UserID.from_string("@creator:test")
- yield defer.ensureDeferred(
+ self.get_success(
self.store.store_room(
self.room.to_string(),
room_creator_user_id=self.u_creator.to_string(),
@@ -46,7 +38,6 @@ class RoomStoreTestCase(unittest.TestCase):
)
)
- @defer.inlineCallbacks
def test_get_room(self):
self.assertDictContainsSubset(
{
@@ -54,16 +45,12 @@ class RoomStoreTestCase(unittest.TestCase):
"creator": self.u_creator.to_string(),
"is_public": True,
},
- (yield defer.ensureDeferred(self.store.get_room(self.room.to_string()))),
+ (self.get_success(self.store.get_room(self.room.to_string()))),
)
- @defer.inlineCallbacks
def test_get_room_unknown_room(self):
- self.assertIsNone(
- (yield defer.ensureDeferred(self.store.get_room("!uknown:test")))
- )
+ self.assertIsNone((self.get_success(self.store.get_room("!uknown:test"))))
- @defer.inlineCallbacks
def test_get_room_with_stats(self):
self.assertDictContainsSubset(
{
@@ -71,29 +58,17 @@ class RoomStoreTestCase(unittest.TestCase):
"creator": self.u_creator.to_string(),
"public": True,
},
- (
- yield defer.ensureDeferred(
- self.store.get_room_with_stats(self.room.to_string())
- )
- ),
+ (self.get_success(self.store.get_room_with_stats(self.room.to_string()))),
)
- @defer.inlineCallbacks
def test_get_room_with_stats_unknown_room(self):
self.assertIsNone(
- (
- yield defer.ensureDeferred(
- self.store.get_room_with_stats("!uknown:test")
- )
- ),
+ (self.get_success(self.store.get_room_with_stats("!uknown:test"))),
)
-class RoomEventsStoreTestCase(unittest.TestCase):
- @defer.inlineCallbacks
- def setUp(self):
- hs = setup_test_homeserver(self.addCleanup)
-
+class RoomEventsStoreTestCase(HomeserverTestCase):
+ def prepare(self, reactor, clock, hs):
# Room events need the full datastore, for persist_event() and
# get_room_state()
self.store = hs.get_datastore()
@@ -102,7 +77,7 @@ class RoomEventsStoreTestCase(unittest.TestCase):
self.room = RoomID.from_string("!abcde:test")
- yield defer.ensureDeferred(
+ self.get_success(
self.store.store_room(
self.room.to_string(),
room_creator_user_id="@creator:text",
@@ -111,23 +86,21 @@ class RoomEventsStoreTestCase(unittest.TestCase):
)
)
- @defer.inlineCallbacks
def inject_room_event(self, **kwargs):
- yield defer.ensureDeferred(
+ self.get_success(
self.storage.persistence.persist_event(
self.event_factory.create_event(room_id=self.room.to_string(), **kwargs)
)
)
- @defer.inlineCallbacks
def STALE_test_room_name(self):
name = "A-Room-Name"
- yield self.inject_room_event(
+ self.inject_room_event(
etype=EventTypes.Name, name=name, content={"name": name}, depth=1
)
- state = yield defer.ensureDeferred(
+ state = self.get_success(
self.store.get_current_state(room_id=self.room.to_string())
)
@@ -137,15 +110,14 @@ class RoomEventsStoreTestCase(unittest.TestCase):
state[0],
)
- @defer.inlineCallbacks
def STALE_test_room_topic(self):
topic = "A place for things"
- yield self.inject_room_event(
+ self.inject_room_event(
etype=EventTypes.Topic, topic=topic, content={"topic": topic}, depth=1
)
- state = yield defer.ensureDeferred(
+ state = self.get_success(
self.store.get_current_state(room_id=self.room.to_string())
)
diff --git a/tests/storage/test_roommember.py b/tests/storage/test_roommember.py
index d2aed66f..9fa968f6 100644
--- a/tests/storage/test_roommember.py
+++ b/tests/storage/test_roommember.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py
index 2471f126..86952645 100644
--- a/tests/storage/test_state.py
+++ b/tests/storage/test_state.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-# Copyright 2018 New Vector Ltd
+# Copyright 2018-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,24 +14,18 @@
import logging
-from twisted.internet import defer
-
from synapse.api.constants import EventTypes, Membership
from synapse.api.room_versions import RoomVersions
from synapse.storage.state import StateFilter
from synapse.types import RoomID, UserID
-import tests.unittest
-import tests.utils
+from tests.unittest import HomeserverTestCase
logger = logging.getLogger(__name__)
-class StateStoreTestCase(tests.unittest.TestCase):
- @defer.inlineCallbacks
- def setUp(self):
- hs = yield tests.utils.setup_test_homeserver(self.addCleanup)
-
+class StateStoreTestCase(HomeserverTestCase):
+ def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.storage = hs.get_storage()
self.state_datastore = self.storage.state.stores.state
@@ -44,7 +37,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
self.room = RoomID.from_string("!abc123:test")
- yield defer.ensureDeferred(
+ self.get_success(
self.store.store_room(
self.room.to_string(),
room_creator_user_id="@creator:text",
@@ -53,7 +46,6 @@ class StateStoreTestCase(tests.unittest.TestCase):
)
)
- @defer.inlineCallbacks
def inject_state_event(self, room, sender, typ, state_key, content):
builder = self.event_builder_factory.for_room_version(
RoomVersions.V1,
@@ -66,13 +58,11 @@ class StateStoreTestCase(tests.unittest.TestCase):
},
)
- event, context = yield defer.ensureDeferred(
+ event, context = self.get_success(
self.event_creation_handler.create_new_client_event(builder)
)
- yield defer.ensureDeferred(
- self.storage.persistence.persist_event(event, context)
- )
+ self.get_success(self.storage.persistence.persist_event(event, context))
return event
@@ -82,16 +72,13 @@ class StateStoreTestCase(tests.unittest.TestCase):
self.assertEqual(s1[t].event_id, s2[t].event_id)
self.assertEqual(len(s1), len(s2))
- @defer.inlineCallbacks
def test_get_state_groups_ids(self):
- e1 = yield self.inject_state_event(
- self.room, self.u_alice, EventTypes.Create, "", {}
- )
- e2 = yield self.inject_state_event(
+ e1 = self.inject_state_event(self.room, self.u_alice, EventTypes.Create, "", {})
+ e2 = self.inject_state_event(
self.room, self.u_alice, EventTypes.Name, "", {"name": "test room"}
)
- state_group_map = yield defer.ensureDeferred(
+ state_group_map = self.get_success(
self.storage.state.get_state_groups_ids(self.room, [e2.event_id])
)
self.assertEqual(len(state_group_map), 1)
@@ -101,16 +88,13 @@ class StateStoreTestCase(tests.unittest.TestCase):
{(EventTypes.Create, ""): e1.event_id, (EventTypes.Name, ""): e2.event_id},
)
- @defer.inlineCallbacks
def test_get_state_groups(self):
- e1 = yield self.inject_state_event(
- self.room, self.u_alice, EventTypes.Create, "", {}
- )
- e2 = yield self.inject_state_event(
+ e1 = self.inject_state_event(self.room, self.u_alice, EventTypes.Create, "", {})
+ e2 = self.inject_state_event(
self.room, self.u_alice, EventTypes.Name, "", {"name": "test room"}
)
- state_group_map = yield defer.ensureDeferred(
+ state_group_map = self.get_success(
self.storage.state.get_state_groups(self.room, [e2.event_id])
)
self.assertEqual(len(state_group_map), 1)
@@ -118,32 +102,29 @@ class StateStoreTestCase(tests.unittest.TestCase):
self.assertEqual({ev.event_id for ev in state_list}, {e1.event_id, e2.event_id})
- @defer.inlineCallbacks
def test_get_state_for_event(self):
# this defaults to a linear DAG as each new injection defaults to whatever
# forward extremities are currently in the DB for this room.
- e1 = yield self.inject_state_event(
- self.room, self.u_alice, EventTypes.Create, "", {}
- )
- e2 = yield self.inject_state_event(
+ e1 = self.inject_state_event(self.room, self.u_alice, EventTypes.Create, "", {})
+ e2 = self.inject_state_event(
self.room, self.u_alice, EventTypes.Name, "", {"name": "test room"}
)
- e3 = yield self.inject_state_event(
+ e3 = self.inject_state_event(
self.room,
self.u_alice,
EventTypes.Member,
self.u_alice.to_string(),
{"membership": Membership.JOIN},
)
- e4 = yield self.inject_state_event(
+ e4 = self.inject_state_event(
self.room,
self.u_bob,
EventTypes.Member,
self.u_bob.to_string(),
{"membership": Membership.JOIN},
)
- e5 = yield self.inject_state_event(
+ e5 = self.inject_state_event(
self.room,
self.u_bob,
EventTypes.Member,
@@ -152,9 +133,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
)
# check we get the full state as of the final event
- state = yield defer.ensureDeferred(
- self.storage.state.get_state_for_event(e5.event_id)
- )
+ state = self.get_success(self.storage.state.get_state_for_event(e5.event_id))
self.assertIsNotNone(e4)
@@ -170,7 +149,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
)
# check we can filter to the m.room.name event (with a '' state key)
- state = yield defer.ensureDeferred(
+ state = self.get_success(
self.storage.state.get_state_for_event(
e5.event_id, StateFilter.from_types([(EventTypes.Name, "")])
)
@@ -179,7 +158,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
self.assertStateMapEqual({(e2.type, e2.state_key): e2}, state)
# check we can filter to the m.room.name event (with a wildcard None state key)
- state = yield defer.ensureDeferred(
+ state = self.get_success(
self.storage.state.get_state_for_event(
e5.event_id, StateFilter.from_types([(EventTypes.Name, None)])
)
@@ -188,7 +167,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
self.assertStateMapEqual({(e2.type, e2.state_key): e2}, state)
# check we can grab the m.room.member events (with a wildcard None state key)
- state = yield defer.ensureDeferred(
+ state = self.get_success(
self.storage.state.get_state_for_event(
e5.event_id, StateFilter.from_types([(EventTypes.Member, None)])
)
@@ -200,7 +179,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
# check we can grab a specific room member without filtering out the
# other event types
- state = yield defer.ensureDeferred(
+ state = self.get_success(
self.storage.state.get_state_for_event(
e5.event_id,
state_filter=StateFilter(
@@ -220,7 +199,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
)
# check that we can grab everything except members
- state = yield defer.ensureDeferred(
+ state = self.get_success(
self.storage.state.get_state_for_event(
e5.event_id,
state_filter=StateFilter(
@@ -238,17 +217,14 @@ class StateStoreTestCase(tests.unittest.TestCase):
#######################################################
room_id = self.room.to_string()
- group_ids = yield defer.ensureDeferred(
+ group_ids = self.get_success(
self.storage.state.get_state_groups_ids(room_id, [e5.event_id])
)
group = list(group_ids.keys())[0]
# test _get_state_for_group_using_cache correctly filters out members
# with types=[]
- (
- state_dict,
- is_all,
- ) = yield self.state_datastore._get_state_for_group_using_cache(
+ (state_dict, is_all,) = self.state_datastore._get_state_for_group_using_cache(
self.state_datastore._state_group_cache,
group,
state_filter=StateFilter(
@@ -265,10 +241,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
state_dict,
)
- (
- state_dict,
- is_all,
- ) = yield self.state_datastore._get_state_for_group_using_cache(
+ (state_dict, is_all,) = self.state_datastore._get_state_for_group_using_cache(
self.state_datastore._state_group_members_cache,
group,
state_filter=StateFilter(
@@ -281,10 +254,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
# test _get_state_for_group_using_cache correctly filters in members
# with wildcard types
- (
- state_dict,
- is_all,
- ) = yield self.state_datastore._get_state_for_group_using_cache(
+ (state_dict, is_all,) = self.state_datastore._get_state_for_group_using_cache(
self.state_datastore._state_group_cache,
group,
state_filter=StateFilter(
@@ -301,10 +271,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
state_dict,
)
- (
- state_dict,
- is_all,
- ) = yield self.state_datastore._get_state_for_group_using_cache(
+ (state_dict, is_all,) = self.state_datastore._get_state_for_group_using_cache(
self.state_datastore._state_group_members_cache,
group,
state_filter=StateFilter(
@@ -324,10 +291,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
# test _get_state_for_group_using_cache correctly filters in members
# with specific types
- (
- state_dict,
- is_all,
- ) = yield self.state_datastore._get_state_for_group_using_cache(
+ (state_dict, is_all,) = self.state_datastore._get_state_for_group_using_cache(
self.state_datastore._state_group_cache,
group,
state_filter=StateFilter(
@@ -344,10 +308,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
state_dict,
)
- (
- state_dict,
- is_all,
- ) = yield self.state_datastore._get_state_for_group_using_cache(
+ (state_dict, is_all,) = self.state_datastore._get_state_for_group_using_cache(
self.state_datastore._state_group_members_cache,
group,
state_filter=StateFilter(
@@ -360,10 +321,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
# test _get_state_for_group_using_cache correctly filters in members
# with specific types
- (
- state_dict,
- is_all,
- ) = yield self.state_datastore._get_state_for_group_using_cache(
+ (state_dict, is_all,) = self.state_datastore._get_state_for_group_using_cache(
self.state_datastore._state_group_members_cache,
group,
state_filter=StateFilter(
@@ -413,10 +371,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
# test _get_state_for_group_using_cache correctly filters out members
# with types=[]
room_id = self.room.to_string()
- (
- state_dict,
- is_all,
- ) = yield self.state_datastore._get_state_for_group_using_cache(
+ (state_dict, is_all,) = self.state_datastore._get_state_for_group_using_cache(
self.state_datastore._state_group_cache,
group,
state_filter=StateFilter(
@@ -428,10 +383,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
self.assertDictEqual({(e1.type, e1.state_key): e1.event_id}, state_dict)
room_id = self.room.to_string()
- (
- state_dict,
- is_all,
- ) = yield self.state_datastore._get_state_for_group_using_cache(
+ (state_dict, is_all,) = self.state_datastore._get_state_for_group_using_cache(
self.state_datastore._state_group_members_cache,
group,
state_filter=StateFilter(
@@ -444,10 +396,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
# test _get_state_for_group_using_cache correctly filters in members
# wildcard types
- (
- state_dict,
- is_all,
- ) = yield self.state_datastore._get_state_for_group_using_cache(
+ (state_dict, is_all,) = self.state_datastore._get_state_for_group_using_cache(
self.state_datastore._state_group_cache,
group,
state_filter=StateFilter(
@@ -458,10 +407,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
self.assertEqual(is_all, False)
self.assertDictEqual({(e1.type, e1.state_key): e1.event_id}, state_dict)
- (
- state_dict,
- is_all,
- ) = yield self.state_datastore._get_state_for_group_using_cache(
+ (state_dict, is_all,) = self.state_datastore._get_state_for_group_using_cache(
self.state_datastore._state_group_members_cache,
group,
state_filter=StateFilter(
@@ -480,10 +426,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
# test _get_state_for_group_using_cache correctly filters in members
# with specific types
- (
- state_dict,
- is_all,
- ) = yield self.state_datastore._get_state_for_group_using_cache(
+ (state_dict, is_all,) = self.state_datastore._get_state_for_group_using_cache(
self.state_datastore._state_group_cache,
group,
state_filter=StateFilter(
@@ -494,10 +437,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
self.assertEqual(is_all, False)
self.assertDictEqual({(e1.type, e1.state_key): e1.event_id}, state_dict)
- (
- state_dict,
- is_all,
- ) = yield self.state_datastore._get_state_for_group_using_cache(
+ (state_dict, is_all,) = self.state_datastore._get_state_for_group_using_cache(
self.state_datastore._state_group_members_cache,
group,
state_filter=StateFilter(
@@ -510,10 +450,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
# test _get_state_for_group_using_cache correctly filters in members
# with specific types
- (
- state_dict,
- is_all,
- ) = yield self.state_datastore._get_state_for_group_using_cache(
+ (state_dict, is_all,) = self.state_datastore._get_state_for_group_using_cache(
self.state_datastore._state_group_cache,
group,
state_filter=StateFilter(
@@ -524,10 +461,7 @@ class StateStoreTestCase(tests.unittest.TestCase):
self.assertEqual(is_all, False)
self.assertDictEqual({}, state_dict)
- (
- state_dict,
- is_all,
- ) = yield self.state_datastore._get_state_for_group_using_cache(
+ (state_dict, is_all,) = self.state_datastore._get_state_for_group_using_cache(
self.state_datastore._state_group_members_cache,
group,
state_filter=StateFilter(
diff --git a/tests/storage/test_transactions.py b/tests/storage/test_transactions.py
index 8e817e2c..b7f7eae8 100644
--- a/tests/storage/test_transactions.py
+++ b/tests/storage/test_transactions.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/storage/test_user_directory.py b/tests/storage/test_user_directory.py
index a6f63f4a..222e5d12 100644
--- a/tests/storage/test_user_directory.py
+++ b/tests/storage/test_user_directory.py
@@ -1,5 +1,4 @@
-# -*- coding: utf-8 -*-
-# Copyright 2018 New Vector Ltd
+# Copyright 2018-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,10 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from twisted.internet import defer
-
-from tests import unittest
-from tests.utils import setup_test_homeserver
+from tests.unittest import HomeserverTestCase, override_config
ALICE = "@alice:a"
BOB = "@bob:b"
@@ -25,73 +21,52 @@ BOBBY = "@bobby:a"
BELA = "@somenickname:a"
-class UserDirectoryStoreTestCase(unittest.TestCase):
- @defer.inlineCallbacks
- def setUp(self):
- self.hs = yield setup_test_homeserver(self.addCleanup)
- self.store = self.hs.get_datastore()
+class UserDirectoryStoreTestCase(HomeserverTestCase):
+ def prepare(self, reactor, clock, hs):
+ self.store = hs.get_datastore()
# alice and bob are both in !room_id. bobby is not but shares
# a homeserver with alice.
- yield defer.ensureDeferred(
- self.store.update_profile_in_user_dir(ALICE, "alice", None)
- )
- yield defer.ensureDeferred(
- self.store.update_profile_in_user_dir(BOB, "bob", None)
- )
- yield defer.ensureDeferred(
- self.store.update_profile_in_user_dir(BOBBY, "bobby", None)
- )
- yield defer.ensureDeferred(
- self.store.update_profile_in_user_dir(BELA, "Bela", None)
- )
- yield defer.ensureDeferred(
- self.store.add_users_in_public_rooms("!room:id", (ALICE, BOB))
- )
+ self.get_success(self.store.update_profile_in_user_dir(ALICE, "alice", None))
+ self.get_success(self.store.update_profile_in_user_dir(BOB, "bob", None))
+ self.get_success(self.store.update_profile_in_user_dir(BOBBY, "bobby", None))
+ self.get_success(self.store.update_profile_in_user_dir(BELA, "Bela", None))
+ self.get_success(self.store.add_users_in_public_rooms("!room:id", (ALICE, BOB)))
- @defer.inlineCallbacks
def test_search_user_dir(self):
# normally when alice searches the directory she should just find
# bob because bobby doesn't share a room with her.
- r = yield defer.ensureDeferred(self.store.search_user_dir(ALICE, "bob", 10))
+ r = self.get_success(self.store.search_user_dir(ALICE, "bob", 10))
self.assertFalse(r["limited"])
self.assertEqual(1, len(r["results"]))
self.assertDictEqual(
r["results"][0], {"user_id": BOB, "display_name": "bob", "avatar_url": None}
)
- @defer.inlineCallbacks
+ @override_config({"user_directory": {"search_all_users": True}})
def test_search_user_dir_all_users(self):
- self.hs.config.user_directory_search_all_users = True
- try:
- r = yield defer.ensureDeferred(self.store.search_user_dir(ALICE, "bob", 10))
- self.assertFalse(r["limited"])
- self.assertEqual(2, len(r["results"]))
- self.assertDictEqual(
- r["results"][0],
- {"user_id": BOB, "display_name": "bob", "avatar_url": None},
- )
- self.assertDictEqual(
- r["results"][1],
- {"user_id": BOBBY, "display_name": "bobby", "avatar_url": None},
- )
- finally:
- self.hs.config.user_directory_search_all_users = False
+ r = self.get_success(self.store.search_user_dir(ALICE, "bob", 10))
+ self.assertFalse(r["limited"])
+ self.assertEqual(2, len(r["results"]))
+ self.assertDictEqual(
+ r["results"][0],
+ {"user_id": BOB, "display_name": "bob", "avatar_url": None},
+ )
+ self.assertDictEqual(
+ r["results"][1],
+ {"user_id": BOBBY, "display_name": "bobby", "avatar_url": None},
+ )
- @defer.inlineCallbacks
+ @override_config({"user_directory": {"search_all_users": True}})
def test_search_user_dir_stop_words(self):
"""Tests that a user can look up another user by searching for the start if its
display name even if that name happens to be a common English word that would
usually be ignored in full text searches.
"""
- self.hs.config.user_directory_search_all_users = True
- try:
- r = yield defer.ensureDeferred(self.store.search_user_dir(ALICE, "be", 10))
- self.assertFalse(r["limited"])
- self.assertEqual(1, len(r["results"]))
- self.assertDictEqual(
- r["results"][0],
- {"user_id": BELA, "display_name": "Bela", "avatar_url": None},
- )
- finally:
- self.hs.config.user_directory_search_all_users = False
+ r = self.get_success(self.store.search_user_dir(ALICE, "be", 10))
+ self.assertFalse(r["limited"])
+ self.assertEqual(1, len(r["results"]))
+ self.assertDictEqual(
+ r["results"][0],
+ {"user_id": BELA, "display_name": "Bela", "avatar_url": None},
+ )
diff --git a/tests/test_distributor.py b/tests/test_distributor.py
index b57f36e6..f8341041 100644
--- a/tests/test_distributor.py
+++ b/tests/test_distributor.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
@@ -14,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock, patch
+from unittest.mock import Mock, patch
from synapse.util.distributor import Distributor
diff --git a/tests/test_event_auth.py b/tests/test_event_auth.py
index 3f2691ee..88888319 100644
--- a/tests/test_event_auth.py
+++ b/tests/test_event_auth.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -207,6 +206,226 @@ class EventAuthTestCase(unittest.TestCase):
do_sig_check=False,
)
+ def test_join_rules_public(self):
+ """
+ Test joining a public room.
+ """
+ creator = "@creator:example.com"
+ pleb = "@joiner:example.com"
+
+ auth_events = {
+ ("m.room.create", ""): _create_event(creator),
+ ("m.room.member", creator): _join_event(creator),
+ ("m.room.join_rules", ""): _join_rules_event(creator, "public"),
+ }
+
+ # Check join.
+ event_auth.check(
+ RoomVersions.V6,
+ _join_event(pleb),
+ auth_events,
+ do_sig_check=False,
+ )
+
+ # A user cannot be force-joined to a room.
+ with self.assertRaises(AuthError):
+ event_auth.check(
+ RoomVersions.V6,
+ _member_event(pleb, "join", sender=creator),
+ auth_events,
+ do_sig_check=False,
+ )
+
+ # Banned should be rejected.
+ auth_events[("m.room.member", pleb)] = _member_event(pleb, "ban")
+ with self.assertRaises(AuthError):
+ event_auth.check(
+ RoomVersions.V6,
+ _join_event(pleb),
+ auth_events,
+ do_sig_check=False,
+ )
+
+ # A user who left can re-join.
+ auth_events[("m.room.member", pleb)] = _member_event(pleb, "leave")
+ event_auth.check(
+ RoomVersions.V6,
+ _join_event(pleb),
+ auth_events,
+ do_sig_check=False,
+ )
+
+ # A user can send a join if they're in the room.
+ auth_events[("m.room.member", pleb)] = _member_event(pleb, "join")
+ event_auth.check(
+ RoomVersions.V6,
+ _join_event(pleb),
+ auth_events,
+ do_sig_check=False,
+ )
+
+ # A user can accept an invite.
+ auth_events[("m.room.member", pleb)] = _member_event(
+ pleb, "invite", sender=creator
+ )
+ event_auth.check(
+ RoomVersions.V6,
+ _join_event(pleb),
+ auth_events,
+ do_sig_check=False,
+ )
+
+ def test_join_rules_invite(self):
+ """
+ Test joining an invite only room.
+ """
+ creator = "@creator:example.com"
+ pleb = "@joiner:example.com"
+
+ auth_events = {
+ ("m.room.create", ""): _create_event(creator),
+ ("m.room.member", creator): _join_event(creator),
+ ("m.room.join_rules", ""): _join_rules_event(creator, "invite"),
+ }
+
+ # A join without an invite is rejected.
+ with self.assertRaises(AuthError):
+ event_auth.check(
+ RoomVersions.V6,
+ _join_event(pleb),
+ auth_events,
+ do_sig_check=False,
+ )
+
+ # A user cannot be force-joined to a room.
+ with self.assertRaises(AuthError):
+ event_auth.check(
+ RoomVersions.V6,
+ _member_event(pleb, "join", sender=creator),
+ auth_events,
+ do_sig_check=False,
+ )
+
+ # Banned should be rejected.
+ auth_events[("m.room.member", pleb)] = _member_event(pleb, "ban")
+ with self.assertRaises(AuthError):
+ event_auth.check(
+ RoomVersions.V6,
+ _join_event(pleb),
+ auth_events,
+ do_sig_check=False,
+ )
+
+ # A user who left cannot re-join.
+ auth_events[("m.room.member", pleb)] = _member_event(pleb, "leave")
+ with self.assertRaises(AuthError):
+ event_auth.check(
+ RoomVersions.V6,
+ _join_event(pleb),
+ auth_events,
+ do_sig_check=False,
+ )
+
+ # A user can send a join if they're in the room.
+ auth_events[("m.room.member", pleb)] = _member_event(pleb, "join")
+ event_auth.check(
+ RoomVersions.V6,
+ _join_event(pleb),
+ auth_events,
+ do_sig_check=False,
+ )
+
+ # A user can accept an invite.
+ auth_events[("m.room.member", pleb)] = _member_event(
+ pleb, "invite", sender=creator
+ )
+ event_auth.check(
+ RoomVersions.V6,
+ _join_event(pleb),
+ auth_events,
+ do_sig_check=False,
+ )
+
+ def test_join_rules_msc3083_restricted(self):
+ """
+ Test joining a restricted room from MSC3083.
+
+ This is pretty much the same test as public.
+ """
+ creator = "@creator:example.com"
+ pleb = "@joiner:example.com"
+
+ auth_events = {
+ ("m.room.create", ""): _create_event(creator),
+ ("m.room.member", creator): _join_event(creator),
+ ("m.room.join_rules", ""): _join_rules_event(creator, "restricted"),
+ }
+
+ # Older room versions don't understand this join rule
+ with self.assertRaises(AuthError):
+ event_auth.check(
+ RoomVersions.V6,
+ _join_event(pleb),
+ auth_events,
+ do_sig_check=False,
+ )
+
+ # Check join.
+ event_auth.check(
+ RoomVersions.MSC3083,
+ _join_event(pleb),
+ auth_events,
+ do_sig_check=False,
+ )
+
+ # A user cannot be force-joined to a room.
+ with self.assertRaises(AuthError):
+ event_auth.check(
+ RoomVersions.MSC3083,
+ _member_event(pleb, "join", sender=creator),
+ auth_events,
+ do_sig_check=False,
+ )
+
+ # Banned should be rejected.
+ auth_events[("m.room.member", pleb)] = _member_event(pleb, "ban")
+ with self.assertRaises(AuthError):
+ event_auth.check(
+ RoomVersions.MSC3083,
+ _join_event(pleb),
+ auth_events,
+ do_sig_check=False,
+ )
+
+ # A user who left can re-join.
+ auth_events[("m.room.member", pleb)] = _member_event(pleb, "leave")
+ event_auth.check(
+ RoomVersions.MSC3083,
+ _join_event(pleb),
+ auth_events,
+ do_sig_check=False,
+ )
+
+ # A user can send a join if they're in the room.
+ auth_events[("m.room.member", pleb)] = _member_event(pleb, "join")
+ event_auth.check(
+ RoomVersions.MSC3083,
+ _join_event(pleb),
+ auth_events,
+ do_sig_check=False,
+ )
+
+ # A user can accept an invite.
+ auth_events[("m.room.member", pleb)] = _member_event(
+ pleb, "invite", sender=creator
+ )
+ event_auth.check(
+ RoomVersions.MSC3083,
+ _join_event(pleb),
+ auth_events,
+ do_sig_check=False,
+ )
+
# helpers for making events
@@ -225,19 +444,24 @@ def _create_event(user_id):
)
-def _join_event(user_id):
+def _member_event(user_id, membership, sender=None):
return make_event_from_dict(
{
"room_id": TEST_ROOM_ID,
"event_id": _get_event_id(),
"type": "m.room.member",
- "sender": user_id,
+ "sender": sender or user_id,
"state_key": user_id,
- "content": {"membership": "join"},
+ "content": {"membership": membership},
+ "prev_events": [],
}
)
+def _join_event(user_id):
+ return _member_event(user_id, "join")
+
+
def _power_levels_event(sender, content):
return make_event_from_dict(
{
@@ -277,6 +501,21 @@ def _random_state_event(sender):
)
+def _join_rules_event(sender, join_rule):
+ return make_event_from_dict(
+ {
+ "room_id": TEST_ROOM_ID,
+ "event_id": _get_event_id(),
+ "type": "m.room.join_rules",
+ "sender": sender,
+ "state_key": "",
+ "content": {
+ "join_rule": join_rule,
+ },
+ }
+ )
+
+
event_count = 0
diff --git a/tests/test_federation.py b/tests/test_federation.py
index fc9aab32..0ed8326f 100644
--- a/tests/test_federation.py
+++ b/tests/test_federation.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
from twisted.internet.defer import succeed
@@ -76,8 +75,10 @@ class MessageAcceptTests(unittest.HomeserverTestCase):
)
self.handler = self.homeserver.get_federation_handler()
- self.handler.do_auth = lambda origin, event, context, auth_events: succeed(
- context
+ self.handler._check_event_auth = (
+ lambda origin, event, context, state, auth_events, backfilled: succeed(
+ context
+ )
)
self.client = self.homeserver.get_federation_client()
self.client._check_sigs_and_hash_and_fetch = lambda dest, pdus, **k: succeed(
@@ -134,7 +135,7 @@ class MessageAcceptTests(unittest.HomeserverTestCase):
}
)
- with LoggingContext():
+ with LoggingContext("test-context"):
failure = self.get_failure(
self.handler.on_receive_pdu(
"test.serv", lying_event, sent_to_us_directly=True
diff --git a/tests/test_mau.py b/tests/test_mau.py
index 75d28a42..fa6ef92b 100644
--- a/tests/test_mau.py
+++ b/tests/test_mau.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,9 +14,7 @@
"""Tests REST events for /rooms paths."""
-import json
-
-from synapse.api.constants import LoginType
+from synapse.api.constants import APP_SERVICE_REGISTRATION_TYPE, LoginType
from synapse.api.errors import Codes, HttpResponseException, SynapseError
from synapse.appservice import ApplicationService
from synapse.rest.client.v2_alpha import register, sync
@@ -113,7 +110,7 @@ class TestMauLimit(unittest.HomeserverTestCase):
)
)
- self.create_user("as_kermit4", token=as_token)
+ self.create_user("as_kermit4", token=as_token, appservice=True)
def test_allowed_after_a_month_mau(self):
# Create and sync so that the MAU counts get updated
@@ -232,14 +229,15 @@ class TestMauLimit(unittest.HomeserverTestCase):
self.reactor.advance(100)
self.assertEqual(2, self.successResultOf(count))
- def create_user(self, localpart, token=None):
- request_data = json.dumps(
- {
- "username": localpart,
- "password": "monkey",
- "auth": {"type": LoginType.DUMMY},
- }
- )
+ def create_user(self, localpart, token=None, appservice=False):
+ request_data = {
+ "username": localpart,
+ "password": "monkey",
+ "auth": {"type": LoginType.DUMMY},
+ }
+
+ if appservice:
+ request_data["type"] = APP_SERVICE_REGISTRATION_TYPE
channel = self.make_request(
"POST",
diff --git a/tests/test_metrics.py b/tests/test_metrics.py
index f696fcf8..b4574b2f 100644
--- a/tests/test_metrics.py
+++ b/tests/test_metrics.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
# Copyright 2019 Matrix.org Foundation C.I.C.
#
diff --git a/tests/test_phone_home.py b/tests/test_phone_home.py
index e7aed092..09707a74 100644
--- a/tests/test_phone_home.py
+++ b/tests/test_phone_home.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,8 +13,7 @@
# limitations under the License.
import resource
-
-import mock
+from unittest import mock
from synapse.app.phone_stats_home import phone_stats_home
diff --git a/tests/test_preview.py b/tests/test_preview.py
index ea832999..cac3d81a 100644
--- a/tests/test_preview.py
+++ b/tests/test_preview.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/test_server.py b/tests/test_server.py
index 55cde7f6..407e172e 100644
--- a/tests/test_server.py
+++ b/tests/test_server.py
@@ -202,6 +202,8 @@ class OptionsResourceTests(unittest.TestCase):
parse_listener_def({"type": "http", "port": 0}),
self.resource,
"1.0",
+ max_request_body_size=1234,
+ reactor=self.reactor,
)
# render the request and return the channel
diff --git a/tests/test_state.py b/tests/test_state.py
index 6227a3ba..62f70958 100644
--- a/tests/test_state.py
+++ b/tests/test_state.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,8 +11,8 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-from mock import Mock
+from typing import List, Optional
+from unittest.mock import Mock
from twisted.internet import defer
@@ -37,8 +36,8 @@ def create_event(
state_key=None,
depth=2,
event_id=None,
- prev_events=[],
- **kwargs
+ prev_events: Optional[List[str]] = None,
+ **kwargs,
):
global _next_event_id
@@ -58,7 +57,7 @@ def create_event(
"sender": "@user_id:example.com",
"room_id": "!room_id:example.com",
"depth": depth,
- "prev_events": prev_events,
+ "prev_events": prev_events or [],
}
if state_key is not None:
diff --git a/tests/test_terms_auth.py b/tests/test_terms_auth.py
index a743cdc3..0df480db 100644
--- a/tests/test_terms_auth.py
+++ b/tests/test_terms_auth.py
@@ -13,8 +13,7 @@
# limitations under the License.
import json
-
-from mock import Mock
+from unittest.mock import Mock
from twisted.test.proto_helpers import MemoryReactorClock
diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py
index b921ac52..f2ef1c60 100644
--- a/tests/test_test_utils.py
+++ b/tests/test_test_utils.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/test_types.py b/tests/test_types.py
index acdeea7a..d7881021 100644
--- a/tests/test_types.py
+++ b/tests/test_types.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/test_utils/__init__.py b/tests/test_utils/__init__.py
index 43898d81..be6302d1 100644
--- a/tests/test_utils/__init__.py
+++ b/tests/test_utils/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C
#
@@ -21,8 +20,7 @@ import sys
import warnings
from asyncio import Future
from typing import Any, Awaitable, Callable, TypeVar
-
-from mock import Mock
+from unittest.mock import Mock
import attr
diff --git a/tests/test_utils/event_injection.py b/tests/test_utils/event_injection.py
index c3c4a93e..e9ec9e08 100644
--- a/tests/test_utils/event_injection.py
+++ b/tests/test_utils/event_injection.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C
#
@@ -33,7 +32,7 @@ async def inject_member_event(
membership: str,
target: Optional[str] = None,
extra_content: Optional[dict] = None,
- **kwargs
+ **kwargs,
) -> EventBase:
"""Inject a membership event into a room."""
if target is None:
@@ -58,7 +57,7 @@ async def inject_event(
hs: synapse.server.HomeServer,
room_version: Optional[str] = None,
prev_event_ids: Optional[List[str]] = None,
- **kwargs
+ **kwargs,
) -> EventBase:
"""Inject a generic event into a room
@@ -83,7 +82,7 @@ async def create_event(
hs: synapse.server.HomeServer,
room_version: Optional[str] = None,
prev_event_ids: Optional[List[str]] = None,
- **kwargs
+ **kwargs,
) -> Tuple[EventBase, EventContext]:
if room_version is None:
room_version = await hs.get_datastore().get_room_version_id(kwargs["room_id"])
diff --git a/tests/test_utils/html_parsers.py b/tests/test_utils/html_parsers.py
index ad563eb3..1fbb38f4 100644
--- a/tests/test_utils/html_parsers.py
+++ b/tests/test_utils/html_parsers.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/test_utils/logging_setup.py b/tests/test_utils/logging_setup.py
index 74568b34..51a197a8 100644
--- a/tests/test_utils/logging_setup.py
+++ b/tests/test_utils/logging_setup.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/test_visibility.py b/tests/test_visibility.py
index 510b6301..94b19788 100644
--- a/tests/test_visibility.py
+++ b/tests/test_visibility.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,8 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
-
-from mock import Mock
+from typing import Optional
+from unittest.mock import Mock
from twisted.internet import defer
from twisted.internet.defer import succeed
@@ -147,9 +146,11 @@ class FilterEventsForServerTestCase(tests.unittest.TestCase):
return event
@defer.inlineCallbacks
- def inject_room_member(self, user_id, membership="join", extra_content={}):
+ def inject_room_member(
+ self, user_id, membership="join", extra_content: Optional[dict] = None
+ ):
content = {"membership": membership}
- content.update(extra_content)
+ content.update(extra_content or {})
builder = self.event_builder_factory.for_room_version(
RoomVersions.V1,
{
diff --git a/tests/unittest.py b/tests/unittest.py
index 58a4daa1..74db7c08 100644
--- a/tests/unittest.py
+++ b/tests/unittest.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector
# Copyright 2019 Matrix.org Federation C.I.C
@@ -19,10 +18,10 @@ import hashlib
import hmac
import inspect
import logging
+import secrets
import time
from typing import Callable, Dict, Iterable, Optional, Tuple, Type, TypeVar, Union
-
-from mock import Mock, patch
+from unittest.mock import Mock, patch
from canonicaljson import json
@@ -135,7 +134,7 @@ class TestCase(unittest.TestCase):
def assertObjectHasAttributes(self, attrs, obj):
"""Asserts that the given object has each of the attributes given, and
that the value of each matches according to assertEquals."""
- for (key, value) in attrs.items():
+ for key in attrs.keys():
if not hasattr(obj, key):
raise AssertionError("Expected obj to have a '.%s'" % key)
try:
@@ -249,6 +248,8 @@ class HomeserverTestCase(TestCase):
config=self.hs.config.server.listeners[0],
resource=self.resource,
server_version_string="1",
+ max_request_body_size=1234,
+ reactor=self.reactor,
)
from tests.rest.client.v1.utils import RestHelper
@@ -471,7 +472,7 @@ class HomeserverTestCase(TestCase):
kwargs["config"] = config_obj
async def run_bg_updates():
- with LoggingContext("run_bg_updates", request="run_bg_updates-1"):
+ with LoggingContext("run_bg_updates"):
while not await stor.db_pool.updates.has_completed_background_updates():
await stor.db_pool.updates.do_next_background_update(1)
@@ -626,7 +627,6 @@ class HomeserverTestCase(TestCase):
str: The new event's ID.
"""
event_creator = self.hs.get_event_creation_handler()
- secrets = self.hs.get_secrets()
requester = create_requester(user)
event, context = self.get_success(
diff --git a/tests/util/__init__.py b/tests/util/__init__.py
index bfebb0f6..5e83dba2 100644
--- a/tests/util/__init__.py
+++ b/tests/util/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/util/caches/__init__.py b/tests/util/caches/__init__.py
index 451dae3b..830e2dfe 100644
--- a/tests/util/caches/__init__.py
+++ b/tests/util/caches/__init__.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/util/caches/test_cached_call.py b/tests/util/caches/test_cached_call.py
index f349b5ce..80b97167 100644
--- a/tests/util/caches/test_cached_call.py
+++ b/tests/util/caches/test_cached_call.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/util/caches/test_deferred_cache.py b/tests/util/caches/test_deferred_cache.py
index c24c33ee..54a88a83 100644
--- a/tests/util/caches/test_deferred_cache.py
+++ b/tests/util/caches/test_deferred_cache.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py
index afb11b9c..178ac8a6 100644
--- a/tests/util/caches/test_descriptors.py
+++ b/tests/util/caches/test_descriptors.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
@@ -15,8 +14,7 @@
# limitations under the License.
import logging
from typing import Set
-
-import mock
+from unittest import mock
from twisted.internet import defer, reactor
@@ -232,8 +230,7 @@ class DescriptorTestCase(unittest.TestCase):
@defer.inlineCallbacks
def do_lookup():
- with LoggingContext() as c1:
- c1.name = "c1"
+ with LoggingContext("c1") as c1:
r = yield obj.fn(1)
self.assertEqual(current_context(), c1)
return r
@@ -275,8 +272,7 @@ class DescriptorTestCase(unittest.TestCase):
@defer.inlineCallbacks
def do_lookup():
- with LoggingContext() as c1:
- c1.name = "c1"
+ with LoggingContext("c1") as c1:
try:
d = obj.fn(1)
self.assertEqual(
@@ -661,14 +657,13 @@ class CachedListDescriptorTestCase(unittest.TestCase):
@descriptors.cachedList("fn", "args1")
async def list_fn(self, args1, arg2):
- assert current_context().request == "c1"
+ assert current_context().name == "c1"
# we want this to behave like an asynchronous function
await run_on_reactor()
- assert current_context().request == "c1"
+ assert current_context().name == "c1"
return self.mock(args1, arg2)
- with LoggingContext() as c1:
- c1.request = "c1"
+ with LoggingContext("c1") as c1:
obj = Cls()
obj.mock.return_value = {10: "fish", 20: "chips"}
d1 = obj.list_fn([10, 20], 2)
diff --git a/tests/util/caches/test_ttlcache.py b/tests/util/caches/test_ttlcache.py
index 816795c1..fe831405 100644
--- a/tests/util/caches/test_ttlcache.py
+++ b/tests/util/caches/test_ttlcache.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
from synapse.util.caches.ttlcache import TTLCache
diff --git a/tests/util/test_async_utils.py b/tests/util/test_async_utils.py
index 17fd86d0..069f8759 100644
--- a/tests/util/test_async_utils.py
+++ b/tests/util/test_async_utils.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/util/test_dict_cache.py b/tests/util/test_dict_cache.py
index 2f41333f..bee66dee 100644
--- a/tests/util/test_dict_cache.py
+++ b/tests/util/test_dict_cache.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/util/test_expiring_cache.py b/tests/util/test_expiring_cache.py
index 49ffeebd..e6e13ba0 100644
--- a/tests/util/test_expiring_cache.py
+++ b/tests/util/test_expiring_cache.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2017 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/util/test_file_consumer.py b/tests/util/test_file_consumer.py
index 20122631..3bb46954 100644
--- a/tests/util/test_file_consumer.py
+++ b/tests/util/test_file_consumer.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,8 +15,7 @@
import threading
from io import StringIO
-
-from mock import NonCallableMock
+from unittest.mock import NonCallableMock
from twisted.internet import defer, reactor
diff --git a/tests/util/test_glob_to_regex.py b/tests/util/test_glob_to_regex.py
new file mode 100644
index 00000000..220accb9
--- /dev/null
+++ b/tests/util/test_glob_to_regex.py
@@ -0,0 +1,59 @@
+# Copyright 2021 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from synapse.util import glob_to_regex
+
+from tests.unittest import TestCase
+
+
+class GlobToRegexTestCase(TestCase):
+ def test_literal_match(self):
+ """patterns without wildcards should match"""
+ pat = glob_to_regex("foobaz")
+ self.assertTrue(
+ pat.match("FoobaZ"), "patterns should match and be case-insensitive"
+ )
+ self.assertFalse(
+ pat.match("x foobaz"), "pattern should not match at word boundaries"
+ )
+
+ def test_wildcard_match(self):
+ pat = glob_to_regex("f?o*baz")
+
+ self.assertTrue(
+ pat.match("FoobarbaZ"),
+ "* should match string and pattern should be case-insensitive",
+ )
+ self.assertTrue(pat.match("foobaz"), "* should match 0 characters")
+ self.assertFalse(pat.match("fooxaz"), "the character after * must match")
+ self.assertFalse(pat.match("fobbaz"), "? should not match 0 characters")
+ self.assertFalse(pat.match("fiiobaz"), "? should not match 2 characters")
+
+ def test_multi_wildcard(self):
+ """patterns with multiple wildcards in a row should match"""
+ pat = glob_to_regex("**baz")
+ self.assertTrue(pat.match("agsgsbaz"), "** should match any string")
+ self.assertTrue(pat.match("baz"), "** should match the empty string")
+ self.assertEqual(pat.pattern, r"\A.{0,}baz\Z")
+
+ pat = glob_to_regex("*?baz")
+ self.assertTrue(pat.match("agsgsbaz"), "*? should match any string")
+ self.assertTrue(pat.match("abaz"), "*? should match a single char")
+ self.assertFalse(pat.match("baz"), "*? should not match the empty string")
+ self.assertEqual(pat.pattern, r"\A.{1,}baz\Z")
+
+ pat = glob_to_regex("a?*?*?baz")
+ self.assertTrue(pat.match("a g baz"), "?*?*? should match 3 chars")
+ self.assertFalse(pat.match("a..baz"), "?*?*? should not match 2 chars")
+ self.assertTrue(pat.match("a.gg.baz"), "?*?*? should match 4 chars")
+ self.assertEqual(pat.pattern, r"\Aa.{3,}baz\Z")
diff --git a/tests/util/test_itertools.py b/tests/util/test_itertools.py
index e931a7ec..1bd0b45d 100644
--- a/tests/util/test_itertools.py
+++ b/tests/util/test_itertools.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/util/test_linearizer.py b/tests/util/test_linearizer.py
index 0e528119..c4a3917b 100644
--- a/tests/util/test_linearizer.py
+++ b/tests/util/test_linearizer.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
diff --git a/tests/util/test_logcontext.py b/tests/util/test_logcontext.py
index 58ee918f..5d9c4665 100644
--- a/tests/util/test_logcontext.py
+++ b/tests/util/test_logcontext.py
@@ -17,11 +17,10 @@ from .. import unittest
class LoggingContextTestCase(unittest.TestCase):
def _check_test_key(self, value):
- self.assertEquals(current_context().request, value)
+ self.assertEquals(current_context().name, value)
def test_with_context(self):
- with LoggingContext() as context_one:
- context_one.request = "test"
+ with LoggingContext("test"):
self._check_test_key("test")
@defer.inlineCallbacks
@@ -30,15 +29,13 @@ class LoggingContextTestCase(unittest.TestCase):
@defer.inlineCallbacks
def competing_callback():
- with LoggingContext() as competing_context:
- competing_context.request = "competing"
+ with LoggingContext("competing"):
yield clock.sleep(0)
self._check_test_key("competing")
reactor.callLater(0, competing_callback)
- with LoggingContext() as context_one:
- context_one.request = "one"
+ with LoggingContext("one"):
yield clock.sleep(0)
self._check_test_key("one")
@@ -47,9 +44,7 @@ class LoggingContextTestCase(unittest.TestCase):
callback_completed = [False]
- with LoggingContext() as context_one:
- context_one.request = "one"
-
+ with LoggingContext("one"):
# fire off function, but don't wait on it.
d2 = run_in_background(function)
@@ -133,9 +128,7 @@ class LoggingContextTestCase(unittest.TestCase):
sentinel_context = current_context()
- with LoggingContext() as context_one:
- context_one.request = "one"
-
+ with LoggingContext("one"):
d1 = make_deferred_yieldable(blocking_function())
# make sure that the context was reset by make_deferred_yieldable
self.assertIs(current_context(), sentinel_context)
@@ -149,9 +142,7 @@ class LoggingContextTestCase(unittest.TestCase):
def test_make_deferred_yieldable_with_chained_deferreds(self):
sentinel_context = current_context()
- with LoggingContext() as context_one:
- context_one.request = "one"
-
+ with LoggingContext("one"):
d1 = make_deferred_yieldable(_chained_deferred_function())
# make sure that the context was reset by make_deferred_yieldable
self.assertIs(current_context(), sentinel_context)
@@ -166,9 +157,7 @@ class LoggingContextTestCase(unittest.TestCase):
"""Check that make_deferred_yieldable does the right thing when its
argument isn't actually a deferred"""
- with LoggingContext() as context_one:
- context_one.request = "one"
-
+ with LoggingContext("one"):
d1 = make_deferred_yieldable("bum")
self._check_test_key("one")
@@ -177,9 +166,9 @@ class LoggingContextTestCase(unittest.TestCase):
self._check_test_key("one")
def test_nested_logging_context(self):
- with LoggingContext(request="foo"):
+ with LoggingContext("foo"):
nested_context = nested_logging_context(suffix="bar")
- self.assertEqual(nested_context.request, "foo-bar")
+ self.assertEqual(nested_context.name, "foo-bar")
@defer.inlineCallbacks
def test_make_deferred_yieldable_with_await(self):
@@ -193,9 +182,7 @@ class LoggingContextTestCase(unittest.TestCase):
sentinel_context = current_context()
- with LoggingContext() as context_one:
- context_one.request = "one"
-
+ with LoggingContext("one"):
d1 = make_deferred_yieldable(blocking_function())
# make sure that the context was reset by make_deferred_yieldable
self.assertIs(current_context(), sentinel_context)
diff --git a/tests/util/test_logformatter.py b/tests/util/test_logformatter.py
index 0fb60caa..a2e08281 100644
--- a/tests/util/test_logformatter.py
+++ b/tests/util/test_logformatter.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/util/test_lrucache.py b/tests/util/test_lrucache.py
index a739a6aa..df3e2777 100644
--- a/tests/util/test_lrucache.py
+++ b/tests/util/test_lrucache.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,7 +13,7 @@
# limitations under the License.
-from mock import Mock
+from unittest.mock import Mock
from synapse.util.caches.lrucache import LruCache
from synapse.util.caches.treecache import TreeCache
diff --git a/tests/util/test_ratelimitutils.py b/tests/util/test_ratelimitutils.py
index 4d1aee91..34aaffe8 100644
--- a/tests/util/test_ratelimitutils.py
+++ b/tests/util/test_ratelimitutils.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,6 +11,8 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+from typing import Optional
+
from synapse.config.homeserver import HomeServerConfig
from synapse.util.ratelimitutils import FederationRateLimiter
@@ -89,9 +90,9 @@ def _await_resolution(reactor, d):
return (reactor.seconds() - start_time) * 1000
-def build_rc_config(settings={}):
+def build_rc_config(settings: Optional[dict] = None):
config_dict = default_config("test")
- config_dict.update(settings)
+ config_dict.update(settings or {})
config = HomeServerConfig()
config.parse_config_dict(config_dict, "", "")
return config.rc_federation
diff --git a/tests/util/test_retryutils.py b/tests/util/test_retryutils.py
index 5f46ed0c..9b2be83a 100644
--- a/tests/util/test_retryutils.py
+++ b/tests/util/test_retryutils.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/util/test_rwlock.py b/tests/util/test_rwlock.py
index d3dea3b5..a10071c7 100644
--- a/tests/util/test_rwlock.py
+++ b/tests/util/test_rwlock.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/util/test_stringutils.py b/tests/util/test_stringutils.py
index 8491f7cc..f7fecd9c 100644
--- a/tests/util/test_stringutils.py
+++ b/tests/util/test_stringutils.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/util/test_threepids.py b/tests/util/test_threepids.py
index 5513724d..d957b953 100644
--- a/tests/util/test_threepids.py
+++ b/tests/util/test_threepids.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2020 Dirk Klimpel
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/util/test_treecache.py b/tests/util/test_treecache.py
index a5f22612..3b077af2 100644
--- a/tests/util/test_treecache.py
+++ b/tests/util/test_treecache.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/util/test_wheel_timer.py b/tests/util/test_wheel_timer.py
index 03201a4d..0d5039de 100644
--- a/tests/util/test_wheel_timer.py
+++ b/tests/util/test_wheel_timer.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tests/utils.py b/tests/utils.py
index be80b137..6bd008dc 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018-2019 New Vector Ltd
#
@@ -21,10 +20,9 @@ import time
import uuid
import warnings
from typing import Type
+from unittest.mock import Mock, patch
from urllib import parse as urlparse
-from mock import Mock, patch
-
from twisted.internet import defer
from synapse.api.constants import EventTypes
@@ -122,7 +120,6 @@ def default_config(name, parse=False):
"enable_registration_captcha": False,
"macaroon_secret_key": "not even a little secret",
"trusted_third_party_id_servers": [],
- "room_invite_state_types": [],
"password_providers": [],
"worker_replication_url": "",
"worker_app": None,
@@ -156,6 +153,10 @@ def default_config(name, parse=False):
"local": {"per_second": 10000, "burst_count": 10000},
"remote": {"per_second": 10000, "burst_count": 10000},
},
+ "rc_invites": {
+ "per_room": {"per_second": 10000, "burst_count": 10000},
+ "per_user": {"per_second": 10000, "burst_count": 10000},
+ },
"rc_3pid_validation": {"per_second": 10000, "burst_count": 10000},
"saml2_enabled": False,
"public_baseurl": None,
@@ -192,7 +193,7 @@ def setup_test_homeserver(
config=None,
reactor=None,
homeserver_to_use: Type[HomeServer] = TestHomeServer,
- **kwargs
+ **kwargs,
):
"""
Setup a homeserver suitable for running tests against. Keyword arguments
@@ -306,7 +307,7 @@ def setup_test_homeserver(
# database for a few more seconds due to flakiness, preventing
# us from dropping it when the test is over. If we can't drop
# it, warn and move on.
- for x in range(5):
+ for _ in range(5):
try:
cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,))
db_conn.commit()
diff --git a/tox.ini b/tox.ini
index 9ff70fe3..ecd60927 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,8 @@
[tox]
-envlist = packaging, py35, py36, py37, py38, py39, check_codestyle, check_isort
+envlist = packaging, py36, py37, py38, py39, check_codestyle, check_isort
+
+# we require tox>=2.3.2 for the fix to https://github.com/tox-dev/tox/issues/208
+minversion = 2.3.2
[base]
deps =
@@ -18,13 +21,11 @@ deps =
# installed on that).
#
# anyway, make sure that we have a recent enough setuptools.
- setuptools>=18.5 ; python_version >= '3.6'
- setuptools>=18.5,<51.0.0 ; python_version < '3.6'
+ setuptools>=18.5
# we also need a semi-recent version of pip, because old ones fail to
# install the "enum34" dependency of cryptography.
- pip>=10 ; python_version >= '3.6'
- pip>=10,<21.0 ; python_version < '3.6'
+ pip>=10
# directories/files we run the linters on.
# if you update this list, make sure to do the same in scripts-dev/lint.sh
@@ -48,6 +49,7 @@ deps =
extras =
# install the optional dependendencies for tox environments without
# '-noextras' in their name
+ # (this requires tox 3)
!noextras: all
test
@@ -74,8 +76,6 @@ commands =
# we use "env" rather than putting a value in `setenv` so that it is not
# inherited by other tox environments.
#
- # keep this in sync with the copy in `testenv:py35-old`.
- #
/usr/bin/env COVERAGE_PROCESS_START={toxinidir}/.coveragerc "{envbindir}/trial" {env:TRIAL_FLAGS:} {posargs:tests} {env:TOXSUFFIX:}
# As of twisted 16.4, trial tries to import the tests as a package (previously
@@ -103,8 +103,9 @@ usedevelop=true
# A test suite for the oldest supported versions of Python libraries, to catch
# any uses of APIs not available in them.
-[testenv:py35-old]
-skip_install=True
+[testenv:py3-old]
+skip_install = true
+usedevelop = false
deps =
# Old automat version for Twisted
Automat == 0.3.0
@@ -120,11 +121,7 @@ commands =
# Install Synapse itself. This won't update any libraries.
pip install -e ".[test]"
- # we have to duplicate the command from `testenv` rather than refer to it
- # as `{[testenv]commands}`, because we run on ubuntu xenial, which has
- # tox 2.3.1, and https://github.com/tox-dev/tox/issues/208.
- #
- /usr/bin/env COVERAGE_PROCESS_START={toxinidir}/.coveragerc "{envbindir}/trial" {env:TRIAL_FLAGS:} {posargs:tests} {env:TOXSUFFIX:}
+ {[testenv]commands}
[testenv:benchmark]
deps =
@@ -136,7 +133,8 @@ commands =
python -m synmark {posargs:}
[testenv:packaging]
-skip_install=True
+skip_install = true
+usedevelop = false
deps =
check-manifest
commands =
@@ -154,7 +152,8 @@ extras = lint
commands = isort -c --df --sp setup.cfg {[base]lint_targets}
[testenv:check-newsfragment]
-skip_install = True
+skip_install = true
+usedevelop = false
deps = towncrier>=18.6.0rc1
commands =
python -m towncrier.check --compare-with=origin/develop
@@ -163,24 +162,26 @@ commands =
commands = {toxinidir}/scripts-dev/generate_sample_config --check
[testenv:combine]
-skip_install = True
+skip_install = true
+usedevelop = false
deps =
coverage
- pip>=10 ; python_version >= '3.6'
- pip>=10,<21.0 ; python_version < '3.6'
+ pip>=10
commands=
coverage combine
coverage report
[testenv:cov-erase]
-skip_install = True
+skip_install = true
+usedevelop = false
deps =
coverage
commands=
coverage erase
[testenv:cov-html]
-skip_install = True
+skip_install = true
+usedevelop = false
deps =
coverage
commands=