diff --git a/.ci/patch_for_twisted_trunk.sh b/.ci/patch_for_twisted_trunk.sh new file mode 100755 index 000000000000..f524581986a9 --- /dev/null +++ b/.ci/patch_for_twisted_trunk.sh @@ -0,0 +1,8 @@ +#!/bin/sh + +# replaces the dependency on Twisted in `python_dependencies` with trunk. + +set -e +cd "$(dirname "$0")"/.. + +sed -i -e 's#"Twisted.*"#"Twisted @ git+https://github.com/twisted/twisted"#' synapse/python_dependencies.py diff --git a/.ci/twisted_trunk_build_failed_issue_template.md b/.ci/twisted_trunk_build_failed_issue_template.md new file mode 100644 index 000000000000..2ead1dc39477 --- /dev/null +++ b/.ci/twisted_trunk_build_failed_issue_template.md @@ -0,0 +1,4 @@ +--- +title: CI run against Twisted trunk is failing +--- +See https://github.com/{{env.GITHUB_REPOSITORY}}/actions/runs/{{env.GITHUB_RUN_ID}} diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml new file mode 100644 index 000000000000..b5c729888f57 --- /dev/null +++ b/.github/workflows/twisted_trunk.yml @@ -0,0 +1,90 @@ +name: Twisted Trunk + +on: + schedule: + - cron: 0 8 * * * + + workflow_dispatch: + +jobs: + mypy: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + - run: .ci/patch_for_twisted_trunk.sh + - run: pip install tox + - run: tox -e mypy + + trial: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - run: sudo apt-get -qq install xmlsec1 + - uses: actions/setup-python@v2 + with: + python-version: 3.6 + - run: .ci/patch_for_twisted_trunk.sh + - run: pip install tox + - run: tox -e py + env: + TRIAL_FLAGS: "--jobs=2" + + - name: Dump logs + # Note: Dumps to workflow logs instead of using actions/upload-artifact + # This keeps logs colocated with failing jobs + # It also ignores find's exit code; this is a best effort affair + run: >- + find _trial_temp -name '*.log' + -exec echo "::group::{}" \; + -exec cat {} \; + -exec echo "::endgroup::" \; + || true + + sytest: + runs-on: ubuntu-latest + container: + image: matrixdotorg/sytest-synapse:buster + volumes: + - ${{ github.workspace }}:/src + + steps: + - uses: actions/checkout@v2 + - name: Patch dependencies + run: .ci/patch_for_twisted_trunk.sh + working-directory: /src + - name: Run SyTest + run: /bootstrap.sh synapse + working-directory: /src + - name: Summarise results.tap + if: ${{ always() }} + run: /sytest/scripts/tap_to_gha.pl /logs/results.tap + - name: Upload SyTest logs + uses: actions/upload-artifact@v2 + if: ${{ always() }} + with: + name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }}) + path: | + /logs/results.tap + /logs/**/*.log* + + # open an issue if the build fails, so we know about it. + open-issue: + if: failure() + needs: + - mypy + - trial + - sytest + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + update_existing: true + filename: .ci/twisted_trunk_build_failed_issue_template.md diff --git a/CHANGES.md b/CHANGES.md index 01766af39cc6..f8da8771aa6e 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,17 @@ +Synapse 1.41.0 (2021-08-24) +=========================== + +This release adds support for Debian 12 (Bookworm), but **removes support for Ubuntu 20.10 (Groovy Gorilla)**, which reached End of Life last month. + +Note that when using workers the `/_synapse/admin/v1/users/{userId}/media` must now be handled by media workers. See the [upgrade notes](https://matrix-org.github.io/synapse/latest/upgrade.html) for more information. + + +Features +-------- + +- Enable room capabilities ([MSC3244](https://github.com/matrix-org/matrix-doc/pull/3244)) by default and set room version 8 as the preferred room version when creating restricted rooms. ([\#10571](https://github.com/matrix-org/synapse/issues/10571)) + + Synapse 1.41.0rc1 (2021-08-18) ============================== @@ -7,7 +21,7 @@ Features - Add `get_userinfo_by_id` method to ModuleApi. ([\#9581](https://github.com/matrix-org/synapse/issues/9581)) - Initial local support for [MSC3266](https://github.com/matrix-org/synapse/pull/10394), Room Summary over the unstable `/rooms/{roomIdOrAlias}/summary` API. ([\#10394](https://github.com/matrix-org/synapse/issues/10394)) - Experimental support for [MSC3288](https://github.com/matrix-org/matrix-doc/pull/3288), sending `room_type` to the identity server for 3pid invites over the `/store-invite` API. ([\#10435](https://github.com/matrix-org/synapse/issues/10435)) -- Add support for sending federation requests through a proxy. Contributed by @Bubu and @dklimpel. ([\#10475](https://github.com/matrix-org/synapse/issues/10475)) +- Add support for sending federation requests through a proxy. Contributed by @Bubu and @dklimpel. See the [upgrade notes](https://matrix-org.github.io/synapse/latest/upgrade.html) for more information. ([\#10596](https://github.com/matrix-org/synapse/issues/10596)). ([\#10475](https://github.com/matrix-org/synapse/issues/10475)) - Add support for "marker" events which makes historical events discoverable for servers that already have all of the scrollback history (part of [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716)). ([\#10498](https://github.com/matrix-org/synapse/issues/10498)) - Add a configuration setting for the time a `/sync` response is cached for. ([\#10513](https://github.com/matrix-org/synapse/issues/10513)) - The default logging handler for new installations is now `PeriodicallyFlushingMemoryHandler`, a buffered logging handler which periodically flushes itself. ([\#10518](https://github.com/matrix-org/synapse/issues/10518)) @@ -29,7 +43,7 @@ Bugfixes - Add some clarification to the sample config file. Contributed by @Kentokamoto. ([\#10129](https://github.com/matrix-org/synapse/issues/10129)) - Fix a long-standing bug where protocols which are not implemented by any appservices were incorrectly returned via `GET /_matrix/client/r0/thirdparty/protocols`. ([\#10532](https://github.com/matrix-org/synapse/issues/10532)) - Fix exceptions in logs when failing to get remote room list. ([\#10541](https://github.com/matrix-org/synapse/issues/10541)) -- Fix longstanding bug which caused the user "status" to be reset when the user went offline. Contributed by @dklimpel. ([\#10550](https://github.com/matrix-org/synapse/issues/10550)) +- Fix longstanding bug which caused the user's presence "status message" to be reset when the user went offline. Contributed by @dklimpel. ([\#10550](https://github.com/matrix-org/synapse/issues/10550)) - Allow public rooms to be previewed in the spaces summary APIs from [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946). ([\#10580](https://github.com/matrix-org/synapse/issues/10580)) - Fix a bug introduced in v1.37.1 where an error could occur in the asynchronous processing of PDUs when the queue was empty. ([\#10592](https://github.com/matrix-org/synapse/issues/10592)) - Fix errors on /sync when read receipt data is a string. Only affects homeservers with the experimental flag for [MSC2285](https://github.com/matrix-org/matrix-doc/pull/2285) enabled. Contributed by @SimonBrandner. ([\#10606](https://github.com/matrix-org/synapse/issues/10606)) @@ -40,7 +54,7 @@ Bugfixes Improved Documentation ---------------------- -- Add documentation for configuration a forward proxy. ([\#10443](https://github.com/matrix-org/synapse/issues/10443)) +- Add documentation for configuring a forward proxy. ([\#10443](https://github.com/matrix-org/synapse/issues/10443)) - Updated the reverse proxy documentation to highlight the homserver configuration that is needed to make Synapse aware that is is intentionally reverse proxied. ([\#10551](https://github.com/matrix-org/synapse/issues/10551)) - Update CONTRIBUTING.md to fix index links and the instructions for SyTest in docker. ([\#10599](https://github.com/matrix-org/synapse/issues/10599)) @@ -48,7 +62,7 @@ Improved Documentation Deprecations and Removals ------------------------- -- No longer build `.deb` packages for Ubuntu 20.10 LTS Groovy Gorilla, which has now EOLed. ([\#10588](https://github.com/matrix-org/synapse/issues/10588)) +- No longer build `.deb` packages for Ubuntu 20.10 Groovy Gorilla, which has now EOLed. ([\#10588](https://github.com/matrix-org/synapse/issues/10588)) - The `template_dir` configuration settings in the `sso`, `account_validity` and `email` sections of the configuration file are now deprecated in favour of the global `templates.custom_template_directory` setting. See the [upgrade notes](https://matrix-org.github.io/synapse/latest/upgrade.html) for more information. ([\#10596](https://github.com/matrix-org/synapse/issues/10596)) diff --git a/changelog.d/10142.feature b/changelog.d/10142.feature new file mode 100644 index 000000000000..5353f6269d01 --- /dev/null +++ b/changelog.d/10142.feature @@ -0,0 +1 @@ +Add support for [MSC3231 - Token authenticated registration](https://github.com/matrix-org/matrix-doc/pull/3231). Users can be required to submit a token during registration to authenticate themselves. Contributed by Callum Brown. diff --git a/changelog.d/10192.doc b/changelog.d/10192.doc new file mode 100644 index 000000000000..3dd00537e8d9 --- /dev/null +++ b/changelog.d/10192.doc @@ -0,0 +1 @@ +Add documentation on how to connect Django with synapse using oidc and django-oauth-toolkit. Contributed by @HugoDelval. diff --git a/changelog.d/10593.bugfix b/changelog.d/10593.bugfix new file mode 100644 index 000000000000..af910bfa4d98 --- /dev/null +++ b/changelog.d/10593.bugfix @@ -0,0 +1 @@ +Reject Client-Server `/keys/query` requests which provide `device_ids` incorrectly. diff --git a/changelog.d/10613.feature b/changelog.d/10613.feature new file mode 100644 index 000000000000..ffc4e4289cfa --- /dev/null +++ b/changelog.d/10613.feature @@ -0,0 +1 @@ +Add pagination to the spaces summary based on updates to [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946). diff --git a/changelog.d/10621.misc b/changelog.d/10621.misc new file mode 100644 index 000000000000..b8de2e1911af --- /dev/null +++ b/changelog.d/10621.misc @@ -0,0 +1 @@ +Add a comment asking developers to leave a reason when bumping the database schema version. \ No newline at end of file diff --git a/changelog.d/10639.doc b/changelog.d/10639.doc new file mode 100644 index 000000000000..acbac4aad8ec --- /dev/null +++ b/changelog.d/10639.doc @@ -0,0 +1 @@ +Fix some of the titles not rendering in the OIDC documentation. diff --git a/changelog.d/10651.misc b/changelog.d/10651.misc new file mode 100644 index 000000000000..7104c121e02a --- /dev/null +++ b/changelog.d/10651.misc @@ -0,0 +1 @@ +Run a nightly CI build against Twisted trunk. diff --git a/changelog.d/10654.bugfix b/changelog.d/10654.bugfix new file mode 100644 index 000000000000..b0bd78453fab --- /dev/null +++ b/changelog.d/10654.bugfix @@ -0,0 +1 @@ +Enforce the maximum length for per-room display names and avatar URLs. \ No newline at end of file diff --git a/changelog.d/10662.misc b/changelog.d/10662.misc new file mode 100644 index 000000000000..593f9ceaad5a --- /dev/null +++ b/changelog.d/10662.misc @@ -0,0 +1 @@ +Do not print out stack traces for network errors when fetching data over federation. diff --git a/changelog.d/10664.misc b/changelog.d/10664.misc new file mode 100644 index 000000000000..cebd5e9a96c8 --- /dev/null +++ b/changelog.d/10664.misc @@ -0,0 +1 @@ +Simplify tests for device admin rest API. \ No newline at end of file diff --git a/changelog.d/10665.misc b/changelog.d/10665.misc new file mode 100644 index 000000000000..39a37b90b1b3 --- /dev/null +++ b/changelog.d/10665.misc @@ -0,0 +1 @@ +Add missing type hints to REST servlets. diff --git a/changelog.d/10666.misc b/changelog.d/10666.misc new file mode 100644 index 000000000000..39a37b90b1b3 --- /dev/null +++ b/changelog.d/10666.misc @@ -0,0 +1 @@ +Add missing type hints to REST servlets. diff --git a/changelog.d/10667.misc b/changelog.d/10667.misc new file mode 100644 index 000000000000..c92846ae260a --- /dev/null +++ b/changelog.d/10667.misc @@ -0,0 +1 @@ +Flatten the `tests.synapse.rests` package by moving the contents of `v1` and `v2_alpha` into the parent. \ No newline at end of file diff --git a/changelog.d/10672.misc b/changelog.d/10672.misc new file mode 100644 index 000000000000..7104c121e02a --- /dev/null +++ b/changelog.d/10672.misc @@ -0,0 +1 @@ +Run a nightly CI build against Twisted trunk. diff --git a/changelog.d/10677.bugfix b/changelog.d/10677.bugfix new file mode 100644 index 000000000000..9964afaaeea1 --- /dev/null +++ b/changelog.d/10677.bugfix @@ -0,0 +1 @@ +Fix a bug which caused the `synapse_user_logins_total` Prometheus metric not to be correctly initialised on restart. diff --git a/changelog.d/10684.bugfix b/changelog.d/10684.bugfix new file mode 100644 index 000000000000..311b17601a3e --- /dev/null +++ b/changelog.d/10684.bugfix @@ -0,0 +1 @@ +Fix long-standing issue which caused an error when a thumbnail is requested and there are multiple thumbnails with the same quality rating. diff --git a/changelog.d/10686.misc b/changelog.d/10686.misc new file mode 100644 index 000000000000..b76908d74ee7 --- /dev/null +++ b/changelog.d/10686.misc @@ -0,0 +1 @@ +Update `complement.sh` to rebuild the base Docker image when run with workers. diff --git a/debian/changelog b/debian/changelog index 68f309b0b25b..4da4bc018cf0 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.41.0) stable; urgency=medium + + * New synapse release 1.41.0. + + -- Synapse Packaging team Tue, 24 Aug 2021 15:31:45 +0100 + matrix-synapse-py3 (1.41.0~rc1) stable; urgency=medium * New synapse release 1.41.0~rc1. diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index 634bc833ab92..4fcd2b78523a 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -53,6 +53,7 @@ - [Media](admin_api/media_admin_api.md) - [Purge History](admin_api/purge_history_api.md) - [Register Users](admin_api/register_api.md) + - [Registration Tokens](usage/administration/admin_api/registration_tokens.md) - [Manipulate Room Membership](admin_api/room_membership.md) - [Rooms](admin_api/rooms.md) - [Server Notices](admin_api/server_notices.md) diff --git a/docs/openid.md b/docs/openid.md index f685fd551acc..49180eec5293 100644 --- a/docs/openid.md +++ b/docs/openid.md @@ -79,7 +79,7 @@ oidc_providers: display_name_template: "{{ user.name }}" ``` -### [Dex][dex-idp] +### Dex [Dex][dex-idp] is a simple, open-source, certified OpenID Connect Provider. Although it is designed to help building a full-blown provider with an @@ -117,7 +117,7 @@ oidc_providers: localpart_template: "{{ user.name }}" display_name_template: "{{ user.name|capitalize }}" ``` -### [Keycloak][keycloak-idp] +### Keycloak [Keycloak][keycloak-idp] is an opensource IdP maintained by Red Hat. @@ -166,7 +166,9 @@ oidc_providers: localpart_template: "{{ user.preferred_username }}" display_name_template: "{{ user.name }}" ``` -### [Auth0][auth0] +### Auth0 + +[Auth0][auth0] is a hosted SaaS IdP solution. 1. Create a regular web application for Synapse 2. Set the Allowed Callback URLs to `[synapse public baseurl]/_synapse/client/oidc/callback` @@ -209,7 +211,7 @@ oidc_providers: ### GitHub -GitHub is a bit special as it is not an OpenID Connect compliant provider, but +[GitHub][github-idp] is a bit special as it is not an OpenID Connect compliant provider, but just a regular OAuth2 provider. The [`/user` API endpoint](https://developer.github.com/v3/users/#get-the-authenticated-user) @@ -242,11 +244,13 @@ oidc_providers: display_name_template: "{{ user.name }}" ``` -### [Google][google-idp] +### Google + +[Google][google-idp] is an OpenID certified authentication and authorisation provider. 1. Set up a project in the Google API Console (see https://developers.google.com/identity/protocols/oauth2/openid-connect#appsetup). -2. add an "OAuth Client ID" for a Web Application under "Credentials". +2. Add an "OAuth Client ID" for a Web Application under "Credentials". 3. Copy the Client ID and Client Secret, and add the following to your synapse config: ```yaml oidc_providers: @@ -446,3 +450,51 @@ The synapse config will look like this: config: email_template: "{{ user.email }}" ``` + +## Django OAuth Toolkit + +[django-oauth-toolkit](https://github.com/jazzband/django-oauth-toolkit) is a +Django application providing out of the box all the endpoints, data and logic +needed to add OAuth2 capabilities to your Django projects. It supports +[OpenID Connect too](https://django-oauth-toolkit.readthedocs.io/en/latest/oidc.html). + +Configuration on Django's side: + +1. Add an application: https://example.com/admin/oauth2_provider/application/add/ and choose parameters like this: +* `Redirect uris`: https://synapse.example.com/_synapse/client/oidc/callback +* `Client type`: `Confidential` +* `Authorization grant type`: `Authorization code` +* `Algorithm`: `HMAC with SHA-2 256` +2. You can [customize the claims](https://django-oauth-toolkit.readthedocs.io/en/latest/oidc.html#customizing-the-oidc-responses) Django gives to synapse (optional): +
+ Code sample + + ```python + class CustomOAuth2Validator(OAuth2Validator): + + def get_additional_claims(self, request): + return { + "sub": request.user.email, + "email": request.user.email, + "first_name": request.user.first_name, + "last_name": request.user.last_name, + } + ``` +
+Your synapse config is then: + +```yaml +oidc_providers: + - idp_id: django_example + idp_name: "Django Example" + issuer: "https://example.com/o/" + client_id: "your-client-id" # CHANGE ME + client_secret: "your-client-secret" # CHANGE ME + scopes: ["openid"] + user_profile_method: "userinfo_endpoint" # needed because oauth-toolkit does not include user information in the authorization response + user_mapping_provider: + config: + localpart_template: "{{ user.email.split('@')[0] }}" + display_name_template: "{{ user.first_name }} {{ user.last_name }}" + email_template: "{{ user.email }}" +``` diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index 2b0c45324215..935841dbfa37 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -793,6 +793,8 @@ log_config: "CONFDIR/SERVERNAME.log.config" # is using # - one for registration that ratelimits registration requests based on the # client's IP address. +# - one for checking the validity of registration tokens that ratelimits +# requests based on the client's IP address. # - one for login that ratelimits login requests based on the client's IP # address. # - one for login that ratelimits login requests based on the account the @@ -821,6 +823,10 @@ log_config: "CONFDIR/SERVERNAME.log.config" # per_second: 0.17 # burst_count: 3 # +#rc_registration_token_validity: +# per_second: 0.1 +# burst_count: 5 +# #rc_login: # address: # per_second: 0.17 @@ -1169,6 +1175,15 @@ url_preview_accept_language: # #enable_3pid_lookup: true +# Require users to submit a token during registration. +# Tokens can be managed using the admin API: +# https://matrix-org.github.io/synapse/latest/usage/administration/admin_api/registration_tokens.html +# Note that `enable_registration` must be set to `true`. +# Disabling this option will not delete any tokens previously generated. +# Defaults to false. Uncomment the following to require tokens: +# +#registration_requires_token: true + # If set, allows registration of standard or admin accounts by anyone who # has the shared secret, even if registration is otherwise disabled. # diff --git a/docs/usage/administration/admin_api/registration_tokens.md b/docs/usage/administration/admin_api/registration_tokens.md new file mode 100644 index 000000000000..828c0277d626 --- /dev/null +++ b/docs/usage/administration/admin_api/registration_tokens.md @@ -0,0 +1,295 @@ +# Registration Tokens + +This API allows you to manage tokens which can be used to authenticate +registration requests, as proposed in [MSC3231](https://github.com/govynnus/matrix-doc/blob/token-registration/proposals/3231-token-authenticated-registration.md). +To use it, you will need to enable the `registration_requires_token` config +option, and authenticate by providing an `access_token` for a server admin: +see [Admin API](../../usage/administration/admin_api). +Note that this API is still experimental; not all clients may support it yet. + + +## Registration token objects + +Most endpoints make use of JSON objects that contain details about tokens. +These objects have the following fields: +- `token`: The token which can be used to authenticate registration. +- `uses_allowed`: The number of times the token can be used to complete a + registration before it becomes invalid. +- `pending`: The number of pending uses the token has. When someone uses + the token to authenticate themselves, the pending counter is incremented + so that the token is not used more than the permitted number of times. + When the person completes registration the pending counter is decremented, + and the completed counter is incremented. +- `completed`: The number of times the token has been used to successfully + complete a registration. +- `expiry_time`: The latest time the token is valid. Given as the number of + milliseconds since 1970-01-01 00:00:00 UTC (the start of the Unix epoch). + To convert this into a human-readable form you can remove the milliseconds + and use the `date` command. For example, `date -d '@1625394937'`. + + +## List all tokens + +Lists all tokens and details about them. If the request is successful, the top +level JSON object will have a `registration_tokens` key which is an array of +registration token objects. + +``` +GET /_synapse/admin/v1/registration_tokens +``` + +Optional query parameters: +- `valid`: `true` or `false`. If `true`, only valid tokens are returned. + If `false`, only tokens that have expired or have had all uses exhausted are + returned. If omitted, all tokens are returned regardless of validity. + +Example: + +``` +GET /_synapse/admin/v1/registration_tokens +``` +``` +200 OK + +{ + "registration_tokens": [ + { + "token": "abcd", + "uses_allowed": 3, + "pending": 0, + "completed": 1, + "expiry_time": null + }, + { + "token": "pqrs", + "uses_allowed": 2, + "pending": 1, + "completed": 1, + "expiry_time": null + }, + { + "token": "wxyz", + "uses_allowed": null, + "pending": 0, + "completed": 9, + "expiry_time": 1625394937000 // 2021-07-04 10:35:37 UTC + } + ] +} +``` + +Example using the `valid` query parameter: + +``` +GET /_synapse/admin/v1/registration_tokens?valid=false +``` +``` +200 OK + +{ + "registration_tokens": [ + { + "token": "pqrs", + "uses_allowed": 2, + "pending": 1, + "completed": 1, + "expiry_time": null + }, + { + "token": "wxyz", + "uses_allowed": null, + "pending": 0, + "completed": 9, + "expiry_time": 1625394937000 // 2021-07-04 10:35:37 UTC + } + ] +} +``` + + +## Get one token + +Get details about a single token. If the request is successful, the response +body will be a registration token object. + +``` +GET /_synapse/admin/v1/registration_tokens/ +``` + +Path parameters: +- `token`: The registration token to return details of. + +Example: + +``` +GET /_synapse/admin/v1/registration_tokens/abcd +``` +``` +200 OK + +{ + "token": "abcd", + "uses_allowed": 3, + "pending": 0, + "completed": 1, + "expiry_time": null +} +``` + + +## Create token + +Create a new registration token. If the request is successful, the newly created +token will be returned as a registration token object in the response body. + +``` +POST /_synapse/admin/v1/registration_tokens/new +``` + +The request body must be a JSON object and can contain the following fields: +- `token`: The registration token. A string of no more than 64 characters that + consists only of characters matched by the regex `[A-Za-z0-9-_]`. + Default: randomly generated. +- `uses_allowed`: The integer number of times the token can be used to complete + a registration before it becomes invalid. + Default: `null` (unlimited uses). +- `expiry_time`: The latest time the token is valid. Given as the number of + milliseconds since 1970-01-01 00:00:00 UTC (the start of the Unix epoch). + You could use, for example, `date '+%s000' -d 'tomorrow'`. + Default: `null` (token does not expire). +- `length`: The length of the token randomly generated if `token` is not + specified. Must be between 1 and 64 inclusive. Default: `16`. + +If a field is omitted the default is used. + +Example using defaults: + +``` +POST /_synapse/admin/v1/registration_tokens/new + +{} +``` +``` +200 OK + +{ + "token": "0M-9jbkf2t_Tgiw1", + "uses_allowed": null, + "pending": 0, + "completed": 0, + "expiry_time": null +} +``` + +Example specifying some fields: + +``` +POST /_synapse/admin/v1/registration_tokens/new + +{ + "token": "defg", + "uses_allowed": 1 +} +``` +``` +200 OK + +{ + "token": "defg", + "uses_allowed": 1, + "pending": 0, + "completed": 0, + "expiry_time": null +} +``` + + +## Update token + +Update the number of allowed uses or expiry time of a token. If the request is +successful, the updated token will be returned as a registration token object +in the response body. + +``` +PUT /_synapse/admin/v1/registration_tokens/ +``` + +Path parameters: +- `token`: The registration token to update. + +The request body must be a JSON object and can contain the following fields: +- `uses_allowed`: The integer number of times the token can be used to complete + a registration before it becomes invalid. By setting `uses_allowed` to `0` + the token can be easily made invalid without deleting it. + If `null` the token will have an unlimited number of uses. +- `expiry_time`: The latest time the token is valid. Given as the number of + milliseconds since 1970-01-01 00:00:00 UTC (the start of the Unix epoch). + If `null` the token will not expire. + +If a field is omitted its value is not modified. + +Example: + +``` +PUT /_synapse/admin/v1/registration_tokens/defg + +{ + "expiry_time": 4781243146000 // 2121-07-06 11:05:46 UTC +} +``` +``` +200 OK + +{ + "token": "defg", + "uses_allowed": 1, + "pending": 0, + "completed": 0, + "expiry_time": 4781243146000 +} +``` + + +## Delete token + +Delete a registration token. If the request is successful, the response body +will be an empty JSON object. + +``` +DELETE /_synapse/admin/v1/registration_tokens/ +``` + +Path parameters: +- `token`: The registration token to delete. + +Example: + +``` +DELETE /_synapse/admin/v1/registration_tokens/wxyz +``` +``` +200 OK + +{} +``` + + +## Errors + +If a request fails a "standard error response" will be returned as defined in +the [Matrix Client-Server API specification](https://matrix.org/docs/spec/client_server/r0.6.1#api-standards). + +For example, if the token specified in a path parameter does not exist a +`404 Not Found` error will be returned. + +``` +GET /_synapse/admin/v1/registration_tokens/1234 +``` +``` +404 Not Found + +{ + "errcode": "M_NOT_FOUND", + "error": "No such registration token: 1234" +} +``` diff --git a/docs/workers.md b/docs/workers.md index 2e63f0345288..3121241894f2 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -236,6 +236,7 @@ expressions: # Registration/login requests ^/_matrix/client/(api/v1|r0|unstable)/login$ ^/_matrix/client/(r0|unstable)/register$ + ^/_matrix/client/unstable/org.matrix.msc3231/register/org.matrix.msc3231.login.registration_token/validity$ # Event sending requests ^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/redact diff --git a/mypy.ini b/mypy.ini index 90ade37b3f9a..745e6b78eb62 100644 --- a/mypy.ini +++ b/mypy.ini @@ -57,6 +57,7 @@ files = synapse/storage/databases/main/keys.py, synapse/storage/databases/main/pusher.py, synapse/storage/databases/main/registration.py, + synapse/storage/databases/main/session.py, synapse/storage/databases/main/stream.py, synapse/storage/databases/main/ui_auth.py, synapse/storage/database.py, @@ -91,8 +92,8 @@ files = tests/handlers/test_password_providers.py, tests/handlers/test_room_summary.py, tests/handlers/test_sync.py, - tests/rest/client/v1/test_login.py, - tests/rest/client/v2_alpha/test_auth.py, + tests/rest/client/test_login.py, + tests/rest/client/test_auth.py, tests/util/test_itertools.py, tests/util/test_stream_change_cache.py diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh index 5d0ef8dd3a73..89af7a4fde89 100755 --- a/scripts-dev/complement.sh +++ b/scripts-dev/complement.sh @@ -35,25 +35,25 @@ if [[ -z "$COMPLEMENT_DIR" ]]; then echo "Checkout available at 'complement-master'" fi +# Build the base Synapse image from the local checkout +docker build -t matrixdotorg/synapse -f "docker/Dockerfile" . + # If we're using workers, modify the docker files slightly. if [[ -n "$WORKERS" ]]; then - BASE_IMAGE=matrixdotorg/synapse-workers - BASE_DOCKERFILE=docker/Dockerfile-workers + # Build the workers docker image (from the base Synapse image). + docker build -t matrixdotorg/synapse-workers -f "docker/Dockerfile-workers" . + export COMPLEMENT_BASE_IMAGE=complement-synapse-workers COMPLEMENT_DOCKERFILE=SynapseWorkers.Dockerfile # And provide some more configuration to complement. export COMPLEMENT_CA=true export COMPLEMENT_VERSION_CHECK_ITERATIONS=500 else - BASE_IMAGE=matrixdotorg/synapse - BASE_DOCKERFILE=docker/Dockerfile export COMPLEMENT_BASE_IMAGE=complement-synapse COMPLEMENT_DOCKERFILE=Synapse.Dockerfile fi -# Build the base Synapse image from the local checkout -docker build -t $BASE_IMAGE -f "$BASE_DOCKERFILE" . -# Build the Synapse monolith image from Complement, based on the above image we just built +# Build the Complement image from the Synapse image we just built. docker build -t $COMPLEMENT_BASE_IMAGE -f "$COMPLEMENT_DIR/dockerfiles/$COMPLEMENT_DOCKERFILE" "$COMPLEMENT_DIR/dockerfiles" cd "$COMPLEMENT_DIR" diff --git a/synapse/__init__.py b/synapse/__init__.py index 6ada20a77f49..ef3770262e8f 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -47,7 +47,7 @@ except ImportError: pass -__version__ = "1.41.0rc1" +__version__ = "1.41.0" if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)): # We import here so that we don't have to install a bunch of deps when diff --git a/synapse/api/constants.py b/synapse/api/constants.py index e0e24fddac35..829061c870c4 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -79,6 +79,7 @@ class LoginType: TERMS = "m.login.terms" SSO = "m.login.sso" DUMMY = "m.login.dummy" + REGISTRATION_TOKEN = "org.matrix.msc3231.login.registration_token" # This is used in the `type` parameter for /register when called by diff --git a/synapse/api/errors.py b/synapse/api/errors.py index dc662bca8353..9480f448d7a5 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -147,6 +147,14 @@ def error_dict(self): return cs_error(self.msg, self.errcode) +class InvalidAPICallError(SynapseError): + """You called an existing API endpoint, but fed that endpoint + invalid or incomplete data.""" + + def __init__(self, msg: str): + super().__init__(HTTPStatus.BAD_REQUEST, msg, Codes.BAD_JSON) + + class ProxiedRequestError(SynapseError): """An error from a general matrix endpoint, eg. from a proxied Matrix API call. diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py index 11280c446220..8abcdfd4fd9a 100644 --- a/synapse/api/room_versions.py +++ b/synapse/api/room_versions.py @@ -293,7 +293,7 @@ class RoomVersionCapability: ), RoomVersionCapability( "restricted", - None, + RoomVersions.V8, lambda room_version: room_version.msc3083_join_rules, ), ) diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 845e6a822046..9b71dd75e6d3 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -95,7 +95,10 @@ ProfileRestServlet, ) from synapse.rest.client.push_rule import PushRuleRestServlet -from synapse.rest.client.register import RegisterRestServlet +from synapse.rest.client.register import ( + RegisterRestServlet, + RegistrationTokenValidityRestServlet, +) from synapse.rest.client.sendtodevice import SendToDeviceRestServlet from synapse.rest.client.versions import VersionsRestServlet from synapse.rest.client.voip import VoipRestServlet @@ -115,6 +118,7 @@ from synapse.storage.databases.main.presence import PresenceStore from synapse.storage.databases.main.room import RoomWorkerStore from synapse.storage.databases.main.search import SearchStore +from synapse.storage.databases.main.session import SessionStore from synapse.storage.databases.main.stats import StatsStore from synapse.storage.databases.main.transactions import TransactionWorkerStore from synapse.storage.databases.main.ui_auth import UIAuthWorkerStore @@ -250,6 +254,7 @@ class GenericWorkerSlavedStore( SearchStore, TransactionWorkerStore, LockStore, + SessionStore, BaseSlavedStore, ): pass @@ -279,6 +284,7 @@ def _listen_http(self, listener_config: ListenerConfig): resource = JsonResource(self, canonical_json=False) RegisterRestServlet(self).register(resource) + RegistrationTokenValidityRestServlet(self).register(resource) login.register_servlets(self, resource) ThreepidRestServlet(self).register(resource) DevicesRestServlet(self).register(resource) diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index a85d8a5dc68b..95deda11a5d6 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -37,7 +37,7 @@ def read_config(self, config: JsonDict, **kwargs): self.msc2285_enabled: bool = experimental.get("msc2285_enabled", False) # MSC3244 (room version capabilities) - self.msc3244_enabled: bool = experimental.get("msc3244_enabled", False) + self.msc3244_enabled: bool = experimental.get("msc3244_enabled", True) # MSC3283 (set displayname, avatar_url and change 3pid capabilities) self.msc3283_enabled: bool = experimental.get("msc3283_enabled", False) diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py index 7a8d5851c40b..f856327bd821 100644 --- a/synapse/config/ratelimiting.py +++ b/synapse/config/ratelimiting.py @@ -79,6 +79,11 @@ def read_config(self, config, **kwargs): self.rc_registration = RateLimitConfig(config.get("rc_registration", {})) + self.rc_registration_token_validity = RateLimitConfig( + config.get("rc_registration_token_validity", {}), + defaults={"per_second": 0.1, "burst_count": 5}, + ) + rc_login_config = config.get("rc_login", {}) self.rc_login_address = RateLimitConfig(rc_login_config.get("address", {})) self.rc_login_account = RateLimitConfig(rc_login_config.get("account", {})) @@ -143,6 +148,8 @@ def generate_config_section(self, **kwargs): # is using # - one for registration that ratelimits registration requests based on the # client's IP address. + # - one for checking the validity of registration tokens that ratelimits + # requests based on the client's IP address. # - one for login that ratelimits login requests based on the client's IP # address. # - one for login that ratelimits login requests based on the account the @@ -171,6 +178,10 @@ def generate_config_section(self, **kwargs): # per_second: 0.17 # burst_count: 3 # + #rc_registration_token_validity: + # per_second: 0.1 + # burst_count: 5 + # #rc_login: # address: # per_second: 0.17 diff --git a/synapse/config/registration.py b/synapse/config/registration.py index 0ad919b1394b..7cffdacfa5ce 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -33,6 +33,9 @@ def read_config(self, config, **kwargs): self.registrations_require_3pid = config.get("registrations_require_3pid", []) self.allowed_local_3pids = config.get("allowed_local_3pids", []) self.enable_3pid_lookup = config.get("enable_3pid_lookup", True) + self.registration_requires_token = config.get( + "registration_requires_token", False + ) self.registration_shared_secret = config.get("registration_shared_secret") self.bcrypt_rounds = config.get("bcrypt_rounds", 12) @@ -140,6 +143,9 @@ def read_config(self, config, **kwargs): "mechanism by removing the `access_token_lifetime` option." ) + # The fallback template used for authenticating using a registration token + self.registration_token_template = self.read_template("registration_token.html") + # The success template used during fallback auth. self.fallback_success_template = self.read_template("auth_success.html") @@ -199,6 +205,15 @@ def generate_config_section(self, generate_secrets=False, **kwargs): # #enable_3pid_lookup: true + # Require users to submit a token during registration. + # Tokens can be managed using the admin API: + # https://matrix-org.github.io/synapse/latest/usage/administration/admin_api/registration_tokens.html + # Note that `enable_registration` must be set to `true`. + # Disabling this option will not delete any tokens previously generated. + # Defaults to false. Uncomment the following to require tokens: + # + #registration_requires_token: true + # If set, allows registration of standard or admin accounts by anyone who # has the shared secret, even if registration is otherwise disabled. # diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 29979414e3d7..44d9e8a5c734 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -43,6 +43,7 @@ Codes, FederationDeniedError, HttpResponseException, + RequestSendFailed, SynapseError, UnsupportedRoomVersionError, ) @@ -558,7 +559,11 @@ async def _try_destination_list( try: return await callback(destination) - except InvalidResponseError as e: + except ( + RequestSendFailed, + InvalidResponseError, + NotRetryingDestination, + ) as e: logger.warning("Failed to %s via %s: %s", description, destination, e) except UnsupportedRoomVersionError: raise diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 8cf614136ebb..0ed59d757bf7 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -56,6 +56,22 @@ ) +def init_counters_for_auth_provider(auth_provider_id: str) -> None: + """Ensure the prometheus counters for the given auth provider are initialised + + This fixes a problem where the counters are not reported for a given auth provider + until the user first logs in/registers. + """ + for is_guest in (True, False): + login_counter.labels(guest=is_guest, auth_provider=auth_provider_id) + for shadow_banned in (True, False): + registration_counter.labels( + guest=is_guest, + shadow_banned=shadow_banned, + auth_provider=auth_provider_id, + ) + + class LoginDict(TypedDict): device_id: str access_token: str @@ -96,6 +112,8 @@ def __init__(self, hs: "HomeServer"): self.session_lifetime = hs.config.session_lifetime self.access_token_lifetime = hs.config.access_token_lifetime + init_counters_for_auth_provider("") + async def check_username( self, localpart: str, diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index ba131962185f..401b84aad1eb 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -36,6 +36,7 @@ from synapse.event_auth import get_named_level, get_power_level_event from synapse.events import EventBase from synapse.events.snapshot import EventContext +from synapse.handlers.profile import MAX_AVATAR_URL_LEN, MAX_DISPLAYNAME_LEN from synapse.types import ( JsonDict, Requester, @@ -79,7 +80,7 @@ def __init__(self, hs: "HomeServer"): self.account_data_handler = hs.get_account_data_handler() self.event_auth_handler = hs.get_event_auth_handler() - self.member_linearizer = Linearizer(name="member") + self.member_linearizer: Linearizer = Linearizer(name="member") self.clock = hs.get_clock() self.spam_checker = hs.get_spam_checker() @@ -556,6 +557,20 @@ async def update_membership_locked( content.pop("displayname", None) content.pop("avatar_url", None) + if len(content.get("displayname") or "") > MAX_DISPLAYNAME_LEN: + raise SynapseError( + 400, + f"Displayname is too long (max {MAX_DISPLAYNAME_LEN})", + errcode=Codes.BAD_JSON, + ) + + if len(content.get("avatar_url") or "") > MAX_AVATAR_URL_LEN: + raise SynapseError( + 400, + f"Avatar URL is too long (max {MAX_AVATAR_URL_LEN})", + errcode=Codes.BAD_JSON, + ) + effective_membership_state = action if action in ["kick", "unban"]: effective_membership_state = "leave" diff --git a/synapse/handlers/room_summary.py b/synapse/handlers/room_summary.py index ac6cfc0da915..906985c754d9 100644 --- a/synapse/handlers/room_summary.py +++ b/synapse/handlers/room_summary.py @@ -28,12 +28,11 @@ Membership, RoomTypes, ) -from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError +from synapse.api.errors import AuthError, Codes, NotFoundError, StoreError, SynapseError from synapse.events import EventBase from synapse.events.utils import format_event_for_client_v2 from synapse.types import JsonDict from synapse.util.caches.response_cache import ResponseCache -from synapse.util.stringutils import random_string if TYPE_CHECKING: from synapse.server import HomeServer @@ -76,6 +75,9 @@ class _PaginationSession: class RoomSummaryHandler: + # A unique key used for pagination sessions for the room hierarchy endpoint. + _PAGINATION_SESSION_TYPE = "room_hierarchy_pagination" + # The time a pagination session remains valid for. _PAGINATION_SESSION_VALIDITY_PERIOD_MS = 5 * 60 * 1000 @@ -87,12 +89,6 @@ def __init__(self, hs: "HomeServer"): self._server_name = hs.hostname self._federation_client = hs.get_federation_client() - # A map of query information to the current pagination state. - # - # TODO Allow for multiple workers to share this data. - # TODO Expire pagination tokens. - self._pagination_sessions: Dict[_PaginationKey, _PaginationSession] = {} - # If a user tries to fetch the same page multiple times in quick succession, # only process the first attempt and return its result to subsequent requests. self._pagination_response_cache: ResponseCache[ @@ -102,21 +98,6 @@ def __init__(self, hs: "HomeServer"): "get_room_hierarchy", ) - def _expire_pagination_sessions(self): - """Expire pagination session which are old.""" - expire_before = ( - self._clock.time_msec() - self._PAGINATION_SESSION_VALIDITY_PERIOD_MS - ) - to_expire = [] - - for key, value in self._pagination_sessions.items(): - if value.creation_time_ms < expire_before: - to_expire.append(key) - - for key in to_expire: - logger.debug("Expiring pagination session id %s", key) - del self._pagination_sessions[key] - async def get_space_summary( self, requester: str, @@ -327,18 +308,29 @@ async def _get_room_hierarchy( # If this is continuing a previous session, pull the persisted data. if from_token: - self._expire_pagination_sessions() + try: + pagination_session = await self._store.get_session( + session_type=self._PAGINATION_SESSION_TYPE, + session_id=from_token, + ) + except StoreError: + raise SynapseError(400, "Unknown pagination token", Codes.INVALID_PARAM) - pagination_key = _PaginationKey( - requested_room_id, suggested_only, max_depth, from_token - ) - if pagination_key not in self._pagination_sessions: + # If the requester, room ID, suggested-only, or max depth were modified + # the session is invalid. + if ( + requester != pagination_session["requester"] + or requested_room_id != pagination_session["room_id"] + or suggested_only != pagination_session["suggested_only"] + or max_depth != pagination_session["max_depth"] + ): raise SynapseError(400, "Unknown pagination token", Codes.INVALID_PARAM) # Load the previous state. - pagination_session = self._pagination_sessions[pagination_key] - room_queue = pagination_session.room_queue - processed_rooms = pagination_session.processed_rooms + room_queue = [ + _RoomQueueEntry(*fields) for fields in pagination_session["room_queue"] + ] + processed_rooms = set(pagination_session["processed_rooms"]) else: # The queue of rooms to process, the next room is last on the stack. room_queue = [_RoomQueueEntry(requested_room_id, ())] @@ -456,13 +448,21 @@ async def _get_room_hierarchy( # If there's additional data, generate a pagination token (and persist state). if room_queue: - next_batch = random_string(24) - result["next_batch"] = next_batch - pagination_key = _PaginationKey( - requested_room_id, suggested_only, max_depth, next_batch - ) - self._pagination_sessions[pagination_key] = _PaginationSession( - self._clock.time_msec(), room_queue, processed_rooms + result["next_batch"] = await self._store.create_session( + session_type=self._PAGINATION_SESSION_TYPE, + value={ + # Information which must be identical across pagination. + "requester": requester, + "room_id": requested_room_id, + "suggested_only": suggested_only, + "max_depth": max_depth, + # The stored state. + "room_queue": [ + attr.astuple(room_entry) for room_entry in room_queue + ], + "processed_rooms": list(processed_rooms), + }, + expiry_ms=self._PAGINATION_SESSION_VALIDITY_PERIOD_MS, ) return result diff --git a/synapse/handlers/sso.py b/synapse/handlers/sso.py index 1b855a685c4a..0e6ebb574ecf 100644 --- a/synapse/handlers/sso.py +++ b/synapse/handlers/sso.py @@ -37,6 +37,7 @@ from synapse.api.constants import LoginType from synapse.api.errors import Codes, NotFoundError, RedirectException, SynapseError from synapse.config.sso import SsoAttributeRequirement +from synapse.handlers.register import init_counters_for_auth_provider from synapse.handlers.ui_auth import UIAuthSessionDataConstants from synapse.http import get_request_user_agent from synapse.http.server import respond_with_html, respond_with_redirect @@ -213,6 +214,7 @@ def register_identity_provider(self, p: SsoIdentityProvider): p_id = p.idp_id assert p_id not in self._identity_providers self._identity_providers[p_id] = p + init_counters_for_auth_provider(p_id) def get_identity_providers(self) -> Mapping[str, SsoIdentityProvider]: """Get the configured identity providers""" diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 2203c45dcc9a..86c3c7f0df50 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -30,6 +30,7 @@ from synapse.api.constants import AccountDataTypes, EventTypes, Membership from synapse.api.filtering import FilterCollection +from synapse.api.presence import UserPresenceState from synapse.api.room_versions import KNOWN_ROOM_VERSIONS from synapse.events import EventBase from synapse.logging.context import current_context @@ -231,7 +232,7 @@ class SyncResult: """ next_batch: StreamToken - presence: List[JsonDict] + presence: List[UserPresenceState] account_data: List[JsonDict] joined: List[JoinedSyncResult] invited: List[InvitedSyncResult] @@ -2177,14 +2178,14 @@ class SyncResultBuilder: joined_room_ids: List of rooms the user is joined to # The following mirror the fields in a sync response - presence (list) - account_data (list) - joined (list[JoinedSyncResult]) - invited (list[InvitedSyncResult]) - knocked (list[KnockedSyncResult]) - archived (list[ArchivedSyncResult]) - groups (GroupsSyncResult|None) - to_device (list) + presence + account_data + joined + invited + knocked + archived + groups + to_device """ sync_config: SyncConfig @@ -2193,7 +2194,7 @@ class SyncResultBuilder: now_token: StreamToken joined_room_ids: FrozenSet[str] - presence: List[JsonDict] = attr.Factory(list) + presence: List[UserPresenceState] = attr.Factory(list) account_data: List[JsonDict] = attr.Factory(list) joined: List[JoinedSyncResult] = attr.Factory(list) invited: List[InvitedSyncResult] = attr.Factory(list) diff --git a/synapse/handlers/ui_auth/__init__.py b/synapse/handlers/ui_auth/__init__.py index 4c3b669faeef..13b0c61d2e20 100644 --- a/synapse/handlers/ui_auth/__init__.py +++ b/synapse/handlers/ui_auth/__init__.py @@ -34,3 +34,8 @@ class UIAuthSessionDataConstants: # used by validate_user_via_ui_auth to store the mxid of the user we are validating # for. REQUEST_USER_ID = "request_user_id" + + # used during registration to store the registration token used (if required) so that: + # - we can prevent a token being used twice by one session + # - we can 'use up' the token after registration has successfully completed + REGISTRATION_TOKEN = "org.matrix.msc3231.login.registration_token" diff --git a/synapse/handlers/ui_auth/checkers.py b/synapse/handlers/ui_auth/checkers.py index 270541cc7605..d3828dec6b3f 100644 --- a/synapse/handlers/ui_auth/checkers.py +++ b/synapse/handlers/ui_auth/checkers.py @@ -241,11 +241,76 @@ async def check_auth(self, authdict: dict, clientip: str) -> Any: return await self._check_threepid("msisdn", authdict) +class RegistrationTokenAuthChecker(UserInteractiveAuthChecker): + AUTH_TYPE = LoginType.REGISTRATION_TOKEN + + def __init__(self, hs: "HomeServer"): + super().__init__(hs) + self.hs = hs + self._enabled = bool(hs.config.registration_requires_token) + self.store = hs.get_datastore() + + def is_enabled(self) -> bool: + return self._enabled + + async def check_auth(self, authdict: dict, clientip: str) -> Any: + if "token" not in authdict: + raise LoginError(400, "Missing registration token", Codes.MISSING_PARAM) + if not isinstance(authdict["token"], str): + raise LoginError( + 400, "Registration token must be a string", Codes.INVALID_PARAM + ) + if "session" not in authdict: + raise LoginError(400, "Missing UIA session", Codes.MISSING_PARAM) + + # Get these here to avoid cyclic dependencies + from synapse.handlers.ui_auth import UIAuthSessionDataConstants + + auth_handler = self.hs.get_auth_handler() + + session = authdict["session"] + token = authdict["token"] + + # If the LoginType.REGISTRATION_TOKEN stage has already been completed, + # return early to avoid incrementing `pending` again. + stored_token = await auth_handler.get_session_data( + session, UIAuthSessionDataConstants.REGISTRATION_TOKEN + ) + if stored_token: + if token != stored_token: + raise LoginError( + 400, "Registration token has changed", Codes.INVALID_PARAM + ) + else: + return token + + if await self.store.registration_token_is_valid(token): + # Increment pending counter, so that if token has limited uses it + # can't be used up by someone else in the meantime. + await self.store.set_registration_token_pending(token) + # Store the token in the UIA session, so that once registration + # is complete `completed` can be incremented. + await auth_handler.set_session_data( + session, + UIAuthSessionDataConstants.REGISTRATION_TOKEN, + token, + ) + # The token will be stored as the result of the authentication stage + # in ui_auth_sessions_credentials. This allows the pending counter + # for tokens to be decremented when expired sessions are deleted. + return token + else: + raise LoginError( + 401, "Invalid registration token", errcode=Codes.UNAUTHORIZED + ) + + INTERACTIVE_AUTH_CHECKERS = [ DummyAuthChecker, TermsAuthChecker, RecaptchaAuthChecker, EmailIdentityAuthChecker, MsisdnAuthChecker, + RegistrationTokenAuthChecker, ] """A list of UserInteractiveAuthChecker classes""" diff --git a/synapse/res/templates/registration_token.html b/synapse/res/templates/registration_token.html new file mode 100644 index 000000000000..4577ce17023e --- /dev/null +++ b/synapse/res/templates/registration_token.html @@ -0,0 +1,23 @@ + + +Authentication + + + + +
+
+ {% if error is defined %} +

Error: {{ error }}

+ {% endif %} +

+ Please enter a registration token. +

+ + + +
+
+ + diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index 7f3051aef105..6e1c8736e1dd 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -36,6 +36,11 @@ ) from synapse.rest.admin.groups import DeleteGroupAdminRestServlet from synapse.rest.admin.media import ListMediaInRoom, register_servlets_for_media_repo +from synapse.rest.admin.registration_tokens import ( + ListRegistrationTokensRestServlet, + NewRegistrationTokenRestServlet, + RegistrationTokenRestServlet, +) from synapse.rest.admin.rooms import ( DeleteRoomRestServlet, ForwardExtremitiesRestServlet, @@ -238,6 +243,9 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: RoomEventContextServlet(hs).register(http_server) RateLimitRestServlet(hs).register(http_server) UsernameAvailableRestServlet(hs).register(http_server) + ListRegistrationTokensRestServlet(hs).register(http_server) + NewRegistrationTokenRestServlet(hs).register(http_server) + RegistrationTokenRestServlet(hs).register(http_server) def register_servlets_for_client_rest_resource( diff --git a/synapse/rest/admin/registration_tokens.py b/synapse/rest/admin/registration_tokens.py new file mode 100644 index 000000000000..5a1c929d85c4 --- /dev/null +++ b/synapse/rest/admin/registration_tokens.py @@ -0,0 +1,321 @@ +# Copyright 2021 Callum Brown +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import string +from typing import TYPE_CHECKING, Tuple + +from synapse.api.errors import Codes, NotFoundError, SynapseError +from synapse.http.servlet import ( + RestServlet, + parse_boolean, + parse_json_object_from_request, +) +from synapse.http.site import SynapseRequest +from synapse.rest.admin._base import admin_patterns, assert_requester_is_admin +from synapse.types import JsonDict + +if TYPE_CHECKING: + from synapse.server import HomeServer + +logger = logging.getLogger(__name__) + + +class ListRegistrationTokensRestServlet(RestServlet): + """List registration tokens. + + To list all tokens: + + GET /_synapse/admin/v1/registration_tokens + + 200 OK + + { + "registration_tokens": [ + { + "token": "abcd", + "uses_allowed": 3, + "pending": 0, + "completed": 1, + "expiry_time": null + }, + { + "token": "wxyz", + "uses_allowed": null, + "pending": 0, + "completed": 9, + "expiry_time": 1625394937000 + } + ] + } + + The optional query parameter `valid` can be used to filter the response. + If it is `true`, only valid tokens are returned. If it is `false`, only + tokens that have expired or have had all uses exhausted are returned. + If it is omitted, all tokens are returned regardless of validity. + """ + + PATTERNS = admin_patterns("/registration_tokens$") + + def __init__(self, hs: "HomeServer"): + self.hs = hs + self.auth = hs.get_auth() + self.store = hs.get_datastore() + + async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + await assert_requester_is_admin(self.auth, request) + valid = parse_boolean(request, "valid") + token_list = await self.store.get_registration_tokens(valid) + return 200, {"registration_tokens": token_list} + + +class NewRegistrationTokenRestServlet(RestServlet): + """Create a new registration token. + + For example, to create a token specifying some fields: + + POST /_synapse/admin/v1/registration_tokens/new + + { + "token": "defg", + "uses_allowed": 1 + } + + 200 OK + + { + "token": "defg", + "uses_allowed": 1, + "pending": 0, + "completed": 0, + "expiry_time": null + } + + Defaults are used for any fields not specified. + """ + + PATTERNS = admin_patterns("/registration_tokens/new$") + + def __init__(self, hs: "HomeServer"): + self.hs = hs + self.auth = hs.get_auth() + self.store = hs.get_datastore() + self.clock = hs.get_clock() + # A string of all the characters allowed to be in a registration_token + self.allowed_chars = string.ascii_letters + string.digits + "-_" + self.allowed_chars_set = set(self.allowed_chars) + + async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + await assert_requester_is_admin(self.auth, request) + body = parse_json_object_from_request(request) + + if "token" in body: + token = body["token"] + if not isinstance(token, str): + raise SynapseError(400, "token must be a string", Codes.INVALID_PARAM) + if not (0 < len(token) <= 64): + raise SynapseError( + 400, + "token must not be empty and must not be longer than 64 characters", + Codes.INVALID_PARAM, + ) + if not set(token).issubset(self.allowed_chars_set): + raise SynapseError( + 400, + "token must consist only of characters matched by the regex [A-Za-z0-9-_]", + Codes.INVALID_PARAM, + ) + + else: + # Get length of token to generate (default is 16) + length = body.get("length", 16) + if not isinstance(length, int): + raise SynapseError( + 400, "length must be an integer", Codes.INVALID_PARAM + ) + if not (0 < length <= 64): + raise SynapseError( + 400, + "length must be greater than zero and not greater than 64", + Codes.INVALID_PARAM, + ) + + # Generate token + token = await self.store.generate_registration_token( + length, self.allowed_chars + ) + + uses_allowed = body.get("uses_allowed", None) + if not ( + uses_allowed is None + or (isinstance(uses_allowed, int) and uses_allowed >= 0) + ): + raise SynapseError( + 400, + "uses_allowed must be a non-negative integer or null", + Codes.INVALID_PARAM, + ) + + expiry_time = body.get("expiry_time", None) + if not isinstance(expiry_time, (int, type(None))): + raise SynapseError( + 400, "expiry_time must be an integer or null", Codes.INVALID_PARAM + ) + if isinstance(expiry_time, int) and expiry_time < self.clock.time_msec(): + raise SynapseError( + 400, "expiry_time must not be in the past", Codes.INVALID_PARAM + ) + + created = await self.store.create_registration_token( + token, uses_allowed, expiry_time + ) + if not created: + raise SynapseError( + 400, f"Token already exists: {token}", Codes.INVALID_PARAM + ) + + resp = { + "token": token, + "uses_allowed": uses_allowed, + "pending": 0, + "completed": 0, + "expiry_time": expiry_time, + } + return 200, resp + + +class RegistrationTokenRestServlet(RestServlet): + """Retrieve, update, or delete the given token. + + For example, + + to retrieve a token: + + GET /_synapse/admin/v1/registration_tokens/abcd + + 200 OK + + { + "token": "abcd", + "uses_allowed": 3, + "pending": 0, + "completed": 1, + "expiry_time": null + } + + + to update a token: + + PUT /_synapse/admin/v1/registration_tokens/defg + + { + "uses_allowed": 5, + "expiry_time": 4781243146000 + } + + 200 OK + + { + "token": "defg", + "uses_allowed": 5, + "pending": 0, + "completed": 0, + "expiry_time": 4781243146000 + } + + + to delete a token: + + DELETE /_synapse/admin/v1/registration_tokens/wxyz + + 200 OK + + {} + """ + + PATTERNS = admin_patterns("/registration_tokens/(?P[^/]*)$") + + def __init__(self, hs: "HomeServer"): + self.hs = hs + self.clock = hs.get_clock() + self.auth = hs.get_auth() + self.store = hs.get_datastore() + + async def on_GET(self, request: SynapseRequest, token: str) -> Tuple[int, JsonDict]: + """Retrieve a registration token.""" + await assert_requester_is_admin(self.auth, request) + token_info = await self.store.get_one_registration_token(token) + + # If no result return a 404 + if token_info is None: + raise NotFoundError(f"No such registration token: {token}") + + return 200, token_info + + async def on_PUT(self, request: SynapseRequest, token: str) -> Tuple[int, JsonDict]: + """Update a registration token.""" + await assert_requester_is_admin(self.auth, request) + body = parse_json_object_from_request(request) + new_attributes = {} + + # Only add uses_allowed to new_attributes if it is present and valid + if "uses_allowed" in body: + uses_allowed = body["uses_allowed"] + if not ( + uses_allowed is None + or (isinstance(uses_allowed, int) and uses_allowed >= 0) + ): + raise SynapseError( + 400, + "uses_allowed must be a non-negative integer or null", + Codes.INVALID_PARAM, + ) + new_attributes["uses_allowed"] = uses_allowed + + if "expiry_time" in body: + expiry_time = body["expiry_time"] + if not isinstance(expiry_time, (int, type(None))): + raise SynapseError( + 400, "expiry_time must be an integer or null", Codes.INVALID_PARAM + ) + if isinstance(expiry_time, int) and expiry_time < self.clock.time_msec(): + raise SynapseError( + 400, "expiry_time must not be in the past", Codes.INVALID_PARAM + ) + new_attributes["expiry_time"] = expiry_time + + if len(new_attributes) == 0: + # Nothing to update, get token info to return + token_info = await self.store.get_one_registration_token(token) + else: + token_info = await self.store.update_registration_token( + token, new_attributes + ) + + # If no result return a 404 + if token_info is None: + raise NotFoundError(f"No such registration token: {token}") + + return 200, token_info + + async def on_DELETE( + self, request: SynapseRequest, token: str + ) -> Tuple[int, JsonDict]: + """Delete a registration token.""" + await assert_requester_is_admin(self.auth, request) + + if await self.store.delete_registration_token(token): + return 200, {} + + raise NotFoundError(f"No such registration token: {token}") diff --git a/synapse/rest/client/account_validity.py b/synapse/rest/client/account_validity.py index 3ebe40186153..6c24b96c547d 100644 --- a/synapse/rest/client/account_validity.py +++ b/synapse/rest/client/account_validity.py @@ -13,24 +13,27 @@ # limitations under the License. import logging +from typing import TYPE_CHECKING, Tuple -from synapse.api.errors import SynapseError -from synapse.http.server import respond_with_html -from synapse.http.servlet import RestServlet +from twisted.web.server import Request + +from synapse.http.server import HttpServer, respond_with_html +from synapse.http.servlet import RestServlet, parse_string +from synapse.http.site import SynapseRequest +from synapse.types import JsonDict from ._base import client_patterns +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) class AccountValidityRenewServlet(RestServlet): PATTERNS = client_patterns("/account_validity/renew$") - def __init__(self, hs): - """ - Args: - hs (synapse.server.HomeServer): server - """ + def __init__(self, hs: "HomeServer"): super().__init__() self.hs = hs @@ -46,18 +49,14 @@ def __init__(self, hs): hs.config.account_validity.account_validity_invalid_token_template ) - async def on_GET(self, request): - if b"token" not in request.args: - raise SynapseError(400, "Missing renewal token") - renewal_token = request.args[b"token"][0] + async def on_GET(self, request: Request) -> None: + renewal_token = parse_string(request, "token", required=True) ( token_valid, token_stale, expiration_ts, - ) = await self.account_activity_handler.renew_account( - renewal_token.decode("utf8") - ) + ) = await self.account_activity_handler.renew_account(renewal_token) if token_valid: status_code = 200 @@ -77,11 +76,7 @@ async def on_GET(self, request): class AccountValiditySendMailServlet(RestServlet): PATTERNS = client_patterns("/account_validity/send_mail$") - def __init__(self, hs): - """ - Args: - hs (synapse.server.HomeServer): server - """ + def __init__(self, hs: "HomeServer"): super().__init__() self.hs = hs @@ -91,7 +86,7 @@ def __init__(self, hs): hs.config.account_validity.account_validity_renew_by_email_enabled ) - async def on_POST(self, request): + async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_expired=True) user_id = requester.user.to_string() await self.account_activity_handler.send_renewal_email_to_user(user_id) @@ -99,6 +94,6 @@ async def on_POST(self, request): return 200, {} -def register_servlets(hs, http_server): +def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: AccountValidityRenewServlet(hs).register(http_server) AccountValiditySendMailServlet(hs).register(http_server) diff --git a/synapse/rest/client/auth.py b/synapse/rest/client/auth.py index 73284e48ec03..91800c02784c 100644 --- a/synapse/rest/client/auth.py +++ b/synapse/rest/client/auth.py @@ -46,6 +46,7 @@ def __init__(self, hs: "HomeServer"): self.registration_handler = hs.get_registration_handler() self.recaptcha_template = hs.config.recaptcha_template self.terms_template = hs.config.terms_template + self.registration_token_template = hs.config.registration_token_template self.success_template = hs.config.fallback_success_template async def on_GET(self, request, stagetype): @@ -74,6 +75,12 @@ async def on_GET(self, request, stagetype): # re-authenticate with their SSO provider. html = await self.auth_handler.start_sso_ui_auth(request, session) + elif stagetype == LoginType.REGISTRATION_TOKEN: + html = self.registration_token_template.render( + session=session, + myurl=f"{CLIENT_API_PREFIX}/r0/auth/{LoginType.REGISTRATION_TOKEN}/fallback/web", + ) + else: raise SynapseError(404, "Unknown auth stage type") @@ -140,6 +147,23 @@ async def on_POST(self, request, stagetype): # The SSO fallback workflow should not post here, raise SynapseError(404, "Fallback SSO auth does not support POST requests.") + elif stagetype == LoginType.REGISTRATION_TOKEN: + token = parse_string(request, "token", required=True) + authdict = {"session": session, "token": token} + + try: + await self.auth_handler.add_oob_auth( + LoginType.REGISTRATION_TOKEN, authdict, request.getClientIP() + ) + except LoginError as e: + html = self.registration_token_template.render( + session=session, + myurl=f"{CLIENT_API_PREFIX}/r0/auth/{LoginType.REGISTRATION_TOKEN}/fallback/web", + error=e.msg, + ) + else: + html = self.success_template.render() + else: raise SynapseError(404, "Unknown auth stage type") diff --git a/synapse/rest/client/capabilities.py b/synapse/rest/client/capabilities.py index 093549512ebc..65b3b5ce2cc5 100644 --- a/synapse/rest/client/capabilities.py +++ b/synapse/rest/client/capabilities.py @@ -15,6 +15,7 @@ from typing import TYPE_CHECKING, Tuple from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, MSC3244_CAPABILITIES +from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet from synapse.http.site import SynapseRequest from synapse.types import JsonDict @@ -75,5 +76,5 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: return 200, response -def register_servlets(hs: "HomeServer", http_server): +def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: CapabilitiesRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/directory.py b/synapse/rest/client/directory.py index ffa075c8e5f6..ee247e3d1e0d 100644 --- a/synapse/rest/client/directory.py +++ b/synapse/rest/client/directory.py @@ -12,8 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. - import logging +from typing import TYPE_CHECKING, Tuple + +from twisted.web.server import Request from synapse.api.errors import ( AuthError, @@ -22,14 +24,19 @@ NotFoundError, SynapseError, ) +from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet, parse_json_object_from_request +from synapse.http.site import SynapseRequest from synapse.rest.client._base import client_patterns -from synapse.types import RoomAlias +from synapse.types import JsonDict, RoomAlias + +if TYPE_CHECKING: + from synapse.server import HomeServer logger = logging.getLogger(__name__) -def register_servlets(hs, http_server): +def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: ClientDirectoryServer(hs).register(http_server) ClientDirectoryListServer(hs).register(http_server) ClientAppserviceDirectoryListServer(hs).register(http_server) @@ -38,21 +45,23 @@ def register_servlets(hs, http_server): class ClientDirectoryServer(RestServlet): PATTERNS = client_patterns("/directory/room/(?P[^/]*)$", v1=True) - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() self.store = hs.get_datastore() self.directory_handler = hs.get_directory_handler() self.auth = hs.get_auth() - async def on_GET(self, request, room_alias): - room_alias = RoomAlias.from_string(room_alias) + async def on_GET(self, request: Request, room_alias: str) -> Tuple[int, JsonDict]: + room_alias_obj = RoomAlias.from_string(room_alias) - res = await self.directory_handler.get_association(room_alias) + res = await self.directory_handler.get_association(room_alias_obj) return 200, res - async def on_PUT(self, request, room_alias): - room_alias = RoomAlias.from_string(room_alias) + async def on_PUT( + self, request: SynapseRequest, room_alias: str + ) -> Tuple[int, JsonDict]: + room_alias_obj = RoomAlias.from_string(room_alias) content = parse_json_object_from_request(request) if "room_id" not in content: @@ -61,7 +70,7 @@ async def on_PUT(self, request, room_alias): ) logger.debug("Got content: %s", content) - logger.debug("Got room name: %s", room_alias.to_string()) + logger.debug("Got room name: %s", room_alias_obj.to_string()) room_id = content["room_id"] servers = content["servers"] if "servers" in content else None @@ -78,22 +87,25 @@ async def on_PUT(self, request, room_alias): requester = await self.auth.get_user_by_req(request) await self.directory_handler.create_association( - requester, room_alias, room_id, servers + requester, room_alias_obj, room_id, servers ) return 200, {} - async def on_DELETE(self, request, room_alias): + async def on_DELETE( + self, request: SynapseRequest, room_alias: str + ) -> Tuple[int, JsonDict]: + room_alias_obj = RoomAlias.from_string(room_alias) + try: service = self.auth.get_appservice_by_req(request) - room_alias = RoomAlias.from_string(room_alias) await self.directory_handler.delete_appservice_association( - service, room_alias + service, room_alias_obj ) logger.info( "Application service at %s deleted alias %s", service.url, - room_alias.to_string(), + room_alias_obj.to_string(), ) return 200, {} except InvalidClientCredentialsError: @@ -103,12 +115,10 @@ async def on_DELETE(self, request, room_alias): requester = await self.auth.get_user_by_req(request) user = requester.user - room_alias = RoomAlias.from_string(room_alias) - - await self.directory_handler.delete_association(requester, room_alias) + await self.directory_handler.delete_association(requester, room_alias_obj) logger.info( - "User %s deleted alias %s", user.to_string(), room_alias.to_string() + "User %s deleted alias %s", user.to_string(), room_alias_obj.to_string() ) return 200, {} @@ -117,20 +127,22 @@ async def on_DELETE(self, request, room_alias): class ClientDirectoryListServer(RestServlet): PATTERNS = client_patterns("/directory/list/room/(?P[^/]*)$", v1=True) - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() self.store = hs.get_datastore() self.directory_handler = hs.get_directory_handler() self.auth = hs.get_auth() - async def on_GET(self, request, room_id): + async def on_GET(self, request: Request, room_id: str) -> Tuple[int, JsonDict]: room = await self.store.get_room(room_id) if room is None: raise NotFoundError("Unknown room") return 200, {"visibility": "public" if room["is_public"] else "private"} - async def on_PUT(self, request, room_id): + async def on_PUT( + self, request: SynapseRequest, room_id: str + ) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) content = parse_json_object_from_request(request) @@ -142,7 +154,9 @@ async def on_PUT(self, request, room_id): return 200, {} - async def on_DELETE(self, request, room_id): + async def on_DELETE( + self, request: SynapseRequest, room_id: str + ) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await self.directory_handler.edit_published_room_list( @@ -157,21 +171,27 @@ class ClientAppserviceDirectoryListServer(RestServlet): "/directory/list/appservice/(?P[^/]*)/(?P[^/]*)$", v1=True ) - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() self.store = hs.get_datastore() self.directory_handler = hs.get_directory_handler() self.auth = hs.get_auth() - def on_PUT(self, request, network_id, room_id): + async def on_PUT( + self, request: SynapseRequest, network_id: str, room_id: str + ) -> Tuple[int, JsonDict]: content = parse_json_object_from_request(request) visibility = content.get("visibility", "public") - return self._edit(request, network_id, room_id, visibility) + return await self._edit(request, network_id, room_id, visibility) - def on_DELETE(self, request, network_id, room_id): - return self._edit(request, network_id, room_id, "private") + async def on_DELETE( + self, request: SynapseRequest, network_id: str, room_id: str + ) -> Tuple[int, JsonDict]: + return await self._edit(request, network_id, room_id, "private") - async def _edit(self, request, network_id, room_id, visibility): + async def _edit( + self, request: SynapseRequest, network_id: str, room_id: str, visibility: str + ) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if not requester.app_service: raise AuthError( diff --git a/synapse/rest/client/keys.py b/synapse/rest/client/keys.py index d0d9d30d40a3..012491f59736 100644 --- a/synapse/rest/client/keys.py +++ b/synapse/rest/client/keys.py @@ -15,8 +15,9 @@ # limitations under the License. import logging +from typing import Any -from synapse.api.errors import SynapseError +from synapse.api.errors import InvalidAPICallError, SynapseError from synapse.http.servlet import ( RestServlet, parse_integer, @@ -163,6 +164,19 @@ async def on_POST(self, request): device_id = requester.device_id timeout = parse_integer(request, "timeout", 10 * 1000) body = parse_json_object_from_request(request) + + device_keys = body.get("device_keys") + if not isinstance(device_keys, dict): + raise InvalidAPICallError("'device_keys' must be a JSON object") + + def is_list_of_strings(values: Any) -> bool: + return isinstance(values, list) and all(isinstance(v, str) for v in values) + + if any(not is_list_of_strings(keys) for keys in device_keys.values()): + raise InvalidAPICallError( + "'device_keys' values must be a list of strings", + ) + result = await self.e2e_keys_handler.query_devices( body, timeout, user_id, device_id ) diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index 0c8d8967b7ee..11d07776b2ff 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -104,6 +104,12 @@ def __init__(self, hs: "HomeServer"): burst_count=self.hs.config.rc_login_account.burst_count, ) + # ensure the CAS/SAML/OIDC handlers are loaded on this worker instance. + # The reason for this is to ensure that the auth_provider_ids are registered + # with SsoHandler, which in turn ensures that the login/registration prometheus + # counters are initialised for the auth_provider_ids. + _load_sso_handlers(hs) + def on_GET(self, request: SynapseRequest): flows = [] if self.jwt_enabled: @@ -499,12 +505,7 @@ class SsoRedirectServlet(RestServlet): def __init__(self, hs: "HomeServer"): # make sure that the relevant handlers are instantiated, so that they # register themselves with the main SSOHandler. - if hs.config.cas_enabled: - hs.get_cas_handler() - if hs.config.saml2_enabled: - hs.get_saml_handler() - if hs.config.oidc_enabled: - hs.get_oidc_handler() + _load_sso_handlers(hs) self._sso_handler = hs.get_sso_handler() self._msc2858_enabled = hs.config.experimental.msc2858_enabled self._public_baseurl = hs.config.public_baseurl @@ -598,3 +599,19 @@ def register_servlets(hs, http_server): SsoRedirectServlet(hs).register(http_server) if hs.config.cas_enabled: CasTicketServlet(hs).register(http_server) + + +def _load_sso_handlers(hs: "HomeServer"): + """Ensure that the SSO handlers are loaded, if they are enabled by configuration. + + This is mostly useful to ensure that the CAS/SAML/OIDC handlers register themselves + with the main SsoHandler. + + It's safe to call this multiple times. + """ + if hs.config.cas.cas_enabled: + hs.get_cas_handler() + if hs.config.saml2.saml2_enabled: + hs.get_saml_handler() + if hs.config.oidc.oidc_enabled: + hs.get_oidc_handler() diff --git a/synapse/rest/client/pusher.py b/synapse/rest/client/pusher.py index 84619c5e4184..98604a93887f 100644 --- a/synapse/rest/client/pusher.py +++ b/synapse/rest/client/pusher.py @@ -13,17 +13,23 @@ # limitations under the License. import logging +from typing import TYPE_CHECKING, Tuple from synapse.api.errors import Codes, StoreError, SynapseError -from synapse.http.server import respond_with_html_bytes +from synapse.http.server import HttpServer, respond_with_html_bytes from synapse.http.servlet import ( RestServlet, assert_params_in_dict, parse_json_object_from_request, parse_string, ) +from synapse.http.site import SynapseRequest from synapse.push import PusherConfigException from synapse.rest.client._base import client_patterns +from synapse.types import JsonDict + +if TYPE_CHECKING: + from synapse.server import HomeServer logger = logging.getLogger(__name__) @@ -31,12 +37,12 @@ class PushersRestServlet(RestServlet): PATTERNS = client_patterns("/pushers$", v1=True) - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() self.hs = hs self.auth = hs.get_auth() - async def on_GET(self, request): + async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) user = requester.user @@ -50,14 +56,14 @@ async def on_GET(self, request): class PushersSetRestServlet(RestServlet): PATTERNS = client_patterns("/pushers/set$", v1=True) - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() self.hs = hs self.auth = hs.get_auth() self.notifier = hs.get_notifier() self.pusher_pool = self.hs.get_pusherpool() - async def on_POST(self, request): + async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) user = requester.user @@ -132,14 +138,14 @@ class PushersRemoveRestServlet(RestServlet): PATTERNS = client_patterns("/pushers/remove$", v1=True) SUCCESS_HTML = b"You have been unsubscribed" - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() self.hs = hs self.notifier = hs.get_notifier() self.auth = hs.get_auth() self.pusher_pool = self.hs.get_pusherpool() - async def on_GET(self, request): + async def on_GET(self, request: SynapseRequest) -> None: requester = await self.auth.get_user_by_req(request, rights="delete_pusher") user = requester.user @@ -165,7 +171,7 @@ async def on_GET(self, request): return None -def register_servlets(hs, http_server): +def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: PushersRestServlet(hs).register(http_server) PushersSetRestServlet(hs).register(http_server) PushersRemoveRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/read_marker.py b/synapse/rest/client/read_marker.py index 027f8b81fa93..43c04fac6fdb 100644 --- a/synapse/rest/client/read_marker.py +++ b/synapse/rest/client/read_marker.py @@ -13,27 +13,36 @@ # limitations under the License. import logging +from typing import TYPE_CHECKING, Tuple from synapse.api.constants import ReadReceiptEventFields from synapse.api.errors import Codes, SynapseError +from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet, parse_json_object_from_request +from synapse.http.site import SynapseRequest +from synapse.types import JsonDict from ._base import client_patterns +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) class ReadMarkerRestServlet(RestServlet): PATTERNS = client_patterns("/rooms/(?P[^/]*)/read_markers$") - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() self.auth = hs.get_auth() self.receipts_handler = hs.get_receipts_handler() self.read_marker_handler = hs.get_read_marker_handler() self.presence_handler = hs.get_presence_handler() - async def on_POST(self, request, room_id): + async def on_POST( + self, request: SynapseRequest, room_id: str + ) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await self.presence_handler.bump_presence_active_time(requester.user) @@ -70,5 +79,5 @@ async def on_POST(self, request, room_id): return 200, {} -def register_servlets(hs, http_server): +def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: ReadMarkerRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py index 58b8e8f2614f..2781a0ea96df 100644 --- a/synapse/rest/client/register.py +++ b/synapse/rest/client/register.py @@ -28,6 +28,7 @@ ThreepidValidationError, UnrecognizedRequestError, ) +from synapse.api.ratelimiting import Ratelimiter from synapse.config import ConfigError from synapse.config.captcha import CaptchaConfig from synapse.config.consent import ConsentConfig @@ -379,6 +380,55 @@ async def on_GET(self, request): return 200, {"available": True} +class RegistrationTokenValidityRestServlet(RestServlet): + """Check the validity of a registration token. + + Example: + + GET /_matrix/client/unstable/org.matrix.msc3231/register/org.matrix.msc3231.login.registration_token/validity?token=abcd + + 200 OK + + { + "valid": true + } + """ + + PATTERNS = client_patterns( + f"/org.matrix.msc3231/register/{LoginType.REGISTRATION_TOKEN}/validity", + releases=(), + unstable=True, + ) + + def __init__(self, hs): + """ + Args: + hs (synapse.server.HomeServer): server + """ + super().__init__() + self.hs = hs + self.store = hs.get_datastore() + self.ratelimiter = Ratelimiter( + store=self.store, + clock=hs.get_clock(), + rate_hz=hs.config.ratelimiting.rc_registration_token_validity.per_second, + burst_count=hs.config.ratelimiting.rc_registration_token_validity.burst_count, + ) + + async def on_GET(self, request): + await self.ratelimiter.ratelimit(None, (request.getClientIP(),)) + + if not self.hs.config.enable_registration: + raise SynapseError( + 403, "Registration has been disabled", errcode=Codes.FORBIDDEN + ) + + token = parse_string(request, "token", required=True) + valid = await self.store.registration_token_is_valid(token) + + return 200, {"valid": valid} + + class RegisterRestServlet(RestServlet): PATTERNS = client_patterns("/register$") @@ -686,6 +736,22 @@ async def on_POST(self, request): ) if registered: + # Check if a token was used to authenticate registration + registration_token = await self.auth_handler.get_session_data( + session_id, + UIAuthSessionDataConstants.REGISTRATION_TOKEN, + ) + if registration_token: + # Increment the `completed` counter for the token + await self.store.use_registration_token(registration_token) + # Indicate that the token has been successfully used so that + # pending is not decremented again when expiring old UIA sessions. + await self.store.mark_ui_auth_stage_complete( + session_id, + LoginType.REGISTRATION_TOKEN, + True, + ) + await self.registration_handler.post_registration_actions( user_id=registered_user_id, auth_result=auth_result, @@ -868,6 +934,11 @@ def _calculate_registration_flows( for flow in flows: flow.insert(0, LoginType.RECAPTCHA) + # Prepend registration token to all flows if we're requiring a token + if config.registration_requires_token: + for flow in flows: + flow.insert(0, LoginType.REGISTRATION_TOKEN) + return flows @@ -876,4 +947,5 @@ def register_servlets(hs, http_server): MsisdnRegisterRequestTokenRestServlet(hs).register(http_server) UsernameAvailabilityRestServlet(hs).register(http_server) RegistrationSubmitTokenServlet(hs).register(http_server) + RegistrationTokenValidityRestServlet(hs).register(http_server) RegisterRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/room_upgrade_rest_servlet.py b/synapse/rest/client/room_upgrade_rest_servlet.py index 6d1b083acb47..6a7792e18b2e 100644 --- a/synapse/rest/client/room_upgrade_rest_servlet.py +++ b/synapse/rest/client/room_upgrade_rest_servlet.py @@ -13,18 +13,25 @@ # limitations under the License. import logging +from typing import TYPE_CHECKING, Tuple from synapse.api.errors import Codes, ShadowBanError, SynapseError from synapse.api.room_versions import KNOWN_ROOM_VERSIONS +from synapse.http.server import HttpServer from synapse.http.servlet import ( RestServlet, assert_params_in_dict, parse_json_object_from_request, ) +from synapse.http.site import SynapseRequest +from synapse.types import JsonDict from synapse.util import stringutils from ._base import client_patterns +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) @@ -41,9 +48,6 @@ class RoomUpgradeRestServlet(RestServlet): } Creates a new room and shuts down the old one. Returns the ID of the new room. - - Args: - hs (synapse.server.HomeServer): """ PATTERNS = client_patterns( @@ -51,13 +55,15 @@ class RoomUpgradeRestServlet(RestServlet): "/rooms/(?P[^/]*)/upgrade$" ) - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() self._hs = hs self._room_creation_handler = hs.get_room_creation_handler() self._auth = hs.get_auth() - async def on_POST(self, request, room_id): + async def on_POST( + self, request: SynapseRequest, room_id: str + ) -> Tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request) content = parse_json_object_from_request(request) @@ -84,5 +90,5 @@ async def on_POST(self, request, room_id): return 200, ret -def register_servlets(hs, http_server): +def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: RoomUpgradeRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/shared_rooms.py b/synapse/rest/client/shared_rooms.py index d2e7f04b406c..1d90493eb082 100644 --- a/synapse/rest/client/shared_rooms.py +++ b/synapse/rest/client/shared_rooms.py @@ -12,13 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging +from typing import TYPE_CHECKING, Tuple from synapse.api.errors import Codes, SynapseError +from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet -from synapse.types import UserID +from synapse.http.site import SynapseRequest +from synapse.types import JsonDict, UserID from ._base import client_patterns +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) @@ -32,13 +38,15 @@ class UserSharedRoomsServlet(RestServlet): releases=(), # This is an unstable feature ) - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() self.auth = hs.get_auth() self.store = hs.get_datastore() self.user_directory_active = hs.config.update_user_directory - async def on_GET(self, request, user_id): + async def on_GET( + self, request: SynapseRequest, user_id: str + ) -> Tuple[int, JsonDict]: if not self.user_directory_active: raise SynapseError( @@ -63,5 +71,5 @@ async def on_GET(self, request, user_id): return 200, {"joined": list(rooms)} -def register_servlets(hs, http_server): +def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: UserSharedRoomsServlet(hs).register(http_server) diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index e18f4d01b375..65c37be3e96c 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -14,17 +14,26 @@ import itertools import logging from collections import defaultdict -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Tuple +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union from synapse.api.constants import Membership, PresenceState from synapse.api.errors import Codes, StoreError, SynapseError from synapse.api.filtering import DEFAULT_FILTER_COLLECTION, FilterCollection +from synapse.api.presence import UserPresenceState from synapse.events.utils import ( format_event_for_client_v2_without_room_id, format_event_raw, ) from synapse.handlers.presence import format_user_presence_state -from synapse.handlers.sync import KnockedSyncResult, SyncConfig +from synapse.handlers.sync import ( + ArchivedSyncResult, + InvitedSyncResult, + JoinedSyncResult, + KnockedSyncResult, + SyncConfig, + SyncResult, +) +from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet, parse_boolean, parse_integer, parse_string from synapse.http.site import SynapseRequest from synapse.types import JsonDict, StreamToken @@ -192,6 +201,8 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: return 200, {} time_now = self.clock.time_msec() + # We know that the the requester has an access token since appservices + # cannot use sync. response_content = await self.encode_response( time_now, sync_result, requester.access_token_id, filter_collection ) @@ -199,7 +210,13 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: logger.debug("Event formatting complete") return 200, response_content - async def encode_response(self, time_now, sync_result, access_token_id, filter): + async def encode_response( + self, + time_now: int, + sync_result: SyncResult, + access_token_id: Optional[int], + filter: FilterCollection, + ) -> JsonDict: logger.debug("Formatting events in sync response") if filter.event_format == "client": event_formatter = format_event_for_client_v2_without_room_id @@ -234,7 +251,7 @@ async def encode_response(self, time_now, sync_result, access_token_id, filter): logger.debug("building sync response dict") - response: dict = defaultdict(dict) + response: JsonDict = defaultdict(dict) response["next_batch"] = await sync_result.next_batch.to_string(self.store) if sync_result.account_data: @@ -274,6 +291,8 @@ async def encode_response(self, time_now, sync_result, access_token_id, filter): if archived: response["rooms"][Membership.LEAVE] = archived + # By the time we get here groups is no longer optional. + assert sync_result.groups is not None if sync_result.groups.join: response["groups"][Membership.JOIN] = sync_result.groups.join if sync_result.groups.invite: @@ -284,7 +303,7 @@ async def encode_response(self, time_now, sync_result, access_token_id, filter): return response @staticmethod - def encode_presence(events, time_now): + def encode_presence(events: List[UserPresenceState], time_now: int) -> JsonDict: return { "events": [ { @@ -299,25 +318,27 @@ def encode_presence(events, time_now): } async def encode_joined( - self, rooms, time_now, token_id, event_fields, event_formatter - ): + self, + rooms: List[JoinedSyncResult], + time_now: int, + token_id: Optional[int], + event_fields: List[str], + event_formatter: Callable[[JsonDict], JsonDict], + ) -> JsonDict: """ Encode the joined rooms in a sync result Args: - rooms(list[synapse.handlers.sync.JoinedSyncResult]): list of sync - results for rooms this user is joined to - time_now(int): current time - used as a baseline for age - calculations - token_id(int): ID of the user's auth token - used for namespacing + rooms: list of sync results for rooms this user is joined to + time_now: current time - used as a baseline for age calculations + token_id: ID of the user's auth token - used for namespacing of transaction IDs - event_fields(list): List of event fields to include. If empty, + event_fields: List of event fields to include. If empty, all fields will be returned. - event_formatter (func[dict]): function to convert from federation format + event_formatter: function to convert from federation format to client format Returns: - dict[str, dict[str, object]]: the joined rooms list, in our - response format + The joined rooms list, in our response format """ joined = {} for room in rooms: @@ -332,23 +353,26 @@ async def encode_joined( return joined - async def encode_invited(self, rooms, time_now, token_id, event_formatter): + async def encode_invited( + self, + rooms: List[InvitedSyncResult], + time_now: int, + token_id: Optional[int], + event_formatter: Callable[[JsonDict], JsonDict], + ) -> JsonDict: """ Encode the invited rooms in a sync result Args: - rooms(list[synapse.handlers.sync.InvitedSyncResult]): list of - sync results for rooms this user is invited to - time_now(int): current time - used as a baseline for age - calculations - token_id(int): ID of the user's auth token - used for namespacing + rooms: list of sync results for rooms this user is invited to + time_now: current time - used as a baseline for age calculations + token_id: ID of the user's auth token - used for namespacing of transaction IDs - event_formatter (func[dict]): function to convert from federation format + event_formatter: function to convert from federation format to client format Returns: - dict[str, dict[str, object]]: the invited rooms list, in our - response format + The invited rooms list, in our response format """ invited = {} for room in rooms: @@ -371,7 +395,7 @@ async def encode_knocked( self, rooms: List[KnockedSyncResult], time_now: int, - token_id: int, + token_id: Optional[int], event_formatter: Callable[[Dict], Dict], ) -> Dict[str, Dict[str, Any]]: """ @@ -422,25 +446,26 @@ async def encode_knocked( return knocked async def encode_archived( - self, rooms, time_now, token_id, event_fields, event_formatter - ): + self, + rooms: List[ArchivedSyncResult], + time_now: int, + token_id: Optional[int], + event_fields: List[str], + event_formatter: Callable[[JsonDict], JsonDict], + ) -> JsonDict: """ Encode the archived rooms in a sync result Args: - rooms (list[synapse.handlers.sync.ArchivedSyncResult]): list of - sync results for rooms this user is joined to - time_now(int): current time - used as a baseline for age - calculations - token_id(int): ID of the user's auth token - used for namespacing + rooms: list of sync results for rooms this user is joined to + time_now: current time - used as a baseline for age calculations + token_id: ID of the user's auth token - used for namespacing of transaction IDs - event_fields(list): List of event fields to include. If empty, + event_fields: List of event fields to include. If empty, all fields will be returned. - event_formatter (func[dict]): function to convert from federation format - to client format + event_formatter: function to convert from federation format to client format Returns: - dict[str, dict[str, object]]: The invited rooms list, in our - response format + The archived rooms list, in our response format """ joined = {} for room in rooms: @@ -456,23 +481,27 @@ async def encode_archived( return joined async def encode_room( - self, room, time_now, token_id, joined, only_fields, event_formatter - ): + self, + room: Union[JoinedSyncResult, ArchivedSyncResult], + time_now: int, + token_id: Optional[int], + joined: bool, + only_fields: Optional[List[str]], + event_formatter: Callable[[JsonDict], JsonDict], + ) -> JsonDict: """ Args: - room (JoinedSyncResult|ArchivedSyncResult): sync result for a - single room - time_now (int): current time - used as a baseline for age - calculations - token_id (int): ID of the user's auth token - used for namespacing + room: sync result for a single room + time_now: current time - used as a baseline for age calculations + token_id: ID of the user's auth token - used for namespacing of transaction IDs - joined (bool): True if the user is joined to this room - will mean + joined: True if the user is joined to this room - will mean we handle ephemeral events - only_fields(list): Optional. The list of event fields to include. - event_formatter (func[dict]): function to convert from federation format + only_fields: Optional. The list of event fields to include. + event_formatter: function to convert from federation format to client format Returns: - dict[str, object]: the room, encoded in our response format + The room, encoded in our response format """ def serialize(events): @@ -508,7 +537,7 @@ def serialize(events): account_data = room.account_data - result = { + result: JsonDict = { "timeline": { "events": serialized_timeline, "prev_batch": await room.timeline.prev_batch.to_string(self.store), @@ -519,6 +548,7 @@ def serialize(events): } if joined: + assert isinstance(room, JoinedSyncResult) ephemeral_events = room.ephemeral result["ephemeral"] = {"events": ephemeral_events} result["unread_notifications"] = room.unread_notifications @@ -528,5 +558,5 @@ def serialize(events): return result -def register_servlets(hs, http_server): +def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: SyncRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/tags.py b/synapse/rest/client/tags.py index c14f83be1878..c88cb9367c5f 100644 --- a/synapse/rest/client/tags.py +++ b/synapse/rest/client/tags.py @@ -13,12 +13,19 @@ # limitations under the License. import logging +from typing import TYPE_CHECKING, Tuple from synapse.api.errors import AuthError +from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet, parse_json_object_from_request +from synapse.http.site import SynapseRequest +from synapse.types import JsonDict from ._base import client_patterns +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) @@ -29,12 +36,14 @@ class TagListServlet(RestServlet): PATTERNS = client_patterns("/user/(?P[^/]*)/rooms/(?P[^/]*)/tags") - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() self.auth = hs.get_auth() self.store = hs.get_datastore() - async def on_GET(self, request, user_id, room_id): + async def on_GET( + self, request: SynapseRequest, user_id: str, room_id: str + ) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot get tags for other users.") @@ -54,12 +63,14 @@ class TagServlet(RestServlet): "/user/(?P[^/]*)/rooms/(?P[^/]*)/tags/(?P[^/]*)" ) - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() self.auth = hs.get_auth() self.handler = hs.get_account_data_handler() - async def on_PUT(self, request, user_id, room_id, tag): + async def on_PUT( + self, request: SynapseRequest, user_id: str, room_id: str, tag: str + ) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot add tags for other users.") @@ -70,7 +81,9 @@ async def on_PUT(self, request, user_id, room_id, tag): return 200, {} - async def on_DELETE(self, request, user_id, room_id, tag): + async def on_DELETE( + self, request: SynapseRequest, user_id: str, room_id: str, tag: str + ) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot add tags for other users.") @@ -80,6 +93,6 @@ async def on_DELETE(self, request, user_id, room_id, tag): return 200, {} -def register_servlets(hs, http_server): +def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: TagListServlet(hs).register(http_server) TagServlet(hs).register(http_server) diff --git a/synapse/rest/client/thirdparty.py b/synapse/rest/client/thirdparty.py index b5c67c9bb67e..b895c73acf2c 100644 --- a/synapse/rest/client/thirdparty.py +++ b/synapse/rest/client/thirdparty.py @@ -12,27 +12,33 @@ # See the License for the specific language governing permissions and # limitations under the License. - import logging +from typing import TYPE_CHECKING, Dict, List, Tuple from synapse.api.constants import ThirdPartyEntityKind +from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet +from synapse.http.site import SynapseRequest +from synapse.types import JsonDict from ._base import client_patterns +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) class ThirdPartyProtocolsServlet(RestServlet): PATTERNS = client_patterns("/thirdparty/protocols") - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() self.auth = hs.get_auth() self.appservice_handler = hs.get_application_service_handler() - async def on_GET(self, request): + async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: await self.auth.get_user_by_req(request, allow_guest=True) protocols = await self.appservice_handler.get_3pe_protocols() @@ -42,13 +48,15 @@ async def on_GET(self, request): class ThirdPartyProtocolServlet(RestServlet): PATTERNS = client_patterns("/thirdparty/protocol/(?P[^/]+)$") - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() self.auth = hs.get_auth() self.appservice_handler = hs.get_application_service_handler() - async def on_GET(self, request, protocol): + async def on_GET( + self, request: SynapseRequest, protocol: str + ) -> Tuple[int, JsonDict]: await self.auth.get_user_by_req(request, allow_guest=True) protocols = await self.appservice_handler.get_3pe_protocols( @@ -63,16 +71,18 @@ async def on_GET(self, request, protocol): class ThirdPartyUserServlet(RestServlet): PATTERNS = client_patterns("/thirdparty/user(/(?P[^/]+))?$") - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() self.auth = hs.get_auth() self.appservice_handler = hs.get_application_service_handler() - async def on_GET(self, request, protocol): + async def on_GET( + self, request: SynapseRequest, protocol: str + ) -> Tuple[int, List[JsonDict]]: await self.auth.get_user_by_req(request, allow_guest=True) - fields = request.args + fields: Dict[bytes, List[bytes]] = request.args # type: ignore[assignment] fields.pop(b"access_token", None) results = await self.appservice_handler.query_3pe( @@ -85,16 +95,18 @@ async def on_GET(self, request, protocol): class ThirdPartyLocationServlet(RestServlet): PATTERNS = client_patterns("/thirdparty/location(/(?P[^/]+))?$") - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() self.auth = hs.get_auth() self.appservice_handler = hs.get_application_service_handler() - async def on_GET(self, request, protocol): + async def on_GET( + self, request: SynapseRequest, protocol: str + ) -> Tuple[int, List[JsonDict]]: await self.auth.get_user_by_req(request, allow_guest=True) - fields = request.args + fields: Dict[bytes, List[bytes]] = request.args # type: ignore[assignment] fields.pop(b"access_token", None) results = await self.appservice_handler.query_3pe( @@ -104,7 +116,7 @@ async def on_GET(self, request, protocol): return 200, results -def register_servlets(hs, http_server): +def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: ThirdPartyProtocolsServlet(hs).register(http_server) ThirdPartyProtocolServlet(hs).register(http_server) ThirdPartyUserServlet(hs).register(http_server) diff --git a/synapse/rest/client/tokenrefresh.py b/synapse/rest/client/tokenrefresh.py index b2f858545cbe..c8c3b25bd36f 100644 --- a/synapse/rest/client/tokenrefresh.py +++ b/synapse/rest/client/tokenrefresh.py @@ -12,11 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import TYPE_CHECKING + +from twisted.web.server import Request + from synapse.api.errors import AuthError +from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet from ._base import client_patterns +if TYPE_CHECKING: + from synapse.server import HomeServer + class TokenRefreshRestServlet(RestServlet): """ @@ -26,12 +34,12 @@ class TokenRefreshRestServlet(RestServlet): PATTERNS = client_patterns("/tokenrefresh") - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() - async def on_POST(self, request): + async def on_POST(self, request: Request) -> None: raise AuthError(403, "tokenrefresh is no longer supported.") -def register_servlets(hs, http_server): +def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: TokenRefreshRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/user_directory.py b/synapse/rest/client/user_directory.py index 7e8912f0b919..885281111438 100644 --- a/synapse/rest/client/user_directory.py +++ b/synapse/rest/client/user_directory.py @@ -13,29 +13,32 @@ # limitations under the License. import logging +from typing import TYPE_CHECKING, Tuple from synapse.api.errors import SynapseError +from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet, parse_json_object_from_request +from synapse.http.site import SynapseRequest +from synapse.types import JsonDict from ._base import client_patterns +if TYPE_CHECKING: + from synapse.server import HomeServer + logger = logging.getLogger(__name__) class UserDirectorySearchRestServlet(RestServlet): PATTERNS = client_patterns("/user_directory/search$") - def __init__(self, hs): - """ - Args: - hs (synapse.server.HomeServer): server - """ + def __init__(self, hs: "HomeServer"): super().__init__() self.hs = hs self.auth = hs.get_auth() self.user_directory_handler = hs.get_user_directory_handler() - async def on_POST(self, request): + async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: """Searches for users in directory Returns: @@ -75,5 +78,5 @@ async def on_POST(self, request): return 200, results -def register_servlets(hs, http_server): +def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: UserDirectorySearchRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index fa2e4e9cba48..a1a815cf8256 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -17,9 +17,17 @@ import logging import re +from typing import TYPE_CHECKING, Tuple + +from twisted.web.server import Request from synapse.api.constants import RoomCreationPreset +from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet +from synapse.types import JsonDict + +if TYPE_CHECKING: + from synapse.server import HomeServer logger = logging.getLogger(__name__) @@ -27,7 +35,7 @@ class VersionsRestServlet(RestServlet): PATTERNS = [re.compile("^/_matrix/client/versions$")] - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() self.config = hs.config @@ -45,7 +53,7 @@ def __init__(self, hs): in self.config.encryption_enabled_by_default_for_room_presets ) - def on_GET(self, request): + def on_GET(self, request: Request) -> Tuple[int, JsonDict]: return ( 200, { @@ -89,5 +97,5 @@ def on_GET(self, request): ) -def register_servlets(hs, http_server): +def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: VersionsRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/voip.py b/synapse/rest/client/voip.py index f53020520d37..9d46ed3af3f5 100644 --- a/synapse/rest/client/voip.py +++ b/synapse/rest/client/voip.py @@ -15,20 +15,27 @@ import base64 import hashlib import hmac +from typing import TYPE_CHECKING, Tuple +from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet +from synapse.http.site import SynapseRequest from synapse.rest.client._base import client_patterns +from synapse.types import JsonDict + +if TYPE_CHECKING: + from synapse.server import HomeServer class VoipRestServlet(RestServlet): PATTERNS = client_patterns("/voip/turnServer$", v1=True) - def __init__(self, hs): + def __init__(self, hs: "HomeServer"): super().__init__() self.hs = hs self.auth = hs.get_auth() - async def on_GET(self, request): + async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: requester = await self.auth.get_user_by_req( request, self.hs.config.turn_allow_guests ) @@ -69,5 +76,5 @@ async def on_GET(self, request): ) -def register_servlets(hs, http_server): +def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: VoipRestServlet(hs).register(http_server) diff --git a/synapse/rest/media/v1/thumbnail_resource.py b/synapse/rest/media/v1/thumbnail_resource.py index a029d426f0b6..12bd745cb21c 100644 --- a/synapse/rest/media/v1/thumbnail_resource.py +++ b/synapse/rest/media/v1/thumbnail_resource.py @@ -15,7 +15,7 @@ import logging -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple from twisted.web.server import Request @@ -414,9 +414,9 @@ def _select_thumbnail( if desired_method == "crop": # Thumbnails that match equal or larger sizes of desired width/height. - crop_info_list = [] + crop_info_list: List[Tuple[int, int, int, bool, int, Dict[str, Any]]] = [] # Other thumbnails. - crop_info_list2 = [] + crop_info_list2: List[Tuple[int, int, int, bool, int, Dict[str, Any]]] = [] for info in thumbnail_infos: # Skip thumbnails generated with different methods. if info["thumbnail_method"] != "crop": @@ -451,15 +451,19 @@ def _select_thumbnail( info, ) ) + # Pick the most appropriate thumbnail. Some values of `desired_width` and + # `desired_height` may result in a tie, in which case we avoid comparing on + # the thumbnail info dictionary and pick the thumbnail that appears earlier + # in the list of candidates. if crop_info_list: - thumbnail_info = min(crop_info_list)[-1] + thumbnail_info = min(crop_info_list, key=lambda t: t[:-1])[-1] elif crop_info_list2: - thumbnail_info = min(crop_info_list2)[-1] + thumbnail_info = min(crop_info_list2, key=lambda t: t[:-1])[-1] elif desired_method == "scale": # Thumbnails that match equal or larger sizes of desired width/height. - info_list = [] + info_list: List[Tuple[int, bool, int, Dict[str, Any]]] = [] # Other thumbnails. - info_list2 = [] + info_list2: List[Tuple[int, bool, int, Dict[str, Any]]] = [] for info in thumbnail_infos: # Skip thumbnails generated with different methods. @@ -477,10 +481,14 @@ def _select_thumbnail( info_list2.append( (size_quality, type_quality, length_quality, info) ) + # Pick the most appropriate thumbnail. Some values of `desired_width` and + # `desired_height` may result in a tie, in which case we avoid comparing on + # the thumbnail info dictionary and pick the thumbnail that appears earlier + # in the list of candidates. if info_list: - thumbnail_info = min(info_list)[-1] + thumbnail_info = min(info_list, key=lambda t: t[:-1])[-1] elif info_list2: - thumbnail_info = min(info_list2)[-1] + thumbnail_info = min(info_list2, key=lambda t: t[:-1])[-1] if thumbnail_info: return FileInfo( diff --git a/synapse/storage/databases/main/__init__.py b/synapse/storage/databases/main/__init__.py index 01b918e12e10..00a644e8f71c 100644 --- a/synapse/storage/databases/main/__init__.py +++ b/synapse/storage/databases/main/__init__.py @@ -63,6 +63,7 @@ from .room import RoomStore from .roommember import RoomMemberStore from .search import SearchStore +from .session import SessionStore from .signatures import SignatureStore from .state import StateStore from .stats import StatsStore @@ -121,6 +122,7 @@ class DataStore( ServerMetricsStore, EventForwardExtremitiesStore, LockStore, + SessionStore, ): def __init__(self, database: DatabasePool, db_conn, hs): self.hs = hs diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py index 469dd53e0ce0..a6517962f619 100644 --- a/synapse/storage/databases/main/registration.py +++ b/synapse/storage/databases/main/registration.py @@ -1168,6 +1168,322 @@ async def update_access_token_last_validated(self, token_id: int) -> None: desc="update_access_token_last_validated", ) + async def registration_token_is_valid(self, token: str) -> bool: + """Checks if a token can be used to authenticate a registration. + + Args: + token: The registration token to be checked + Returns: + True if the token is valid, False otherwise. + """ + res = await self.db_pool.simple_select_one( + "registration_tokens", + keyvalues={"token": token}, + retcols=["uses_allowed", "pending", "completed", "expiry_time"], + allow_none=True, + ) + + # Check if the token exists + if res is None: + return False + + # Check if the token has expired + now = self._clock.time_msec() + if res["expiry_time"] and res["expiry_time"] < now: + return False + + # Check if the token has been used up + if ( + res["uses_allowed"] + and res["pending"] + res["completed"] >= res["uses_allowed"] + ): + return False + + # Otherwise, the token is valid + return True + + async def set_registration_token_pending(self, token: str) -> None: + """Increment the pending registrations counter for a token. + + Args: + token: The registration token pending use + """ + + def _set_registration_token_pending_txn(txn): + pending = self.db_pool.simple_select_one_onecol_txn( + txn, + "registration_tokens", + keyvalues={"token": token}, + retcol="pending", + ) + self.db_pool.simple_update_one_txn( + txn, + "registration_tokens", + keyvalues={"token": token}, + updatevalues={"pending": pending + 1}, + ) + + return await self.db_pool.runInteraction( + "set_registration_token_pending", _set_registration_token_pending_txn + ) + + async def use_registration_token(self, token: str) -> None: + """Complete a use of the given registration token. + + The `pending` counter will be decremented, and the `completed` + counter will be incremented. + + Args: + token: The registration token to be 'used' + """ + + def _use_registration_token_txn(txn): + # Normally, res is Optional[Dict[str, Any]]. + # Override type because the return type is only optional if + # allow_none is True, and we don't want mypy throwing errors + # about None not being indexable. + res: Dict[str, Any] = self.db_pool.simple_select_one_txn( + txn, + "registration_tokens", + keyvalues={"token": token}, + retcols=["pending", "completed"], + ) # type: ignore + + # Decrement pending and increment completed + self.db_pool.simple_update_one_txn( + txn, + "registration_tokens", + keyvalues={"token": token}, + updatevalues={ + "completed": res["completed"] + 1, + "pending": res["pending"] - 1, + }, + ) + + return await self.db_pool.runInteraction( + "use_registration_token", _use_registration_token_txn + ) + + async def get_registration_tokens( + self, valid: Optional[bool] = None + ) -> List[Dict[str, Any]]: + """List all registration tokens. Used by the admin API. + + Args: + valid: If True, only valid tokens are returned. + If False, only invalid tokens are returned. + Default is None: return all tokens regardless of validity. + + Returns: + A list of dicts, each containing details of a token. + """ + + def select_registration_tokens_txn(txn, now: int, valid: Optional[bool]): + if valid is None: + # Return all tokens regardless of validity + txn.execute("SELECT * FROM registration_tokens") + + elif valid: + # Select valid tokens only + sql = ( + "SELECT * FROM registration_tokens WHERE " + "(uses_allowed > pending + completed OR uses_allowed IS NULL) " + "AND (expiry_time > ? OR expiry_time IS NULL)" + ) + txn.execute(sql, [now]) + + else: + # Select invalid tokens only + sql = ( + "SELECT * FROM registration_tokens WHERE " + "uses_allowed <= pending + completed OR expiry_time <= ?" + ) + txn.execute(sql, [now]) + + return self.db_pool.cursor_to_dict(txn) + + return await self.db_pool.runInteraction( + "select_registration_tokens", + select_registration_tokens_txn, + self._clock.time_msec(), + valid, + ) + + async def get_one_registration_token(self, token: str) -> Optional[Dict[str, Any]]: + """Get info about the given registration token. Used by the admin API. + + Args: + token: The token to retrieve information about. + + Returns: + A dict, or None if token doesn't exist. + """ + return await self.db_pool.simple_select_one( + "registration_tokens", + keyvalues={"token": token}, + retcols=["token", "uses_allowed", "pending", "completed", "expiry_time"], + allow_none=True, + desc="get_one_registration_token", + ) + + async def generate_registration_token( + self, length: int, chars: str + ) -> Optional[str]: + """Generate a random registration token. Used by the admin API. + + Args: + length: The length of the token to generate. + chars: A string of the characters allowed in the generated token. + + Returns: + The generated token. + + Raises: + SynapseError if a unique registration token could still not be + generated after a few tries. + """ + # Make a few attempts at generating a unique token of the required + # length before failing. + for _i in range(3): + # Generate token + token = "".join(random.choices(chars, k=length)) + + # Check if the token already exists + existing_token = await self.db_pool.simple_select_one_onecol( + "registration_tokens", + keyvalues={"token": token}, + retcol="token", + allow_none=True, + desc="check_if_registration_token_exists", + ) + + if existing_token is None: + # The generated token doesn't exist yet, return it + return token + + raise SynapseError( + 500, + "Unable to generate a unique registration token. Try again with a greater length", + Codes.UNKNOWN, + ) + + async def create_registration_token( + self, token: str, uses_allowed: Optional[int], expiry_time: Optional[int] + ) -> bool: + """Create a new registration token. Used by the admin API. + + Args: + token: The token to create. + uses_allowed: The number of times the token can be used to complete + a registration before it becomes invalid. A value of None indicates + unlimited uses. + expiry_time: The latest time the token is valid. Given as the + number of milliseconds since 1970-01-01 00:00:00 UTC. A value of + None indicates that the token does not expire. + + Returns: + Whether the row was inserted or not. + """ + + def _create_registration_token_txn(txn): + row = self.db_pool.simple_select_one_txn( + txn, + "registration_tokens", + keyvalues={"token": token}, + retcols=["token"], + allow_none=True, + ) + + if row is not None: + # Token already exists + return False + + self.db_pool.simple_insert_txn( + txn, + "registration_tokens", + values={ + "token": token, + "uses_allowed": uses_allowed, + "pending": 0, + "completed": 0, + "expiry_time": expiry_time, + }, + ) + + return True + + return await self.db_pool.runInteraction( + "create_registration_token", _create_registration_token_txn + ) + + async def update_registration_token( + self, token: str, updatevalues: Dict[str, Optional[int]] + ) -> Optional[Dict[str, Any]]: + """Update a registration token. Used by the admin API. + + Args: + token: The token to update. + updatevalues: A dict with the fields to update. E.g.: + `{"uses_allowed": 3}` to update just uses_allowed, or + `{"uses_allowed": 3, "expiry_time": None}` to update both. + This is passed straight to simple_update_one. + + Returns: + A dict with all info about the token, or None if token doesn't exist. + """ + + def _update_registration_token_txn(txn): + try: + self.db_pool.simple_update_one_txn( + txn, + "registration_tokens", + keyvalues={"token": token}, + updatevalues=updatevalues, + ) + except StoreError: + # Update failed because token does not exist + return None + + # Get all info about the token so it can be sent in the response + return self.db_pool.simple_select_one_txn( + txn, + "registration_tokens", + keyvalues={"token": token}, + retcols=[ + "token", + "uses_allowed", + "pending", + "completed", + "expiry_time", + ], + allow_none=True, + ) + + return await self.db_pool.runInteraction( + "update_registration_token", _update_registration_token_txn + ) + + async def delete_registration_token(self, token: str) -> bool: + """Delete a registration token. Used by the admin API. + + Args: + token: The token to delete. + + Returns: + Whether the token was successfully deleted or not. + """ + try: + await self.db_pool.simple_delete_one( + "registration_tokens", + keyvalues={"token": token}, + desc="delete_registration_token", + ) + except StoreError: + # Deletion failed because token does not exist + return False + + return True + @cached() async def mark_access_token_as_used(self, token_id: int) -> None: """ diff --git a/synapse/storage/databases/main/session.py b/synapse/storage/databases/main/session.py new file mode 100644 index 000000000000..172f27d109ad --- /dev/null +++ b/synapse/storage/databases/main/session.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import TYPE_CHECKING + +import synapse.util.stringutils as stringutils +from synapse.api.errors import StoreError +from synapse.metrics.background_process_metrics import wrap_as_background_process +from synapse.storage._base import SQLBaseStore, db_to_json +from synapse.storage.database import ( + DatabasePool, + LoggingDatabaseConnection, + LoggingTransaction, +) +from synapse.types import JsonDict +from synapse.util import json_encoder + +if TYPE_CHECKING: + from synapse.server import HomeServer + + +class SessionStore(SQLBaseStore): + """ + A store for generic session data. + + Each type of session should provide a unique type (to separate sessions). + + Sessions are automatically removed when they expire. + """ + + def __init__( + self, + database: DatabasePool, + db_conn: LoggingDatabaseConnection, + hs: "HomeServer", + ): + super().__init__(database, db_conn, hs) + + # Create a background job for culling expired sessions. + if hs.config.run_background_tasks: + self._clock.looping_call(self._delete_expired_sessions, 30 * 60 * 1000) + + async def create_session( + self, session_type: str, value: JsonDict, expiry_ms: int + ) -> str: + """ + Creates a new pagination session for the room hierarchy endpoint. + + Args: + session_type: The type for this session. + value: The value to store. + expiry_ms: How long before an item is evicted from the cache + in milliseconds. Default is 0, indicating items never get + evicted based on time. + + Returns: + The newly created session ID. + + Raises: + StoreError if a unique session ID cannot be generated. + """ + # autogen a session ID and try to create it. We may clash, so just + # try a few times till one goes through, giving up eventually. + attempts = 0 + while attempts < 5: + session_id = stringutils.random_string(24) + + try: + await self.db_pool.simple_insert( + table="sessions", + values={ + "session_id": session_id, + "session_type": session_type, + "value": json_encoder.encode(value), + "expiry_time_ms": self.hs.get_clock().time_msec() + expiry_ms, + }, + desc="create_session", + ) + + return session_id + except self.db_pool.engine.module.IntegrityError: + attempts += 1 + raise StoreError(500, "Couldn't generate a session ID.") + + async def get_session(self, session_type: str, session_id: str) -> JsonDict: + """ + Retrieve data stored with create_session + + Args: + session_type: The type for this session. + session_id: The session ID returned from create_session. + + Raises: + StoreError if the session cannot be found. + """ + + def _get_session( + txn: LoggingTransaction, session_type: str, session_id: str, ts: int + ) -> JsonDict: + # This includes the expiry time since items are only periodically + # deleted, not upon expiry. + select_sql = """ + SELECT value FROM sessions WHERE + session_type = ? AND session_id = ? AND expiry_time_ms > ? + """ + txn.execute(select_sql, [session_type, session_id, ts]) + row = txn.fetchone() + + if not row: + raise StoreError(404, "No session") + + return db_to_json(row[0]) + + return await self.db_pool.runInteraction( + "get_session", + _get_session, + session_type, + session_id, + self._clock.time_msec(), + ) + + @wrap_as_background_process("delete_expired_sessions") + async def _delete_expired_sessions(self) -> None: + """Remove sessions with expiry dates that have passed.""" + + def _delete_expired_sessions_txn(txn: LoggingTransaction, ts: int) -> None: + sql = "DELETE FROM sessions WHERE expiry_time_ms <= ?" + txn.execute(sql, (ts,)) + + await self.db_pool.runInteraction( + "delete_expired_sessions", + _delete_expired_sessions_txn, + self._clock.time_msec(), + ) diff --git a/synapse/storage/databases/main/ui_auth.py b/synapse/storage/databases/main/ui_auth.py index 38bfdf5dad3a..4d6bbc94c774 100644 --- a/synapse/storage/databases/main/ui_auth.py +++ b/synapse/storage/databases/main/ui_auth.py @@ -15,6 +15,7 @@ import attr +from synapse.api.constants import LoginType from synapse.api.errors import StoreError from synapse.storage._base import SQLBaseStore, db_to_json from synapse.storage.database import LoggingTransaction @@ -329,6 +330,48 @@ def _delete_old_ui_auth_sessions_txn( keyvalues={}, ) + # If a registration token was used, decrement the pending counter + # before deleting the session. + rows = self.db_pool.simple_select_many_txn( + txn, + table="ui_auth_sessions_credentials", + column="session_id", + iterable=session_ids, + keyvalues={"stage_type": LoginType.REGISTRATION_TOKEN}, + retcols=["result"], + ) + + # Get the tokens used and how much pending needs to be decremented by. + token_counts: Dict[str, int] = {} + for r in rows: + # If registration was successfully completed, the result of the + # registration token stage for that session will be True. + # If a token was used to authenticate, but registration was + # never completed, the result will be the token used. + token = db_to_json(r["result"]) + if isinstance(token, str): + token_counts[token] = token_counts.get(token, 0) + 1 + + # Update the `pending` counters. + if len(token_counts) > 0: + token_rows = self.db_pool.simple_select_many_txn( + txn, + table="registration_tokens", + column="token", + iterable=list(token_counts.keys()), + keyvalues={}, + retcols=["token", "pending"], + ) + for token_row in token_rows: + token = token_row["token"] + new_pending = token_row["pending"] - token_counts[token] + self.db_pool.simple_update_one_txn( + txn, + table="registration_tokens", + keyvalues={"token": token}, + updatevalues={"pending": new_pending}, + ) + # Delete the corresponding completed credentials. self.db_pool.simple_delete_many_txn( txn, diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index a5bc0ee8a560..af9cc69949c3 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +# When updating these values, please leave a short summary of the changes below. + SCHEMA_VERSION = 63 """Represents the expectations made by the codebase about the database schema diff --git a/synapse/storage/schema/main/delta/62/02session_store.sql b/synapse/storage/schema/main/delta/62/02session_store.sql new file mode 100644 index 000000000000..535fb34c109c --- /dev/null +++ b/synapse/storage/schema/main/delta/62/02session_store.sql @@ -0,0 +1,23 @@ +/* + * Copyright 2021 The Matrix.org Foundation C.I.C. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +CREATE TABLE IF NOT EXISTS sessions( + session_type TEXT NOT NULL, -- The unique key for this type of session. + session_id TEXT NOT NULL, -- The session ID passed to the client. + value TEXT NOT NULL, -- A JSON dictionary to persist. + expiry_time_ms BIGINT NOT NULL, -- The time this session will expire (epoch time in milliseconds). + UNIQUE (session_type, session_id) +); diff --git a/synapse/storage/schema/main/delta/63/01create_registration_tokens.sql b/synapse/storage/schema/main/delta/63/01create_registration_tokens.sql new file mode 100644 index 000000000000..ee6cf958f4f3 --- /dev/null +++ b/synapse/storage/schema/main/delta/63/01create_registration_tokens.sql @@ -0,0 +1,23 @@ +/* Copyright 2021 Callum Brown + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +CREATE TABLE IF NOT EXISTS registration_tokens( + token TEXT NOT NULL, -- The token that can be used for authentication. + uses_allowed INT, -- The total number of times this token can be used. NULL if no limit. + pending INT NOT NULL, -- The number of in progress registrations using this token. + completed INT NOT NULL, -- The number of times this token has been used to complete a registration. + expiry_time BIGINT, -- The latest time this token will be valid (epoch time in milliseconds). NULL if token doesn't expire. + UNIQUE (token) +); diff --git a/tests/rest/admin/test_device.py b/tests/rest/admin/test_device.py index c4afe5c3d90b..a3679be20539 100644 --- a/tests/rest/admin/test_device.py +++ b/tests/rest/admin/test_device.py @@ -12,9 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import json import urllib.parse +from parameterized import parameterized + import synapse.rest.admin from synapse.api.errors import Codes from synapse.rest.client import login @@ -45,49 +46,23 @@ def prepare(self, reactor, clock, hs): self.other_user_device_id, ) - def test_no_auth(self): + @parameterized.expand(["GET", "PUT", "DELETE"]) + def test_no_auth(self, method: str): """ Try to get a device of an user without authentication. """ - channel = self.make_request("GET", self.url, b"{}") - - self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"]) - - channel = self.make_request("PUT", self.url, b"{}") - - self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"]) - - channel = self.make_request("DELETE", self.url, b"{}") + channel = self.make_request(method, self.url, b"{}") self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"]) self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"]) - def test_requester_is_no_admin(self): + @parameterized.expand(["GET", "PUT", "DELETE"]) + def test_requester_is_no_admin(self, method: str): """ If the user is not a server admin, an error is returned. """ channel = self.make_request( - "GET", - self.url, - access_token=self.other_user_token, - ) - - self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"]) - - channel = self.make_request( - "PUT", - self.url, - access_token=self.other_user_token, - ) - - self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"]) - self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"]) - - channel = self.make_request( - "DELETE", + method, self.url, access_token=self.other_user_token, ) @@ -95,7 +70,8 @@ def test_requester_is_no_admin(self): self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"]) self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"]) - def test_user_does_not_exist(self): + @parameterized.expand(["GET", "PUT", "DELETE"]) + def test_user_does_not_exist(self, method: str): """ Tests that a lookup for a user that does not exist returns a 404 """ @@ -105,7 +81,7 @@ def test_user_does_not_exist(self): ) channel = self.make_request( - "GET", + method, url, access_token=self.admin_user_tok, ) @@ -113,25 +89,8 @@ def test_user_does_not_exist(self): self.assertEqual(404, channel.code, msg=channel.json_body) self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"]) - channel = self.make_request( - "PUT", - url, - access_token=self.admin_user_tok, - ) - - self.assertEqual(404, channel.code, msg=channel.json_body) - self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"]) - - channel = self.make_request( - "DELETE", - url, - access_token=self.admin_user_tok, - ) - - self.assertEqual(404, channel.code, msg=channel.json_body) - self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"]) - - def test_user_is_not_local(self): + @parameterized.expand(["GET", "PUT", "DELETE"]) + def test_user_is_not_local(self, method: str): """ Tests that a lookup for a user that is not a local returns a 400 """ @@ -141,25 +100,7 @@ def test_user_is_not_local(self): ) channel = self.make_request( - "GET", - url, - access_token=self.admin_user_tok, - ) - - self.assertEqual(400, channel.code, msg=channel.json_body) - self.assertEqual("Can only lookup local users", channel.json_body["error"]) - - channel = self.make_request( - "PUT", - url, - access_token=self.admin_user_tok, - ) - - self.assertEqual(400, channel.code, msg=channel.json_body) - self.assertEqual("Can only lookup local users", channel.json_body["error"]) - - channel = self.make_request( - "DELETE", + method, url, access_token=self.admin_user_tok, ) @@ -219,12 +160,11 @@ def test_update_device_too_long_display_name(self): * (synapse.handlers.device.MAX_DEVICE_DISPLAY_NAME_LEN + 1) } - body = json.dumps(update) channel = self.make_request( "PUT", self.url, access_token=self.admin_user_tok, - content=body.encode(encoding="utf_8"), + content=update, ) self.assertEqual(400, channel.code, msg=channel.json_body) @@ -275,12 +215,11 @@ def test_update_display_name(self): Tests a normal successful update of display name """ # Set new display_name - body = json.dumps({"display_name": "new displayname"}) channel = self.make_request( "PUT", self.url, access_token=self.admin_user_tok, - content=body.encode(encoding="utf_8"), + content={"display_name": "new displayname"}, ) self.assertEqual(200, channel.code, msg=channel.json_body) @@ -529,12 +468,11 @@ def test_unknown_devices(self): """ Tests that a remove of a device that does not exist returns 200. """ - body = json.dumps({"devices": ["unknown_device1", "unknown_device2"]}) channel = self.make_request( "POST", self.url, access_token=self.admin_user_tok, - content=body.encode(encoding="utf_8"), + content={"devices": ["unknown_device1", "unknown_device2"]}, ) # Delete unknown devices returns status 200 @@ -560,12 +498,11 @@ def test_delete_devices(self): device_ids.append(str(d["device_id"])) # Delete devices - body = json.dumps({"devices": device_ids}) channel = self.make_request( "POST", self.url, access_token=self.admin_user_tok, - content=body.encode(encoding="utf_8"), + content={"devices": device_ids}, ) self.assertEqual(200, channel.code, msg=channel.json_body) diff --git a/tests/rest/admin/test_registration_tokens.py b/tests/rest/admin/test_registration_tokens.py new file mode 100644 index 000000000000..4927321e5a4f --- /dev/null +++ b/tests/rest/admin/test_registration_tokens.py @@ -0,0 +1,710 @@ +# Copyright 2021 Callum Brown +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import random +import string + +import synapse.rest.admin +from synapse.api.errors import Codes +from synapse.rest.client import login + +from tests import unittest + + +class ManageRegistrationTokensTestCase(unittest.HomeserverTestCase): + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + ] + + def prepare(self, reactor, clock, hs): + self.store = hs.get_datastore() + self.admin_user = self.register_user("admin", "pass", admin=True) + self.admin_user_tok = self.login("admin", "pass") + + self.other_user = self.register_user("user", "pass") + self.other_user_tok = self.login("user", "pass") + + self.url = "/_synapse/admin/v1/registration_tokens" + + def _new_token(self, **kwargs): + """Helper function to create a token.""" + token = kwargs.get( + "token", + "".join(random.choices(string.ascii_letters, k=8)), + ) + self.get_success( + self.store.db_pool.simple_insert( + "registration_tokens", + { + "token": token, + "uses_allowed": kwargs.get("uses_allowed", None), + "pending": kwargs.get("pending", 0), + "completed": kwargs.get("completed", 0), + "expiry_time": kwargs.get("expiry_time", None), + }, + ) + ) + return token + + # CREATION + + def test_create_no_auth(self): + """Try to create a token without authentication.""" + channel = self.make_request("POST", self.url + "/new", {}) + self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"]) + + def test_create_requester_not_admin(self): + """Try to create a token while not an admin.""" + channel = self.make_request( + "POST", + self.url + "/new", + {}, + access_token=self.other_user_tok, + ) + self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"]) + + def test_create_using_defaults(self): + """Create a token using all the defaults.""" + channel = self.make_request( + "POST", + self.url + "/new", + {}, + access_token=self.admin_user_tok, + ) + + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(len(channel.json_body["token"]), 16) + self.assertIsNone(channel.json_body["uses_allowed"]) + self.assertIsNone(channel.json_body["expiry_time"]) + self.assertEqual(channel.json_body["pending"], 0) + self.assertEqual(channel.json_body["completed"], 0) + + def test_create_specifying_fields(self): + """Create a token specifying the value of all fields.""" + data = { + "token": "abcd", + "uses_allowed": 1, + "expiry_time": self.clock.time_msec() + 1000000, + } + + channel = self.make_request( + "POST", + self.url + "/new", + data, + access_token=self.admin_user_tok, + ) + + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["token"], "abcd") + self.assertEqual(channel.json_body["uses_allowed"], 1) + self.assertEqual(channel.json_body["expiry_time"], data["expiry_time"]) + self.assertEqual(channel.json_body["pending"], 0) + self.assertEqual(channel.json_body["completed"], 0) + + def test_create_with_null_value(self): + """Create a token specifying unlimited uses and no expiry.""" + data = { + "uses_allowed": None, + "expiry_time": None, + } + + channel = self.make_request( + "POST", + self.url + "/new", + data, + access_token=self.admin_user_tok, + ) + + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(len(channel.json_body["token"]), 16) + self.assertIsNone(channel.json_body["uses_allowed"]) + self.assertIsNone(channel.json_body["expiry_time"]) + self.assertEqual(channel.json_body["pending"], 0) + self.assertEqual(channel.json_body["completed"], 0) + + def test_create_token_too_long(self): + """Check token longer than 64 chars is invalid.""" + data = {"token": "a" * 65} + + channel = self.make_request( + "POST", + self.url + "/new", + data, + access_token=self.admin_user_tok, + ) + + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["errcode"], Codes.INVALID_PARAM) + + def test_create_token_invalid_chars(self): + """Check you can't create token with invalid characters.""" + data = { + "token": "abc/def", + } + + channel = self.make_request( + "POST", + self.url + "/new", + data, + access_token=self.admin_user_tok, + ) + + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["errcode"], Codes.INVALID_PARAM) + + def test_create_token_already_exists(self): + """Check you can't create token that already exists.""" + data = { + "token": "abcd", + } + + channel1 = self.make_request( + "POST", + self.url + "/new", + data, + access_token=self.admin_user_tok, + ) + self.assertEqual(200, int(channel1.result["code"]), msg=channel1.result["body"]) + + channel2 = self.make_request( + "POST", + self.url + "/new", + data, + access_token=self.admin_user_tok, + ) + self.assertEqual(400, int(channel2.result["code"]), msg=channel2.result["body"]) + self.assertEqual(channel2.json_body["errcode"], Codes.INVALID_PARAM) + + def test_create_unable_to_generate_token(self): + """Check right error is raised when server can't generate unique token.""" + # Create all possible single character tokens + tokens = [] + for c in string.ascii_letters + string.digits + "-_": + tokens.append( + { + "token": c, + "uses_allowed": None, + "pending": 0, + "completed": 0, + "expiry_time": None, + } + ) + self.get_success( + self.store.db_pool.simple_insert_many( + "registration_tokens", + tokens, + "create_all_registration_tokens", + ) + ) + + # Check creating a single character token fails with a 500 status code + channel = self.make_request( + "POST", + self.url + "/new", + {"length": 1}, + access_token=self.admin_user_tok, + ) + self.assertEqual(500, int(channel.result["code"]), msg=channel.result["body"]) + + def test_create_uses_allowed(self): + """Check you can only create a token with good values for uses_allowed.""" + # Should work with 0 (token is invalid from the start) + channel = self.make_request( + "POST", + self.url + "/new", + {"uses_allowed": 0}, + access_token=self.admin_user_tok, + ) + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["uses_allowed"], 0) + + # Should fail with negative integer + channel = self.make_request( + "POST", + self.url + "/new", + {"uses_allowed": -5}, + access_token=self.admin_user_tok, + ) + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["errcode"], Codes.INVALID_PARAM) + + # Should fail with float + channel = self.make_request( + "POST", + self.url + "/new", + {"uses_allowed": 1.5}, + access_token=self.admin_user_tok, + ) + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["errcode"], Codes.INVALID_PARAM) + + def test_create_expiry_time(self): + """Check you can't create a token with an invalid expiry_time.""" + # Should fail with a time in the past + channel = self.make_request( + "POST", + self.url + "/new", + {"expiry_time": self.clock.time_msec() - 10000}, + access_token=self.admin_user_tok, + ) + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["errcode"], Codes.INVALID_PARAM) + + # Should fail with float + channel = self.make_request( + "POST", + self.url + "/new", + {"expiry_time": self.clock.time_msec() + 1000000.5}, + access_token=self.admin_user_tok, + ) + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["errcode"], Codes.INVALID_PARAM) + + def test_create_length(self): + """Check you can only generate a token with a valid length.""" + # Should work with 64 + channel = self.make_request( + "POST", + self.url + "/new", + {"length": 64}, + access_token=self.admin_user_tok, + ) + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(len(channel.json_body["token"]), 64) + + # Should fail with 0 + channel = self.make_request( + "POST", + self.url + "/new", + {"length": 0}, + access_token=self.admin_user_tok, + ) + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["errcode"], Codes.INVALID_PARAM) + + # Should fail with a negative integer + channel = self.make_request( + "POST", + self.url + "/new", + {"length": -5}, + access_token=self.admin_user_tok, + ) + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["errcode"], Codes.INVALID_PARAM) + + # Should fail with a float + channel = self.make_request( + "POST", + self.url + "/new", + {"length": 8.5}, + access_token=self.admin_user_tok, + ) + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["errcode"], Codes.INVALID_PARAM) + + # Should fail with 65 + channel = self.make_request( + "POST", + self.url + "/new", + {"length": 65}, + access_token=self.admin_user_tok, + ) + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["errcode"], Codes.INVALID_PARAM) + + # UPDATING + + def test_update_no_auth(self): + """Try to update a token without authentication.""" + channel = self.make_request( + "PUT", + self.url + "/1234", # Token doesn't exist but that doesn't matter + {}, + ) + self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"]) + + def test_update_requester_not_admin(self): + """Try to update a token while not an admin.""" + channel = self.make_request( + "PUT", + self.url + "/1234", # Token doesn't exist but that doesn't matter + {}, + access_token=self.other_user_tok, + ) + self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"]) + + def test_update_non_existent(self): + """Try to update a token that doesn't exist.""" + channel = self.make_request( + "PUT", + self.url + "/1234", + {"uses_allowed": 1}, + access_token=self.admin_user_tok, + ) + + self.assertEqual(404, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["errcode"], Codes.NOT_FOUND) + + def test_update_uses_allowed(self): + """Test updating just uses_allowed.""" + # Create new token using default values + token = self._new_token() + + # Should succeed with 1 + channel = self.make_request( + "PUT", + self.url + "/" + token, + {"uses_allowed": 1}, + access_token=self.admin_user_tok, + ) + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["uses_allowed"], 1) + self.assertIsNone(channel.json_body["expiry_time"]) + + # Should succeed with 0 (makes token invalid) + channel = self.make_request( + "PUT", + self.url + "/" + token, + {"uses_allowed": 0}, + access_token=self.admin_user_tok, + ) + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["uses_allowed"], 0) + self.assertIsNone(channel.json_body["expiry_time"]) + + # Should succeed with null + channel = self.make_request( + "PUT", + self.url + "/" + token, + {"uses_allowed": None}, + access_token=self.admin_user_tok, + ) + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertIsNone(channel.json_body["uses_allowed"]) + self.assertIsNone(channel.json_body["expiry_time"]) + + # Should fail with a float + channel = self.make_request( + "PUT", + self.url + "/" + token, + {"uses_allowed": 1.5}, + access_token=self.admin_user_tok, + ) + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["errcode"], Codes.INVALID_PARAM) + + # Should fail with a negative integer + channel = self.make_request( + "PUT", + self.url + "/" + token, + {"uses_allowed": -5}, + access_token=self.admin_user_tok, + ) + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["errcode"], Codes.INVALID_PARAM) + + def test_update_expiry_time(self): + """Test updating just expiry_time.""" + # Create new token using default values + token = self._new_token() + new_expiry_time = self.clock.time_msec() + 1000000 + + # Should succeed with a time in the future + channel = self.make_request( + "PUT", + self.url + "/" + token, + {"expiry_time": new_expiry_time}, + access_token=self.admin_user_tok, + ) + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["expiry_time"], new_expiry_time) + self.assertIsNone(channel.json_body["uses_allowed"]) + + # Should succeed with null + channel = self.make_request( + "PUT", + self.url + "/" + token, + {"expiry_time": None}, + access_token=self.admin_user_tok, + ) + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertIsNone(channel.json_body["expiry_time"]) + self.assertIsNone(channel.json_body["uses_allowed"]) + + # Should fail with a time in the past + past_time = self.clock.time_msec() - 10000 + channel = self.make_request( + "PUT", + self.url + "/" + token, + {"expiry_time": past_time}, + access_token=self.admin_user_tok, + ) + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["errcode"], Codes.INVALID_PARAM) + + # Should fail a float + channel = self.make_request( + "PUT", + self.url + "/" + token, + {"expiry_time": new_expiry_time + 0.5}, + access_token=self.admin_user_tok, + ) + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["errcode"], Codes.INVALID_PARAM) + + def test_update_both(self): + """Test updating both uses_allowed and expiry_time.""" + # Create new token using default values + token = self._new_token() + new_expiry_time = self.clock.time_msec() + 1000000 + + data = { + "uses_allowed": 1, + "expiry_time": new_expiry_time, + } + + channel = self.make_request( + "PUT", + self.url + "/" + token, + data, + access_token=self.admin_user_tok, + ) + + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["uses_allowed"], 1) + self.assertEqual(channel.json_body["expiry_time"], new_expiry_time) + + def test_update_invalid_type(self): + """Test using invalid types doesn't work.""" + # Create new token using default values + token = self._new_token() + + data = { + "uses_allowed": False, + "expiry_time": "1626430124000", + } + + channel = self.make_request( + "PUT", + self.url + "/" + token, + data, + access_token=self.admin_user_tok, + ) + + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["errcode"], Codes.INVALID_PARAM) + + # DELETING + + def test_delete_no_auth(self): + """Try to delete a token without authentication.""" + channel = self.make_request( + "DELETE", + self.url + "/1234", # Token doesn't exist but that doesn't matter + {}, + ) + self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"]) + + def test_delete_requester_not_admin(self): + """Try to delete a token while not an admin.""" + channel = self.make_request( + "DELETE", + self.url + "/1234", # Token doesn't exist but that doesn't matter + {}, + access_token=self.other_user_tok, + ) + self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"]) + + def test_delete_non_existent(self): + """Try to delete a token that doesn't exist.""" + channel = self.make_request( + "DELETE", + self.url + "/1234", + {}, + access_token=self.admin_user_tok, + ) + + self.assertEqual(404, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["errcode"], Codes.NOT_FOUND) + + def test_delete(self): + """Test deleting a token.""" + # Create new token using default values + token = self._new_token() + + channel = self.make_request( + "DELETE", + self.url + "/" + token, + {}, + access_token=self.admin_user_tok, + ) + + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + + # GETTING ONE + + def test_get_no_auth(self): + """Try to get a token without authentication.""" + channel = self.make_request( + "GET", + self.url + "/1234", # Token doesn't exist but that doesn't matter + {}, + ) + self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"]) + + def test_get_requester_not_admin(self): + """Try to get a token while not an admin.""" + channel = self.make_request( + "GET", + self.url + "/1234", # Token doesn't exist but that doesn't matter + {}, + access_token=self.other_user_tok, + ) + self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"]) + + def test_get_non_existent(self): + """Try to get a token that doesn't exist.""" + channel = self.make_request( + "GET", + self.url + "/1234", + {}, + access_token=self.admin_user_tok, + ) + + self.assertEqual(404, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["errcode"], Codes.NOT_FOUND) + + def test_get(self): + """Test getting a token.""" + # Create new token using default values + token = self._new_token() + + channel = self.make_request( + "GET", + self.url + "/" + token, + {}, + access_token=self.admin_user_tok, + ) + + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(channel.json_body["token"], token) + self.assertIsNone(channel.json_body["uses_allowed"]) + self.assertIsNone(channel.json_body["expiry_time"]) + self.assertEqual(channel.json_body["pending"], 0) + self.assertEqual(channel.json_body["completed"], 0) + + # LISTING + + def test_list_no_auth(self): + """Try to list tokens without authentication.""" + channel = self.make_request("GET", self.url, {}) + self.assertEqual(401, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"]) + + def test_list_requester_not_admin(self): + """Try to list tokens while not an admin.""" + channel = self.make_request( + "GET", + self.url, + {}, + access_token=self.other_user_tok, + ) + self.assertEqual(403, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"]) + + def test_list_all(self): + """Test listing all tokens.""" + # Create new token using default values + token = self._new_token() + + channel = self.make_request( + "GET", + self.url, + {}, + access_token=self.admin_user_tok, + ) + + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(len(channel.json_body["registration_tokens"]), 1) + token_info = channel.json_body["registration_tokens"][0] + self.assertEqual(token_info["token"], token) + self.assertIsNone(token_info["uses_allowed"]) + self.assertIsNone(token_info["expiry_time"]) + self.assertEqual(token_info["pending"], 0) + self.assertEqual(token_info["completed"], 0) + + def test_list_invalid_query_parameter(self): + """Test with `valid` query parameter not `true` or `false`.""" + channel = self.make_request( + "GET", + self.url + "?valid=x", + {}, + access_token=self.admin_user_tok, + ) + + self.assertEqual(400, int(channel.result["code"]), msg=channel.result["body"]) + + def _test_list_query_parameter(self, valid: str): + """Helper used to test both valid=true and valid=false.""" + # Create 2 valid and 2 invalid tokens. + now = self.hs.get_clock().time_msec() + # Create always valid token + valid1 = self._new_token() + # Create token that hasn't been used up + valid2 = self._new_token(uses_allowed=1) + # Create token that has expired + invalid1 = self._new_token(expiry_time=now - 10000) + # Create token that has been used up but hasn't expired + invalid2 = self._new_token( + uses_allowed=2, + pending=1, + completed=1, + expiry_time=now + 1000000, + ) + + if valid == "true": + tokens = [valid1, valid2] + else: + tokens = [invalid1, invalid2] + + channel = self.make_request( + "GET", + self.url + "?valid=" + valid, + {}, + access_token=self.admin_user_tok, + ) + + self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) + self.assertEqual(len(channel.json_body["registration_tokens"]), 2) + token_info_1 = channel.json_body["registration_tokens"][0] + token_info_2 = channel.json_body["registration_tokens"][1] + self.assertIn(token_info_1["token"], tokens) + self.assertIn(token_info_2["token"], tokens) + + def test_list_valid(self): + """Test listing just valid tokens.""" + self._test_list_query_parameter(valid="true") + + def test_list_invalid(self): + """Test listing just invalid tokens.""" + self._test_list_query_parameter(valid="false") diff --git a/tests/rest/client/v2_alpha/test_account.py b/tests/rest/client/test_account.py similarity index 100% rename from tests/rest/client/v2_alpha/test_account.py rename to tests/rest/client/test_account.py diff --git a/tests/rest/client/v2_alpha/test_auth.py b/tests/rest/client/test_auth.py similarity index 99% rename from tests/rest/client/v2_alpha/test_auth.py rename to tests/rest/client/test_auth.py index cf5cfb910c8c..e2fcbdc63ac6 100644 --- a/tests/rest/client/v2_alpha/test_auth.py +++ b/tests/rest/client/test_auth.py @@ -25,7 +25,7 @@ from tests import unittest from tests.handlers.test_oidc import HAS_OIDC -from tests.rest.client.v1.utils import TEST_OIDC_CONFIG +from tests.rest.client.utils import TEST_OIDC_CONFIG from tests.server import FakeChannel from tests.unittest import override_config, skip_unless diff --git a/tests/rest/client/v2_alpha/test_capabilities.py b/tests/rest/client/test_capabilities.py similarity index 99% rename from tests/rest/client/v2_alpha/test_capabilities.py rename to tests/rest/client/test_capabilities.py index ac31e5ceaf7d..422361b62a5a 100644 --- a/tests/rest/client/v2_alpha/test_capabilities.py +++ b/tests/rest/client/test_capabilities.py @@ -171,7 +171,8 @@ def test_change_3pid_capabilities_3pid_disabled(self): self.assertEqual(channel.code, 200) self.assertFalse(capabilities["org.matrix.msc3283.3pid_changes"]["enabled"]) - def test_get_does_not_include_msc3244_fields_by_default(self): + @override_config({"experimental_features": {"msc3244_enabled": False}}) + def test_get_does_not_include_msc3244_fields_when_disabled(self): access_token = self.get_success( self.auth_handler.get_access_token_for_user_id( self.user, device_id=None, valid_until_ms=None @@ -186,7 +187,6 @@ def test_get_does_not_include_msc3244_fields_by_default(self): "org.matrix.msc3244.room_capabilities", capabilities["m.room_versions"] ) - @override_config({"experimental_features": {"msc3244_enabled": True}}) def test_get_does_include_msc3244_fields_when_enabled(self): access_token = self.get_success( self.auth_handler.get_access_token_for_user_id( diff --git a/tests/rest/client/v1/test_directory.py b/tests/rest/client/test_directory.py similarity index 100% rename from tests/rest/client/v1/test_directory.py rename to tests/rest/client/test_directory.py diff --git a/tests/rest/client/v1/test_events.py b/tests/rest/client/test_events.py similarity index 100% rename from tests/rest/client/v1/test_events.py rename to tests/rest/client/test_events.py diff --git a/tests/rest/client/v2_alpha/test_filter.py b/tests/rest/client/test_filter.py similarity index 100% rename from tests/rest/client/v2_alpha/test_filter.py rename to tests/rest/client/test_filter.py diff --git a/tests/rest/client/test_keys.py b/tests/rest/client/test_keys.py new file mode 100644 index 000000000000..d7fa635eae1d --- /dev/null +++ b/tests/rest/client/test_keys.py @@ -0,0 +1,91 @@ +# Copyright 2021 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License + +from http import HTTPStatus + +from synapse.api.errors import Codes +from synapse.rest import admin +from synapse.rest.client import keys, login + +from tests import unittest + + +class KeyQueryTestCase(unittest.HomeserverTestCase): + servlets = [ + keys.register_servlets, + admin.register_servlets_for_client_rest_resource, + login.register_servlets, + ] + + def test_rejects_device_id_ice_key_outside_of_list(self): + self.register_user("alice", "wonderland") + alice_token = self.login("alice", "wonderland") + bob = self.register_user("bob", "uncle") + channel = self.make_request( + "POST", + "/_matrix/client/r0/keys/query", + { + "device_keys": { + bob: "device_id1", + }, + }, + alice_token, + ) + self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.result) + self.assertEqual( + channel.json_body["errcode"], + Codes.BAD_JSON, + channel.result, + ) + + def test_rejects_device_key_given_as_map_to_bool(self): + self.register_user("alice", "wonderland") + alice_token = self.login("alice", "wonderland") + bob = self.register_user("bob", "uncle") + channel = self.make_request( + "POST", + "/_matrix/client/r0/keys/query", + { + "device_keys": { + bob: { + "device_id1": True, + }, + }, + }, + alice_token, + ) + + self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.result) + self.assertEqual( + channel.json_body["errcode"], + Codes.BAD_JSON, + channel.result, + ) + + def test_requires_device_key(self): + """`device_keys` is required. We should complain if it's missing.""" + self.register_user("alice", "wonderland") + alice_token = self.login("alice", "wonderland") + channel = self.make_request( + "POST", + "/_matrix/client/r0/keys/query", + {}, + alice_token, + ) + self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.result) + self.assertEqual( + channel.json_body["errcode"], + Codes.BAD_JSON, + channel.result, + ) diff --git a/tests/rest/client/v1/test_login.py b/tests/rest/client/test_login.py similarity index 99% rename from tests/rest/client/v1/test_login.py rename to tests/rest/client/test_login.py index eba3552b19ac..5b2243fe5205 100644 --- a/tests/rest/client/v1/test_login.py +++ b/tests/rest/client/test_login.py @@ -32,7 +32,7 @@ from tests import unittest from tests.handlers.test_oidc import HAS_OIDC from tests.handlers.test_saml import has_saml2 -from tests.rest.client.v1.utils import TEST_OIDC_AUTH_ENDPOINT, TEST_OIDC_CONFIG +from tests.rest.client.utils import TEST_OIDC_AUTH_ENDPOINT, TEST_OIDC_CONFIG from tests.test_utils.html_parsers import TestHtmlParser from tests.unittest import HomeserverTestCase, override_config, skip_unless diff --git a/tests/rest/client/v2_alpha/test_password_policy.py b/tests/rest/client/test_password_policy.py similarity index 100% rename from tests/rest/client/v2_alpha/test_password_policy.py rename to tests/rest/client/test_password_policy.py diff --git a/tests/rest/client/v1/test_presence.py b/tests/rest/client/test_presence.py similarity index 100% rename from tests/rest/client/v1/test_presence.py rename to tests/rest/client/test_presence.py diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/test_profile.py similarity index 100% rename from tests/rest/client/v1/test_profile.py rename to tests/rest/client/test_profile.py diff --git a/tests/rest/client/v1/test_push_rule_attrs.py b/tests/rest/client/test_push_rule_attrs.py similarity index 100% rename from tests/rest/client/v1/test_push_rule_attrs.py rename to tests/rest/client/test_push_rule_attrs.py diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/test_register.py similarity index 63% rename from tests/rest/client/v2_alpha/test_register.py rename to tests/rest/client/test_register.py index fecda037a54b..9f3ab2c9858a 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/test_register.py @@ -24,6 +24,7 @@ from synapse.api.errors import Codes from synapse.appservice import ApplicationService from synapse.rest.client import account, account_validity, login, logout, register, sync +from synapse.storage._base import db_to_json from tests import unittest from tests.unittest import override_config @@ -204,6 +205,371 @@ def test_POST_ratelimiting(self): self.assertEquals(channel.result["code"], b"200", channel.result) + @override_config({"registration_requires_token": True}) + def test_POST_registration_requires_token(self): + username = "kermit" + device_id = "frogfone" + token = "abcd" + store = self.hs.get_datastore() + self.get_success( + store.db_pool.simple_insert( + "registration_tokens", + { + "token": token, + "uses_allowed": None, + "pending": 0, + "completed": 0, + "expiry_time": None, + }, + ) + ) + params = { + "username": username, + "password": "monkey", + "device_id": device_id, + } + + # Request without auth to get flows and session + channel = self.make_request(b"POST", self.url, json.dumps(params)) + self.assertEquals(channel.result["code"], b"401", channel.result) + flows = channel.json_body["flows"] + # Synapse adds a dummy stage to differentiate flows where otherwise one + # flow would be a subset of another flow. + self.assertCountEqual( + [[LoginType.REGISTRATION_TOKEN, LoginType.DUMMY]], + (f["stages"] for f in flows), + ) + session = channel.json_body["session"] + + # Do the registration token stage and check it has completed + params["auth"] = { + "type": LoginType.REGISTRATION_TOKEN, + "token": token, + "session": session, + } + request_data = json.dumps(params) + channel = self.make_request(b"POST", self.url, request_data) + self.assertEquals(channel.result["code"], b"401", channel.result) + completed = channel.json_body["completed"] + self.assertCountEqual([LoginType.REGISTRATION_TOKEN], completed) + + # Do the m.login.dummy stage and check registration was successful + params["auth"] = { + "type": LoginType.DUMMY, + "session": session, + } + request_data = json.dumps(params) + channel = self.make_request(b"POST", self.url, request_data) + det_data = { + "user_id": f"@{username}:{self.hs.hostname}", + "home_server": self.hs.hostname, + "device_id": device_id, + } + self.assertEquals(channel.result["code"], b"200", channel.result) + self.assertDictContainsSubset(det_data, channel.json_body) + + # Check the `completed` counter has been incremented and pending is 0 + res = self.get_success( + store.db_pool.simple_select_one( + "registration_tokens", + keyvalues={"token": token}, + retcols=["pending", "completed"], + ) + ) + self.assertEquals(res["completed"], 1) + self.assertEquals(res["pending"], 0) + + @override_config({"registration_requires_token": True}) + def test_POST_registration_token_invalid(self): + params = { + "username": "kermit", + "password": "monkey", + } + # Request without auth to get session + channel = self.make_request(b"POST", self.url, json.dumps(params)) + session = channel.json_body["session"] + + # Test with token param missing (invalid) + params["auth"] = { + "type": LoginType.REGISTRATION_TOKEN, + "session": session, + } + channel = self.make_request(b"POST", self.url, json.dumps(params)) + self.assertEquals(channel.result["code"], b"401", channel.result) + self.assertEquals(channel.json_body["errcode"], Codes.MISSING_PARAM) + self.assertEquals(channel.json_body["completed"], []) + + # Test with non-string (invalid) + params["auth"]["token"] = 1234 + channel = self.make_request(b"POST", self.url, json.dumps(params)) + self.assertEquals(channel.result["code"], b"401", channel.result) + self.assertEquals(channel.json_body["errcode"], Codes.INVALID_PARAM) + self.assertEquals(channel.json_body["completed"], []) + + # Test with unknown token (invalid) + params["auth"]["token"] = "1234" + channel = self.make_request(b"POST", self.url, json.dumps(params)) + self.assertEquals(channel.result["code"], b"401", channel.result) + self.assertEquals(channel.json_body["errcode"], Codes.UNAUTHORIZED) + self.assertEquals(channel.json_body["completed"], []) + + @override_config({"registration_requires_token": True}) + def test_POST_registration_token_limit_uses(self): + token = "abcd" + store = self.hs.get_datastore() + # Create token that can be used once + self.get_success( + store.db_pool.simple_insert( + "registration_tokens", + { + "token": token, + "uses_allowed": 1, + "pending": 0, + "completed": 0, + "expiry_time": None, + }, + ) + ) + params1 = {"username": "bert", "password": "monkey"} + params2 = {"username": "ernie", "password": "monkey"} + # Do 2 requests without auth to get two session IDs + channel1 = self.make_request(b"POST", self.url, json.dumps(params1)) + session1 = channel1.json_body["session"] + channel2 = self.make_request(b"POST", self.url, json.dumps(params2)) + session2 = channel2.json_body["session"] + + # Use token with session1 and check `pending` is 1 + params1["auth"] = { + "type": LoginType.REGISTRATION_TOKEN, + "token": token, + "session": session1, + } + self.make_request(b"POST", self.url, json.dumps(params1)) + # Repeat request to make sure pending isn't increased again + self.make_request(b"POST", self.url, json.dumps(params1)) + pending = self.get_success( + store.db_pool.simple_select_one_onecol( + "registration_tokens", + keyvalues={"token": token}, + retcol="pending", + ) + ) + self.assertEquals(pending, 1) + + # Check auth fails when using token with session2 + params2["auth"] = { + "type": LoginType.REGISTRATION_TOKEN, + "token": token, + "session": session2, + } + channel = self.make_request(b"POST", self.url, json.dumps(params2)) + self.assertEquals(channel.result["code"], b"401", channel.result) + self.assertEquals(channel.json_body["errcode"], Codes.UNAUTHORIZED) + self.assertEquals(channel.json_body["completed"], []) + + # Complete registration with session1 + params1["auth"]["type"] = LoginType.DUMMY + self.make_request(b"POST", self.url, json.dumps(params1)) + # Check pending=0 and completed=1 + res = self.get_success( + store.db_pool.simple_select_one( + "registration_tokens", + keyvalues={"token": token}, + retcols=["pending", "completed"], + ) + ) + self.assertEquals(res["pending"], 0) + self.assertEquals(res["completed"], 1) + + # Check auth still fails when using token with session2 + channel = self.make_request(b"POST", self.url, json.dumps(params2)) + self.assertEquals(channel.result["code"], b"401", channel.result) + self.assertEquals(channel.json_body["errcode"], Codes.UNAUTHORIZED) + self.assertEquals(channel.json_body["completed"], []) + + @override_config({"registration_requires_token": True}) + def test_POST_registration_token_expiry(self): + token = "abcd" + now = self.hs.get_clock().time_msec() + store = self.hs.get_datastore() + # Create token that expired yesterday + self.get_success( + store.db_pool.simple_insert( + "registration_tokens", + { + "token": token, + "uses_allowed": None, + "pending": 0, + "completed": 0, + "expiry_time": now - 24 * 60 * 60 * 1000, + }, + ) + ) + params = {"username": "kermit", "password": "monkey"} + # Request without auth to get session + channel = self.make_request(b"POST", self.url, json.dumps(params)) + session = channel.json_body["session"] + + # Check authentication fails with expired token + params["auth"] = { + "type": LoginType.REGISTRATION_TOKEN, + "token": token, + "session": session, + } + channel = self.make_request(b"POST", self.url, json.dumps(params)) + self.assertEquals(channel.result["code"], b"401", channel.result) + self.assertEquals(channel.json_body["errcode"], Codes.UNAUTHORIZED) + self.assertEquals(channel.json_body["completed"], []) + + # Update token so it expires tomorrow + self.get_success( + store.db_pool.simple_update_one( + "registration_tokens", + keyvalues={"token": token}, + updatevalues={"expiry_time": now + 24 * 60 * 60 * 1000}, + ) + ) + + # Check authentication succeeds + channel = self.make_request(b"POST", self.url, json.dumps(params)) + completed = channel.json_body["completed"] + self.assertCountEqual([LoginType.REGISTRATION_TOKEN], completed) + + @override_config({"registration_requires_token": True}) + def test_POST_registration_token_session_expiry(self): + """Test `pending` is decremented when an uncompleted session expires.""" + token = "abcd" + store = self.hs.get_datastore() + self.get_success( + store.db_pool.simple_insert( + "registration_tokens", + { + "token": token, + "uses_allowed": None, + "pending": 0, + "completed": 0, + "expiry_time": None, + }, + ) + ) + + # Do 2 requests without auth to get two session IDs + params1 = {"username": "bert", "password": "monkey"} + params2 = {"username": "ernie", "password": "monkey"} + channel1 = self.make_request(b"POST", self.url, json.dumps(params1)) + session1 = channel1.json_body["session"] + channel2 = self.make_request(b"POST", self.url, json.dumps(params2)) + session2 = channel2.json_body["session"] + + # Use token with both sessions + params1["auth"] = { + "type": LoginType.REGISTRATION_TOKEN, + "token": token, + "session": session1, + } + self.make_request(b"POST", self.url, json.dumps(params1)) + + params2["auth"] = { + "type": LoginType.REGISTRATION_TOKEN, + "token": token, + "session": session2, + } + self.make_request(b"POST", self.url, json.dumps(params2)) + + # Complete registration with session1 + params1["auth"]["type"] = LoginType.DUMMY + self.make_request(b"POST", self.url, json.dumps(params1)) + + # Check `result` of registration token stage for session1 is `True` + result1 = self.get_success( + store.db_pool.simple_select_one_onecol( + "ui_auth_sessions_credentials", + keyvalues={ + "session_id": session1, + "stage_type": LoginType.REGISTRATION_TOKEN, + }, + retcol="result", + ) + ) + self.assertTrue(db_to_json(result1)) + + # Check `result` for session2 is the token used + result2 = self.get_success( + store.db_pool.simple_select_one_onecol( + "ui_auth_sessions_credentials", + keyvalues={ + "session_id": session2, + "stage_type": LoginType.REGISTRATION_TOKEN, + }, + retcol="result", + ) + ) + self.assertEquals(db_to_json(result2), token) + + # Delete both sessions (mimics expiry) + self.get_success( + store.delete_old_ui_auth_sessions(self.hs.get_clock().time_msec()) + ) + + # Check pending is now 0 + pending = self.get_success( + store.db_pool.simple_select_one_onecol( + "registration_tokens", + keyvalues={"token": token}, + retcol="pending", + ) + ) + self.assertEquals(pending, 0) + + @override_config({"registration_requires_token": True}) + def test_POST_registration_token_session_expiry_deleted_token(self): + """Test session expiry doesn't break when the token is deleted. + + 1. Start but don't complete UIA with a registration token + 2. Delete the token from the database + 3. Expire the session + """ + token = "abcd" + store = self.hs.get_datastore() + self.get_success( + store.db_pool.simple_insert( + "registration_tokens", + { + "token": token, + "uses_allowed": None, + "pending": 0, + "completed": 0, + "expiry_time": None, + }, + ) + ) + + # Do request without auth to get a session ID + params = {"username": "kermit", "password": "monkey"} + channel = self.make_request(b"POST", self.url, json.dumps(params)) + session = channel.json_body["session"] + + # Use token + params["auth"] = { + "type": LoginType.REGISTRATION_TOKEN, + "token": token, + "session": session, + } + self.make_request(b"POST", self.url, json.dumps(params)) + + # Delete token + self.get_success( + store.db_pool.simple_delete_one( + "registration_tokens", + keyvalues={"token": token}, + ) + ) + + # Delete session (mimics expiry) + self.get_success( + store.delete_old_ui_auth_sessions(self.hs.get_clock().time_msec()) + ) + def test_advertised_flows(self): channel = self.make_request(b"POST", self.url, b"{}") self.assertEquals(channel.result["code"], b"401", channel.result) @@ -744,3 +1110,71 @@ def test_background_job(self): self.assertGreaterEqual(res, now_ms + self.validity_period - self.max_delta) self.assertLessEqual(res, now_ms + self.validity_period) + + +class RegistrationTokenValidityRestServletTestCase(unittest.HomeserverTestCase): + servlets = [register.register_servlets] + url = "/_matrix/client/unstable/org.matrix.msc3231/register/org.matrix.msc3231.login.registration_token/validity" + + def default_config(self): + config = super().default_config() + config["registration_requires_token"] = True + return config + + def test_GET_token_valid(self): + token = "abcd" + store = self.hs.get_datastore() + self.get_success( + store.db_pool.simple_insert( + "registration_tokens", + { + "token": token, + "uses_allowed": None, + "pending": 0, + "completed": 0, + "expiry_time": None, + }, + ) + ) + + channel = self.make_request( + b"GET", + f"{self.url}?token={token}", + ) + self.assertEquals(channel.result["code"], b"200", channel.result) + self.assertEquals(channel.json_body["valid"], True) + + def test_GET_token_invalid(self): + token = "1234" + channel = self.make_request( + b"GET", + f"{self.url}?token={token}", + ) + self.assertEquals(channel.result["code"], b"200", channel.result) + self.assertEquals(channel.json_body["valid"], False) + + @override_config( + {"rc_registration_token_validity": {"per_second": 0.1, "burst_count": 5}} + ) + def test_GET_ratelimiting(self): + token = "1234" + + for i in range(0, 6): + channel = self.make_request( + b"GET", + f"{self.url}?token={token}", + ) + + if i == 5: + self.assertEquals(channel.result["code"], b"429", channel.result) + retry_after_ms = int(channel.json_body["retry_after_ms"]) + else: + self.assertEquals(channel.result["code"], b"200", channel.result) + + self.reactor.advance(retry_after_ms / 1000.0 + 1.0) + + channel = self.make_request( + b"GET", + f"{self.url}?token={token}", + ) + self.assertEquals(channel.result["code"], b"200", channel.result) diff --git a/tests/rest/client/v2_alpha/test_relations.py b/tests/rest/client/test_relations.py similarity index 100% rename from tests/rest/client/v2_alpha/test_relations.py rename to tests/rest/client/test_relations.py diff --git a/tests/rest/client/v2_alpha/test_report_event.py b/tests/rest/client/test_report_event.py similarity index 100% rename from tests/rest/client/v2_alpha/test_report_event.py rename to tests/rest/client/test_report_event.py diff --git a/tests/rest/client/v1/test_rooms.py b/tests/rest/client/test_rooms.py similarity index 100% rename from tests/rest/client/v1/test_rooms.py rename to tests/rest/client/test_rooms.py diff --git a/tests/rest/client/v2_alpha/test_sendtodevice.py b/tests/rest/client/test_sendtodevice.py similarity index 100% rename from tests/rest/client/v2_alpha/test_sendtodevice.py rename to tests/rest/client/test_sendtodevice.py diff --git a/tests/rest/client/v2_alpha/test_shared_rooms.py b/tests/rest/client/test_shared_rooms.py similarity index 100% rename from tests/rest/client/v2_alpha/test_shared_rooms.py rename to tests/rest/client/test_shared_rooms.py diff --git a/tests/rest/client/v2_alpha/test_sync.py b/tests/rest/client/test_sync.py similarity index 100% rename from tests/rest/client/v2_alpha/test_sync.py rename to tests/rest/client/test_sync.py diff --git a/tests/rest/client/v1/test_typing.py b/tests/rest/client/test_typing.py similarity index 100% rename from tests/rest/client/v1/test_typing.py rename to tests/rest/client/test_typing.py diff --git a/tests/rest/client/v2_alpha/test_upgrade_room.py b/tests/rest/client/test_upgrade_room.py similarity index 100% rename from tests/rest/client/v2_alpha/test_upgrade_room.py rename to tests/rest/client/test_upgrade_room.py diff --git a/tests/rest/client/v1/utils.py b/tests/rest/client/utils.py similarity index 100% rename from tests/rest/client/v1/utils.py rename to tests/rest/client/utils.py diff --git a/tests/rest/client/v1/__init__.py b/tests/rest/client/v1/__init__.py deleted file mode 100644 index 5e83dba2ed6f..000000000000 --- a/tests/rest/client/v1/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2014-2016 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/tests/rest/client/v2_alpha/__init__.py b/tests/rest/client/v2_alpha/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/tests/rest/media/v1/test_media_storage.py b/tests/rest/media/v1/test_media_storage.py index 6085444b9da8..2f7eebfe6931 100644 --- a/tests/rest/media/v1/test_media_storage.py +++ b/tests/rest/media/v1/test_media_storage.py @@ -21,7 +21,7 @@ from urllib import parse import attr -from parameterized import parameterized_class +from parameterized import parameterized, parameterized_class from PIL import Image as Image from twisted.internet import defer @@ -473,6 +473,43 @@ def _test_thumbnail(self, method, expected_body, expected_found): }, ) + @parameterized.expand([("crop", 16), ("crop", 64), ("scale", 16), ("scale", 64)]) + def test_same_quality(self, method, desired_size): + """Test that choosing between thumbnails with the same quality rating succeeds. + + We are not particular about which thumbnail is chosen.""" + self.assertIsNotNone( + self.thumbnail_resource._select_thumbnail( + desired_width=desired_size, + desired_height=desired_size, + desired_method=method, + desired_type=self.test_image.content_type, + # Provide two identical thumbnails which are guaranteed to have the same + # quality rating. + thumbnail_infos=[ + { + "thumbnail_width": 32, + "thumbnail_height": 32, + "thumbnail_method": method, + "thumbnail_type": self.test_image.content_type, + "thumbnail_length": 256, + "filesystem_id": f"thumbnail1{self.test_image.extension}", + }, + { + "thumbnail_width": 32, + "thumbnail_height": 32, + "thumbnail_method": method, + "thumbnail_type": self.test_image.content_type, + "thumbnail_length": 256, + "filesystem_id": f"thumbnail2{self.test_image.extension}", + }, + ], + file_id=f"image{self.test_image.extension}", + url_cache=None, + server_name=None, + ) + ) + def test_x_robots_tag_header(self): """ Tests that the `X-Robots-Tag` header is present, which informs web crawlers diff --git a/tests/unittest.py b/tests/unittest.py index 3eec9c4d5b6f..f2c90cc47b53 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -252,7 +252,7 @@ def setUp(self): reactor=self.reactor, ) - from tests.rest.client.v1.utils import RestHelper + from tests.rest.client.utils import RestHelper self.helper = RestHelper(self.hs, self.site, getattr(self, "user_id", None))