Browse Source

Merge branch 'develop' into erikj/less_state_membership

Erik Johnston 1 year ago
parent
commit
cbb1575486
100 changed files with 2027 additions and 1128 deletions
  1. 34 3
      .github/workflows/latest_deps.yml
  2. 16 25
      .github/workflows/tests.yml
  3. 280 3
      CHANGES.md
  4. 107 343
      README.rst
  5. 0 1
      changelog.d/12942.misc
  6. 0 1
      changelog.d/12943.misc
  7. 0 1
      changelog.d/12967.removal
  8. 0 1
      changelog.d/13038.feature
  9. 0 1
      changelog.d/13094.misc
  10. 0 1
      changelog.d/13172.misc
  11. 0 1
      changelog.d/13175.misc
  12. 0 1
      changelog.d/13192.removal
  13. 0 1
      changelog.d/13198.misc
  14. 0 1
      changelog.d/13208.feature
  15. 0 1
      changelog.d/13215.misc
  16. 0 1
      changelog.d/13218.misc
  17. 0 1
      changelog.d/13220.feature
  18. 0 1
      changelog.d/13224.misc
  19. 0 1
      changelog.d/13231.doc
  20. 0 1
      changelog.d/13233.doc
  21. 0 1
      changelog.d/13239.removal
  22. 0 1
      changelog.d/13240.misc
  23. 0 1
      changelog.d/13242.misc
  24. 0 1
      changelog.d/13251.misc
  25. 0 1
      changelog.d/13253.misc
  26. 0 1
      changelog.d/13254.misc
  27. 0 1
      changelog.d/13255.misc
  28. 0 1
      changelog.d/13257.misc
  29. 0 1
      changelog.d/13258.misc
  30. 0 1
      changelog.d/13260.misc
  31. 0 1
      changelog.d/13261.doc
  32. 0 1
      changelog.d/13263.bugfix
  33. 0 1
      changelog.d/13266.misc
  34. 0 1
      changelog.d/13267.misc
  35. 0 1
      changelog.d/13270.bugfix
  36. 0 1
      changelog.d/13271.doc
  37. 0 1
      changelog.d/13274.misc
  38. 0 1
      changelog.d/13276.feature
  39. 0 1
      changelog.d/13278.bugfix
  40. 0 1
      changelog.d/13279.misc
  41. 0 1
      changelog.d/13281.misc
  42. 0 1
      changelog.d/13284.misc
  43. 0 1
      changelog.d/13285.misc
  44. 0 1
      changelog.d/13296.bugfix
  45. 0 1
      changelog.d/13297.misc
  46. 0 1
      changelog.d/13299.misc
  47. 0 1
      changelog.d/13300.misc
  48. 0 1
      changelog.d/13303.misc
  49. 0 1
      changelog.d/13307.misc
  50. 0 1
      changelog.d/13308.misc
  51. 0 1
      changelog.d/13310.misc
  52. 0 1
      changelog.d/13311.misc
  53. 0 1
      changelog.d/13314.doc
  54. 0 1
      changelog.d/13318.misc
  55. 0 1
      changelog.d/13320.misc
  56. 0 1
      changelog.d/13323.misc
  57. 0 1
      changelog.d/13326.removal
  58. 0 1
      changelog.d/13328.misc
  59. 1 0
      changelog.d/13573.misc
  60. 1 0
      changelog.d/13588.misc
  61. 1 0
      changelog.d/13591.misc
  62. 1 0
      changelog.d/13592.misc
  63. 1 0
      changelog.d/13600.misc
  64. 1 0
      changelog.d/13602.doc
  65. 232 181
      contrib/grafana/synapse.json
  66. 42 0
      debian/changelog
  67. 24 3
      docker/Dockerfile
  68. 59 35
      docker/Dockerfile-workers
  69. 4 0
      docker/README-testing.md
  70. 1 1
      docker/README.md
  71. 1 0
      docker/complement/Dockerfile
  72. 1 1
      docker/conf-workers/supervisord.conf.j2
  73. 19 2
      docs/admin_api/register_api.md
  74. 6 1
      docs/admin_api/rooms.md
  75. 7 0
      docs/admin_api/user_admin_api.md
  76. 42 9
      docs/auth_chain_difference_algorithm.md
  77. 2 1
      docs/message_retention_policies.md
  78. 2 2
      docs/modules/password_auth_provider_callbacks.md
  79. 3 1
      docs/openid.md
  80. 11 42
      docs/reverse_proxy.md
  81. 8 6
      docs/sso_mapping_providers.md
  82. 1 1
      docs/templates.md
  83. 29 4
      docs/upgrade.md
  84. 2 1
      docs/usage/administration/admin_api/README.md
  85. 3 3
      docs/usage/administration/admin_api/registration_tokens.md
  86. 86 4
      docs/usage/administration/admin_faq.md
  87. 152 174
      docs/usage/configuration/config_documentation.md
  88. 1 4
      mypy.ini
  89. 83 31
      poetry.lock
  90. 4 1
      pyproject.toml
  91. 425 0
      scripts-dev/check_pydantic_models.py
  92. 1 0
      scripts-dev/complement.sh
  93. 1 0
      scripts-dev/lint.sh
  94. 130 32
      scripts-dev/release.py
  95. 110 111
      synapse/api/auth.py
  96. 6 5
      synapse/api/constants.py
  97. 48 10
      synapse/api/errors.py
  98. 3 3
      synapse/api/ratelimiting.py
  99. 19 19
      synapse/api/room_versions.py
  100. 16 12
      synapse/app/admin_cmd.py

+ 34 - 3
.github/workflows/latest_deps.yml

@@ -135,11 +135,42 @@ jobs:
             /logs/**/*.log*
 
 
-  # TODO: run complement (as with twisted trunk, see #12473).
+  complement:
+    if: "${{ !failure() && !cancelled() }}"
+    runs-on: ubuntu-latest
+
+    strategy:
+      fail-fast: false
+      matrix:
+        include:
+          - arrangement: monolith
+            database: SQLite
+
+          - arrangement: monolith
+            database: Postgres
+
+          - arrangement: workers
+            database: Postgres
+
+    steps:
+      - name: Run actions/checkout@v2 for synapse
+        uses: actions/checkout@v2
+        with:
+          path: synapse
+
+      - name: Prepare Complement's Prerequisites
+        run: synapse/.ci/scripts/setup_complement_prerequisites.sh
+
+      - run: |
+          set -o pipefail
+          TEST_ONLY_IGNORE_POETRY_LOCKFILE=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
+        shell: bash
+        name: Run Complement Tests
 
-  # open an issue if the build fails, so we know about it.
+  # Open an issue if the build fails, so we know about it.
+  # Only do this if we're not experimenting with this action in a PR.
   open-issue:
-    if: failure()
+    if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request'"
     needs:
       # TODO: should mypy be included here? It feels more brittle than the other two.
       - mypy

+ 16 - 25
.github/workflows/tests.yml

@@ -53,10 +53,22 @@ jobs:
         env:
           PULL_REQUEST_NUMBER: ${{ github.event.number }}
 
+  lint-pydantic:
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v2
+        with:
+          ref: ${{ github.event.pull_request.head.sha }}
+          fetch-depth: 0
+      - uses: matrix-org/setup-python-poetry@v1
+        with:
+          extras: "all"
+      - run: poetry run scripts-dev/check_pydantic_models.py
+
   # Dummy step to gate other tests on without repeating the whole list
   linting-done:
     if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
-    needs: [lint, lint-crlf, lint-newsfile, check-sampleconfig, check-schema-delta]
+    needs: [lint, lint-crlf, lint-newsfile, lint-pydantic, check-sampleconfig, check-schema-delta]
     runs-on: ubuntu-latest
     steps:
       - run: "true"
@@ -328,29 +340,8 @@ jobs:
           - arrangement: monolith
             database: Postgres
 
-    steps:
-      - name: Run actions/checkout@v2 for synapse
-        uses: actions/checkout@v2
-        with:
-          path: synapse
-
-      - name: Prepare Complement's Prerequisites
-        run: synapse/.ci/scripts/setup_complement_prerequisites.sh
-
-      - run: |
-          set -o pipefail
-          POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
-        shell: bash
-        name: Run Complement Tests
-
-  # XXX When complement with workers is stable, move this back into the standard
-  #     "complement" matrix above.
-  #
-  # See https://github.com/matrix-org/synapse/issues/13161
-  complement-workers:
-    if: "${{ !failure() && !cancelled() }}"
-    needs: linting-done
-    runs-on: ubuntu-latest
+          - arrangement: workers
+            database: Postgres
 
     steps:
       - name: Run actions/checkout@v2 for synapse
@@ -363,7 +354,7 @@ jobs:
 
       - run: |
           set -o pipefail
-          POSTGRES=1 WORKERS=1 COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
+          POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
         shell: bash
         name: Run Complement Tests
 

+ 280 - 3
CHANGES.md

@@ -1,7 +1,284 @@
-Synapse vNext
-=============
+Synapse 1.66.0rc1 (2022-08-23)
+==============================
+
+This release removes the ability for homeservers to delegate email ownership
+verification and password reset confirmation to identity servers. This removal
+was originally planned for Synapse 1.64, but was later deferred until now.
+
+See the [upgrade notes](https://matrix-org.github.io/synapse/v1.66/upgrade.html#upgrading-to-v1660) for more details.
+
+Features
+--------
+
+- Improve validation of request bodies for the following client-server API endpoints: [`/account/password`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3accountpassword), [`/account/password/email/requestToken`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3accountpasswordemailrequesttoken), [`/account/deactivate`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3accountdeactivate) and [`/account/3pid/email/requestToken`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidemailrequesttoken). ([\#13188](https://github.com/matrix-org/synapse/issues/13188), [\#13563](https://github.com/matrix-org/synapse/issues/13563))
+- Add forgotten status to [Room Details Admin API](https://matrix-org.github.io/synapse/latest/admin_api/rooms.html#room-details-api). ([\#13503](https://github.com/matrix-org/synapse/issues/13503))
+- Add an experimental implementation for [MSC3852 (Expose user agents on `Device`)](https://github.com/matrix-org/matrix-spec-proposals/pull/3852). ([\#13549](https://github.com/matrix-org/synapse/issues/13549))
+- Add `org.matrix.msc2716v4` experimental room version with updated content fields. Part of [MSC2716 (Importing history)](https://github.com/matrix-org/matrix-spec-proposals/pull/2716).  ([\#13551](https://github.com/matrix-org/synapse/issues/13551))
+- Add support for compression to federation responses. ([\#13537](https://github.com/matrix-org/synapse/issues/13537))
+- Improve performance of sending messages in rooms with thousands of local users. ([\#13522](https://github.com/matrix-org/synapse/issues/13522), [\#13547](https://github.com/matrix-org/synapse/issues/13547))
+
+
+Bugfixes
+--------
+
+- Faster room joins: make `/joined_members` block whilst the room is partial stated. ([\#13514](https://github.com/matrix-org/synapse/issues/13514))
+- Fix a bug introduced in Synapse 1.21.0 where the [`/event_reports` Admin API](https://matrix-org.github.io/synapse/develop/admin_api/event_reports.html) could return a total count which was larger than the number of results you can actually query for. ([\#13525](https://github.com/matrix-org/synapse/issues/13525))
+- Fix a bug introduced in Synapse 1.52.0 where sending server notices fails if `max_avatar_size` or `allowed_avatar_mimetypes` is set and not `system_mxid_avatar_url`. ([\#13566](https://github.com/matrix-org/synapse/issues/13566))
+- Fix a bug where the `opentracing.force_tracing_for_users` config option would not apply to [`/sendToDevice`](https://spec.matrix.org/v1.3/client-server-api/#put_matrixclientv3sendtodeviceeventtypetxnid) and [`/keys/upload`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3keysupload) requests. ([\#13574](https://github.com/matrix-org/synapse/issues/13574))
+
+
+Improved Documentation
+----------------------
+
+- Add `openssl` example for generating registration HMAC digest. ([\#13472](https://github.com/matrix-org/synapse/issues/13472))
+- Tidy up Synapse's README. ([\#13491](https://github.com/matrix-org/synapse/issues/13491))
+- Document that event purging related to the `redaction_retention_period` config option is executed only every 5 minutes. ([\#13492](https://github.com/matrix-org/synapse/issues/13492))
+- Add a warning to retention documentation regarding the possibility of database corruption. ([\#13497](https://github.com/matrix-org/synapse/issues/13497))
+- Document that the `DOCKER_BUILDKIT=1` flag is needed to build the docker image. ([\#13515](https://github.com/matrix-org/synapse/issues/13515))
+- Add missing links in `user_consent` section of configuration manual. ([\#13536](https://github.com/matrix-org/synapse/issues/13536))
+- Fix the doc and some warnings that were referring to the nonexistent `custom_templates_directory` setting (instead of `custom_template_directory`). ([\#13538](https://github.com/matrix-org/synapse/issues/13538))
+
+
+Deprecations and Removals
+-------------------------
+
+- Remove the ability for homeservers to delegate email ownership verification
+  and password reset confirmation to identity servers. See [upgrade notes](https://matrix-org.github.io/synapse/v1.66/upgrade.html#upgrading-to-v1660) for more details.
+
+Internal Changes
+----------------
+
+### Faster room joins
+
+- Update the rejected state of events during de-partial-stating. ([\#13459](https://github.com/matrix-org/synapse/issues/13459))
+- Avoid blocking lazy-loading `/sync`s during partial joins due to remote memberships. Pull remote memberships from auth events instead of the room state. ([\#13477](https://github.com/matrix-org/synapse/issues/13477))
+- Refuse to start when faster joins is enabled on a deployment with workers, since worker configurations are not currently supported. ([\#13531](https://github.com/matrix-org/synapse/issues/13531))
+
+### Metrics and tracing
+
+- Allow use of both `@trace` and `@tag_args` stacked on the same function. ([\#13453](https://github.com/matrix-org/synapse/issues/13453))
+- Instrument the federation/backfill part of `/messages` for understandable traces in Jaeger. ([\#13489](https://github.com/matrix-org/synapse/issues/13489))
+- Instrument `FederationStateIdsServlet` (`/state_ids`) for understandable traces in Jaeger. ([\#13499](https://github.com/matrix-org/synapse/issues/13499), [\#13554](https://github.com/matrix-org/synapse/issues/13554))
+- Track HTTP response times over 10 seconds from `/messages` (`synapse_room_message_list_rest_servlet_response_time_seconds`). ([\#13533](https://github.com/matrix-org/synapse/issues/13533))
+- Add metrics to track how the rate limiter is affecting requests (sleep/reject). ([\#13534](https://github.com/matrix-org/synapse/issues/13534), [\#13541](https://github.com/matrix-org/synapse/issues/13541))
+- Add metrics to time how long it takes us to do backfill processing (`synapse_federation_backfill_processing_before_time_seconds`, `synapse_federation_backfill_processing_after_time_seconds`). ([\#13535](https://github.com/matrix-org/synapse/issues/13535), [\#13584](https://github.com/matrix-org/synapse/issues/13584))
+- Add metrics to track rate limiter queue timing (`synapse_rate_limit_queue_wait_time_seconds`). ([\#13544](https://github.com/matrix-org/synapse/issues/13544))
+- Update metrics to track `/messages` response time by room size. ([\#13545](https://github.com/matrix-org/synapse/issues/13545))
+
+### Everything else
+
+- Refactor methods in `synapse.api.auth.Auth` to use `Requester` objects everywhere instead of user IDs. ([\#13024](https://github.com/matrix-org/synapse/issues/13024))
+- Clean-up tests for notifications. ([\#13471](https://github.com/matrix-org/synapse/issues/13471))
+- Add some miscellaneous comments to document sync, especially around `compute_state_delta`. ([\#13474](https://github.com/matrix-org/synapse/issues/13474))
+- Use literals in place of `HTTPStatus` constants in tests. ([\#13479](https://github.com/matrix-org/synapse/issues/13479), [\#13488](https://github.com/matrix-org/synapse/issues/13488))
+- Add comments about how event push actions are rotated. ([\#13485](https://github.com/matrix-org/synapse/issues/13485))
+- Modify HTML template content to better support mobile devices' screen sizes. ([\#13493](https://github.com/matrix-org/synapse/issues/13493))
+- Add a linter script which will reject non-strict types in Pydantic models. ([\#13502](https://github.com/matrix-org/synapse/issues/13502))
+- Reduce the number of tests using legacy TCP replication. ([\#13543](https://github.com/matrix-org/synapse/issues/13543))
+- Allow specifying additional request fields when using the `HomeServerTestCase.login` helper method. ([\#13549](https://github.com/matrix-org/synapse/issues/13549))
+- Make `HomeServerTestCase` load any configured homeserver modules automatically. ([\#13558](https://github.com/matrix-org/synapse/issues/13558))
+
+
+Synapse 1.65.0 (2022-08-16)
+===========================
+
+No significant changes since 1.65.0rc2.
+
+
+Synapse 1.65.0rc2 (2022-08-11)
+==============================
+
+Internal Changes
+----------------
+
+- Revert 'Remove the unspecced `room_id` field in the `/hierarchy` response. ([\#13365](https://github.com/matrix-org/synapse/issues/13365))' to give more time for clients to update. ([\#13501](https://github.com/matrix-org/synapse/issues/13501))
+
+
+Synapse 1.65.0rc1 (2022-08-09)
+==============================
+
+Features
+--------
+
+- Add support for stable prefixes for [MSC2285 (private read receipts)](https://github.com/matrix-org/matrix-spec-proposals/pull/2285). ([\#13273](https://github.com/matrix-org/synapse/issues/13273))
+- Add new unstable error codes `ORG.MATRIX.MSC3848.ALREADY_JOINED`, `ORG.MATRIX.MSC3848.NOT_JOINED`, and `ORG.MATRIX.MSC3848.INSUFFICIENT_POWER` described in [MSC3848](https://github.com/matrix-org/matrix-spec-proposals/pull/3848). ([\#13343](https://github.com/matrix-org/synapse/issues/13343))
+- Use stable prefixes for [MSC3827](https://github.com/matrix-org/matrix-spec-proposals/pull/3827). ([\#13370](https://github.com/matrix-org/synapse/issues/13370))
+- Add a new module API method to translate a room alias into a room ID. ([\#13428](https://github.com/matrix-org/synapse/issues/13428))
+- Add a new module API method to create a room. ([\#13429](https://github.com/matrix-org/synapse/issues/13429))
+- Add remote join capability to the module API's `update_room_membership` method (in a backwards compatible manner). ([\#13441](https://github.com/matrix-org/synapse/issues/13441))
+
+
+Bugfixes
+--------
+
+- Update the version of the LDAP3 auth provider module included in the `matrixdotorg/synapse` DockerHub images and the Debian packages hosted on packages.matrix.org to 0.2.2. This version fixes a regression in the module. ([\#13470](https://github.com/matrix-org/synapse/issues/13470))
+- Fix a bug introduced in Synapse v1.41.0 where the `/hierarchy` API returned non-standard information (a `room_id` field under each entry in `children_state`) (this was reverted in v1.65.0rc2, see changelog notes above). ([\#13365](https://github.com/matrix-org/synapse/issues/13365))
+- Fix a bug introduced in Synapse 0.24.0 that would respond with the wrong error status code to `/joined_members` requests when the requester is not a current member of the room. Contributed by @andrewdoh. ([\#13374](https://github.com/matrix-org/synapse/issues/13374))
+- Fix bug in handling of typing events for appservices. Contributed by Nick @ Beeper (@fizzadar). ([\#13392](https://github.com/matrix-org/synapse/issues/13392))
+- Fix a bug introduced in Synapse 1.57.0 where rooms listed in `exclude_rooms_from_sync` in the configuration file would not be properly excluded from incremental syncs. ([\#13408](https://github.com/matrix-org/synapse/issues/13408))
+- Fix a bug in the experimental faster-room-joins support which could cause it to get stuck in an infinite loop. ([\#13353](https://github.com/matrix-org/synapse/issues/13353))
+- Faster room joins: fix a bug which caused rejected events to become un-rejected during state syncing. ([\#13413](https://github.com/matrix-org/synapse/issues/13413))
+- Faster room joins: fix error when running out of servers to sync partial state with, so that Synapse raises the intended error instead. ([\#13432](https://github.com/matrix-org/synapse/issues/13432))
+
+
+Updates to the Docker image
+---------------------------
+
+- Make Docker images build on armv7 by installing cryptography dependencies in the 'requirements' stage. Contributed by Jasper Spaans. ([\#13372](https://github.com/matrix-org/synapse/issues/13372))
+
+
+Improved Documentation
+----------------------
+
+- Update the 'registration tokens' page to acknowledge that the relevant MSC was merged into version 1.2 of the Matrix specification. Contributed by @moan0s. ([\#11897](https://github.com/matrix-org/synapse/issues/11897))
+- Document which HTTP resources support gzip compression. ([\#13221](https://github.com/matrix-org/synapse/issues/13221))
+- Add steps describing how to elevate an existing user to administrator by manipulating the database. ([\#13230](https://github.com/matrix-org/synapse/issues/13230))
+- Fix wrong headline for `url_preview_accept_language` in documentation. ([\#13437](https://github.com/matrix-org/synapse/issues/13437))
+- Remove redundant 'Contents' section from the Configuration Manual. Contributed by @dklimpel. ([\#13438](https://github.com/matrix-org/synapse/issues/13438))
+- Update documentation for config setting `macaroon_secret_key`. ([\#13443](https://github.com/matrix-org/synapse/issues/13443))
+- Update outdated information on `sso_mapping_providers` documentation. ([\#13449](https://github.com/matrix-org/synapse/issues/13449))
+- Fix example code in module documentation of `password_auth_provider_callbacks`. ([\#13450](https://github.com/matrix-org/synapse/issues/13450))
+- Make the configuration for the cache clearer. ([\#13481](https://github.com/matrix-org/synapse/issues/13481))
+
+
+Internal Changes
+----------------
+
+- Extend the release script to automatically push a new SyTest branch, rather than having that be a manual process. ([\#12978](https://github.com/matrix-org/synapse/issues/12978))
+- Make minor clarifications to the error messages given when we fail to join a room via any server. ([\#13160](https://github.com/matrix-org/synapse/issues/13160))
+- Enable Complement CI tests in the 'latest deps' test run. ([\#13213](https://github.com/matrix-org/synapse/issues/13213))
+- Fix long-standing bugged logic which was never hit in `get_pdu` asking every remote destination even after it finds an event. ([\#13346](https://github.com/matrix-org/synapse/issues/13346))
+- Faster room joins: avoid blocking when pulling events with partially missing prev events. ([\#13355](https://github.com/matrix-org/synapse/issues/13355))
+- Instrument `/messages` for understandable traces in Jaeger. ([\#13368](https://github.com/matrix-org/synapse/issues/13368))
+- Remove an unused argument to `get_relations_for_event`. ([\#13383](https://github.com/matrix-org/synapse/issues/13383))
+- Add a `merge-back` command to the release script, which automates merging the correct branches after a release. ([\#13393](https://github.com/matrix-org/synapse/issues/13393))
+- Adding missing type hints to tests. ([\#13397](https://github.com/matrix-org/synapse/issues/13397))
+- Faster Room Joins: don't leave a stuck room partial state flag if the join fails. ([\#13403](https://github.com/matrix-org/synapse/issues/13403))
+- Refactor `_resolve_state_at_missing_prevs` to compute an `EventContext` instead. ([\#13404](https://github.com/matrix-org/synapse/issues/13404), [\#13431](https://github.com/matrix-org/synapse/issues/13431))
+- Faster Room Joins: prevent Synapse from answering federated join requests for a room which it has not fully joined yet. ([\#13416](https://github.com/matrix-org/synapse/issues/13416))
+- Re-enable running Complement tests against Synapse with workers. ([\#13420](https://github.com/matrix-org/synapse/issues/13420))
+- Prevent unnecessary lookups to any external `get_event` cache. Contributed by Nick @ Beeper (@fizzadar). ([\#13435](https://github.com/matrix-org/synapse/issues/13435))
+- Add some tracing to give more insight into local room joins. ([\#13439](https://github.com/matrix-org/synapse/issues/13439))
+- Rename class `RateLimitConfig` to `RatelimitSettings` and `FederationRateLimitConfig` to `FederationRatelimitSettings`. ([\#13442](https://github.com/matrix-org/synapse/issues/13442))
+- Add some comments about how event push actions are stored. ([\#13445](https://github.com/matrix-org/synapse/issues/13445), [\#13455](https://github.com/matrix-org/synapse/issues/13455))
+- Improve rebuild speed for the "synapse-workers" docker image. ([\#13447](https://github.com/matrix-org/synapse/issues/13447))
+- Fix `@tag_args` being off-by-one with the arguments when tagging a span (tracing). ([\#13452](https://github.com/matrix-org/synapse/issues/13452))
+- Update type of `EventContext.rejected`. ([\#13460](https://github.com/matrix-org/synapse/issues/13460))
+- Use literals in place of `HTTPStatus` constants in tests. ([\#13463](https://github.com/matrix-org/synapse/issues/13463), [\#13469](https://github.com/matrix-org/synapse/issues/13469))
+- Correct a misnamed argument in state res v2 internals. ([\#13467](https://github.com/matrix-org/synapse/issues/13467))
+
+
+Synapse 1.64.0 (2022-08-02)
+===========================
+
+No significant changes since 1.64.0rc2.
+
+
+Deprecation Warning
+-------------------
+
+Synapse v1.66.0 will remove the ability to delegate the tasks of verifying email address ownership, and password reset confirmation, to an identity server.
+
+If you require your homeserver to verify e-mail addresses or to support password resets via e-mail, please configure your homeserver with SMTP access so that it can send e-mails on its own behalf.
+[Consult the configuration documentation for more information.](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#email)
+
+
+Synapse 1.64.0rc2 (2022-07-29)
+==============================
+
+This RC reintroduces support for `account_threepid_delegates.email`, which was removed in 1.64.0rc1. It remains deprecated and will be removed altogether in Synapse v1.66.0. ([\#13406](https://github.com/matrix-org/synapse/issues/13406))
+
+
+Synapse 1.64.0rc1 (2022-07-26)
+==============================
+
+This RC removed the ability to delegate the tasks of verifying email address ownership, and password reset confirmation, to an identity server.
+
+We have also stopped building `.deb` packages for Ubuntu 21.10 as it is no longer an active version of Ubuntu.
+
+
+Features
+--------
+
+- Improve error messages when media thumbnails cannot be served. ([\#13038](https://github.com/matrix-org/synapse/issues/13038))
+- Allow pagination from remote event after discovering it from [MSC3030](https://github.com/matrix-org/matrix-spec-proposals/pull/3030) `/timestamp_to_event`. ([\#13205](https://github.com/matrix-org/synapse/issues/13205))
+- Add a `room_type` field in the responses for the list room and room details admin APIs. Contributed by @andrewdoh. ([\#13208](https://github.com/matrix-org/synapse/issues/13208))
+- Add support for room version 10. ([\#13220](https://github.com/matrix-org/synapse/issues/13220))
+- Add per-room rate limiting for room joins. For each room, Synapse now monitors the rate of join events in that room, and throttles additional joins if that rate grows too large. ([\#13253](https://github.com/matrix-org/synapse/issues/13253), [\#13254](https://github.com/matrix-org/synapse/issues/13254), [\#13255](https://github.com/matrix-org/synapse/issues/13255), [\#13276](https://github.com/matrix-org/synapse/issues/13276))
+- Support Implicit TLS (TLS without using a STARTTLS upgrade, typically on port 465) for sending emails, enabled by the new option `force_tls`. Contributed by Jan Schär. ([\#13317](https://github.com/matrix-org/synapse/issues/13317))
+
+
+Bugfixes
+--------
+
+- Fix a bug introduced in Synapse 1.15.0 where adding a user through the Synapse Admin API with a phone number would fail if the `enable_email_notifs` and `email_notifs_for_new_users` options were enabled. Contributed by @thomasweston12. ([\#13263](https://github.com/matrix-org/synapse/issues/13263))
+- Fix a bug introduced in Synapse 1.40.0 where a user invited to a restricted room would be briefly unable to join. ([\#13270](https://github.com/matrix-org/synapse/issues/13270))
+- Fix a long-standing bug where, in rare instances, Synapse could store the incorrect state for a room after a state resolution. ([\#13278](https://github.com/matrix-org/synapse/issues/13278))
+- Fix a bug introduced in v1.18.0 where the `synapse_pushers` metric would overcount pushers when they are replaced. ([\#13296](https://github.com/matrix-org/synapse/issues/13296))
+- Disable autocorrection and autocapitalisation on the username text field shown during registration when using SSO. ([\#13350](https://github.com/matrix-org/synapse/issues/13350))
+- Update locked version of `frozendict` to 2.3.3, which has fixes for memory leaks affecting `/sync`. ([\#13284](https://github.com/matrix-org/synapse/issues/13284), [\#13352](https://github.com/matrix-org/synapse/issues/13352))
+
+
+Improved Documentation
+----------------------
+
+- Provide an example of using the Admin API. Contributed by @jejo86. ([\#13231](https://github.com/matrix-org/synapse/issues/13231))
+- Move the documentation for how URL previews work to the URL preview module. ([\#13233](https://github.com/matrix-org/synapse/issues/13233), [\#13261](https://github.com/matrix-org/synapse/issues/13261))
+- Add another `contrib` script to help set up worker processes. Contributed by @villepeh. ([\#13271](https://github.com/matrix-org/synapse/issues/13271))
+- Document that certain config options were added or changed in Synapse 1.62. Contributed by @behrmann. ([\#13314](https://github.com/matrix-org/synapse/issues/13314))
+- Document the new `rc_invites.per_issuer` throttling option added in Synapse 1.63. ([\#13333](https://github.com/matrix-org/synapse/issues/13333))
+- Mention that BuildKit is needed when building Docker images for tests. ([\#13338](https://github.com/matrix-org/synapse/issues/13338))
+- Improve Caddy reverse proxy documentation. ([\#13344](https://github.com/matrix-org/synapse/issues/13344))
+
+
+Deprecations and Removals
+-------------------------
+
+- Drop tables that were formerly used for groups/communities. ([\#12967](https://github.com/matrix-org/synapse/issues/12967))
+- Drop support for delegating email verification to an external server. ([\#13192](https://github.com/matrix-org/synapse/issues/13192))
+- Drop support for calling `/_matrix/client/v3/account/3pid/bind` without an `id_access_token`, which was not permitted by the spec. Contributed by @Vetchu. ([\#13239](https://github.com/matrix-org/synapse/issues/13239))
+- Stop building `.deb` packages for Ubuntu 21.10 (Impish Indri), which has reached end of life. ([\#13326](https://github.com/matrix-org/synapse/issues/13326))
+
+
+Internal Changes
+----------------
 
-As of this release, Synapse no longer allows the tasks of verifying email address ownership, and password reset confirmation, to be delegated to an identity server. For more information, see the [upgrade notes](https://matrix-org.github.io/synapse/v1.64/upgrade.html#upgrading-to-v1640).
+- Use lower transaction isolation level when purging rooms to avoid serialization errors. Contributed by Nick @ Beeper. ([\#12942](https://github.com/matrix-org/synapse/issues/12942))
+- Remove code which incorrectly attempted to reconcile state with remote servers when processing incoming events. ([\#12943](https://github.com/matrix-org/synapse/issues/12943))
+- Make the AS login method call `Auth.get_user_by_req` for checking the AS token. ([\#13094](https://github.com/matrix-org/synapse/issues/13094))
+- Always use a version of canonicaljson that supports the C implementation of frozendict. ([\#13172](https://github.com/matrix-org/synapse/issues/13172))
+- Add prometheus counters for ephemeral events and to device messages pushed to app services. Contributed by Brad @ Beeper. ([\#13175](https://github.com/matrix-org/synapse/issues/13175))
+- Refactor receipts servlet logic to avoid duplicated code. ([\#13198](https://github.com/matrix-org/synapse/issues/13198))
+- Preparation for database schema simplifications: populate `state_key` and `rejection_reason` for existing rows in the `events` table. ([\#13215](https://github.com/matrix-org/synapse/issues/13215))
+- Remove unused database table `event_reference_hashes`. ([\#13218](https://github.com/matrix-org/synapse/issues/13218))
+- Further reduce queries used sending events when creating new rooms. Contributed by Nick @ Beeper (@fizzadar). ([\#13224](https://github.com/matrix-org/synapse/issues/13224))
+- Call the v2 identity service `/3pid/unbind` endpoint, rather than v1. Contributed by @Vetchu. ([\#13240](https://github.com/matrix-org/synapse/issues/13240))
+- Use an asynchronous cache wrapper for the get event cache. Contributed by Nick @ Beeper (@fizzadar). ([\#13242](https://github.com/matrix-org/synapse/issues/13242), [\#13308](https://github.com/matrix-org/synapse/issues/13308))
+- Optimise federation sender and appservice pusher event stream processing queries. Contributed by Nick @ Beeper (@fizzadar). ([\#13251](https://github.com/matrix-org/synapse/issues/13251))
+- Log the stack when waiting for an entire room to be un-partial stated. ([\#13257](https://github.com/matrix-org/synapse/issues/13257))
+- Fix spurious warning when fetching state after a missing prev event. ([\#13258](https://github.com/matrix-org/synapse/issues/13258))
+- Clean-up tests for notifications. ([\#13260](https://github.com/matrix-org/synapse/issues/13260))
+- Do not fail build if complement with workers fails. ([\#13266](https://github.com/matrix-org/synapse/issues/13266))
+- Don't pull out state in `compute_event_context` for unconflicted state. ([\#13267](https://github.com/matrix-org/synapse/issues/13267), [\#13274](https://github.com/matrix-org/synapse/issues/13274))
+- Reduce the rebuild time for the complement-synapse docker image. ([\#13279](https://github.com/matrix-org/synapse/issues/13279))
+- Don't pull out the full state when creating an event. ([\#13281](https://github.com/matrix-org/synapse/issues/13281), [\#13307](https://github.com/matrix-org/synapse/issues/13307))
+- Upgrade from Poetry 1.1.12 to 1.1.14, to fix bugs when locking packages. ([\#13285](https://github.com/matrix-org/synapse/issues/13285))
+- Make `DictionaryCache` expire full entries if they haven't been queried in a while, even if specific keys have been queried recently. ([\#13292](https://github.com/matrix-org/synapse/issues/13292))
+- Use `HTTPStatus` constants in place of literals in tests. ([\#13297](https://github.com/matrix-org/synapse/issues/13297))
+- Improve performance of query  `_get_subset_users_in_room_with_profiles`. ([\#13299](https://github.com/matrix-org/synapse/issues/13299))
+- Up batch size of `bulk_get_push_rules` and `_get_joined_profiles_from_event_ids`. ([\#13300](https://github.com/matrix-org/synapse/issues/13300))
+- Remove unnecessary `json.dumps` from tests. ([\#13303](https://github.com/matrix-org/synapse/issues/13303))
+- Reduce memory usage of sending dummy events. ([\#13310](https://github.com/matrix-org/synapse/issues/13310))
+- Prevent formatting changes of [#3679](https://github.com/matrix-org/synapse/pull/3679) from appearing in `git blame`. ([\#13311](https://github.com/matrix-org/synapse/issues/13311))
+- Change `get_users_in_room` and `get_rooms_for_user` caches to enable pruning of old entries. ([\#13313](https://github.com/matrix-org/synapse/issues/13313))
+- Validate federation destinations and log an error if a destination is invalid. ([\#13318](https://github.com/matrix-org/synapse/issues/13318))
+- Fix `FederationClient.get_pdu()` returning events from the cache as `outliers` instead of original events we saw over federation. ([\#13320](https://github.com/matrix-org/synapse/issues/13320))
+- Reduce memory usage of state caches. ([\#13323](https://github.com/matrix-org/synapse/issues/13323))
+- Reduce the amount of state we store in the `state_cache`. ([\#13324](https://github.com/matrix-org/synapse/issues/13324))
+- Add missing type hints to open tracing module. ([\#13328](https://github.com/matrix-org/synapse/issues/13328), [\#13345](https://github.com/matrix-org/synapse/issues/13345), [\#13362](https://github.com/matrix-org/synapse/issues/13362))
+- Remove old base slaved store and de-duplicate cache ID generators. Contributed by Nick @ Beeper (@fizzadar). ([\#13329](https://github.com/matrix-org/synapse/issues/13329), [\#13349](https://github.com/matrix-org/synapse/issues/13349))
+- When reporting metrics is enabled, use ~8x less data to describe DB transaction metrics. ([\#13342](https://github.com/matrix-org/synapse/issues/13342))
+- Faster room joins: skip soft fail checks while Synapse only has partial room state, since the current membership of event senders may not be accurately known. ([\#13354](https://github.com/matrix-org/synapse/issues/13354))
 
 
 Synapse 1.63.1 (2022-07-20)

+ 107 - 343
README.rst

@@ -2,107 +2,111 @@
 Synapse |support| |development| |documentation| |license| |pypi| |python|
 =========================================================================
 
+Synapse is an open-source `Matrix <https://matrix.org/>`_ homeserver written and
+maintained by the Matrix.org Foundation. We began rapid development began in 2014,
+reaching v1.0.0 in 2019. Development on Synapse and the Matrix protocol itself continues
+in earnest today.
+
+Briefly, Matrix is an open standard for communications on the internet, supporting
+federation, encryption and VoIP. Matrix.org has more to say about the `goals of the
+Matrix project <https://matrix.org/docs/guides/introduction>`_, and the `formal specification
+<https://spec.matrix.org/>`_ describes the technical details.
+
 .. contents::
 
-Introduction
-============
+Installing and configuration
+============================
 
-Matrix is an ambitious new ecosystem for open federated Instant Messaging and
-VoIP.  The basics you need to know to get up and running are:
+The Synapse documentation describes `how to install Synapse <https://matrix-org.github.io/synapse/latest/setup/installation.html>`_. We recommend using
+`Docker images <https://matrix-org.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
+<https://matrix-org.github.io/synapse/latest/setup/installation.html#matrixorg-packages>`_.
 
-- Everything in Matrix happens in a room.  Rooms are distributed and do not
-  exist on any single server.  Rooms can be located using convenience aliases
-  like ``#matrix:matrix.org`` or ``#test:localhost:8448``.
+.. _federation:
 
-- Matrix user IDs look like ``@matthew:matrix.org`` (although in the future
-  you will normally refer to yourself and others using a third party identifier
-  (3PID): email address, phone number, etc rather than manipulating Matrix user IDs)
+Synapse has a variety of `config options
+<https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html>`_
+which can be used to customise its behaviour after installation.
+There are additional details on how to `configure Synapse for federation here
+<https://matrix-org.github.io/synapse/latest/federate.html>`_.
 
-The overall architecture is::
+.. _reverse-proxy:
 
-      client <----> homeserver <=====================> homeserver <----> client
-             https://somewhere.org/_matrix      https://elsewhere.net/_matrix
+Using a reverse proxy with Synapse
+----------------------------------
 
-``#matrix:matrix.org`` is the official support room for Matrix, and can be
-accessed by any client from https://matrix.org/docs/projects/try-matrix-now.html or
-via IRC bridge at irc://irc.libera.chat/matrix.
+It is recommended to put a reverse proxy such as
+`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
+`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
+`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
+`HAProxy <https://www.haproxy.org/>`_ or
+`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
+doing so is that it means that you can expose the default https port (443) to
+Matrix clients without needing to run Synapse with root privileges.
+For information on configuring one, see `the reverse proxy docs
+<https://matrix-org.github.io/synapse/latest/reverse_proxy.html>`_.
 
-Synapse is currently in rapid development, but as of version 0.5 we believe it
-is sufficiently stable to be run as an internet-facing service for real usage!
+Upgrading an existing Synapse
+-----------------------------
 
-About Matrix
-============
+The instructions for upgrading Synapse are in `the upgrade notes`_.
+Please check these instructions as upgrading may require extra steps for some
+versions of Synapse.
 
-Matrix specifies a set of pragmatic RESTful HTTP JSON APIs as an open standard,
-which handle:
+.. _the upgrade notes: https://matrix-org.github.io/synapse/develop/upgrade.html
 
-- Creating and managing fully distributed chat rooms with no
-  single points of control or failure
-- Eventually-consistent cryptographically secure synchronisation of room
-  state across a global open network of federated servers and services
-- Sending and receiving extensible messages in a room with (optional)
-  end-to-end encryption
-- Inviting, joining, leaving, kicking, banning room members
-- Managing user accounts (registration, login, logout)
-- Using 3rd Party IDs (3PIDs) such as email addresses, phone numbers,
-  Facebook accounts to authenticate, identify and discover users on Matrix.
-- Placing 1:1 VoIP and Video calls
 
-These APIs are intended to be implemented on a wide range of servers, services
-and clients, letting developers build messaging and VoIP functionality on top
-of the entirely open Matrix ecosystem rather than using closed or proprietary
-solutions. The hope is for Matrix to act as the building blocks for a new
-generation of fully open and interoperable messaging and VoIP apps for the
-internet.
+Platform dependencies
+---------------------
 
-Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
-team, written in Python 3/Twisted.
+Synapse uses a number of platform dependencies such as Python and PostgreSQL,
+and aims to follow supported upstream versions. See the
+`deprecation policy <https://matrix-org.github.io/synapse/latest/deprecation_policy.html>`_
+for more details.
 
-In Matrix, every user runs one or more Matrix clients, which connect through to
-a Matrix homeserver. The homeserver stores all their personal chat history and
-user account information - much as a mail client connects through to an
-IMAP/SMTP server. Just like email, you can either run your own Matrix
-homeserver and control and own your own communications and history or use one
-hosted by someone else (e.g. matrix.org) - there is no single point of control
-or mandatory service provider in Matrix, unlike WhatsApp, Facebook, Hangouts,
-etc.
 
-We'd like to invite you to join #matrix:matrix.org (via
-https://matrix.org/docs/projects/try-matrix-now.html), run a homeserver, take a look
-at the `Matrix spec <https://matrix.org/docs/spec>`_, and experiment with the
-`APIs <https://matrix.org/docs/api>`_ and `Client SDKs
-<https://matrix.org/docs/projects/try-matrix-now.html#client-sdks>`_.
+Security note
+-------------
 
-Thanks for using Matrix!
+Matrix serves raw, user-supplied data in some APIs -- specifically the `content
+repository endpoints`_.
 
-Support
-=======
+.. _content repository endpoints: https://matrix.org/docs/spec/client_server/latest.html#get-matrix-media-r0-download-servername-mediaid
 
-For support installing or managing Synapse, please join |room|_ (from a matrix.org
-account if necessary) and ask questions there. We do not use GitHub issues for
-support requests, only for bug reports and feature requests.
+Whilst we make a reasonable effort to mitigate against XSS attacks (for
+instance, by using `CSP`_), a Matrix homeserver should not be hosted on a
+domain hosting other web applications. This especially applies to sharing
+the domain with Matrix web clients and other sensitive applications like
+webmail. See
+https://developer.github.com/changes/2014-04-25-user-content-security for more
+information.
 
-Synapse's documentation is `nicely rendered on GitHub Pages <https://matrix-org.github.io/synapse>`_,
-with its source available in |docs|_.
+.. _CSP: https://github.com/matrix-org/synapse/pull/1021
 
-.. |room| replace:: ``#synapse:matrix.org``
-.. _room: https://matrix.to/#/#synapse:matrix.org
+Ideally, the homeserver should not simply be on a different subdomain, but on
+a completely different `registered domain`_ (also known as top-level site or
+eTLD+1). This is because `some attacks`_ are still possible as long as the two
+applications share the same registered domain.
 
-.. |docs| replace:: ``docs``
-.. _docs: docs
+.. _registered domain: https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-2.3
 
-Synapse Installation
-====================
+.. _some attacks: https://en.wikipedia.org/wiki/Session_fixation#Attacks_using_cross-subdomain_cookie
 
-.. _federation:
+To illustrate this with an example, if your Element Web or other sensitive web
+application is hosted on ``A.example1.com``, you should ideally host Synapse on
+``example2.com``. Some amount of protection is offered by hosting on
+``B.example1.com`` instead, so this is also acceptable in some scenarios.
+However, you should *not* host your Synapse on ``A.example1.com``.
+
+Note that all of the above refers exclusively to the domain used in Synapse's
+``public_baseurl`` setting. In particular, it has no bearing on the domain
+mentioned in MXIDs hosted on that server.
 
-* For details on how to install synapse, see
-  `Installation Instructions <https://matrix-org.github.io/synapse/latest/setup/installation.html>`_.
-* For specific details on how to configure Synapse for federation see `docs/federate.md <docs/federate.md>`_
+Following this advice ensures that even if an XSS is found in Synapse, the
+impact to other applications will be minimal.
 
 
-Connecting to Synapse from a client
-===================================
+Testing a new installation
+==========================
 
 The easiest way to try out your new Synapse installation is by connecting to it
 from a web client.
@@ -129,11 +133,20 @@ Registering a new user from a client
 ------------------------------------
 
 By default, registration of new users via Matrix clients is disabled. To enable
-it, specify ``enable_registration: true`` in ``homeserver.yaml``. (It is then
-recommended to also set up CAPTCHA - see `<docs/CAPTCHA_SETUP.md>`_.)
+it:
+
+1. In the
+   `registration config section <https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#registration>`_
+   set ``enable_registration: true`` in ``homeserver.yaml``.
+2. Then **either**:
 
-Once ``enable_registration`` is set to ``true``, it is possible to register a
-user via a Matrix client.
+   a. set up a `CAPTCHA <https://matrix-org.github.io/synapse/latest/CAPTCHA_SETUP.html>`_, or
+   b. set ``enable_registration_without_verification: true`` in ``homeserver.yaml``.
+
+We **strongly** recommend using a CAPTCHA, particularly if your homeserver is exposed to
+the public internet. Without it, anyone can freely register accounts on your homeserver.
+This can be exploited by attackers to create spambots targetting the rest of the Matrix
+federation.
 
 Your new user name will be formed partly from the ``server_name``, and partly
 from a localpart you specify when you create the account. Your name will take
@@ -146,71 +159,22 @@ the form of::
 As when logging in, you will need to specify a "Custom server".  Specify your
 desired ``localpart`` in the 'User name' box.
 
-Security note
-=============
+Troubleshooting and support
+===========================
 
-Matrix serves raw, user-supplied data in some APIs -- specifically the `content
-repository endpoints`_.
+The `Admin FAQ <https://matrix-org.github.io/synapse/latest/usage/administration/admin_faq.html>`_
+includes tips on dealing with some common problems. For more details, see
+`Synapse's wider documentation <https://matrix-org.github.io/synapse/latest/>`_.
 
-.. _content repository endpoints: https://matrix.org/docs/spec/client_server/latest.html#get-matrix-media-r0-download-servername-mediaid
-
-Whilst we make a reasonable effort to mitigate against XSS attacks (for
-instance, by using `CSP`_), a Matrix homeserver should not be hosted on a
-domain hosting other web applications. This especially applies to sharing
-the domain with Matrix web clients and other sensitive applications like
-webmail. See
-https://developer.github.com/changes/2014-04-25-user-content-security for more
-information.
-
-.. _CSP: https://github.com/matrix-org/synapse/pull/1021
-
-Ideally, the homeserver should not simply be on a different subdomain, but on
-a completely different `registered domain`_ (also known as top-level site or
-eTLD+1). This is because `some attacks`_ are still possible as long as the two
-applications share the same registered domain.
-
-.. _registered domain: https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-2.3
-
-.. _some attacks: https://en.wikipedia.org/wiki/Session_fixation#Attacks_using_cross-subdomain_cookie
+For additional support installing or managing Synapse, please ask in the community
+support room |room|_ (from a matrix.org account if necessary). We do not use GitHub
+issues for support requests, only for bug reports and feature requests.
 
-To illustrate this with an example, if your Element Web or other sensitive web
-application is hosted on ``A.example1.com``, you should ideally host Synapse on
-``example2.com``. Some amount of protection is offered by hosting on
-``B.example1.com`` instead, so this is also acceptable in some scenarios.
-However, you should *not* host your Synapse on ``A.example1.com``.
-
-Note that all of the above refers exclusively to the domain used in Synapse's
-``public_baseurl`` setting. In particular, it has no bearing on the domain
-mentioned in MXIDs hosted on that server.
-
-Following this advice ensures that even if an XSS is found in Synapse, the
-impact to other applications will be minimal.
-
-
-Upgrading an existing Synapse
-=============================
-
-The instructions for upgrading synapse are in `the upgrade notes`_.
-Please check these instructions as upgrading may require extra steps for some
-versions of synapse.
-
-.. _the upgrade notes: https://matrix-org.github.io/synapse/develop/upgrade.html
-
-.. _reverse-proxy:
-
-Using a reverse proxy with Synapse
-==================================
-
-It is recommended to put a reverse proxy such as
-`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
-`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
-`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
-`HAProxy <https://www.haproxy.org/>`_ or
-`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
-doing so is that it means that you can expose the default https port (443) to
-Matrix clients without needing to run Synapse with root privileges.
+.. |room| replace:: ``#synapse:matrix.org``
+.. _room: https://matrix.to/#/#synapse:matrix.org
 
-For information on configuring one, see `<docs/reverse_proxy.md>`_.
+.. |docs| replace:: ``docs``
+.. _docs: docs
 
 Identity Servers
 ================
@@ -242,34 +206,15 @@ an email address with your account, or send an invite to another user via their
 email address.
 
 
-Password reset
-==============
-
-Users can reset their password through their client. Alternatively, a server admin
-can reset a users password using the `admin API <docs/admin_api/user_admin_api.md#reset-password>`_
-or by directly editing the database as shown below.
-
-First calculate the hash of the new password::
-
-    $ ~/synapse/env/bin/hash_password
-    Password:
-    Confirm password:
-    $2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
-
-Then update the ``users`` table in the database::
-
-    UPDATE users SET password_hash='$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
-        WHERE name='@test:test.com';
-
-
-Synapse Development
-===================
+Development
+===========
 
+We welcome contributions to Synapse from the community!
 The best place to get started is our
 `guide for contributors <https://matrix-org.github.io/synapse/latest/development/contributing_guide.html>`_.
 This is part of our larger `documentation <https://matrix-org.github.io/synapse/latest>`_, which includes
-information for synapse developers as well as synapse administrators.
 
+information for Synapse developers as well as Synapse administrators.
 Developers might be particularly interested in:
 
 * `Synapse's database schema <https://matrix-org.github.io/synapse/latest/development/database_schema.html>`_,
@@ -280,187 +225,6 @@ Alongside all that, join our developer community on Matrix:
 `#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
 
 
-Quick start
------------
-
-Before setting up a development environment for synapse, make sure you have the
-system dependencies (such as the python header files) installed - see
-`Platform-specific prerequisites <https://matrix-org.github.io/synapse/latest/setup/installation.html#platform-specific-prerequisites>`_.
-
-To check out a synapse for development, clone the git repo into a working
-directory of your choice::
-
-    git clone https://github.com/matrix-org/synapse.git
-    cd synapse
-
-Synapse has a number of external dependencies. We maintain a fixed development
-environment using `Poetry <https://python-poetry.org/>`_. First, install poetry. We recommend::
-
-    pip install --user pipx
-    pipx install poetry
-
-as described `here <https://python-poetry.org/docs/#installing-with-pipx>`_.
-(See `poetry's installation docs <https://python-poetry.org/docs/#installation>`_
-for other installation methods.) Then ask poetry to create a virtual environment
-from the project and install Synapse's dependencies::
-
-    poetry install --extras "all test"
-
-This will run a process of downloading and installing all the needed
-dependencies into a virtual env.
-
-We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`::
-
-    poetry run ./demo/start.sh
-
-(to stop, you can use ``poetry run ./demo/stop.sh``)
-
-See the `demo documentation <https://matrix-org.github.io/synapse/develop/development/demo.html>`_
-for more information.
-
-If you just want to start a single instance of the app and run it directly::
-
-    # Create the homeserver.yaml config once
-    poetry run synapse_homeserver \
-      --server-name my.domain.name \
-      --config-path homeserver.yaml \
-      --generate-config \
-      --report-stats=[yes|no]
-
-    # Start the app
-    poetry run synapse_homeserver --config-path homeserver.yaml
-
-
-Running the unit tests
-----------------------
-
-After getting up and running, you may wish to run Synapse's unit tests to
-check that everything is installed correctly::
-
-    poetry run trial tests
-
-This should end with a 'PASSED' result (note that exact numbers will
-differ)::
-
-    Ran 1337 tests in 716.064s
-
-    PASSED (skips=15, successes=1322)
-
-For more tips on running the unit tests, like running a specific test or
-to see the logging output, see the `CONTRIBUTING doc <CONTRIBUTING.md#run-the-unit-tests>`_.
-
-
-Running the Integration Tests
------------------------------
-
-Synapse is accompanied by `SyTest <https://github.com/matrix-org/sytest>`_,
-a Matrix homeserver integration testing suite, which uses HTTP requests to
-access the API as a Matrix client would. It is able to run Synapse directly from
-the source tree, so installation of the server is not required.
-
-Testing with SyTest is recommended for verifying that changes related to the
-Client-Server API are functioning correctly. See the `SyTest installation
-instructions <https://github.com/matrix-org/sytest#installing>`_ for details.
-
-
-Platform dependencies
-=====================
-
-Synapse uses a number of platform dependencies such as Python and PostgreSQL,
-and aims to follow supported upstream versions. See the
-`<docs/deprecation_policy.md>`_ document for more details.
-
-
-Troubleshooting
-===============
-
-Need help? Join our community support room on Matrix:
-`#synapse:matrix.org <https://matrix.to/#/#synapse:matrix.org>`_
-
-Running out of File Handles
----------------------------
-
-If synapse runs out of file handles, it typically fails badly - live-locking
-at 100% CPU, and/or failing to accept new TCP connections (blocking the
-connecting client).  Matrix currently can legitimately use a lot of file handles,
-thanks to busy rooms like #matrix:matrix.org containing hundreds of participating
-servers.  The first time a server talks in a room it will try to connect
-simultaneously to all participating servers, which could exhaust the available
-file descriptors between DNS queries & HTTPS sockets, especially if DNS is slow
-to respond. (We need to improve the routing algorithm used to be better than
-full mesh, but as of March 2019 this hasn't happened yet).
-
-If you hit this failure mode, we recommend increasing the maximum number of
-open file handles to be at least 4096 (assuming a default of 1024 or 256).
-This is typically done by editing ``/etc/security/limits.conf``
-
-Separately, Synapse may leak file handles if inbound HTTP requests get stuck
-during processing - e.g. blocked behind a lock or talking to a remote server etc.
-This is best diagnosed by matching up the 'Received request' and 'Processed request'
-log lines and looking for any 'Processed request' lines which take more than
-a few seconds to execute. Please let us know at #synapse:matrix.org if
-you see this failure mode so we can help debug it, however.
-
-Help!! Synapse is slow and eats all my RAM/CPU!
------------------------------------------------
-
-First, ensure you are running the latest version of Synapse, using Python 3
-with a PostgreSQL database.
-
-Synapse's architecture is quite RAM hungry currently - we deliberately
-cache a lot of recent room data and metadata in RAM in order to speed up
-common requests. We'll improve this in the future, but for now the easiest
-way to either reduce the RAM usage (at the risk of slowing things down)
-is to set the almost-undocumented ``SYNAPSE_CACHE_FACTOR`` environment
-variable. The default is 0.5, which can be decreased to reduce RAM usage
-in memory constrained enviroments, or increased if performance starts to
-degrade.
-
-However, degraded performance due to a low cache factor, common on
-machines with slow disks, often leads to explosions in memory use due
-backlogged requests. In this case, reducing the cache factor will make
-things worse. Instead, try increasing it drastically. 2.0 is a good
-starting value.
-
-Using `libjemalloc <http://jemalloc.net/>`_ can also yield a significant
-improvement in overall memory use, and especially in terms of giving back
-RAM to the OS. To use it, the library must simply be put in the
-LD_PRELOAD environment variable when launching Synapse. On Debian, this
-can be done by installing the ``libjemalloc1`` package and adding this
-line to ``/etc/default/matrix-synapse``::
-
-    LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.1
-
-This can make a significant difference on Python 2.7 - it's unclear how
-much of an improvement it provides on Python 3.x.
-
-If you're encountering high CPU use by the Synapse process itself, you
-may be affected by a bug with presence tracking that leads to a
-massive excess of outgoing federation requests (see `discussion
-<https://github.com/matrix-org/synapse/issues/3971>`_). If metrics
-indicate that your server is also issuing far more outgoing federation
-requests than can be accounted for by your users' activity, this is a
-likely cause. The misbehavior can be worked around by setting
-the following in the Synapse config file:
-
-.. code-block:: yaml
-
-   presence:
-       enabled: false
-
-People can't accept room invitations from me
---------------------------------------------
-
-The typical failure mode here is that you send an invitation to someone
-to join a room or direct chat, but when they go to accept it, they get an
-error (typically along the lines of "Invalid signature"). They might see
-something like the following in their logs::
-
-    2019-09-11 19:32:04,271 - synapse.federation.transport.server - 288 - WARNING - GET-11752 - authenticate_request failed: 401: Invalid signature for server <server> with key ed25519:a_EqML: Unable to verify signature for <server>
-
-This is normally caused by a misconfiguration in your reverse-proxy. See
-`<docs/reverse_proxy.md>`_ and double-check that your settings are correct.
-
 .. |support| image:: https://img.shields.io/matrix/synapse:matrix.org?label=support&logo=matrix
   :alt: (get support on #synapse:matrix.org)
   :target: https://matrix.to/#/#synapse:matrix.org

+ 0 - 1
changelog.d/12942.misc

@@ -1 +0,0 @@
-Use lower isolation level when purging rooms to avoid serialization errors. Contributed by Nick @ Beeper.

+ 0 - 1
changelog.d/12943.misc

@@ -1 +0,0 @@
-Remove code which incorrectly attempted to reconcile state with remote servers when processing incoming events.

+ 0 - 1
changelog.d/12967.removal

@@ -1 +0,0 @@
-Drop tables used for groups/communities.

+ 0 - 1
changelog.d/13038.feature

@@ -1 +0,0 @@
-Provide more info why we don't have any thumbnails to serve.

+ 0 - 1
changelog.d/13094.misc

@@ -1 +0,0 @@
-Make the AS login method call `Auth.get_user_by_req` for checking the AS token.

+ 0 - 1
changelog.d/13172.misc

@@ -1 +0,0 @@
-Always use a version of canonicaljson that supports the C implementation of frozendict.

+ 0 - 1
changelog.d/13175.misc

@@ -1 +0,0 @@
-Add prometheus counters for ephemeral events and to device messages pushed to app services. Contributed by Brad @ Beeper.

+ 0 - 1
changelog.d/13192.removal

@@ -1 +0,0 @@
-Drop support for delegating email verification to an external server.

+ 0 - 1
changelog.d/13198.misc

@@ -1 +0,0 @@
-Refactor receipts servlet logic to avoid duplicated code.

+ 0 - 1
changelog.d/13208.feature

@@ -1 +0,0 @@
-Add a `room_type` field in the responses for the list room and room details admin API. Contributed by @andrewdoh.

+ 0 - 1
changelog.d/13215.misc

@@ -1 +0,0 @@
-Preparation for database schema simplifications: populate `state_key` and `rejection_reason` for existing rows in the `events` table.

+ 0 - 1
changelog.d/13218.misc

@@ -1 +0,0 @@
-Remove unused database table `event_reference_hashes`.

+ 0 - 1
changelog.d/13220.feature

@@ -1 +0,0 @@
-Add support for room version 10.

+ 0 - 1
changelog.d/13224.misc

@@ -1 +0,0 @@
-Further reduce queries used sending events when creating new rooms. Contributed by Nick @ Beeper (@fizzadar).

+ 0 - 1
changelog.d/13231.doc

@@ -1 +0,0 @@
-Provide an example of using the Admin API. Contributed by @jejo86.

+ 0 - 1
changelog.d/13233.doc

@@ -1 +0,0 @@
-Move the documentation for how URL previews work to the URL preview module.

+ 0 - 1
changelog.d/13239.removal

@@ -1 +0,0 @@
-Drop support for calling `/_matrix/client/v3/account/3pid/bind` without an `id_access_token`, which was not permitted by the spec. Contributed by @Vetchu.

+ 0 - 1
changelog.d/13240.misc

@@ -1 +0,0 @@
-Call the v2 identity service `/3pid/unbind` endpoint, rather than v1.

+ 0 - 1
changelog.d/13242.misc

@@ -1 +0,0 @@
-Use an asynchronous cache wrapper for the get event cache. Contributed by Nick @ Beeper (@fizzadar).

+ 0 - 1
changelog.d/13251.misc

@@ -1 +0,0 @@
-Optimise federation sender and appservice pusher event stream processing queries. Contributed by Nick @ Beeper (@fizzadar).

+ 0 - 1
changelog.d/13253.misc

@@ -1 +0,0 @@
-Preparatory work for a per-room rate limiter on joins.

+ 0 - 1
changelog.d/13254.misc

@@ -1 +0,0 @@
-Preparatory work for a per-room rate limiter on joins.

+ 0 - 1
changelog.d/13255.misc

@@ -1 +0,0 @@
-Preparatory work for a per-room rate limiter on joins.

+ 0 - 1
changelog.d/13257.misc

@@ -1 +0,0 @@
-Log the stack when waiting for an entire room to be un-partial stated.

+ 0 - 1
changelog.d/13258.misc

@@ -1 +0,0 @@
-Fix spurious warning when fetching state after a missing prev event.

+ 0 - 1
changelog.d/13260.misc

@@ -1 +0,0 @@
-Clean-up tests for notifications.

+ 0 - 1
changelog.d/13261.doc

@@ -1 +0,0 @@
-Move the documentation for how URL previews work to the URL preview module.

+ 0 - 1
changelog.d/13263.bugfix

@@ -1 +0,0 @@
-Fix a bug introduced in Synapse 1.15.0 where adding a user through the Synapse Admin API with a phone number would fail if the "enable_email_notifs" and "email_notifs_for_new_users" options were enabled. Contributed by @thomasweston12.

+ 0 - 1
changelog.d/13266.misc

@@ -1 +0,0 @@
-Do not fail build if complement with workers fails.

+ 0 - 1
changelog.d/13267.misc

@@ -1 +0,0 @@
-Don't pull out state in `compute_event_context` for unconflicted state.

+ 0 - 1
changelog.d/13270.bugfix

@@ -1 +0,0 @@
-Fix a bug introduced in Synapse 1.40 where a user invited to a restricted room would be briefly unable to join.

+ 0 - 1
changelog.d/13271.doc

@@ -1 +0,0 @@
-Add another `contrib` script to help set up worker processes. Contributed by @villepeh.

+ 0 - 1
changelog.d/13274.misc

@@ -1 +0,0 @@
-Don't pull out state in `compute_event_context` for unconflicted state.

+ 0 - 1
changelog.d/13276.feature

@@ -1 +0,0 @@
-Add per-room rate limiting for room joins. For each room, Synapse now monitors the rate of join events in that room, and throttle additional joins if that rate grows too large.

+ 0 - 1
changelog.d/13278.bugfix

@@ -1 +0,0 @@
-Fix long-standing bug where in rare instances Synapse could store the incorrect state for a room after a state resolution.

+ 0 - 1
changelog.d/13279.misc

@@ -1 +0,0 @@
-Reduce the rebuild time for the complement-synapse docker image.

+ 0 - 1
changelog.d/13281.misc

@@ -1 +0,0 @@
-Don't pull out the full state when creating an event.

+ 0 - 1
changelog.d/13284.misc

@@ -1 +0,0 @@
-Update locked version of `frozendict` to 2.3.2, which has a fix for a memory leak.

+ 0 - 1
changelog.d/13285.misc

@@ -1 +0,0 @@
-Upgrade from Poetry 1.1.14 to 1.1.12, to fix bugs when locking packages.

+ 0 - 1
changelog.d/13296.bugfix

@@ -1 +0,0 @@
-Fix a bug introduced in v1.18.0 where the `synapse_pushers` metric would overcount pushers when they are replaced.

+ 0 - 1
changelog.d/13297.misc

@@ -1 +0,0 @@
-Use `HTTPStatus` constants in place of literals in tests.

+ 0 - 1
changelog.d/13299.misc

@@ -1 +0,0 @@
-Improve performance of query  `_get_subset_users_in_room_with_profiles`.

+ 0 - 1
changelog.d/13300.misc

@@ -1 +0,0 @@
-Up batch size of `bulk_get_push_rules` and `_get_joined_profiles_from_event_ids`.

+ 0 - 1
changelog.d/13303.misc

@@ -1 +0,0 @@
-Remove unnecessary `json.dumps` from tests.

+ 0 - 1
changelog.d/13307.misc

@@ -1 +0,0 @@
-Don't pull out the full state when creating an event.

+ 0 - 1
changelog.d/13308.misc

@@ -1 +0,0 @@
-Use an asynchronous cache wrapper for the get event cache. Contributed by Nick @ Beeper (@fizzadar).

+ 0 - 1
changelog.d/13310.misc

@@ -1 +0,0 @@
-Reduce memory usage of sending dummy events.

+ 0 - 1
changelog.d/13311.misc

@@ -1 +0,0 @@
-Prevent formatting changes of [#3679](https://github.com/matrix-org/synapse/pull/3679) from appearing in `git blame`.

+ 0 - 1
changelog.d/13314.doc

@@ -1 +0,0 @@
-Add notes when config options where changed. Contributed by @behrmann.

+ 0 - 1
changelog.d/13318.misc

@@ -1 +0,0 @@
-Validate federation destinations and log an error if a destination is invalid.

+ 0 - 1
changelog.d/13320.misc

@@ -1 +0,0 @@
-Fix `FederationClient.get_pdu()` returning events from the cache as `outliers` instead of original events we saw over federation.

+ 0 - 1
changelog.d/13323.misc

@@ -1 +0,0 @@
-Reduce memory usage of state caches.

+ 0 - 1
changelog.d/13326.removal

@@ -1 +0,0 @@
-Stop builindg `.deb` packages for Ubuntu 21.10 (Impish Indri), which has reached end of life.

+ 0 - 1
changelog.d/13328.misc

@@ -1 +0,0 @@
-Add type hints to `trace` decorator.

+ 1 - 0
changelog.d/13573.misc

@@ -0,0 +1 @@
+Cache user IDs instead of profiles to reduce cache memory usage. Contributed by Nick @ Beeper (@fizzadar).

+ 1 - 0
changelog.d/13588.misc

@@ -0,0 +1 @@
+Instrument `_check_sigs_and_hash_and_fetch` to trace time spent in child concurrent calls for understandable traces in Jaeger.

+ 1 - 0
changelog.d/13591.misc

@@ -0,0 +1 @@
+Improve performance of `@cachedList`.

+ 1 - 0
changelog.d/13592.misc

@@ -0,0 +1 @@
+Minor speed up of fetching large numbers of push rules.

+ 1 - 0
changelog.d/13600.misc

@@ -0,0 +1 @@
+Cache user IDs instead of profiles to reduce cache memory usage. Contributed by Nick @ Beeper (@fizzadar).

+ 1 - 0
changelog.d/13602.doc

@@ -0,0 +1 @@
+Improve the description of the ["chain cover index"](https://matrix-org.github.io/synapse/latest/auth_chain_difference_algorithm.html) used internally by Synapse.

File diff suppressed because it is too large
+ 232 - 181
contrib/grafana/synapse.json


+ 42 - 0
debian/changelog

@@ -1,3 +1,45 @@
+matrix-synapse-py3 (1.66.0~rc1) stable; urgency=medium
+
+  * New Synapse release 1.66.0rc1.
+
+ -- Synapse Packaging team <packages@matrix.org>  Tue, 23 Aug 2022 09:48:55 +0100
+
+matrix-synapse-py3 (1.65.0) stable; urgency=medium
+
+  * New Synapse release 1.65.0.
+
+ -- Synapse Packaging team <packages@matrix.org>  Tue, 16 Aug 2022 16:51:26 +0100
+
+matrix-synapse-py3 (1.65.0~rc2) stable; urgency=medium
+
+  * New Synapse release 1.65.0rc2.
+
+ -- Synapse Packaging team <packages@matrix.org>  Thu, 11 Aug 2022 11:38:18 +0100
+
+matrix-synapse-py3 (1.65.0~rc1) stable; urgency=medium
+
+  * New Synapse release 1.65.0rc1.
+
+ -- Synapse Packaging team <packages@matrix.org>  Tue, 09 Aug 2022 11:39:29 +0100
+
+matrix-synapse-py3 (1.64.0) stable; urgency=medium
+
+  * New Synapse release 1.64.0.
+
+ -- Synapse Packaging team <packages@matrix.org>  Tue, 02 Aug 2022 10:32:30 +0100
+
+matrix-synapse-py3 (1.64.0~rc2) stable; urgency=medium
+
+  * New Synapse release 1.64.0rc2.
+
+ -- Synapse Packaging team <packages@matrix.org>  Fri, 29 Jul 2022 12:22:53 +0100
+
+matrix-synapse-py3 (1.64.0~rc1) stable; urgency=medium
+
+  * New Synapse release 1.64.0rc1.
+
+ -- Synapse Packaging team <packages@matrix.org>  Tue, 26 Jul 2022 12:11:49 +0100
+
 matrix-synapse-py3 (1.63.1) stable; urgency=medium
 
   * New Synapse release 1.63.1.

+ 24 - 3
docker/Dockerfile

@@ -40,7 +40,8 @@ FROM docker.io/python:${PYTHON_VERSION}-slim as requirements
 RUN \
    --mount=type=cache,target=/var/cache/apt,sharing=locked \
    --mount=type=cache,target=/var/lib/apt,sharing=locked \
- apt-get update -qq && apt-get install -yqq git \
+    apt-get update -qq && apt-get install -yqq \
+      build-essential cargo git libffi-dev libssl-dev \
     && rm -rf /var/lib/apt/lists/*
 
 # We install poetry in its own build stage to avoid its dependencies conflicting with
@@ -68,7 +69,18 @@ COPY pyproject.toml poetry.lock /synapse/
 # reason, such as when a git repository is used directly as a dependency.
 ARG TEST_ONLY_SKIP_DEP_HASH_VERIFICATION
 
-RUN /root/.local/bin/poetry export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}
+# If specified, we won't use the Poetry lockfile.
+# Instead, we'll just install what a regular `pip install` would from PyPI.
+ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
+
+# Export the dependencies, but only if we're actually going to use the Poetry lockfile.
+# Otherwise, just create an empty requirements file so that the Dockerfile can
+# proceed.
+RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
+    /root/.local/bin/poetry export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \
+  else \
+    touch /synapse/requirements.txt; \
+  fi
 
 ###
 ### Stage 1: builder
@@ -108,8 +120,17 @@ COPY synapse /synapse/synapse/
 # ... and what we need to `pip install`.
 COPY pyproject.toml README.rst /synapse/
 
+# Repeat of earlier build argument declaration, as this is a new build stage.
+ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
+
 # Install the synapse package itself.
-RUN pip install --prefix="/install" --no-deps --no-warn-script-location /synapse
+# If we have populated requirements.txt, we don't install any dependencies
+# as we should already have those from the previous `pip install` step.
+RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
+    pip install --prefix="/install" --no-deps --no-warn-script-location /synapse[all]; \
+  else \
+    pip install --prefix="/install" --no-warn-script-location /synapse[all]; \
+  fi
 
 ###
 ### Stage 2: runtime

+ 59 - 35
docker/Dockerfile-workers

@@ -1,38 +1,62 @@
-# Inherit from the official Synapse docker image
-ARG SYNAPSE_VERSION=latest
-FROM matrixdotorg/synapse:$SYNAPSE_VERSION
-
-# Install deps
-RUN \
-   --mount=type=cache,target=/var/cache/apt,sharing=locked \
-   --mount=type=cache,target=/var/lib/apt,sharing=locked \
-  apt-get update -qq && \
-  DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
-     redis-server nginx-light
-
-# Install supervisord with pip instead of apt, to avoid installing a second
-# copy of python.
-RUN --mount=type=cache,target=/root/.cache/pip \
-    pip install supervisor~=4.2
-
-# Disable the default nginx sites
-RUN rm /etc/nginx/sites-enabled/default
+# syntax=docker/dockerfile:1
 
-# Copy Synapse worker, nginx and supervisord configuration template files
-COPY ./docker/conf-workers/* /conf/
-
-# Copy a script to prefix log lines with the supervisor program name
-COPY ./docker/prefix-log /usr/local/bin/
-
-# Expose nginx listener port
-EXPOSE 8080/tcp
+ARG SYNAPSE_VERSION=latest
 
-# A script to read environment variables and create the necessary
-# files to run the desired worker configuration. Will start supervisord.
-COPY ./docker/configure_workers_and_start.py /configure_workers_and_start.py
-ENTRYPOINT ["/configure_workers_and_start.py"]
+# first of all, we create a base image with an nginx which we can copy into the
+# target image. For repeated rebuilds, this is much faster than apt installing
+# each time.
+
+FROM debian:bullseye-slim AS deps_base
+    RUN \
+       --mount=type=cache,target=/var/cache/apt,sharing=locked \
+       --mount=type=cache,target=/var/lib/apt,sharing=locked \
+      apt-get update -qq && \
+      DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
+          redis-server nginx-light
+
+# Similarly, a base to copy the redis server from.
+#
+# The redis docker image has fewer dynamic libraries than the debian package,
+# which makes it much easier to copy (but we need to make sure we use an image
+# based on the same debian version as the synapse image, to make sure we get
+# the expected version of libc.
+FROM redis:6-bullseye AS redis_base
+
+# now build the final image, based on the the regular Synapse docker image
+FROM matrixdotorg/synapse:$SYNAPSE_VERSION
 
-# Replace the healthcheck with one which checks *all* the workers. The script
-# is generated by configure_workers_and_start.py.
-HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
-    CMD /bin/sh /healthcheck.sh
+    # Install supervisord with pip instead of apt, to avoid installing a second
+    # copy of python.
+    RUN --mount=type=cache,target=/root/.cache/pip \
+        pip install supervisor~=4.2
+    RUN mkdir -p /etc/supervisor/conf.d
+
+    # Copy over redis and nginx
+    COPY --from=redis_base /usr/local/bin/redis-server /usr/local/bin
+
+    COPY --from=deps_base /usr/sbin/nginx /usr/sbin
+    COPY --from=deps_base /usr/share/nginx /usr/share/nginx
+    COPY --from=deps_base /usr/lib/nginx /usr/lib/nginx
+    COPY --from=deps_base /etc/nginx /etc/nginx
+    RUN rm /etc/nginx/sites-enabled/default
+    RUN mkdir /var/log/nginx /var/lib/nginx
+    RUN chown www-data /var/log/nginx /var/lib/nginx
+
+    # Copy Synapse worker, nginx and supervisord configuration template files
+    COPY ./docker/conf-workers/* /conf/
+
+    # Copy a script to prefix log lines with the supervisor program name
+    COPY ./docker/prefix-log /usr/local/bin/
+
+    # Expose nginx listener port
+    EXPOSE 8080/tcp
+
+    # A script to read environment variables and create the necessary
+    # files to run the desired worker configuration. Will start supervisord.
+    COPY ./docker/configure_workers_and_start.py /configure_workers_and_start.py
+    ENTRYPOINT ["/configure_workers_and_start.py"]
+
+    # Replace the healthcheck with one which checks *all* the workers. The script
+    # is generated by configure_workers_and_start.py.
+    HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
+        CMD /bin/sh /healthcheck.sh

+ 4 - 0
docker/README-testing.md

@@ -22,6 +22,10 @@ Consult the [contributing guide][guideComplementSh] for instructions on how to u
 Under some circumstances, you may wish to build the images manually.
 The instructions below will lead you to doing that.
 
+Note that these images can only be built using [BuildKit](https://docs.docker.com/develop/develop-images/build_enhancements/),
+therefore BuildKit needs to be enabled when calling `docker build`. This can be done by
+setting `DOCKER_BUILDKIT=1` in your environment.
+
 Start by building the base Synapse docker image. If you wish to run tests with the latest
 release of Synapse, instead of your current checkout, you can skip this step. From the
 root of the repository:

+ 1 - 1
docker/README.md

@@ -191,7 +191,7 @@ If you need to build the image from a Synapse checkout, use the following `docke
  build` command from the repo's root:
 
 ```
-docker build -t matrixdotorg/synapse -f docker/Dockerfile .
+DOCKER_BUILDKIT=1 docker build -t matrixdotorg/synapse -f docker/Dockerfile .
 ```
 
 You can choose to build a different docker image by changing the value of the `-f` flag to

+ 1 - 0
docker/complement/Dockerfile

@@ -1,3 +1,4 @@
+# syntax=docker/dockerfile:1
 # This dockerfile builds on top of 'docker/Dockerfile-workers' in matrix-org/synapse
 # by including a built-in postgres instance, as well as setting up the homeserver so
 # that it is ready for testing via Complement.

+ 1 - 1
docker/conf-workers/supervisord.conf.j2

@@ -19,7 +19,7 @@ username=www-data
 autorestart=true
 
 [program:redis]
-command=/usr/local/bin/prefix-log /usr/bin/redis-server /etc/redis/redis.conf --daemonize no
+command=/usr/local/bin/prefix-log /usr/local/bin/redis-server
 priority=1
 stdout_logfile=/dev/stdout
 stdout_logfile_maxbytes=0

+ 19 - 2
docs/admin_api/register_api.md

@@ -46,7 +46,24 @@ As an example:
 The MAC is the hex digest output of the HMAC-SHA1 algorithm, with the key being
 the shared secret and the content being the nonce, user, password, either the
 string "admin" or "notadmin", and optionally the user_type
-each separated by NULs. For an example of generation in Python:
+each separated by NULs.
+
+Here is an easy way to generate the HMAC digest if you have Bash and OpenSSL:
+
+```bash
+# Update these values and then paste this code block into a bash terminal
+nonce='thisisanonce'
+username='pepper_roni'
+password='pizza'
+admin='admin'
+secret='shared_secret'
+
+printf '%s\0%s\0%s\0%s' "$nonce" "$username" "$password" "$admin" |
+  openssl sha1 -hmac "$secret" |
+  awk '{print $2}'
+```
+
+For an example of generation in Python:
 
 ```python
 import hmac, hashlib
@@ -70,4 +87,4 @@ def generate_mac(nonce, user, password, admin=False, user_type=None):
         mac.update(user_type.encode('utf8'))
 
     return mac.hexdigest()
-```
+```

+ 6 - 1
docs/admin_api/rooms.md

@@ -302,6 +302,8 @@ The following fields are possible in the JSON response body:
 * `state_events` - Total number of state_events of a room. Complexity of the room.
 * `room_type` - The type of the room taken from the room's creation event; for example "m.space" if the room is a space.
   If the room does not define a type, the value will be `null`.
+* `forgotten` - Whether all local users have
+  [forgotten](https://spec.matrix.org/latest/client-server-api/#leaving-rooms) the room.
 
 The API is:
 
@@ -330,10 +332,13 @@ A response body like the following is returned:
   "guest_access": null,
   "history_visibility": "shared",
   "state_events": 93534,
-  "room_type": "m.space"
+  "room_type": "m.space",
+  "forgotten": false
 }
 ```
 
+_Changed in Synapse 1.66:_ Added the `forgotten` key to the response body.
+
 # Room Members API
 
 The Room Members admin API allows server admins to get a list of all members of a room.

+ 7 - 0
docs/admin_api/user_admin_api.md

@@ -753,6 +753,7 @@ A response body like the following is returned:
       "device_id": "QBUAZIFURK",
       "display_name": "android",
       "last_seen_ip": "1.2.3.4",
+      "last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
       "last_seen_ts": 1474491775024,
       "user_id": "<user_id>"
     },
@@ -760,6 +761,7 @@ A response body like the following is returned:
       "device_id": "AUIECTSRND",
       "display_name": "ios",
       "last_seen_ip": "1.2.3.5",
+      "last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
       "last_seen_ts": 1474491775025,
       "user_id": "<user_id>"
     }
@@ -786,6 +788,8 @@ The following fields are returned in the JSON response body:
     Absent if no name has been set.
   - `last_seen_ip` - The IP address where this device was last seen.
     (May be a few minutes out of date, for efficiency reasons).
+  - `last_seen_user_agent` - The user agent of the device when it was last seen.
+    (May be a few minutes out of date, for efficiency reasons).
   - `last_seen_ts` - The timestamp (in milliseconds since the unix epoch) when this
     devices was last seen. (May be a few minutes out of date, for efficiency reasons).
   - `user_id` - Owner of  device.
@@ -837,6 +841,7 @@ A response body like the following is returned:
   "device_id": "<device_id>",
   "display_name": "android",
   "last_seen_ip": "1.2.3.4",
+  "last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
   "last_seen_ts": 1474491775024,
   "user_id": "<user_id>"
 }
@@ -858,6 +863,8 @@ The following fields are returned in the JSON response body:
   Absent if no name has been set.
 - `last_seen_ip` - The IP address where this device was last seen.
   (May be a few minutes out of date, for efficiency reasons).
+  - `last_seen_user_agent` - The user agent of the device when it was last seen.
+  (May be a few minutes out of date, for efficiency reasons).
 - `last_seen_ts` - The timestamp (in milliseconds since the unix epoch) when this
   devices was last seen. (May be a few minutes out of date, for efficiency reasons).
 - `user_id` - Owner of  device.

+ 42 - 9
docs/auth_chain_difference_algorithm.md

@@ -34,13 +34,45 @@ the process of indexing it).
 ## Chain Cover Index
 
 Synapse computes auth chain differences by pre-computing a "chain cover" index
-for the auth chain in a room, allowing efficient reachability queries like "is
-event A in the auth chain of event B". This is done by assigning every event a
-*chain ID* and *sequence number* (e.g. `(5,3)`), and having a map of *links*
-between chains (e.g. `(5,3) -> (2,4)`) such that A is reachable by B (i.e. `A`
-is in the auth chain of `B`) if and only if either:
-
-1. A and B have the same chain ID and `A`'s sequence number is less than `B`'s
+for the auth chain in a room, allowing us to efficiently make reachability queries
+like "is event `A` in the auth chain of event `B`?". We could do this with an index
+that tracks all pairs `(A, B)` such that `A` is in the auth chain of `B`. However, this
+would be prohibitively large, scaling poorly as the room accumulates more state
+events.
+
+Instead, we break down the graph into *chains*. A chain is a subset of a DAG
+with the following property: for any pair of events `E` and `F` in the chain,
+the chain contains a path `E -> F` or a path `F -> E`. This forces a chain to be
+linear (without forks), e.g. `E -> F -> G -> ... -> H`. Each event in the chain
+is given a *sequence number* local to that chain. The oldest event `E` in the
+chain has sequence number 1. If `E` has a child `F` in the chain, then `F` has
+sequence number 2. If `E` has a grandchild `G` in the chain, then `G` has
+sequence number 3; and so on.
+
+Synapse ensures that each persisted event belongs to exactly one chain, and
+tracks how the chains are connected to one another. This allows us to
+efficiently answer reachability queries. Doing so uses less storage than
+tracking reachability on an event-by-event basis, particularly when we have
+fewer and longer chains. See
+
+> Jagadish, H. (1990). [A compression technique to materialize transitive closure](https://doi.org/10.1145/99935.99944).
+> *ACM Transactions on Database Systems (TODS)*, 15*(4)*, 558-598.
+
+for the original idea or
+
+> Y. Chen, Y. Chen, [An efficient algorithm for answering graph
+> reachability queries](https://doi.org/10.1109/ICDE.2008.4497498),
+> in: 2008 IEEE 24th International Conference on Data Engineering, April 2008,
+> pp. 893–902. (PDF available via [Google Scholar](https://scholar.google.com/scholar?q=Y.%20Chen,%20Y.%20Chen,%20An%20efficient%20algorithm%20for%20answering%20graph%20reachability%20queries,%20in:%202008%20IEEE%2024th%20International%20Conference%20on%20Data%20Engineering,%20April%202008,%20pp.%20893902.).)
+
+for a more modern take.
+
+In practical terms, the chain cover assigns every event a
+*chain ID* and *sequence number* (e.g. `(5,3)`), and maintains a map of *links*
+between events in chains (e.g. `(5,3) -> (2,4)`) such that `A` is reachable by `B`
+(i.e. `A` is in the auth chain of `B`) if and only if either:
+
+1. `A` and `B` have the same chain ID and `A`'s sequence number is less than `B`'s
    sequence number; or
 2. there is a link `L` between `B`'s chain ID and `A`'s chain ID such that
    `L.start_seq_no` <= `B.seq_no` and `A.seq_no` <= `L.end_seq_no`.
@@ -49,8 +81,9 @@ There are actually two potential implementations, one where we store links from
 each chain to every other reachable chain (the transitive closure of the links
 graph), and one where we remove redundant links (the transitive reduction of the
 links graph) e.g. if we have chains `C3 -> C2 -> C1` then the link `C3 -> C1`
-would not be stored. Synapse uses the former implementations so that it doesn't
-need to recurse to test reachability between chains.
+would not be stored. Synapse uses the former implementation so that it doesn't
+need to recurse to test reachability between chains. This trades-off extra storage
+in order to save CPU cycles and DB queries.
 
 ### Example
 

+ 2 - 1
docs/message_retention_policies.md

@@ -8,7 +8,8 @@ and allow server and room admins to configure how long messages should
 be kept in a homeserver's database before being purged from it.
 **Please note that, as this feature isn't part of the Matrix
 specification yet, this implementation is to be considered as
-experimental.** 
+experimental. There are known bugs which may cause database corruption.
+Proceed with caution.** 
 
 A message retention policy is mainly defined by its `max_lifetime`
 parameter, which defines how long a message can be kept around after

+ 2 - 2
docs/modules/password_auth_provider_callbacks.md

@@ -263,7 +263,7 @@ class MyAuthProvider:
             return None
 
         if self.credentials.get(username) == login_dict.get("my_field"):
-            return self.api.get_qualified_user_id(username)
+            return (self.api.get_qualified_user_id(username), None)
 
     async def check_pass(
         self,
@@ -280,5 +280,5 @@ class MyAuthProvider:
             return None
 
         if self.credentials.get(username) == login_dict.get("password"):
-            return self.api.get_qualified_user_id(username)
+            return (self.api.get_qualified_user_id(username), None)
 ```

+ 3 - 1
docs/openid.md

@@ -174,7 +174,9 @@ oidc_providers:
 
 1. Create a regular web application for Synapse
 2. Set the Allowed Callback URLs to `[synapse public baseurl]/_synapse/client/oidc/callback`
-3. Add a rule to add the `preferred_username` claim.
+3. Add a rule with any name to add the `preferred_username` claim. 
+(See https://auth0.com/docs/customize/rules/create-rules for more information on how to create rules.)
+   
    <details>
     <summary>Code sample</summary>
 

+ 11 - 42
docs/reverse_proxy.md

@@ -79,63 +79,32 @@ server {
 }
 ```
 
-### Caddy v1
-
-```
-matrix.example.com {
-  proxy /_matrix http://localhost:8008 {
-    transparent
-  }
-
-  proxy /_synapse/client http://localhost:8008 {
-    transparent
-  }
-}
-
-example.com:8448 {
-  proxy / http://localhost:8008 {
-    transparent
-  }
-}
-```
-
 ### Caddy v2
 
 ```
 matrix.example.com {
-  reverse_proxy /_matrix/* http://localhost:8008
-  reverse_proxy /_synapse/client/* http://localhost:8008
+  reverse_proxy /_matrix/* localhost:8008
+  reverse_proxy /_synapse/client/* localhost:8008
 }
 
 example.com:8448 {
-  reverse_proxy http://localhost:8008
+  reverse_proxy localhost:8008
 }
 ```
+
 [Delegation](delegate.md) example:
-```
-(matrix-well-known-header) {
-    # Headers
-    header Access-Control-Allow-Origin "*"
-    header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS"
-    header Access-Control-Allow-Headers "Origin, X-Requested-With, Content-Type, Accept, Authorization"
-    header Content-Type "application/json"
-}
 
+```
 example.com {
-    handle /.well-known/matrix/server {
-        import matrix-well-known-header
-        respond `{"m.server":"matrix.example.com:443"}`
-    }
-
-    handle /.well-known/matrix/client {
-        import matrix-well-known-header
-        respond `{"m.homeserver":{"base_url":"https://matrix.example.com"},"m.identity_server":{"base_url":"https://identity.example.com"}}`
-    }
+	header /.well-known/matrix/* Content-Type application/json
+	header /.well-known/matrix/* Access-Control-Allow-Origin *
+	respond /.well-known/matrix/server `{"m.server": "matrix.example.com:443"}`
+	respond /.well-known/matrix/client `{"m.homeserver":{"base_url":"https://matrix.example.com"},"m.identity_server":{"base_url":"https://identity.example.com"}}`
 }
 
 matrix.example.com {
-    reverse_proxy /_matrix/* http://localhost:8008
-    reverse_proxy /_synapse/client/* http://localhost:8008
+    reverse_proxy /_matrix/* localhost:8008
+    reverse_proxy /_synapse/client/* localhost:8008
 }
 ```
 

+ 8 - 6
docs/sso_mapping_providers.md

@@ -22,7 +22,7 @@ choose their own username.
 In the first case - where users are automatically allocated a Matrix ID - it is
 the responsibility of the mapping provider to normalise the SSO attributes and
 map them to a valid Matrix ID. The [specification for Matrix
-IDs](https://matrix.org/docs/spec/appendices#user-identifiers) has some
+IDs](https://spec.matrix.org/latest/appendices/#user-identifiers) has some
 information about what is considered valid.
 
 If the mapping provider does not assign a Matrix ID, then Synapse will
@@ -37,9 +37,10 @@ as Synapse). The Synapse config is then modified to point to the mapping provide
 ## OpenID Mapping Providers
 
 The OpenID mapping provider can be customized by editing the
-`oidc_config.user_mapping_provider.module` config option.
+[`oidc_providers.user_mapping_provider.module`](usage/configuration/config_documentation.md#oidc_providers)
+config option.
 
-`oidc_config.user_mapping_provider.config` allows you to provide custom
+`oidc_providers.user_mapping_provider.config` allows you to provide custom
 configuration options to the module. Check with the module's documentation for
 what options it provides (if any). The options listed by default are for the
 user mapping provider built in to Synapse. If using a custom module, you should
@@ -58,7 +59,7 @@ A custom mapping provider must specify the following methods:
     - This method should have the `@staticmethod` decoration.
     - Arguments:
         - `config` - A `dict` representing the parsed content of the
-          `oidc_config.user_mapping_provider.config` homeserver config option.
+          `oidc_providers.user_mapping_provider.config` homeserver config option.
            Runs on homeserver startup. Providers should extract and validate
            any option values they need here.
     - Whatever is returned will be passed back to the user mapping provider module's
@@ -102,7 +103,7 @@ A custom mapping provider must specify the following methods:
       will be returned as part of the response during a successful login.
 
       Note that care should be taken to not overwrite any of the parameters
-      usually returned as part of the [login response](https://matrix.org/docs/spec/client_server/latest#post-matrix-client-r0-login).
+      usually returned as part of the [login response](https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3login).
 
 ### Default OpenID Mapping Provider
 
@@ -113,7 +114,8 @@ specified in the config. It is located at
 ## SAML Mapping Providers
 
 The SAML mapping provider can be customized by editing the
-`saml2_config.user_mapping_provider.module` config option.
+[`saml2_config.user_mapping_provider.module`](docs/usage/configuration/config_documentation.md#saml2_config)
+config option.
 
 `saml2_config.user_mapping_provider.config` allows you to provide custom
 configuration options to the module. Check with the module's documentation for

+ 1 - 1
docs/templates.md

@@ -9,7 +9,7 @@ in, allowing them to specify custom templates:
 
 ```yaml
 templates:
-  custom_templates_directory: /path/to/custom/templates/
+  custom_template_directory: /path/to/custom/templates/
 ```
 
 If this setting is not set, or the files named below are not found within the directory,

+ 29 - 4
docs/upgrade.md

@@ -89,21 +89,37 @@ process, for example:
     dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
     ```
 
-# Upgrading to v1.64.0
+# Upgrading to v1.66.0
 
 ## Delegation of email validation no longer supported
 
 As of this version, Synapse no longer allows the tasks of verifying email address
 ownership, and password reset confirmation, to be delegated to an identity server.
+This removal was previously planned for Synapse 1.64.0, but was
+[delayed](https://github.com/matrix-org/synapse/issues/13421) until now to give
+homeserver administrators more notice of the change.
 
 To continue to allow users to add email addresses to their homeserver accounts,
-and perform password resets, make sure that Synapse is configured with a
-working email server in the `email` configuration section (including, at a
-minimum, a `notif_from` setting.)
+and perform password resets, make sure that Synapse is configured with a working
+email server in the [`email` configuration
+section](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#email)
+(including, at a minimum, a `notif_from` setting.)
 
 Specifying an `email` setting under `account_threepid_delegates` will now cause
 an error at startup.
 
+# Upgrading to v1.64.0
+
+## Deprecation of the ability to delegate e-mail verification to identity servers
+
+Synapse v1.66.0 will remove the ability to delegate the tasks of verifying email address ownership, and password reset confirmation, to an identity server.
+
+If you require your homeserver to verify e-mail addresses or to support password resets via e-mail, please configure your homeserver with SMTP access so that it can send e-mails on its own behalf.
+[Consult the configuration documentation for more information.](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#email)
+
+The option that will be removed is `account_threepid_delegates.email`.
+
+
 ## Changes to the event replication streams
 
 Synapse now includes a flag indicating if an event is an outlier when
@@ -114,6 +130,15 @@ vice versa.
 Once all workers are upgraded to v1.64 (or downgraded to v1.63), event
 replication will resume as normal.
 
+## frozendict release
+
+[frozendict 2.3.3](https://github.com/Marco-Sulla/python-frozendict/releases/tag/v2.3.3)
+has recently been released, which fixes a memory leak that occurs during `/sync`
+requests. We advise server administrators who installed Synapse via pip to upgrade
+frozendict with `pip install --upgrade frozendict`. The Docker image
+`matrixdotorg/synapse` and the Debian packages from `packages.matrix.org` already
+include the updated library.
+
 # Upgrading to v1.62.0
 
 ## New signatures for spam checker callbacks

+ 2 - 1
docs/usage/administration/admin_api/README.md

@@ -5,8 +5,9 @@
 Many of the API calls in the admin api will require an `access_token` for a
 server admin. (Note that a server admin is distinct from a room admin.)
 
-A user can be marked as a server admin by updating the database directly, e.g.:
+An existing user can be marked as a server admin by updating the database directly.
 
+Check your [database settings](config_documentation.md#database) in the configuration file, connect to the correct database using either `psql [database name]` (if using PostgreSQL) or `sqlite3 path/to/your/database.db` (if using SQLite) and elevate the user `@foo:bar.com` to administrator.
 ```sql
 UPDATE users SET admin = 1 WHERE name = '@foo:bar.com';
 ```

+ 3 - 3
docs/usage/administration/admin_api/registration_tokens.md

@@ -2,11 +2,11 @@
 
 This API allows you to manage tokens which can be used to authenticate
 registration requests, as proposed in
-[MSC3231](https://github.com/matrix-org/matrix-doc/blob/main/proposals/3231-token-authenticated-registration.md).
+[MSC3231](https://github.com/matrix-org/matrix-doc/blob/main/proposals/3231-token-authenticated-registration.md)
+and stabilised in version 1.2 of the Matrix specification.
 To use it, you will need to enable the `registration_requires_token` config
 option, and authenticate by providing an `access_token` for a server admin:
-see [Admin API](../../usage/administration/admin_api).
-Note that this API is still experimental; not all clients may support it yet.
+see [Admin API](../admin_api).
 
 
 ## Registration token objects

+ 86 - 4
docs/usage/administration/admin_faq.md

@@ -2,9 +2,9 @@
 
 How do I become a server admin?
 ---
-If your server already has an admin account you should use the user admin API to promote other accounts to become admins. See [User Admin API](../../admin_api/user_admin_api.md#Change-whether-a-user-is-a-server-administrator-or-not)
+If your server already has an admin account you should use the [User Admin API](../../admin_api/user_admin_api.md#Change-whether-a-user-is-a-server-administrator-or-not) to promote other accounts to become admins.
 
-If you don't have any admin accounts yet you won't be able to use the admin API so you'll have to edit the database manually. Manually editing the database is generally not recommended so once you have an admin account, use the admin APIs to make further changes.
+If you don't have any admin accounts yet you won't be able to use the admin API, so you'll have to edit the database manually. Manually editing the database is generally not recommended so once you have an admin account: use the admin APIs to make further changes.
 
 ```sql
 UPDATE users SET admin = 1 WHERE name = '@foo:bar.com';
@@ -32,9 +32,11 @@ What users are registered on my server?
 SELECT NAME from users;
 ```
 
-Manually resetting passwords:
+Manually resetting passwords
 ---
-See https://github.com/matrix-org/synapse/blob/master/README.rst#password-reset
+Users can reset their password through their client. Alternatively, a server admin
+can reset a user's password using the [admin API](../../admin_api/user_admin_api.md#reset-password).
+
 
 I have a problem with my server. Can I just delete my database and start again?
 ---
@@ -101,3 +103,83 @@ LIMIT 10;
 
 You can also use the [List Room API](../../admin_api/rooms.md#list-room-api)
 and `order_by` `state_events`.
+
+
+People can't accept room invitations from me
+---
+
+The typical failure mode here is that you send an invitation to someone
+to join a room or direct chat, but when they go to accept it, they get an
+error (typically along the lines of "Invalid signature"). They might see
+something like the following in their logs:
+
+    2019-09-11 19:32:04,271 - synapse.federation.transport.server - 288 - WARNING - GET-11752 - authenticate_request failed: 401: Invalid signature for server <server> with key ed25519:a_EqML: Unable to verify signature for <server>
+
+This is normally caused by a misconfiguration in your reverse-proxy. See [the reverse proxy docs](docs/reverse_proxy.md) and double-check that your settings are correct.
+
+
+Help!! Synapse is slow and eats all my RAM/CPU!
+-----------------------------------------------
+
+First, ensure you are running the latest version of Synapse, using Python 3
+with a [PostgreSQL database](../../postgres.md).
+
+Synapse's architecture is quite RAM hungry currently - we deliberately
+cache a lot of recent room data and metadata in RAM in order to speed up
+common requests. We'll improve this in the future, but for now the easiest
+way to either reduce the RAM usage (at the risk of slowing things down)
+is to set the almost-undocumented ``SYNAPSE_CACHE_FACTOR`` environment
+variable. The default is 0.5, which can be decreased to reduce RAM usage
+in memory constrained environments, or increased if performance starts to
+degrade.
+
+However, degraded performance due to a low cache factor, common on
+machines with slow disks, often leads to explosions in memory use due
+backlogged requests. In this case, reducing the cache factor will make
+things worse. Instead, try increasing it drastically. 2.0 is a good
+starting value.
+
+Using [libjemalloc](https://jemalloc.net) can also yield a significant
+improvement in overall memory use, and especially in terms of giving back
+RAM to the OS. To use it, the library must simply be put in the
+LD_PRELOAD environment variable when launching Synapse. On Debian, this
+can be done by installing the `libjemalloc1` package and adding this
+line to `/etc/default/matrix-synapse`:
+
+    LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.1
+
+This made a significant difference on Python 2.7 - it's unclear how
+much of an improvement it provides on Python 3.x.
+
+If you're encountering high CPU use by the Synapse process itself, you
+may be affected by a bug with presence tracking that leads to a
+massive excess of outgoing federation requests (see [discussion](https://github.com/matrix-org/synapse/issues/3971)). If metrics
+indicate that your server is also issuing far more outgoing federation
+requests than can be accounted for by your users' activity, this is a
+likely cause. The misbehavior can be worked around by disabling presence
+in the Synapse config file: [see here](../configuration/config_documentation.md#presence).
+
+
+Running out of File Handles
+---------------------------
+
+If Synapse runs out of file handles, it typically fails badly - live-locking
+at 100% CPU, and/or failing to accept new TCP connections (blocking the
+connecting client).  Matrix currently can legitimately use a lot of file handles,
+thanks to busy rooms like `#matrix:matrix.org` containing hundreds of participating
+servers.  The first time a server talks in a room it will try to connect
+simultaneously to all participating servers, which could exhaust the available
+file descriptors between DNS queries & HTTPS sockets, especially if DNS is slow
+to respond. (We need to improve the routing algorithm used to be better than
+full mesh, but as of March 2019 this hasn't happened yet).
+
+If you hit this failure mode, we recommend increasing the maximum number of
+open file handles to be at least 4096 (assuming a default of 1024 or 256).
+This is typically done by editing ``/etc/security/limits.conf``
+
+Separately, Synapse may leak file handles if inbound HTTP requests get stuck
+during processing - e.g. blocked behind a lock or talking to a remote server etc.
+This is best diagnosed by matching up the 'Received request' and 'Processed request'
+log lines and looking for any 'Processed request' lines which take more than
+a few seconds to execute. Please let us know at [`#synapse:matrix.org`](https://matrix.to/#/#synapse-dev:matrix.org) if
+you see this failure mode so we can help debug it, however.

File diff suppressed because it is too large
+ 152 - 174
docs/usage/configuration/config_documentation.md


+ 1 - 4
mypy.ini

@@ -1,6 +1,6 @@
 [mypy]
 namespace_packages = True
-plugins = mypy_zope:plugin, scripts-dev/mypy_synapse_plugin.py
+plugins = pydantic.mypy, mypy_zope:plugin, scripts-dev/mypy_synapse_plugin.py
 follow_imports = normal
 check_untyped_defs = True
 show_error_codes = True
@@ -84,9 +84,6 @@ disallow_untyped_defs = False
 [mypy-synapse.http.matrixfederationclient]
 disallow_untyped_defs = False
 
-[mypy-synapse.logging.opentracing]
-disallow_untyped_defs = False
-
 [mypy-synapse.metrics._reactor_metrics]
 disallow_untyped_defs = False
 # This module imports select.epoll. That exists on Linux, but doesn't on macOS.

+ 83 - 31
poetry.lock

@@ -177,7 +177,7 @@ optional = false
 python-versions = "*"
 
 [package.extras]
-test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"]
+test = ["hypothesis (==3.55.3)", "flake8 (==3.7.8)"]
 
 [[package]]
 name = "constantly"
@@ -290,7 +290,7 @@ importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
 
 [[package]]
 name = "frozendict"
-version = "2.3.2"
+version = "2.3.3"
 description = "A simple immutable dictionary"
 category = "main"
 optional = false
@@ -435,8 +435,8 @@ optional = false
 python-versions = ">=3.6"
 
 [package.extras]
-test = ["pytest", "pytest-trio", "pytest-asyncio", "testpath", "trio", "async-timeout"]
-trio = ["trio", "async-generator"]
+trio = ["async-generator", "trio"]
+test = ["async-timeout", "trio", "testpath", "pytest-asyncio", "pytest-trio", "pytest"]
 
 [[package]]
 name = "jinja2"
@@ -535,12 +535,12 @@ attrs = "*"
 importlib-metadata = {version = ">=1.4", markers = "python_version < \"3.8\""}
 
 [package.extras]
-dev = ["tox", "twisted", "aiounittest", "mypy (==0.910)", "black (==22.3.0)", "flake8 (==4.0.1)", "isort (==5.9.3)", "build (==0.8.0)", "twine (==4.0.1)"]
-test = ["tox", "twisted", "aiounittest"]
+test = ["aiounittest", "twisted", "tox"]
+dev = ["twine (==4.0.1)", "build (==0.8.0)", "isort (==5.9.3)", "flake8 (==4.0.1)", "black (==22.3.0)", "mypy (==0.910)", "aiounittest", "twisted", "tox"]
 
 [[package]]
 name = "matrix-synapse-ldap3"
-version = "0.2.1"
+version = "0.2.2"
 description = "An LDAP3 auth provider for Synapse"
 category = "main"
 optional = true
@@ -552,7 +552,7 @@ service-identity = "*"
 Twisted = ">=15.1.0"
 
 [package.extras]
-dev = ["matrix-synapse", "tox", "ldaptor", "mypy (==0.910)", "types-setuptools", "black (==22.3.0)", "flake8 (==4.0.1)", "isort (==5.9.3)"]
+dev = ["isort (==5.9.3)", "flake8 (==4.0.1)", "black (==22.3.0)", "types-setuptools", "mypy (==0.910)", "ldaptor", "tox", "matrix-synapse"]
 
 [[package]]
 name = "mccabe"
@@ -778,6 +778,21 @@ category = "main"
 optional = false
 python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
 
+[[package]]
+name = "pydantic"
+version = "1.9.1"
+description = "Data validation and settings management using python type hints"
+category = "main"
+optional = false
+python-versions = ">=3.6.1"
+
+[package.dependencies]
+typing-extensions = ">=3.7.4.3"
+
+[package.extras]
+dotenv = ["python-dotenv (>=0.10.4)"]
+email = ["email-validator (>=1.0.3)"]
+
 [[package]]
 name = "pyflakes"
 version = "2.4.0"
@@ -820,10 +835,10 @@ optional = false
 python-versions = ">=3.6"
 
 [package.extras]
+tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
+docs = ["zope.interface", "sphinx-rtd-theme", "sphinx"]
+dev = ["pre-commit", "mypy", "coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)", "cryptography (>=3.3.1)", "zope.interface", "sphinx-rtd-theme", "sphinx"]
 crypto = ["cryptography (>=3.3.1)"]
-dev = ["sphinx", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.3.1)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "mypy", "pre-commit"]
-docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"]
-tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"]
 
 [[package]]
 name = "pymacaroons"
@@ -1563,7 +1578,7 @@ url_preview = ["lxml"]
 [metadata]
 lock-version = "1.1"
 python-versions = "^3.7.1"
-content-hash = "c24bbcee7e86dbbe7cdbf49f91a25b310bf21095452641e7440129f59b077f78"
+content-hash = "7de518bf27967b3547eab8574342cfb67f87d6b47b4145c13de11112141dbf2d"
 
 [metadata.files]
 attrs = [
@@ -1753,23 +1768,23 @@ flake8-comprehensions = [
     {file = "flake8_comprehensions-3.8.0-py3-none-any.whl", hash = "sha256:9406314803abe1193c064544ab14fdc43c58424c0882f6ff8a581eb73fc9bb58"},
 ]
 frozendict = [
-    {file = "frozendict-2.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4fb171d1e84d17335365877e19d17440373b47ca74a73c06f65ac0b16d01e87f"},
-    {file = "frozendict-2.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a3640e9d7533d164160b758351aa49d9e85bbe0bd76d219d4021e90ffa6a52"},
-    {file = "frozendict-2.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:87cfd00fafbc147d8cd2590d1109b7db8ac8d7d5bdaa708ba46caee132b55d4d"},
-    {file = "frozendict-2.3.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fb09761e093cfabb2f179dbfdb2521e1ec5701df714d1eb5c51fa7849027be19"},
-    {file = "frozendict-2.3.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82176dc7adf01cf8f0193e909401939415a230a1853f4a672ec1629a06ceae18"},
-    {file = "frozendict-2.3.2-cp36-cp36m-win_amd64.whl", hash = "sha256:c1c70826aa4a50fa283fe161834ac4a3ac7c753902c980bb8b595b0998a38ddb"},
-    {file = "frozendict-2.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1db5035ddbed995badd1a62c4102b5e207b5aeb24472df2c60aba79639d7996b"},
-    {file = "frozendict-2.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4246fc4cb1413645ba4d3513939b90d979a5bae724be605a10b2b26ee12f839c"},
-    {file = "frozendict-2.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:680cd42fb0a255da1ce45678ccbd7f69da750d5243809524ebe8f45b2eda6e6b"},
-    {file = "frozendict-2.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a7f3a181d6722c92a9fab12d0c5c2b006a18ca5666098531f316d1e1c8984e3"},
-    {file = "frozendict-2.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1cb866eabb3c1384a7fe88e1e1033e2b6623073589012ab637c552bf03f6364"},
-    {file = "frozendict-2.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:952c5e5e664578c5c2ce8489ee0ab6a1855da02b58ef593ee728fc10d672641a"},
-    {file = "frozendict-2.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:608b77904cd0117cd816df605a80d0043a5326ee62529327d2136c792165a823"},
-    {file = "frozendict-2.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0eed41fd326f0bcc779837d8d9e1374da1bc9857fe3b9f2910195bbd5fff3aeb"},
-    {file = "frozendict-2.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:bde28db6b5868dd3c45b3555f9d1dc5a1cca6d93591502fa5dcecce0dde6a335"},
-    {file = "frozendict-2.3.2-py3-none-any.whl", hash = "sha256:6882a9bbe08ab9b5ff96ce11bdff3fe40b114b9813bc6801261e2a7b45e20012"},
-    {file = "frozendict-2.3.2.tar.gz", hash = "sha256:7fac4542f0a13fbe704db4942f41ba3abffec5af8b100025973e59dff6a09d0d"},
+    {file = "frozendict-2.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39942914c1217a5a49c7551495a103b3dbd216e19413687e003b859c6b0ebc12"},
+    {file = "frozendict-2.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5589256058b31f2b91419fa30b8dc62dbdefe7710e688a3fd5b43849161eecc9"},
+    {file = "frozendict-2.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:35eb7e59e287c41f4f712d4d3d2333354175b155d217b97c99c201d2d8920790"},
+    {file = "frozendict-2.3.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:310aaf81793abf4f471895e6fe65e0e74a28a2aaf7b25c2ba6ccd4e35af06842"},
+    {file = "frozendict-2.3.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c353c11010a986566a0cb37f9a783c560ffff7d67d5e7fd52221fb03757cdc43"},
+    {file = "frozendict-2.3.3-cp36-cp36m-win_amd64.whl", hash = "sha256:15b5f82aad108125336593cec1b6420c638bf45f449c57e50949fc7654ea5a41"},
+    {file = "frozendict-2.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a4737e5257756bd6b877504ff50185b705db577b5330d53040a6cf6417bb3cdb"},
+    {file = "frozendict-2.3.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80a14c11e33e8b0bc09e07bba3732c77a502c39edb8c3959fd9a0e490e031158"},
+    {file = "frozendict-2.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:027952d1698ac9c766ef43711226b178cdd49d2acbdff396936639ad1d2a5615"},
+    {file = "frozendict-2.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ef818d66c85098a37cf42509545a4ba7dd0c4c679d6262123a8dc14cc474bab7"},
+    {file = "frozendict-2.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:812279f2b270c980112dc4e367b168054f937108f8044eced4199e0ab2945a37"},
+    {file = "frozendict-2.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:c1fb7efbfebc2075f781be3d9774e4ba6ce4fc399148b02097f68d4b3c4bc00a"},
+    {file = "frozendict-2.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0b46d4bf95bce843c0151959d54c3e5b8d0ce29cb44794e820b3ec980d63eee"},
+    {file = "frozendict-2.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38c4660f37fcc70a32ff997fe58e40b3fcc60b2017b286e33828efaa16b01308"},
+    {file = "frozendict-2.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:919e3609844fece11ab18bcbf28a3ed20f8108ad4149d7927d413687f281c6c9"},
+    {file = "frozendict-2.3.3-py3-none-any.whl", hash = "sha256:f988b482d08972a196664718167a993a61c9e9f6fe7b0ca2443570b5f20ca44a"},
+    {file = "frozendict-2.3.3.tar.gz", hash = "sha256:398539c52af3c647d103185bbaa1291679f0507ad035fe3bab2a8b0366d52cf1"},
 ]
 gitdb = [
     {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"},
@@ -2055,8 +2070,8 @@ matrix-common = [
     {file = "matrix_common-1.2.1.tar.gz", hash = "sha256:a99dcf02a6bd95b24a5a61b354888a2ac92bf2b4b839c727b8dd9da2cdfa3853"},
 ]
 matrix-synapse-ldap3 = [
-    {file = "matrix-synapse-ldap3-0.2.1.tar.gz", hash = "sha256:bfb4390f4a262ffb0d6f057ff3aeb1e46d4e52ff420a064d795fb4f555f00285"},
-    {file = "matrix_synapse_ldap3-0.2.1-py3-none-any.whl", hash = "sha256:1b3310a60f1d06466f35905a269b6df95747fd1305f2b7fe638f373963b2aa2c"},
+    {file = "matrix-synapse-ldap3-0.2.2.tar.gz", hash = "sha256:b388d95693486eef69adaefd0fd9e84463d52fe17b0214a00efcaa669b73cb74"},
+    {file = "matrix_synapse_ldap3-0.2.2-py3-none-any.whl", hash = "sha256:66ee4c85d7952c6c27fd04c09cdfdf4847b8e8b7d6a7ada6ba1100013bda060f"},
 ]
 mccabe = [
     {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
@@ -2260,6 +2275,43 @@ pycparser = [
     {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
     {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
 ]
+pydantic = [
+    {file = "pydantic-1.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8098a724c2784bf03e8070993f6d46aa2eeca031f8d8a048dff277703e6e193"},
+    {file = "pydantic-1.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c320c64dd876e45254bdd350f0179da737463eea41c43bacbee9d8c9d1021f11"},
+    {file = "pydantic-1.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18f3e912f9ad1bdec27fb06b8198a2ccc32f201e24174cec1b3424dda605a310"},
+    {file = "pydantic-1.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11951b404e08b01b151222a1cb1a9f0a860a8153ce8334149ab9199cd198131"},
+    {file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8bc541a405423ce0e51c19f637050acdbdf8feca34150e0d17f675e72d119580"},
+    {file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e565a785233c2d03724c4dc55464559639b1ba9ecf091288dd47ad9c629433bd"},
+    {file = "pydantic-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:a4a88dcd6ff8fd47c18b3a3709a89adb39a6373f4482e04c1b765045c7e282fd"},
+    {file = "pydantic-1.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:447d5521575f18e18240906beadc58551e97ec98142266e521c34968c76c8761"},
+    {file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:985ceb5d0a86fcaa61e45781e567a59baa0da292d5ed2e490d612d0de5796918"},
+    {file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059b6c1795170809103a1538255883e1983e5b831faea6558ef873d4955b4a74"},
+    {file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d12f96b5b64bec3f43c8e82b4aab7599d0157f11c798c9f9c528a72b9e0b339a"},
+    {file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ae72f8098acb368d877b210ebe02ba12585e77bd0db78ac04a1ee9b9f5dd2166"},
+    {file = "pydantic-1.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:79b485767c13788ee314669008d01f9ef3bc05db9ea3298f6a50d3ef596a154b"},
+    {file = "pydantic-1.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:494f7c8537f0c02b740c229af4cb47c0d39840b829ecdcfc93d91dcbb0779892"},
+    {file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0f047e11febe5c3198ed346b507e1d010330d56ad615a7e0a89fae604065a0e"},
+    {file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:969dd06110cb780da01336b281f53e2e7eb3a482831df441fb65dd30403f4608"},
+    {file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:177071dfc0df6248fd22b43036f936cfe2508077a72af0933d0c1fa269b18537"},
+    {file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9bcf8b6e011be08fb729d110f3e22e654a50f8a826b0575c7196616780683380"},
+    {file = "pydantic-1.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a955260d47f03df08acf45689bd163ed9df82c0e0124beb4251b1290fa7ae728"},
+    {file = "pydantic-1.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9ce157d979f742a915b75f792dbd6aa63b8eccaf46a1005ba03aa8a986bde34a"},
+    {file = "pydantic-1.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0bf07cab5b279859c253d26a9194a8906e6f4a210063b84b433cf90a569de0c1"},
+    {file = "pydantic-1.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d93d4e95eacd313d2c765ebe40d49ca9dd2ed90e5b37d0d421c597af830c195"},
+    {file = "pydantic-1.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1542636a39c4892c4f4fa6270696902acb186a9aaeac6f6cf92ce6ae2e88564b"},
+    {file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a9af62e9b5b9bc67b2a195ebc2c2662fdf498a822d62f902bf27cccb52dbbf49"},
+    {file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fe4670cb32ea98ffbf5a1262f14c3e102cccd92b1869df3bb09538158ba90fe6"},
+    {file = "pydantic-1.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:9f659a5ee95c8baa2436d392267988fd0f43eb774e5eb8739252e5a7e9cf07e0"},
+    {file = "pydantic-1.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b83ba3825bc91dfa989d4eed76865e71aea3a6ca1388b59fc801ee04c4d8d0d6"},
+    {file = "pydantic-1.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1dd8fecbad028cd89d04a46688d2fcc14423e8a196d5b0a5c65105664901f810"},
+    {file = "pydantic-1.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02eefd7087268b711a3ff4db528e9916ac9aa18616da7bca69c1871d0b7a091f"},
+    {file = "pydantic-1.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb57ba90929bac0b6cc2af2373893d80ac559adda6933e562dcfb375029acee"},
+    {file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4ce9ae9e91f46c344bec3b03d6ee9612802682c1551aaf627ad24045ce090761"},
+    {file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:72ccb318bf0c9ab97fc04c10c37683d9eea952ed526707fabf9ac5ae59b701fd"},
+    {file = "pydantic-1.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:61b6760b08b7c395975d893e0b814a11cf011ebb24f7d869e7118f5a339a82e1"},
+    {file = "pydantic-1.9.1-py3-none-any.whl", hash = "sha256:4988c0f13c42bfa9ddd2fe2f569c9d54646ce84adc5de84228cfe83396f3bd58"},
+    {file = "pydantic-1.9.1.tar.gz", hash = "sha256:1ed987c3ff29fff7fd8c3ea3a3ea877ad310aae2ef9889a119e22d3f2db0691a"},
+]
 pyflakes = [
     {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"},
     {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"},

+ 4 - 1
pyproject.toml

@@ -54,7 +54,7 @@ skip_gitignore = true
 
 [tool.poetry]
 name = "matrix-synapse"
-version = "1.63.1"
+version = "1.66.0rc1"
 description = "Homeserver for the Matrix decentralised comms protocol"
 authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
 license = "Apache-2.0"
@@ -158,6 +158,9 @@ packaging = ">=16.1"
 # At the time of writing, we only use functions from the version `importlib.metadata`
 # which shipped in Python 3.8. This corresponds to version 1.4 of the backport.
 importlib_metadata = { version = ">=1.4", python = "<3.8" }
+# This is the most recent version of Pydantic with available on common distros.
+pydantic = ">=1.7.4"
+
 
 
 # Optional Dependencies

+ 425 - 0
scripts-dev/check_pydantic_models.py

@@ -0,0 +1,425 @@
+#! /usr/bin/env python
+# Copyright 2022 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+A script which enforces that Synapse always uses strict types when defining a Pydantic
+model.
+
+Pydantic does not yet offer a strict mode, but it is planned for pydantic v2. See
+
+    https://github.com/pydantic/pydantic/issues/1098
+    https://pydantic-docs.helpmanual.io/blog/pydantic-v2/#strict-mode
+
+until then, this script is a best effort to stop us from introducing type coersion bugs
+(like the infamous stringy power levels fixed in room version 10).
+"""
+import argparse
+import contextlib
+import functools
+import importlib
+import logging
+import os
+import pkgutil
+import sys
+import textwrap
+import traceback
+import unittest.mock
+from contextlib import contextmanager
+from typing import Any, Callable, Dict, Generator, List, Set, Type, TypeVar
+
+from parameterized import parameterized
+from pydantic import BaseModel as PydanticBaseModel, conbytes, confloat, conint, constr
+from pydantic.typing import get_args
+from typing_extensions import ParamSpec
+
+logger = logging.getLogger(__name__)
+
+CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: List[Callable] = [
+    constr,
+    conbytes,
+    conint,
+    confloat,
+]
+
+TYPES_THAT_PYDANTIC_WILL_COERCE_TO = [
+    str,
+    bytes,
+    int,
+    float,
+    bool,
+]
+
+
+P = ParamSpec("P")
+R = TypeVar("R")
+
+
+class ModelCheckerException(Exception):
+    """Dummy exception. Allows us to detect unwanted types during a module import."""
+
+
+class MissingStrictInConstrainedTypeException(ModelCheckerException):
+    factory_name: str
+
+    def __init__(self, factory_name: str):
+        self.factory_name = factory_name
+
+
+class FieldHasUnwantedTypeException(ModelCheckerException):
+    message: str
+
+    def __init__(self, message: str):
+        self.message = message
+
+
+def make_wrapper(factory: Callable[P, R]) -> Callable[P, R]:
+    """We patch `constr` and friends with wrappers that enforce strict=True."""
+
+    @functools.wraps(factory)
+    def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
+        # type-ignore: should be redundant once we can use https://github.com/python/mypy/pull/12668
+        if "strict" not in kwargs:  # type: ignore[attr-defined]
+            raise MissingStrictInConstrainedTypeException(factory.__name__)
+        if not kwargs["strict"]:  # type: ignore[index]
+            raise MissingStrictInConstrainedTypeException(factory.__name__)
+        return factory(*args, **kwargs)
+
+    return wrapper
+
+
+def field_type_unwanted(type_: Any) -> bool:
+    """Very rough attempt to detect if a type is unwanted as a Pydantic annotation.
+
+    At present, we exclude types which will coerce, or any generic type involving types
+    which will coerce."""
+    logger.debug("Is %s unwanted?")
+    if type_ in TYPES_THAT_PYDANTIC_WILL_COERCE_TO:
+        logger.debug("yes")
+        return True
+    logger.debug("Maybe. Subargs are %s", get_args(type_))
+    rv = any(field_type_unwanted(t) for t in get_args(type_))
+    logger.debug("Conclusion: %s %s unwanted", type_, "is" if rv else "is not")
+    return rv
+
+
+class PatchedBaseModel(PydanticBaseModel):
+    """A patched version of BaseModel that inspects fields after models are defined.
+
+    We complain loudly if we see an unwanted type.
+
+    Beware: ModelField.type_ is presumably private; this is likely to be very brittle.
+    """
+
+    @classmethod
+    def __init_subclass__(cls: Type[PydanticBaseModel], **kwargs: object):
+        for field in cls.__fields__.values():
+            # Note that field.type_ and field.outer_type are computed based on the
+            # annotation type, see pydantic.fields.ModelField._type_analysis
+            if field_type_unwanted(field.outer_type_):
+                # TODO: this only reports the first bad field. Can we find all bad ones
+                #  and report them all?
+                raise FieldHasUnwantedTypeException(
+                    f"{cls.__module__}.{cls.__qualname__} has field '{field.name}' "
+                    f"with unwanted type `{field.outer_type_}`"
+                )
+
+
+@contextmanager
+def monkeypatch_pydantic() -> Generator[None, None, None]:
+    """Patch pydantic with our snooping versions of BaseModel and the con* functions.
+
+    If the snooping functions see something they don't like, they'll raise a
+    ModelCheckingException instance.
+    """
+    with contextlib.ExitStack() as patches:
+        # Most Synapse code ought to import the patched objects directly from
+        # `pydantic`. But we also patch their containing modules `pydantic.main` and
+        # `pydantic.types` for completeness.
+        patch_basemodel1 = unittest.mock.patch(
+            "pydantic.BaseModel", new=PatchedBaseModel
+        )
+        patch_basemodel2 = unittest.mock.patch(
+            "pydantic.main.BaseModel", new=PatchedBaseModel
+        )
+        patches.enter_context(patch_basemodel1)
+        patches.enter_context(patch_basemodel2)
+        for factory in CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG:
+            wrapper: Callable = make_wrapper(factory)
+            patch1 = unittest.mock.patch(f"pydantic.{factory.__name__}", new=wrapper)
+            patch2 = unittest.mock.patch(
+                f"pydantic.types.{factory.__name__}", new=wrapper
+            )
+            patches.enter_context(patch1)
+            patches.enter_context(patch2)
+        yield
+
+
+def format_model_checker_exception(e: ModelCheckerException) -> str:
+    """Work out which line of code caused e. Format the line in a human-friendly way."""
+    # TODO. FieldHasUnwantedTypeException gives better error messages. Can we ditch the
+    #   patches of constr() etc, and instead inspect fields to look for ConstrainedStr
+    #   with strict=False? There is some difficulty with the inheritance hierarchy
+    #   because StrictStr < ConstrainedStr < str.
+    if isinstance(e, FieldHasUnwantedTypeException):
+        return e.message
+    elif isinstance(e, MissingStrictInConstrainedTypeException):
+        frame_summary = traceback.extract_tb(e.__traceback__)[-2]
+        return (
+            f"Missing `strict=True` from {e.factory_name}() call \n"
+            + traceback.format_list([frame_summary])[0].lstrip()
+        )
+    else:
+        raise ValueError(f"Unknown exception {e}") from e
+
+
+def lint() -> int:
+    """Try to import all of Synapse and see if we spot any Pydantic type coercions.
+
+    Print any problems, then return a status code suitable for sys.exit."""
+    failures = do_lint()
+    if failures:
+        print(f"Found {len(failures)} problem(s)")
+    for failure in sorted(failures):
+        print(failure)
+    return os.EX_DATAERR if failures else os.EX_OK
+
+
+def do_lint() -> Set[str]:
+    """Try to import all of Synapse and see if we spot any Pydantic type coercions."""
+    failures = set()
+
+    with monkeypatch_pydantic():
+        logger.debug("Importing synapse")
+        try:
+            # TODO: make "synapse" an argument so we can target this script at
+            # a subpackage
+            module = importlib.import_module("synapse")
+        except ModelCheckerException as e:
+            logger.warning("Bad annotation found when importing synapse")
+            failures.add(format_model_checker_exception(e))
+            return failures
+
+        try:
+            logger.debug("Fetching subpackages")
+            module_infos = list(
+                pkgutil.walk_packages(module.__path__, f"{module.__name__}.")
+            )
+        except ModelCheckerException as e:
+            logger.warning("Bad annotation found when looking for modules to import")
+            failures.add(format_model_checker_exception(e))
+            return failures
+
+        for module_info in module_infos:
+            logger.debug("Importing %s", module_info.name)
+            try:
+                importlib.import_module(module_info.name)
+            except ModelCheckerException as e:
+                logger.warning(
+                    f"Bad annotation found when importing {module_info.name}"
+                )
+                failures.add(format_model_checker_exception(e))
+
+    return failures
+
+
+def run_test_snippet(source: str) -> None:
+    """Exec a snippet of source code in an isolated environment."""
+    # To emulate `source` being called at the top level of the module,
+    # the globals and locals we provide apparently have to be the same mapping.
+    #
+    # > Remember that at the module level, globals and locals are the same dictionary.
+    # > If exec gets two separate objects as globals and locals, the code will be
+    # > executed as if it were embedded in a class definition.
+    globals_: Dict[str, object]
+    locals_: Dict[str, object]
+    globals_ = locals_ = {}
+    exec(textwrap.dedent(source), globals_, locals_)
+
+
+class TestConstrainedTypesPatch(unittest.TestCase):
+    def test_expression_without_strict_raises(self) -> None:
+        with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
+            run_test_snippet(
+                """
+                from pydantic import constr
+                constr()
+                """
+            )
+
+    def test_called_as_module_attribute_raises(self) -> None:
+        with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
+            run_test_snippet(
+                """
+                import pydantic
+                pydantic.constr()
+                """
+            )
+
+    def test_wildcard_import_raises(self) -> None:
+        with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
+            run_test_snippet(
+                """
+                from pydantic import *
+                constr()
+                """
+            )
+
+    def test_alternative_import_raises(self) -> None:
+        with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
+            run_test_snippet(
+                """
+                from pydantic.types import constr
+                constr()
+                """
+            )
+
+    def test_alternative_import_attribute_raises(self) -> None:
+        with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
+            run_test_snippet(
+                """
+                import pydantic.types
+                pydantic.types.constr()
+                """
+            )
+
+    def test_kwarg_but_no_strict_raises(self) -> None:
+        with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
+            run_test_snippet(
+                """
+                from pydantic import constr
+                constr(min_length=10)
+                """
+            )
+
+    def test_kwarg_strict_False_raises(self) -> None:
+        with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
+            run_test_snippet(
+                """
+                from pydantic import constr
+                constr(strict=False)
+                """
+            )
+
+    def test_kwarg_strict_True_doesnt_raise(self) -> None:
+        with monkeypatch_pydantic():
+            run_test_snippet(
+                """
+                from pydantic import constr
+                constr(strict=True)
+                """
+            )
+
+    def test_annotation_without_strict_raises(self) -> None:
+        with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
+            run_test_snippet(
+                """
+                from pydantic import constr
+                x: constr()
+                """
+            )
+
+    def test_field_annotation_without_strict_raises(self) -> None:
+        with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
+            run_test_snippet(
+                """
+                from pydantic import BaseModel, conint
+                class C:
+                    x: conint()
+                """
+            )
+
+
+class TestFieldTypeInspection(unittest.TestCase):
+    @parameterized.expand(
+        [
+            ("str",),
+            ("bytes"),
+            ("int",),
+            ("float",),
+            ("bool"),
+            ("Optional[str]",),
+            ("Union[None, str]",),
+            ("List[str]",),
+            ("List[List[str]]",),
+            ("Dict[StrictStr, str]",),
+            ("Dict[str, StrictStr]",),
+            ("TypedDict('D', x=int)",),
+        ]
+    )
+    def test_field_holding_unwanted_type_raises(self, annotation: str) -> None:
+        with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
+            run_test_snippet(
+                f"""
+                from typing import *
+                from pydantic import *
+                class C(BaseModel):
+                    f: {annotation}
+                """
+            )
+
+    @parameterized.expand(
+        [
+            ("StrictStr",),
+            ("StrictBytes"),
+            ("StrictInt",),
+            ("StrictFloat",),
+            ("StrictBool"),
+            ("constr(strict=True, min_length=10)",),
+            ("Optional[StrictStr]",),
+            ("Union[None, StrictStr]",),
+            ("List[StrictStr]",),
+            ("List[List[StrictStr]]",),
+            ("Dict[StrictStr, StrictStr]",),
+            ("TypedDict('D', x=StrictInt)",),
+        ]
+    )
+    def test_field_holding_accepted_type_doesnt_raise(self, annotation: str) -> None:
+        with monkeypatch_pydantic():
+            run_test_snippet(
+                f"""
+                from typing import *
+                from pydantic import *
+                class C(BaseModel):
+                    f: {annotation}
+                """
+            )
+
+    def test_field_holding_str_raises_with_alternative_import(self) -> None:
+        with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
+            run_test_snippet(
+                """
+                from pydantic.main import BaseModel
+                class C(BaseModel):
+                    f: str
+                """
+            )
+
+
+parser = argparse.ArgumentParser()
+parser.add_argument("mode", choices=["lint", "test"], default="lint", nargs="?")
+parser.add_argument("-v", "--verbose", action="store_true")
+
+
+if __name__ == "__main__":
+    args = parser.parse_args(sys.argv[1:])
+    logging.basicConfig(
+        format="%(asctime)s %(name)s:%(lineno)d %(levelname)s %(message)s",
+        level=logging.DEBUG if args.verbose else logging.INFO,
+    )
+    # suppress logs we don't care about
+    logging.getLogger("xmlschema").setLevel(logging.WARNING)
+    if args.mode == "lint":
+        sys.exit(lint())
+    elif args.mode == "test":
+        unittest.main(argv=sys.argv[:1])

+ 1 - 0
scripts-dev/complement.sh

@@ -101,6 +101,7 @@ if [ -z "$skip_docker_build" ]; then
     echo_if_github "::group::Build Docker image: matrixdotorg/synapse"
     docker build -t matrixdotorg/synapse \
       --build-arg TEST_ONLY_SKIP_DEP_HASH_VERIFICATION \
+      --build-arg TEST_ONLY_IGNORE_POETRY_LOCKFILE \
       -f "docker/Dockerfile" .
     echo_if_github "::endgroup::"
 

+ 1 - 0
scripts-dev/lint.sh

@@ -106,4 +106,5 @@ isort "${files[@]}"
 python3 -m black "${files[@]}"
 ./scripts-dev/config-lint.sh
 flake8 "${files[@]}"
+./scripts-dev/check_pydantic_models.py lint
 mypy

+ 130 - 32
scripts-dev/release.py

@@ -32,6 +32,7 @@ import click
 import commonmark
 import git
 from click.exceptions import ClickException
+from git import GitCommandError, Repo
 from github import Github
 from packaging import version
 
@@ -55,9 +56,12 @@ def run_until_successful(
 def cli() -> None:
     """An interactive script to walk through the parts of creating a release.
 
-    Requires the dev dependencies be installed, which can be done via:
+    Requirements:
+      - The dev dependencies be installed, which can be done via:
 
-        pip install -e .[dev]
+            pip install -e .[dev]
+
+      - A checkout of the sytest repository at ../sytest
 
     Then to use:
 
@@ -75,6 +79,8 @@ def cli() -> None:
 
         # Optional: generate some nice links for the announcement
 
+        ./scripts-dev/release.py merge-back
+
         ./scripts-dev/release.py announce
 
     If the env var GH_TOKEN (or GITHUB_TOKEN) is set, or passed into the
@@ -89,10 +95,12 @@ def prepare() -> None:
     """
 
     # Make sure we're in a git repo.
-    repo = get_repo_and_check_clean_checkout()
+    synapse_repo = get_repo_and_check_clean_checkout()
+    sytest_repo = get_repo_and_check_clean_checkout("../sytest", "sytest")
 
-    click.secho("Updating git repo...")
-    repo.remote().fetch()
+    click.secho("Updating Synapse and Sytest git repos...")
+    synapse_repo.remote().fetch()
+    sytest_repo.remote().fetch()
 
     # Get the current version and AST from root Synapse module.
     current_version = get_package_version()
@@ -166,12 +174,12 @@ def prepare() -> None:
     assert not parsed_new_version.is_postrelease
 
     release_branch_name = get_release_branch_name(parsed_new_version)
-    release_branch = find_ref(repo, release_branch_name)
+    release_branch = find_ref(synapse_repo, release_branch_name)
     if release_branch:
         if release_branch.is_remote():
             # If the release branch only exists on the remote we check it out
             # locally.
-            repo.git.checkout(release_branch_name)
+            synapse_repo.git.checkout(release_branch_name)
     else:
         # If a branch doesn't exist we create one. We ask which one branch it
         # should be based off, defaulting to sensible values depending on the
@@ -187,25 +195,34 @@ def prepare() -> None:
             "Which branch should the release be based on?", default=default
         )
 
-        base_branch = find_ref(repo, branch_name)
-        if not base_branch:
-            print(f"Could not find base branch {branch_name}!")
-            click.get_current_context().abort()
+        for repo_name, repo in {"synapse": synapse_repo, "sytest": sytest_repo}.items():
+            base_branch = find_ref(repo, branch_name)
+            if not base_branch:
+                print(f"Could not find base branch {branch_name} for {repo_name}!")
+                click.get_current_context().abort()
+
+            # Check out the base branch and ensure it's up to date
+            repo.head.set_reference(
+                base_branch, f"check out the base branch for {repo_name}"
+            )
+            repo.head.reset(index=True, working_tree=True)
+            if not base_branch.is_remote():
+                update_branch(repo)
 
-        # Check out the base branch and ensure it's up to date
-        repo.head.set_reference(base_branch, "check out the base branch")
-        repo.head.reset(index=True, working_tree=True)
-        if not base_branch.is_remote():
-            update_branch(repo)
+            # Create the new release branch
+            # Type ignore will no longer be needed after GitPython 3.1.28.
+            # See https://github.com/gitpython-developers/GitPython/pull/1419
+            repo.create_head(release_branch_name, commit=base_branch)  # type: ignore[arg-type]
 
-        # Create the new release branch
-        # Type ignore will no longer be needed after GitPython 3.1.28.
-        # See https://github.com/gitpython-developers/GitPython/pull/1419
-        repo.create_head(release_branch_name, commit=base_branch)  # type: ignore[arg-type]
+        # Special-case SyTest: we don't actually prepare any files so we may
+        # as well push it now (and only when we create a release branch;
+        # not on subsequent RCs or full releases).
+        if click.confirm("Push new SyTest branch?", default=True):
+            sytest_repo.git.push("-u", sytest_repo.remote().name, release_branch_name)
 
     # Switch to the release branch and ensure it's up to date.
-    repo.git.checkout(release_branch_name)
-    update_branch(repo)
+    synapse_repo.git.checkout(release_branch_name)
+    update_branch(synapse_repo)
 
     # Update the version specified in pyproject.toml.
     subprocess.check_output(["poetry", "version", new_version])
@@ -230,15 +247,15 @@ def prepare() -> None:
     run_until_successful('dch -M -r -D stable ""', shell=True)
 
     # Show the user the changes and ask if they want to edit the change log.
-    repo.git.add("-u")
+    synapse_repo.git.add("-u")
     subprocess.run("git diff --cached", shell=True)
 
     if click.confirm("Edit changelog?", default=False):
         click.edit(filename="CHANGES.md")
 
     # Commit the changes.
-    repo.git.add("-u")
-    repo.git.commit("-m", new_version)
+    synapse_repo.git.add("-u")
+    synapse_repo.git.commit("-m", new_version)
 
     # We give the option to bail here in case the user wants to make sure things
     # are OK before pushing.
@@ -246,17 +263,21 @@ def prepare() -> None:
         print("")
         print("Run when ready to push:")
         print("")
-        print(f"\tgit push -u {repo.remote().name} {repo.active_branch.name}")
+        print(
+            f"\tgit push -u {synapse_repo.remote().name} {synapse_repo.active_branch.name}"
+        )
         print("")
         sys.exit(0)
 
     # Otherwise, push and open the changelog in the browser.
-    repo.git.push("-u", repo.remote().name, repo.active_branch.name)
+    synapse_repo.git.push(
+        "-u", synapse_repo.remote().name, synapse_repo.active_branch.name
+    )
 
     print("Opening the changelog in your browser...")
     print("Please ask others to give it a check.")
     click.launch(
-        f"https://github.com/matrix-org/synapse/blob/{repo.active_branch.name}/CHANGES.md"
+        f"https://github.com/matrix-org/synapse/blob/{synapse_repo.active_branch.name}/CHANGES.md"
     )
 
 
@@ -423,6 +444,79 @@ def upload() -> None:
     )
 
 
+def _merge_into(repo: Repo, source: str, target: str) -> None:
+    """
+    Merges branch `source` into branch `target`.
+    Pulls both before merging and pushes the result.
+    """
+
+    # Update our branches and switch to the target branch
+    for branch in [source, target]:
+        click.echo(f"Switching to {branch} and pulling...")
+        repo.heads[branch].checkout()
+        # Pull so we're up to date
+        repo.remote().pull()
+
+    assert repo.active_branch.name == target
+
+    try:
+        # TODO This seemed easier than using GitPython directly
+        click.echo(f"Merging {source}...")
+        repo.git.merge(source)
+    except GitCommandError as exc:
+        # If a merge conflict occurs, give some context and try to
+        # make it easy to abort if necessary.
+        click.echo(exc)
+        if not click.confirm(
+            f"Likely merge conflict whilst merging ({source} → {target}). "
+            f"Have you resolved it?"
+        ):
+            repo.git.merge("--abort")
+            return
+
+    # Push result.
+    click.echo("Pushing...")
+    repo.remote().push()
+
+
+@cli.command()
+def merge_back() -> None:
+    """Merge the release branch back into the appropriate branches.
+    All branches will be automatically pulled from the remote and the results
+    will be pushed to the remote."""
+
+    synapse_repo = get_repo_and_check_clean_checkout()
+    branch_name = synapse_repo.active_branch.name
+
+    if not branch_name.startswith("release-v"):
+        raise RuntimeError("Not on a release branch. This does not seem sensible.")
+
+    # Pull so we're up to date
+    synapse_repo.remote().pull()
+
+    current_version = get_package_version()
+
+    if current_version.is_prerelease:
+        # Release candidate
+        if click.confirm(f"Merge {branch_name} → develop?", default=True):
+            _merge_into(synapse_repo, branch_name, "develop")
+    else:
+        # Full release
+        sytest_repo = get_repo_and_check_clean_checkout("../sytest", "sytest")
+
+        if click.confirm(f"Merge {branch_name} → master?", default=True):
+            _merge_into(synapse_repo, branch_name, "master")
+
+        if click.confirm("Merge master → develop?", default=True):
+            _merge_into(synapse_repo, "master", "develop")
+
+        if click.confirm(f"On SyTest, merge {branch_name} → master?", default=True):
+            _merge_into(sytest_repo, branch_name, "master")
+
+        if click.confirm("On SyTest, merge master → develop?", default=True):
+            _merge_into(sytest_repo, "master", "develop")
+
+
 @cli.command()
 def announce() -> None:
     """Generate markdown to announce the release."""
@@ -469,14 +563,18 @@ def get_release_branch_name(version_number: version.Version) -> str:
     return f"release-v{version_number.major}.{version_number.minor}"
 
 
-def get_repo_and_check_clean_checkout() -> git.Repo:
+def get_repo_and_check_clean_checkout(
+    path: str = ".", name: str = "synapse"
+) -> git.Repo:
     """Get the project repo and check it's not got any uncommitted changes."""
     try:
-        repo = git.Repo()
+        repo = git.Repo(path=path)
     except git.InvalidGitRepositoryError:
-        raise click.ClickException("Not in Synapse repo.")
+        raise click.ClickException(
+            f"{path} is not a git repository (expecting a {name} repository)."
+        )
     if repo.is_dirty():
-        raise click.ClickException("Uncommitted changes exist.")
+        raise click.ClickException(f"Uncommitted changes exist in {path}.")
     return repo
 
 

+ 110 - 111
synapse/api/auth.py

@@ -26,13 +26,18 @@ from synapse.api.errors import (
     Codes,
     InvalidClientTokenError,
     MissingClientTokenError,
+    UnstableSpecAuthError,
 )
 from synapse.appservice import ApplicationService
 from synapse.http import get_request_user_agent
 from synapse.http.site import SynapseRequest
-from synapse.logging.opentracing import active_span, force_tracing, start_active_span
-from synapse.storage.databases.main.registration import TokenLookupResult
-from synapse.types import Requester, UserID, create_requester
+from synapse.logging.opentracing import (
+    active_span,
+    force_tracing,
+    start_active_span,
+    trace,
+)
+from synapse.types import Requester, create_requester
 
 if TYPE_CHECKING:
     from synapse.server import HomeServer
@@ -64,14 +69,14 @@ class Auth:
     async def check_user_in_room(
         self,
         room_id: str,
-        user_id: str,
+        requester: Requester,
         allow_departed_users: bool = False,
     ) -> Tuple[str, Optional[str]]:
         """Check if the user is in the room, or was at some point.
         Args:
             room_id: The room to check.
 
-            user_id: The user to check.
+            requester: The user making the request, according to the access token.
 
             current_state: Optional map of the current state of the room.
                 If provided then that map is used to check whether they are a
@@ -88,6 +93,7 @@ class Auth:
             membership event ID of the user.
         """
 
+        user_id = requester.user.to_string()
         (
             membership,
             member_event_id,
@@ -106,8 +112,11 @@ class Auth:
                 forgot = await self.store.did_forget(user_id, room_id)
                 if not forgot:
                     return membership, member_event_id
-
-        raise AuthError(403, "User %s not in room %s" % (user_id, room_id))
+        raise UnstableSpecAuthError(
+            403,
+            "User %s not in room %s" % (user_id, room_id),
+            errcode=Codes.NOT_JOINED,
+        )
 
     async def get_user_by_req(
         self,
@@ -173,96 +182,69 @@ class Auth:
 
             access_token = self.get_access_token_from_request(request)
 
-            (
-                user_id,
-                device_id,
-                app_service,
-            ) = await self._get_appservice_user_id_and_device_id(request)
-            if user_id and app_service:
-                if ip_addr and self._track_appservice_user_ips:
-                    await self.store.insert_client_ip(
-                        user_id=user_id,
-                        access_token=access_token,
-                        ip=ip_addr,
-                        user_agent=user_agent,
-                        device_id="dummy-device"
-                        if device_id is None
-                        else device_id,  # stubbed
-                    )
-
-                requester = create_requester(
-                    user_id, app_service=app_service, device_id=device_id
+            # First check if it could be a request from an appservice
+            requester = await self._get_appservice_user(request)
+            if not requester:
+                # If not, it should be from a regular user
+                requester = await self.get_user_by_access_token(
+                    access_token, allow_expired=allow_expired
                 )
 
-                request.requester = user_id
-                return requester
-
-            user_info = await self.get_user_by_access_token(
-                access_token, allow_expired=allow_expired
-            )
-            token_id = user_info.token_id
-            is_guest = user_info.is_guest
-            shadow_banned = user_info.shadow_banned
-
-            # Deny the request if the user account has expired.
-            if not allow_expired:
-                if await self._account_validity_handler.is_user_expired(
-                    user_info.user_id
-                ):
-                    # Raise the error if either an account validity module has determined
-                    # the account has expired, or the legacy account validity
-                    # implementation is enabled and determined the account has expired
-                    raise AuthError(
-                        403,
-                        "User account has expired",
-                        errcode=Codes.EXPIRED_ACCOUNT,
-                    )
-
-            device_id = user_info.device_id
-
-            if access_token and ip_addr:
+                # Deny the request if the user account has expired.
+                # This check is only done for regular users, not appservice ones.
+                if not allow_expired:
+                    if await self._account_validity_handler.is_user_expired(
+                        requester.user.to_string()
+                    ):
+                        # Raise the error if either an account validity module has determined
+                        # the account has expired, or the legacy account validity
+                        # implementation is enabled and determined the account has expired
+                        raise AuthError(
+                            403,
+                            "User account has expired",
+                            errcode=Codes.EXPIRED_ACCOUNT,
+                        )
+
+            if ip_addr and (
+                not requester.app_service or self._track_appservice_user_ips
+            ):
+                # XXX(quenting): I'm 95% confident that we could skip setting the
+                # device_id to "dummy-device" for appservices, and that the only impact
+                # would be some rows which whould not deduplicate in the 'user_ips'
+                # table during the transition
+                recorded_device_id = (
+                    "dummy-device"
+                    if requester.device_id is None and requester.app_service is not None
+                    else requester.device_id
+                )
                 await self.store.insert_client_ip(
-                    user_id=user_info.token_owner,
+                    user_id=requester.authenticated_entity,
                     access_token=access_token,
                     ip=ip_addr,
                     user_agent=user_agent,
-                    device_id=device_id,
+                    device_id=recorded_device_id,
                 )
+
                 # Track also the puppeted user client IP if enabled and the user is puppeting
                 if (
-                    user_info.user_id != user_info.token_owner
+                    requester.user.to_string() != requester.authenticated_entity
                     and self._track_puppeted_user_ips
                 ):
                     await self.store.insert_client_ip(
-                        user_id=user_info.user_id,
+                        user_id=requester.user.to_string(),
                         access_token=access_token,
                         ip=ip_addr,
                         user_agent=user_agent,
-                        device_id=device_id,
+                        device_id=requester.device_id,
                     )
 
-            if is_guest and not allow_guest:
+            if requester.is_guest and not allow_guest:
                 raise AuthError(
                     403,
                     "Guest access not allowed",
                     errcode=Codes.GUEST_ACCESS_FORBIDDEN,
                 )
 
-            # Mark the token as used. This is used to invalidate old refresh
-            # tokens after some time.
-            if not user_info.token_used and token_id is not None:
-                await self.store.mark_access_token_as_used(token_id)
-
-            requester = create_requester(
-                user_info.user_id,
-                token_id,
-                is_guest,
-                shadow_banned,
-                device_id,
-                app_service=app_service,
-                authenticated_entity=user_info.token_owner,
-            )
-
             request.requester = requester
             return requester
         except KeyError:
@@ -299,9 +281,7 @@ class Auth:
                 403, "Application service has not registered this user (%s)" % user_id
             )
 
-    async def _get_appservice_user_id_and_device_id(
-        self, request: Request
-    ) -> Tuple[Optional[str], Optional[str], Optional[ApplicationService]]:
+    async def _get_appservice_user(self, request: Request) -> Optional[Requester]:
         """
         Given a request, reads the request parameters to determine:
         - whether it's an application service that's making this request
@@ -316,15 +296,13 @@ class Auth:
              Must use `org.matrix.msc3202.device_id` in place of `device_id` for now.
 
         Returns:
-            3-tuple of
-            (user ID?, device ID?, application service?)
+            the application service `Requester` of that request
 
         Postconditions:
-        - If an application service is returned, so is a user ID
-        - A user ID is never returned without an application service
-        - A device ID is never returned without a user ID or an application service
-        - The returned application service, if present, is permitted to control the
-          returned user ID.
+        - The `app_service` field in the returned `Requester` is set
+        - The `user_id` field in the returned `Requester` is either the application
+          service sender or the controlled user set by the `user_id` URI parameter
+        - The returned application service is permitted to control the returned user ID.
         - The returned device ID, if present, has been checked to be a valid device ID
           for the returned user ID.
         """
@@ -334,12 +312,12 @@ class Auth:
             self.get_access_token_from_request(request)
         )
         if app_service is None:
-            return None, None, None
+            return None
 
         if app_service.ip_range_whitelist:
             ip_address = IPAddress(request.getClientAddress().host)
             if ip_address not in app_service.ip_range_whitelist:
-                return None, None, None
+                return None
 
         # This will always be set by the time Twisted calls us.
         assert request.args is not None
@@ -373,13 +351,15 @@ class Auth:
                     Codes.EXCLUSIVE,
                 )
 
-        return effective_user_id, effective_device_id, app_service
+        return create_requester(
+            effective_user_id, app_service=app_service, device_id=effective_device_id
+        )
 
     async def get_user_by_access_token(
         self,
         token: str,
         allow_expired: bool = False,
-    ) -> TokenLookupResult:
+    ) -> Requester:
         """Validate access token and get user_id from it
 
         Args:
@@ -396,9 +376,9 @@ class Auth:
 
         # First look in the database to see if the access token is present
         # as an opaque token.
-        r = await self.store.get_user_by_access_token(token)
-        if r:
-            valid_until_ms = r.valid_until_ms
+        user_info = await self.store.get_user_by_access_token(token)
+        if user_info:
+            valid_until_ms = user_info.valid_until_ms
             if (
                 not allow_expired
                 and valid_until_ms is not None
@@ -410,7 +390,20 @@ class Auth:
                     msg="Access token has expired", soft_logout=True
                 )
 
-            return r
+            # Mark the token as used. This is used to invalidate old refresh
+            # tokens after some time.
+            await self.store.mark_access_token_as_used(user_info.token_id)
+
+            requester = create_requester(
+                user_id=user_info.user_id,
+                access_token_id=user_info.token_id,
+                is_guest=user_info.is_guest,
+                shadow_banned=user_info.shadow_banned,
+                device_id=user_info.device_id,
+                authenticated_entity=user_info.token_owner,
+            )
+
+            return requester
 
         # If the token isn't found in the database, then it could still be a
         # macaroon for a guest, so we check that here.
@@ -436,11 +429,12 @@ class Auth:
                     "Guest access token used for regular user"
                 )
 
-            return TokenLookupResult(
+            return create_requester(
                 user_id=user_id,
                 is_guest=True,
                 # all guests get the same device id
                 device_id=GUEST_DEVICE_ID,
+                authenticated_entity=user_id,
             )
         except (
             pymacaroons.exceptions.MacaroonException,
@@ -463,32 +457,33 @@ class Auth:
         request.requester = create_requester(service.sender, app_service=service)
         return service
 
-    async def is_server_admin(self, user: UserID) -> bool:
+    async def is_server_admin(self, requester: Requester) -> bool:
         """Check if the given user is a local server admin.
 
         Args:
-            user: user to check
+            requester: The user making the request, according to the access token.
 
         Returns:
             True if the user is an admin
         """
-        return await self.store.is_server_admin(user)
+        return await self.store.is_server_admin(requester.user)
 
-    async def check_can_change_room_list(self, room_id: str, user: UserID) -> bool:
+    async def check_can_change_room_list(
+        self, room_id: str, requester: Requester
+    ) -> bool:
         """Determine whether the user is allowed to edit the room's entry in the
         published room list.
 
         Args:
-            room_id
-            user
+            room_id: The room to check.
+            requester: The user making the request, according to the access token.
         """
 
-        is_admin = await self.is_server_admin(user)
+        is_admin = await self.is_server_admin(requester)
         if is_admin:
             return True
 
-        user_id = user.to_string()
-        await self.check_user_in_room(room_id, user_id)
+        await self.check_user_in_room(room_id, requester)
 
         # We currently require the user is a "moderator" in the room. We do this
         # by checking if they would (theoretically) be able to change the
@@ -507,7 +502,9 @@ class Auth:
         send_level = event_auth.get_send_level(
             EventTypes.CanonicalAlias, "", power_level_event
         )
-        user_level = event_auth.get_user_power_level(user_id, auth_events)
+        user_level = event_auth.get_user_power_level(
+            requester.user.to_string(), auth_events
+        )
 
         return user_level >= send_level
 
@@ -563,17 +560,18 @@ class Auth:
 
             return query_params[0].decode("ascii")
 
+    @trace
     async def check_user_in_room_or_world_readable(
-        self, room_id: str, user_id: str, allow_departed_users: bool = False
+        self, room_id: str, requester: Requester, allow_departed_users: bool = False
     ) -> Tuple[str, Optional[str]]:
         """Checks that the user is or was in the room or the room is world
         readable. If it isn't then an exception is raised.
 
         Args:
-            room_id: room to check
-            user_id: user to check
-            allow_departed_users: if True, accept users that were previously
-                members but have now departed
+            room_id: The room to check.
+            requester: The user making the request, according to the access token.
+            allow_departed_users: If True, accept users that were previously
+                members but have now departed.
 
         Returns:
             Resolves to the current membership of the user in the room and the
@@ -588,7 +586,7 @@ class Auth:
             #  * The user is a guest user, and has joined the room
             # else it will throw.
             return await self.check_user_in_room(
-                room_id, user_id, allow_departed_users=allow_departed_users
+                room_id, requester, allow_departed_users=allow_departed_users
             )
         except AuthError:
             visibility = await self._storage_controllers.state.get_current_state_event(
@@ -600,8 +598,9 @@ class Auth:
                 == HistoryVisibility.WORLD_READABLE
             ):
                 return Membership.JOIN, None
-            raise AuthError(
+            raise UnstableSpecAuthError(
                 403,
                 "User %s not in room %s, and room previews are disabled"
-                % (user_id, room_id),
+                % (requester.user, room_id),
+                errcode=Codes.NOT_JOINED,
             )

+ 6 - 5
synapse/api/constants.py

@@ -216,11 +216,11 @@ class EventContentFields:
     MSC2716_HISTORICAL: Final = "org.matrix.msc2716.historical"
     # For "insertion" events to indicate what the next batch ID should be in
     # order to connect to it
-    MSC2716_NEXT_BATCH_ID: Final = "org.matrix.msc2716.next_batch_id"
+    MSC2716_NEXT_BATCH_ID: Final = "next_batch_id"
     # Used on "batch" events to indicate which insertion event it connects to
-    MSC2716_BATCH_ID: Final = "org.matrix.msc2716.batch_id"
+    MSC2716_BATCH_ID: Final = "batch_id"
     # For "marker" events
-    MSC2716_MARKER_INSERTION: Final = "org.matrix.msc2716.marker.insertion"
+    MSC2716_INSERTION_EVENT_REFERENCE: Final = "insertion_event_reference"
 
     # The authorising user for joining a restricted room.
     AUTHORISING_USER: Final = "join_authorised_via_users_server"
@@ -257,7 +257,8 @@ class GuestAccess:
 
 class ReceiptTypes:
     READ: Final = "m.read"
-    READ_PRIVATE: Final = "org.matrix.msc2285.read.private"
+    READ_PRIVATE: Final = "m.read.private"
+    UNSTABLE_READ_PRIVATE: Final = "org.matrix.msc2285.read.private"
     FULLY_READ: Final = "m.fully_read"
 
 
@@ -268,4 +269,4 @@ class PublicRoomsFilterFields:
     """
 
     GENERIC_SEARCH_TERM: Final = "generic_search_term"
-    ROOM_TYPES: Final = "org.matrix.msc3827.room_types"
+    ROOM_TYPES: Final = "room_types"

+ 48 - 10
synapse/api/errors.py

@@ -26,6 +26,7 @@ from twisted.web import http
 from synapse.util import json_decoder
 
 if typing.TYPE_CHECKING:
+    from synapse.config.homeserver import HomeServerConfig
     from synapse.types import JsonDict
 
 logger = logging.getLogger(__name__)
@@ -80,6 +81,12 @@ class Codes(str, Enum):
     INVALID_SIGNATURE = "M_INVALID_SIGNATURE"
     USER_DEACTIVATED = "M_USER_DEACTIVATED"
 
+    # Part of MSC3848
+    # https://github.com/matrix-org/matrix-spec-proposals/pull/3848
+    ALREADY_JOINED = "ORG.MATRIX.MSC3848.ALREADY_JOINED"
+    NOT_JOINED = "ORG.MATRIX.MSC3848.NOT_JOINED"
+    INSUFFICIENT_POWER = "ORG.MATRIX.MSC3848.INSUFFICIENT_POWER"
+
     # The account has been suspended on the server.
     # By opposition to `USER_DEACTIVATED`, this is a reversible measure
     # that can possibly be appealed and reverted.
@@ -167,7 +174,7 @@ class SynapseError(CodeMessageException):
         else:
             self._additional_fields = dict(additional_fields)
 
-    def error_dict(self) -> "JsonDict":
+    def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
         return cs_error(self.msg, self.errcode, **self._additional_fields)
 
 
@@ -213,7 +220,7 @@ class ConsentNotGivenError(SynapseError):
         )
         self._consent_uri = consent_uri
 
-    def error_dict(self) -> "JsonDict":
+    def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
         return cs_error(self.msg, self.errcode, consent_uri=self._consent_uri)
 
 
@@ -307,6 +314,37 @@ class AuthError(SynapseError):
         super().__init__(code, msg, errcode, additional_fields)
 
 
+class UnstableSpecAuthError(AuthError):
+    """An error raised when a new error code is being proposed to replace a previous one.
+    This error will return a "org.matrix.unstable.errcode" property with the new error code,
+    with the previous error code still being defined in the "errcode" property.
+
+    This error will include `org.matrix.msc3848.unstable.errcode` in the C-S error body.
+    """
+
+    def __init__(
+        self,
+        code: int,
+        msg: str,
+        errcode: str,
+        previous_errcode: str = Codes.FORBIDDEN,
+        additional_fields: Optional[dict] = None,
+    ):
+        self.previous_errcode = previous_errcode
+        super().__init__(code, msg, errcode, additional_fields)
+
+    def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
+        fields = {}
+        if config is not None and config.experimental.msc3848_enabled:
+            fields["org.matrix.msc3848.unstable.errcode"] = self.errcode
+        return cs_error(
+            self.msg,
+            self.previous_errcode,
+            **fields,
+            **self._additional_fields,
+        )
+
+
 class InvalidClientCredentialsError(SynapseError):
     """An error raised when there was a problem with the authorisation credentials
     in a client request.
@@ -338,8 +376,8 @@ class InvalidClientTokenError(InvalidClientCredentialsError):
         super().__init__(msg=msg, errcode="M_UNKNOWN_TOKEN")
         self._soft_logout = soft_logout
 
-    def error_dict(self) -> "JsonDict":
-        d = super().error_dict()
+    def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
+        d = super().error_dict(config)
         d["soft_logout"] = self._soft_logout
         return d
 
@@ -362,7 +400,7 @@ class ResourceLimitError(SynapseError):
         self.limit_type = limit_type
         super().__init__(code, msg, errcode=errcode)
 
-    def error_dict(self) -> "JsonDict":
+    def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
         return cs_error(
             self.msg,
             self.errcode,
@@ -397,7 +435,7 @@ class InvalidCaptchaError(SynapseError):
         super().__init__(code, msg, errcode)
         self.error_url = error_url
 
-    def error_dict(self) -> "JsonDict":
+    def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
         return cs_error(self.msg, self.errcode, error_url=self.error_url)
 
 
@@ -414,7 +452,7 @@ class LimitExceededError(SynapseError):
         super().__init__(code, msg, errcode)
         self.retry_after_ms = retry_after_ms
 
-    def error_dict(self) -> "JsonDict":
+    def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
         return cs_error(self.msg, self.errcode, retry_after_ms=self.retry_after_ms)
 
 
@@ -429,7 +467,7 @@ class RoomKeysVersionError(SynapseError):
         super().__init__(403, "Wrong room_keys version", Codes.WRONG_ROOM_KEYS_VERSION)
         self.current_version = current_version
 
-    def error_dict(self) -> "JsonDict":
+    def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
         return cs_error(self.msg, self.errcode, current_version=self.current_version)
 
 
@@ -469,7 +507,7 @@ class IncompatibleRoomVersionError(SynapseError):
 
         self._room_version = room_version
 
-    def error_dict(self) -> "JsonDict":
+    def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
         return cs_error(self.msg, self.errcode, room_version=self._room_version)
 
 
@@ -515,7 +553,7 @@ class UnredactedContentDeletedError(SynapseError):
         )
         self.content_keep_ms = content_keep_ms
 
-    def error_dict(self) -> "JsonDict":
+    def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
         extra = {}
         if self.content_keep_ms is not None:
             extra = {"fi.mau.msc2815.content_keep_ms": self.content_keep_ms}

+ 3 - 3
synapse/api/ratelimiting.py

@@ -17,7 +17,7 @@ from collections import OrderedDict
 from typing import Hashable, Optional, Tuple
 
 from synapse.api.errors import LimitExceededError
-from synapse.config.ratelimiting import RateLimitConfig
+from synapse.config.ratelimiting import RatelimitSettings
 from synapse.storage.databases.main import DataStore
 from synapse.types import Requester
 from synapse.util import Clock
@@ -314,8 +314,8 @@ class RequestRatelimiter:
         self,
         store: DataStore,
         clock: Clock,
-        rc_message: RateLimitConfig,
-        rc_admin_redaction: Optional[RateLimitConfig],
+        rc_message: RatelimitSettings,
+        rc_admin_redaction: Optional[RatelimitSettings],
     ):
         self.store = store
         self.clock = clock

+ 19 - 19
synapse/api/room_versions.py

@@ -269,24 +269,6 @@ class RoomVersions:
         msc3787_knock_restricted_join_rule=False,
         msc3667_int_only_power_levels=False,
     )
-    MSC2716v3 = RoomVersion(
-        "org.matrix.msc2716v3",
-        RoomDisposition.UNSTABLE,
-        EventFormatVersions.V3,
-        StateResolutionVersions.V2,
-        enforce_key_validity=True,
-        special_case_aliases_auth=False,
-        strict_canonicaljson=True,
-        limit_notifications_power_levels=True,
-        msc2176_redaction_rules=False,
-        msc3083_join_rules=False,
-        msc3375_redaction_rules=False,
-        msc2403_knocking=True,
-        msc2716_historical=True,
-        msc2716_redactions=True,
-        msc3787_knock_restricted_join_rule=False,
-        msc3667_int_only_power_levels=False,
-    )
     MSC3787 = RoomVersion(
         "org.matrix.msc3787",
         RoomDisposition.UNSTABLE,
@@ -323,6 +305,24 @@ class RoomVersions:
         msc3787_knock_restricted_join_rule=True,
         msc3667_int_only_power_levels=True,
     )
+    MSC2716v4 = RoomVersion(
+        "org.matrix.msc2716v4",
+        RoomDisposition.UNSTABLE,
+        EventFormatVersions.V3,
+        StateResolutionVersions.V2,
+        enforce_key_validity=True,
+        special_case_aliases_auth=False,
+        strict_canonicaljson=True,
+        limit_notifications_power_levels=True,
+        msc2176_redaction_rules=False,
+        msc3083_join_rules=False,
+        msc3375_redaction_rules=False,
+        msc2403_knocking=True,
+        msc2716_historical=True,
+        msc2716_redactions=True,
+        msc3787_knock_restricted_join_rule=False,
+        msc3667_int_only_power_levels=False,
+    )
 
 
 KNOWN_ROOM_VERSIONS: Dict[str, RoomVersion] = {
@@ -338,9 +338,9 @@ KNOWN_ROOM_VERSIONS: Dict[str, RoomVersion] = {
         RoomVersions.V7,
         RoomVersions.V8,
         RoomVersions.V9,
-        RoomVersions.MSC2716v3,
         RoomVersions.MSC3787,
         RoomVersions.V10,
+        RoomVersions.MSC2716v4,
     )
 }
 

+ 16 - 12
synapse/app/admin_cmd.py

@@ -28,19 +28,22 @@ from synapse.config.homeserver import HomeServerConfig
 from synapse.config.logger import setup_logging
 from synapse.events import EventBase
 from synapse.handlers.admin import ExfiltrationWriter
-from synapse.replication.slave.storage._base import BaseSlavedStore
-from synapse.replication.slave.storage.account_data import SlavedAccountDataStore
-from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
-from synapse.replication.slave.storage.deviceinbox import SlavedDeviceInboxStore
 from synapse.replication.slave.storage.devices import SlavedDeviceStore
 from synapse.replication.slave.storage.events import SlavedEventStore
 from synapse.replication.slave.storage.filtering import SlavedFilteringStore
 from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore
-from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
-from synapse.replication.slave.storage.registration import SlavedRegistrationStore
 from synapse.server import HomeServer
 from synapse.storage.database import DatabasePool, LoggingDatabaseConnection
+from synapse.storage.databases.main.account_data import AccountDataWorkerStore
+from synapse.storage.databases.main.appservice import (
+    ApplicationServiceTransactionWorkerStore,
+    ApplicationServiceWorkerStore,
+)
+from synapse.storage.databases.main.deviceinbox import DeviceInboxWorkerStore
+from synapse.storage.databases.main.receipts import ReceiptsWorkerStore
+from synapse.storage.databases.main.registration import RegistrationWorkerStore
 from synapse.storage.databases.main.room import RoomWorkerStore
+from synapse.storage.databases.main.tags import TagsWorkerStore
 from synapse.types import StateMap
 from synapse.util import SYNAPSE_VERSION
 from synapse.util.logcontext import LoggingContext
@@ -49,16 +52,17 @@ logger = logging.getLogger("synapse.app.admin_cmd")
 
 
 class AdminCmdSlavedStore(
-    SlavedReceiptsStore,
-    SlavedAccountDataStore,
-    SlavedApplicationServiceStore,
-    SlavedRegistrationStore,
     SlavedFilteringStore,
-    SlavedDeviceInboxStore,
     SlavedDeviceStore,
     SlavedPushRuleStore,
     SlavedEventStore,
-    BaseSlavedStore,
+    TagsWorkerStore,
+    DeviceInboxWorkerStore,
+    AccountDataWorkerStore,
+    ApplicationServiceTransactionWorkerStore,
+    ApplicationServiceWorkerStore,
+    RegistrationWorkerStore,
+    ReceiptsWorkerStore,
     RoomWorkerStore,
 ):
     def __init__(

Some files were not shown because too many files changed in this diff