Browse Source

Merge branch 'develop' into e2e_cross-signing

Hubert Chathi 5 years ago
parent
commit
3e180d8f14
100 changed files with 1758 additions and 943 deletions
  1. 52 49
      .circleci/config.yml
  2. 2 2
      .circleci/merge_base_branch.sh
  3. 12 0
      .coveragerc
  4. 9 0
      .editorconfig
  5. 28 10
      .github/ISSUE_TEMPLATE/BUG_REPORT.md
  6. 9 0
      .github/ISSUE_TEMPLATE/FEATURE_REQUEST.md
  7. 9 0
      .github/ISSUE_TEMPLATE/SUPPORT_REQUEST.md
  8. 7 0
      .github/PULL_REQUEST_TEMPLATE.md
  9. 3 0
      .github/SUPPORT.md
  10. 38 17
      .travis.yml
  11. 124 4
      CHANGES.md
  12. 2 0
      MANIFEST.in
  13. 41 56
      README.rst
  14. 68 4
      UPGRADE.rst
  15. 1 0
      changelog.d/3830.feature
  16. 1 0
      changelog.d/3831.feature
  17. 1 0
      changelog.d/4165.bugfix
  18. 1 0
      changelog.d/4176.bugfix
  19. 1 0
      changelog.d/4180.misc
  20. 1 0
      changelog.d/4182.misc
  21. 1 0
      changelog.d/4183.bugfix
  22. 1 0
      changelog.d/4188.misc
  23. 2 0
      changelog.d/4189.misc
  24. 1 0
      changelog.d/4190.misc
  25. 1 0
      changelog.d/4192.bugfix
  26. 1 0
      changelog.d/4193.misc
  27. 1 0
      changelog.d/4197.bugfix
  28. 1 0
      changelog.d/4200.misc
  29. 1 0
      changelog.d/4204.misc
  30. 1 0
      changelog.d/4205.misc
  31. 1 0
      changelog.d/4207.bugfix
  32. 1 0
      changelog.d/4209.misc
  33. 2 0
      changelog.d/4211.bugfix
  34. 1 0
      changelog.d/4214.misc
  35. 1 0
      changelog.d/4220.feature
  36. 1 0
      changelog.d/4223.bugfix
  37. 1 0
      changelog.d/4224.misc
  38. 1 0
      changelog.d/4225.misc
  39. 1 0
      changelog.d/4227.misc
  40. 1 0
      changelog.d/4230.feature
  41. 1 0
      changelog.d/4234.misc
  42. 1 0
      changelog.d/4241.bugfix
  43. 1 0
      changelog.d/4244.misc
  44. 1 0
      changelog.d/4250.bugfix
  45. 1 0
      changelog.d/4253.bugfix
  46. 1 0
      changelog.d/4257.misc
  47. 1 0
      changelog.d/4260.misc
  48. 1 0
      changelog.d/4261.misc
  49. 1 0
      changelog.d/4262.feature
  50. 1 0
      changelog.d/4266.misc
  51. 5 3
      contrib/docker/docker-compose.yml
  52. 16 0
      contrib/purge_api/README.md
  53. 141 0
      contrib/purge_api/purge_history.sh
  54. 54 0
      contrib/purge_api/purge_remote_media.sh
  55. 6 6
      docker/conf/homeserver.yaml
  56. 8 0
      docs/admin_api/purge_history_api.rst
  57. 38 1
      docs/consent_tracking.md
  58. 57 1
      docs/log_contexts.rst
  59. 9 6
      docs/privacy_policy_templates/en/1.0.html
  60. 0 19
      jenkins/prepare_synapse.sh
  61. 15 21
      scripts-dev/check_auth.py
  62. 18 14
      scripts-dev/check_event_hash.py
  63. 19 17
      scripts-dev/check_signature.py
  64. 22 18
      scripts-dev/convert_server_keys.py
  65. 34 20
      scripts-dev/definitions.py
  66. 8 5
      scripts-dev/dump_macaroon.py
  67. 54 52
      scripts-dev/federation_client.py
  68. 38 24
      scripts-dev/hash_history.py
  69. 8 8
      scripts-dev/list_url_patterns.py
  70. 0 39
      scripts-dev/make_identicons.pl
  71. 13 13
      scripts-dev/tail-synapse.py
  72. 34 10
      scripts/hash_password
  73. 12 24
      scripts/move_remote_media_to_new_store.py
  74. 3 191
      scripts/register_new_matrix_user
  75. 124 151
      scripts/synapse_port_db
  76. 8 9
      setup.cfg
  77. 4 2
      setup.py
  78. 1 1
      synapse/__init__.py
  79. 0 0
      synapse/_scripts/__init__.py
  80. 215 0
      synapse/_scripts/register_new_matrix_user.py
  81. 20 9
      synapse/api/auth.py
  82. 8 1
      synapse/api/constants.py
  83. 4 1
      synapse/api/filtering.py
  84. 0 1
      synapse/api/urls.py
  85. 24 24
      synapse/app/homeserver.py
  86. 3 3
      synapse/app/pusher.py
  87. 14 0
      synapse/app/synchrotron.py
  88. 24 12
      synapse/appservice/scheduler.py
  89. 1 1
      synapse/config/__main__.py
  90. 59 62
      synapse/config/_base.py
  91. 5 0
      synapse/config/appservice.py
  92. 18 0
      synapse/config/consent_config.py
  93. 18 22
      synapse/config/emailconfig.py
  94. 2 1
      synapse/config/homeserver.py
  95. 1 0
      synapse/config/logger.py
  96. 25 3
      synapse/config/registration.py
  97. 1 1
      synapse/config/repository.py
  98. 102 0
      synapse/config/room_directory.py
  99. 17 1
      synapse/config/server.py
  100. 6 4
      synapse/crypto/keyclient.py

+ 52 - 49
.circleci/config.yml

@@ -4,8 +4,8 @@ jobs:
     machine: true
     steps:
       - checkout
-      - run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_TAG} .
-      - run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_TAG}-py3 --build-arg PYTHON_VERSION=3.6 .
+      - run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:${CIRCLE_TAG} .
+      - run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:${CIRCLE_TAG}-py3 --build-arg PYTHON_VERSION=3.6 .
       - run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
       - run: docker push matrixdotorg/synapse:${CIRCLE_TAG}
       - run: docker push matrixdotorg/synapse:${CIRCLE_TAG}-py3
@@ -13,109 +13,112 @@ jobs:
     machine: true
     steps:
       - checkout
-      - run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_SHA1} .
-      - run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_SHA1}-py3 --build-arg PYTHON_VERSION=3.6 .
+      - run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:${CIRCLE_SHA1} .
+      - run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:${CIRCLE_SHA1}-py3 --build-arg PYTHON_VERSION=3.6 .
       - run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
-      - run: docker tag matrixdotorg/synapse:${CIRCLE_SHA1} matrixdotorg/synapse:latest
-      - run: docker tag matrixdotorg/synapse:${CIRCLE_SHA1}-py3 matrixdotorg/synapse:latest-py3
-      - run: docker push matrixdotorg/synapse:${CIRCLE_SHA1}
-      - run: docker push matrixdotorg/synapse:${CIRCLE_SHA1}-py3
       - run: docker push matrixdotorg/synapse:latest
       - run: docker push matrixdotorg/synapse:latest-py3
   sytestpy2:
-    machine: true
+    docker:
+      - image: matrixdotorg/sytest-synapsepy2
+    working_directory: /src
     steps:
       - checkout
-      - run: docker pull matrixdotorg/sytest-synapsepy2
-      - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs matrixdotorg/sytest-synapsepy2
+      - run: /synapse_sytest.sh
       - store_artifacts:
-          path: ~/project/logs
+          path: /logs
           destination: logs
       - store_test_results:
-          path: logs
+          path: /logs
   sytestpy2postgres:
-    machine: true
+    docker:
+      - image: matrixdotorg/sytest-synapsepy2
+    working_directory: /src
     steps:
       - checkout
-      - run: docker pull matrixdotorg/sytest-synapsepy2
-      - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs -e POSTGRES=1 matrixdotorg/sytest-synapsepy2
+      - run: POSTGRES=1 /synapse_sytest.sh
       - store_artifacts:
-          path: ~/project/logs
+          path: /logs
           destination: logs
       - store_test_results:
-          path: logs
+          path: /logs
   sytestpy2merged:
-    machine: true
+    docker:
+      - image: matrixdotorg/sytest-synapsepy2
+    working_directory: /src
     steps:
       - checkout
       - run: bash .circleci/merge_base_branch.sh
-      - run: docker pull matrixdotorg/sytest-synapsepy2
-      - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs matrixdotorg/sytest-synapsepy2
+      - run: /synapse_sytest.sh
       - store_artifacts:
-          path: ~/project/logs
+          path: /logs
           destination: logs
       - store_test_results:
-          path: logs
-
+          path: /logs
   sytestpy2postgresmerged:
-    machine: true
+    docker:
+      - image: matrixdotorg/sytest-synapsepy2
+    working_directory: /src
     steps:
       - checkout
       - run: bash .circleci/merge_base_branch.sh
-      - run: docker pull matrixdotorg/sytest-synapsepy2
-      - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs -e POSTGRES=1 matrixdotorg/sytest-synapsepy2
+      - run: POSTGRES=1 /synapse_sytest.sh
       - store_artifacts:
-          path: ~/project/logs
+          path: /logs
           destination: logs
       - store_test_results:
-          path: logs
+          path: /logs
 
   sytestpy3:
-    machine: true
+    docker:
+      - image: matrixdotorg/sytest-synapsepy3
+    working_directory: /src
     steps:
       - checkout
-      - run: docker pull matrixdotorg/sytest-synapsepy3
-      - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs matrixdotorg/sytest-synapsepy3
+      - run: /synapse_sytest.sh
       - store_artifacts:
-          path: ~/project/logs
+          path: /logs
           destination: logs
       - store_test_results:
-          path: logs
+          path: /logs
   sytestpy3postgres:
-    machine: true
+    docker:
+      - image: matrixdotorg/sytest-synapsepy3
+    working_directory: /src
     steps:
       - checkout
-      - run: docker pull matrixdotorg/sytest-synapsepy3
-      - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs -e POSTGRES=1 matrixdotorg/sytest-synapsepy3
+      - run: POSTGRES=1 /synapse_sytest.sh
       - store_artifacts:
-          path: ~/project/logs
+          path: /logs
           destination: logs
       - store_test_results:
-          path: logs
+          path: /logs
   sytestpy3merged:
-    machine: true
+    docker:
+      - image: matrixdotorg/sytest-synapsepy3
+    working_directory: /src
     steps:
       - checkout
       - run: bash .circleci/merge_base_branch.sh
-      - run: docker pull matrixdotorg/sytest-synapsepy3
-      - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs matrixdotorg/sytest-synapsepy3
+      - run: /synapse_sytest.sh
       - store_artifacts:
-          path: ~/project/logs
+          path: /logs
           destination: logs
       - store_test_results:
-          path: logs
+          path: /logs
   sytestpy3postgresmerged:
-    machine: true
+    docker:
+      - image: matrixdotorg/sytest-synapsepy3
+    working_directory: /src
     steps:
       - checkout
       - run: bash .circleci/merge_base_branch.sh
-      - run: docker pull matrixdotorg/sytest-synapsepy3
-      - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs -e POSTGRES=1 matrixdotorg/sytest-synapsepy3
+      - run: POSTGRES=1 /synapse_sytest.sh
       - store_artifacts:
-          path: ~/project/logs
+          path: /logs
           destination: logs
       - store_test_results:
-          path: logs
+          path: /logs
 
 workflows:
   version: 2

+ 2 - 2
.circleci/merge_base_branch.sh

@@ -16,7 +16,7 @@ then
     GITBASE="develop"
 else
     # Get the reference, using the GitHub API
-    GITBASE=`curl -q https://api.github.com/repos/matrix-org/synapse/pulls/${CIRCLE_PR_NUMBER} | jq -r '.base.ref'`
+    GITBASE=`wget -O- https://api.github.com/repos/matrix-org/synapse/pulls/${CIRCLE_PR_NUMBER} | jq -r '.base.ref'`
 fi
 
 # Show what we are before
@@ -31,4 +31,4 @@ git fetch -u origin $GITBASE
 git merge --no-edit origin/$GITBASE
 
 # Show what we are after.
-git show -s
+git show -s

+ 12 - 0
.coveragerc

@@ -0,0 +1,12 @@
+[run]
+branch = True
+parallel = True
+source = synapse
+
+[paths]
+source=
+   coverage
+
+[report]
+precision = 2
+ignore_errors = True

+ 9 - 0
.editorconfig

@@ -0,0 +1,9 @@
+# EditorConfig https://EditorConfig.org
+
+# top-most EditorConfig file
+root = true
+
+# 4 space indentation
+[*.py]
+indent_style = space
+indent_size = 4

+ 28 - 10
.github/ISSUE_TEMPLATE.md → .github/ISSUE_TEMPLATE/BUG_REPORT.md

@@ -1,3 +1,9 @@
+---
+name: Bug report
+about: Create a report to help us improve
+
+---
+
 <!-- 
 
 **IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**: 
@@ -11,38 +17,50 @@ the necessary data to fix your issue.
 You can also preview your report before submitting it. You may remove sections
 that aren't relevant to your particular case.
 
-Text between <!-- and --> marks will be invisible in the report.
+Text between <!-- and --> marks will be invisible in the report.
 
 -->
 
 ### Description
 
-Describe here the problem that you are experiencing, or the feature you are requesting.
+<!-- Describe here the problem that you are experiencing -->
 
 ### Steps to reproduce
 
-- For bugs, list the steps
+- list the steps
 - that reproduce the bug
 - using hyphens as bullet points
 
+<!-- 
 Describe how what happens differs from what you expected.
 
-<!-- If you can identify any relevant log snippets from _homeserver.log_, please include
+If you can identify any relevant log snippets from _homeserver.log_, please include
 those (please be careful to remove any personal or private data). Please surround them with
-``` (three backticks, on a line on their own), so that they are formatted legibly. -->
+``` (three backticks, on a line on their own), so that they are formatted legibly.
+-->
 
 ### Version information
 
 <!-- IMPORTANT: please answer the following questions, to help us narrow down the problem -->
 
-- **Homeserver**: Was this issue identified on matrix.org or another homeserver?
+<!-- Was this issue identified on matrix.org or another homeserver? -->
+- **Homeserver**: 
 
 If not matrix.org:
-- **Version**:        What version of Synapse is running? <!-- 
+
+<!-- 
+What version of Synapse is running? 
 You can find the Synapse version by inspecting the server headers (replace matrix.org with
 your own homeserver domain):
 $ curl -v https://matrix.org/_matrix/client/versions 2>&1 | grep "Server:"
 -->
-- **Install method**: package manager/git clone/pip      
-- **Platform**:       Tell us about the environment in which your homeserver is operating
-                      - distro, hardware, if it's running in a vm/container, etc.
+- **Version**: 
+
+- **Install method**: 
+<!-- examples: package manager/git clone/pip  -->
+
+- **Platform**: 
+<!--
+Tell us about the environment in which your homeserver is operating
+distro, hardware, if it's running in a vm/container, etc.
+-->

+ 9 - 0
.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md

@@ -0,0 +1,9 @@
+---
+name: Feature request
+about: Suggest an idea for this project
+
+---
+
+**Description:**
+
+<!-- Describe here the feature you are requesting. -->

+ 9 - 0
.github/ISSUE_TEMPLATE/SUPPORT_REQUEST.md

@@ -0,0 +1,9 @@
+---
+name: Support request
+about: I need support for Synapse
+
+---
+
+# Please ask for support in [**#matrix:matrix.org**](https://matrix.to/#/#matrix:matrix.org)
+
+## Don't file an issue as a support request.

+ 7 - 0
.github/PULL_REQUEST_TEMPLATE.md

@@ -0,0 +1,7 @@
+### Pull Request Checklist
+
+<!-- Please read CONTRIBUTING.rst before submitting your pull request -->
+
+* [ ] Pull request is based on the develop branch
+* [ ] Pull request includes a [changelog file](CONTRIBUTING.rst#changelog)
+* [ ] Pull request includes a [sign off](CONTRIBUTING.rst#sign-off)

+ 3 - 0
.github/SUPPORT.md

@@ -0,0 +1,3 @@
+[**#matrix:matrix.org**](https://matrix.to/#/#matrix:matrix.org) is the official support room for Matrix, and can be accessed by any client from https://matrix.org/docs/projects/try-matrix-now.html 
+
+It can also be access via IRC bridge at irc://irc.freenode.net/matrix or on the web here: https://webchat.freenode.net/?channels=matrix

+ 38 - 17
.travis.yml

@@ -1,49 +1,70 @@
 sudo: false
 language: python
 
-# tell travis to cache ~/.cache/pip
-cache: pip
+cache:
+  directories:
+    # we only bother to cache the wheels; parts of the http cache get
+    # invalidated every build (because they get served with a max-age of 600
+    # seconds), which means that we end up re-uploading the whole cache for
+    # every build, which is time-consuming In any case, it's not obvious that
+    # downloading the cache from S3 would be much faster than downloading the
+    # originals from pypi.
+    #
+    - $HOME/.cache/pip/wheels
 
-before_script:
-  - git remote set-branches --add origin develop
-  - git fetch origin develop
+# don't clone the whole repo history, one commit will do
+git:
+  depth: 1
 
+# only build branches we care about (PRs are built seperately)
+branches:
+  only:
+    - master
+    - develop
+    - /^release-v/
+
+# When running the tox environments that call Twisted Trial, we can pass the -j
+# flag to run the tests concurrently. We set this to 2 for CPU bound tests
+# (SQLite) and 4 for I/O bound tests (PostgreSQL).
 matrix:
   fast_finish: true
   include:
   - python: 2.7
     env: TOX_ENV=packaging
 
-  - python: 2.7
-    env: TOX_ENV=pep8
+  - python: 3.6
+    env: TOX_ENV="pep8,check_isort"
 
   - python: 2.7
-    env: TOX_ENV=py27
+    env: TOX_ENV=py27,codecov TRIAL_FLAGS="-j 2"
 
   - python: 2.7
-    env: TOX_ENV=py27-old
+    env: TOX_ENV=py27-old TRIAL_FLAGS="-j 2"
 
   - python: 2.7
-    env: TOX_ENV=py27-postgres TRIAL_FLAGS="-j 4"
+    env: TOX_ENV=py27-postgres,codecov TRIAL_FLAGS="-j 4"
     services:
       - postgresql
 
   - python: 3.5
-    env: TOX_ENV=py35
+    env: TOX_ENV=py35,codecov TRIAL_FLAGS="-j 2"
 
   - python: 3.6
-    env: TOX_ENV=py36
+    env: TOX_ENV=py36,codecov TRIAL_FLAGS="-j 2"
 
   - python: 3.6
-    env: TOX_ENV=py36-postgres TRIAL_FLAGS="-j 4"
+    env: TOX_ENV=py36-postgres,codecov TRIAL_FLAGS="-j 4"
     services:
       - postgresql
 
-  - python: 3.6
-    env: TOX_ENV=check_isort
-
-  - python: 3.6
+  - # we only need to check for the newsfragment if it's a PR build
+    if: type = pull_request
+    python: 3.6
     env: TOX_ENV=check-newsfragment
+    script:
+      - git remote set-branches --add origin develop
+      - git fetch origin develop
+      - tox -e $TOX_ENV
 
 install:
   - pip install tox

+ 124 - 4
CHANGES.md

@@ -1,3 +1,127 @@
+Synapse 0.33.9 (2018-11-19)
+===========================
+
+No significant changes.
+
+
+Synapse 0.33.9rc1 (2018-11-14)
+==============================
+
+Features
+--------
+
+- Include flags to optionally add `m.login.terms` to the registration flow when consent tracking is enabled. ([\#4004](https://github.com/matrix-org/synapse/issues/4004), [\#4133](https://github.com/matrix-org/synapse/issues/4133), [\#4142](https://github.com/matrix-org/synapse/issues/4142), [\#4184](https://github.com/matrix-org/synapse/issues/4184))
+- Support for replacing rooms with new ones ([\#4091](https://github.com/matrix-org/synapse/issues/4091), [\#4099](https://github.com/matrix-org/synapse/issues/4099), [\#4100](https://github.com/matrix-org/synapse/issues/4100), [\#4101](https://github.com/matrix-org/synapse/issues/4101))
+
+
+Bugfixes
+--------
+
+- Fix exceptions when using the email mailer on Python 3. ([\#4095](https://github.com/matrix-org/synapse/issues/4095))
+- Fix e2e key backup with more than 9 backup versions ([\#4113](https://github.com/matrix-org/synapse/issues/4113))
+- Searches that request profile info now no longer fail with a 500. ([\#4122](https://github.com/matrix-org/synapse/issues/4122))
+- fix return code of empty key backups ([\#4123](https://github.com/matrix-org/synapse/issues/4123))
+- If the typing stream ID goes backwards (as on a worker when the master restarts), the worker's typing handler will no longer erroneously report rooms containing new typing events. ([\#4127](https://github.com/matrix-org/synapse/issues/4127))
+- Fix table lock of device_lists_remote_cache which could freeze the application ([\#4132](https://github.com/matrix-org/synapse/issues/4132))
+- Fix exception when using state res v2 algorithm ([\#4135](https://github.com/matrix-org/synapse/issues/4135))
+- Generating the user consent URI no longer fails on Python 3. ([\#4140](https://github.com/matrix-org/synapse/issues/4140), [\#4163](https://github.com/matrix-org/synapse/issues/4163))
+- Loading URL previews from the DB cache on Postgres will no longer cause Unicode type errors when responding to the request, and URL previews will no longer fail if the remote server returns a Content-Type header with the chartype in quotes. ([\#4157](https://github.com/matrix-org/synapse/issues/4157))
+- The hash_password script now works on Python 3. ([\#4161](https://github.com/matrix-org/synapse/issues/4161))
+- Fix noop checks when updating device keys, reducing spurious device list update notifications. ([\#4164](https://github.com/matrix-org/synapse/issues/4164))
+
+
+Deprecations and Removals
+-------------------------
+
+- The disused and un-specced identicon generator has been removed. ([\#4106](https://github.com/matrix-org/synapse/issues/4106))
+- The obsolete and non-functional /pull federation endpoint has been removed. ([\#4118](https://github.com/matrix-org/synapse/issues/4118))
+- The deprecated v1 key exchange endpoints have been removed. ([\#4119](https://github.com/matrix-org/synapse/issues/4119))
+- Synapse will no longer fetch keys using the fallback deprecated v1 key exchange method and will now always use v2. ([\#4120](https://github.com/matrix-org/synapse/issues/4120))
+
+
+Internal Changes
+----------------
+
+- Fix build of Docker image with docker-compose ([\#3778](https://github.com/matrix-org/synapse/issues/3778))
+- Delete unreferenced state groups during history purge ([\#4006](https://github.com/matrix-org/synapse/issues/4006))
+- The "Received rdata" log messages on workers is now logged at DEBUG, not INFO. ([\#4108](https://github.com/matrix-org/synapse/issues/4108))
+- Reduce replication traffic for device lists ([\#4109](https://github.com/matrix-org/synapse/issues/4109))
+- Fix `synapse_replication_tcp_protocol_*_commands` metric label to be full command name, rather than just the first character ([\#4110](https://github.com/matrix-org/synapse/issues/4110))
+- Log some bits about room creation ([\#4121](https://github.com/matrix-org/synapse/issues/4121))
+- Fix `tox` failure on old systems ([\#4124](https://github.com/matrix-org/synapse/issues/4124))
+- Add STATE_V2_TEST room version ([\#4128](https://github.com/matrix-org/synapse/issues/4128))
+- Clean up event accesses and tests ([\#4137](https://github.com/matrix-org/synapse/issues/4137))
+- The default logging config will now set an explicit log file encoding of UTF-8. ([\#4138](https://github.com/matrix-org/synapse/issues/4138))
+- Add helpers functions for getting prev and auth events of an event ([\#4139](https://github.com/matrix-org/synapse/issues/4139))
+- Add some tests for the HTTP pusher. ([\#4149](https://github.com/matrix-org/synapse/issues/4149))
+- add purge_history.sh and purge_remote_media.sh scripts to contrib/ ([\#4155](https://github.com/matrix-org/synapse/issues/4155))
+- HTTP tests have been refactored to contain less boilerplate. ([\#4156](https://github.com/matrix-org/synapse/issues/4156))
+- Drop incoming events from federation for unknown rooms ([\#4165](https://github.com/matrix-org/synapse/issues/4165))
+
+
+Synapse 0.33.8 (2018-11-01)
+===========================
+
+No significant changes.
+
+
+Synapse 0.33.8rc2 (2018-10-31)
+==============================
+
+Bugfixes
+--------
+
+- Searches that request profile info now no longer fail with a 500. Fixes 
+  a regression in 0.33.8rc1. ([\#4122](https://github.com/matrix-org/synapse/issues/4122))
+
+
+Synapse 0.33.8rc1 (2018-10-29)
+==============================
+
+Features
+--------
+
+- Servers with auto-join rooms will now automatically create those rooms when the first user registers ([\#3975](https://github.com/matrix-org/synapse/issues/3975))
+- Add config option to control alias creation ([\#4051](https://github.com/matrix-org/synapse/issues/4051))
+- The register_new_matrix_user script is now ported to Python 3. ([\#4085](https://github.com/matrix-org/synapse/issues/4085))
+- Configure Docker image to listen on both ipv4 and ipv6. ([\#4089](https://github.com/matrix-org/synapse/issues/4089))
+
+
+Bugfixes
+--------
+
+- Fix HTTP error response codes for federated group requests. ([\#3969](https://github.com/matrix-org/synapse/issues/3969))
+- Fix issue where Python 3 users couldn't paginate /publicRooms ([\#4046](https://github.com/matrix-org/synapse/issues/4046))
+- Fix URL previewing to work in Python 3.7 ([\#4050](https://github.com/matrix-org/synapse/issues/4050))
+- synctl will use the right python executable to run worker processes ([\#4057](https://github.com/matrix-org/synapse/issues/4057))
+- Manhole now works again on Python 3, instead of failing with a "couldn't match all kex parts" when connecting. ([\#4060](https://github.com/matrix-org/synapse/issues/4060), [\#4067](https://github.com/matrix-org/synapse/issues/4067))
+- Fix some metrics being racy and causing exceptions when polled by Prometheus. ([\#4061](https://github.com/matrix-org/synapse/issues/4061))
+- Fix bug which prevented email notifications from being sent unless an absolute path was given for `email_templates`. ([\#4068](https://github.com/matrix-org/synapse/issues/4068))
+- Correctly account for cpu usage by background threads ([\#4074](https://github.com/matrix-org/synapse/issues/4074))
+- Fix race condition where config defined reserved users were not being added to
+  the monthly active user list prior to the homeserver reactor firing up ([\#4081](https://github.com/matrix-org/synapse/issues/4081))
+- Fix bug which prevented backslashes being used in event field filters ([\#4083](https://github.com/matrix-org/synapse/issues/4083))
+
+
+Internal Changes
+----------------
+
+- Add information about the [matrix-docker-ansible-deploy](https://github.com/spantaleev/matrix-docker-ansible-deploy) playbook ([\#3698](https://github.com/matrix-org/synapse/issues/3698))
+- Add initial implementation of new state resolution algorithm ([\#3786](https://github.com/matrix-org/synapse/issues/3786))
+- Reduce database load when fetching state groups ([\#4011](https://github.com/matrix-org/synapse/issues/4011))
+- Various cleanups in the federation client code ([\#4031](https://github.com/matrix-org/synapse/issues/4031))
+- Run the CircleCI builds in docker containers ([\#4041](https://github.com/matrix-org/synapse/issues/4041))
+- Only colourise synctl output when attached to tty ([\#4049](https://github.com/matrix-org/synapse/issues/4049))
+- Refactor room alias creation code ([\#4063](https://github.com/matrix-org/synapse/issues/4063))
+- Make the Python scripts in the top-level scripts folders meet pep8 and pass flake8. ([\#4068](https://github.com/matrix-org/synapse/issues/4068))
+- The README now contains example for the Caddy web server. Contributed by steamp0rt. ([\#4072](https://github.com/matrix-org/synapse/issues/4072))
+- Add psutil as an explicit dependency ([\#4073](https://github.com/matrix-org/synapse/issues/4073))
+- Clean up threading and logcontexts in pushers ([\#4075](https://github.com/matrix-org/synapse/issues/4075))
+- Correctly manage logcontexts during startup to fix some "Unexpected logging context" warnings ([\#4076](https://github.com/matrix-org/synapse/issues/4076))
+- Give some more things logcontexts ([\#4077](https://github.com/matrix-org/synapse/issues/4077))
+- Clean up some bits of code which were flagged by the linter ([\#4082](https://github.com/matrix-org/synapse/issues/4082))
+
+
 Synapse 0.33.7 (2018-10-18)
 ===========================
 
@@ -11,10 +135,6 @@ If you have email notifications enabled, you should ensure that
 have installed customised templates, or leave it unset to use the default
 templates.
 
-The configuration parser will try to detect the situation where
-`email.template_dir` is incorrectly set to `res/templates` and do the right
-thing, but will warn about this.
-
 Synapse 0.33.7rc2 (2018-10-17)
 ==============================
 

+ 2 - 0
MANIFEST.in

@@ -26,6 +26,7 @@ recursive-include synapse/static *.js
 exclude Dockerfile
 exclude .dockerignore
 exclude test_postgresql.sh
+exclude .editorconfig
 
 include pyproject.toml
 recursive-include changelog.d *
@@ -34,6 +35,7 @@ prune .github
 prune demo/etc
 prune docker
 prune .circleci
+prune .coveragerc
 
 exclude jenkins*
 recursive-exclude jenkins *.sh

+ 41 - 56
README.rst

@@ -86,7 +86,7 @@ Synapse is the reference Python/Twisted Matrix homeserver implementation.
 System requirements:
 
 - POSIX-compliant system (tested on Linux & OS X)
-- Python 2.7
+- Python 3.5, 3.6, or 2.7
 - At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
 
 Installing from source
@@ -101,13 +101,13 @@ header files for Python C extensions.
 
 Installing prerequisites on Ubuntu or Debian::
 
-    sudo apt-get install build-essential python2.7-dev libffi-dev \
+    sudo apt-get install build-essential python3-dev libffi-dev \
                          python-pip python-setuptools sqlite3 \
                          libssl-dev python-virtualenv libjpeg-dev libxslt1-dev
 
 Installing prerequisites on ArchLinux::
 
-    sudo pacman -S base-devel python2 python-pip \
+    sudo pacman -S base-devel python python-pip \
                    python-setuptools python-virtualenv sqlite3
 
 Installing prerequisites on CentOS 7 or Fedora 25::
@@ -126,12 +126,9 @@ Installing prerequisites on Mac OS X::
 
 Installing prerequisites on Raspbian::
 
-    sudo apt-get install build-essential python2.7-dev libffi-dev \
+    sudo apt-get install build-essential python3-dev libffi-dev \
                          python-pip python-setuptools sqlite3 \
                          libssl-dev python-virtualenv libjpeg-dev
-    sudo pip install --upgrade pip
-    sudo pip install --upgrade ndg-httpsclient
-    sudo pip install --upgrade virtualenv
 
 Installing prerequisites on openSUSE::
 
@@ -142,24 +139,25 @@ Installing prerequisites on openSUSE::
 Installing prerequisites on OpenBSD::
 
     doas pkg_add python libffi py-pip py-setuptools sqlite3 py-virtualenv \
-                 libxslt
+                 libxslt jpeg
 
 To install the Synapse homeserver run::
 
-    virtualenv -p python2.7 ~/.synapse
-    source ~/.synapse/bin/activate
+    mkdir -p ~/synapse
+    virtualenv -p python3 ~/synapse/env
+    source ~/synapse/env/bin/activate
     pip install --upgrade pip
     pip install --upgrade setuptools
     pip install matrix-synapse
 
 This installs Synapse, along with the libraries it uses, into a virtual
-environment under ``~/.synapse``.  Feel free to pick a different directory
+environment under ``~/synapse/env``.  Feel free to pick a different directory
 if you prefer.
 
 This Synapse installation can then be later upgraded by using pip again with the
 update flag::
 
-    source ~/.synapse/bin/activate
+    source ~/synapse/env/bin/activate
     pip install -U matrix-synapse
 
 In case of problems, please see the _`Troubleshooting` section below.
@@ -174,6 +172,12 @@ Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a
 Dockerfile to automate a synapse server in a single Docker image, at
 https://hub.docker.com/r/avhost/docker-matrix/tags/
 
+Slavi Pantaleev has created an Ansible playbook,
+which installs the offical Docker image of Matrix Synapse
+along with many other Matrix-related services (Postgres database, riot-web, coturn, mxisd, SSL support, etc.).
+For more details, see
+https://github.com/spantaleev/matrix-docker-ansible-deploy
+
 Configuring Synapse
 -------------------
 
@@ -234,7 +238,7 @@ commandline script.
 
 To get started, it is easiest to use the command line to register new users::
 
-    $ source ~/.synapse/bin/activate
+    $ source ~/synapse/env/bin/activate
     $ synctl start # if not already running
     $ register_new_matrix_user -c homeserver.yaml https://localhost:8448
     New user localpart: erikj
@@ -260,13 +264,12 @@ Running Synapse
 ===============
 
 To actually run your new homeserver, pick a working directory for Synapse to
-run (e.g. ``~/.synapse``), and::
+run (e.g. ``~/synapse``), and::
 
-    cd ~/.synapse
-    source ./bin/activate
+    cd ~/synapse
+    source env/bin/activate
     synctl start
 
-
 Connecting to Synapse from a client
 ===================================
 
@@ -327,7 +330,7 @@ content served to web browsers a matrix API from being able to attack webapps ho
 on the same domain.  This is particularly true of sharing a matrix webclient and
 server on the same domain.
 
-See https://github.com/vector-im/vector-web/issues/1977 and
+See https://github.com/vector-im/riot-web/issues/1977 and
 https://developer.github.com/changes/2014-04-25-user-content-security for more details.
 
 
@@ -374,35 +377,17 @@ the generated config),
 https://www.archlinux.org/packages/community/any/python2-matrix-angular-sdk/ will also need to
 be installed.
 
-Alternatively, to install using pip a few changes may be needed as ArchLinux
-defaults to python 3, but synapse currently assumes python 2.7 by default:
-
 pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 )::
 
-    sudo pip2.7 install --upgrade pip
-
-You also may need to explicitly specify python 2.7 again during the install
-request::
-
-    pip2.7 install https://github.com/matrix-org/synapse/tarball/master
+    sudo pip install --upgrade pip
 
 If you encounter an error with lib bcrypt causing an Wrong ELF Class:
 ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
 compile it under the right architecture. (This should not be needed if
 installing under virtualenv)::
 
-    sudo pip2.7 uninstall py-bcrypt
-    sudo pip2.7 install py-bcrypt
-
-During setup of Synapse you need to call python2.7 directly again::
-
-    cd ~/.synapse
-    python2.7 -m synapse.app.homeserver \
-      --server-name machine.my.domain.name \
-      --config-path homeserver.yaml \
-      --generate-config
-
-...substituting your host and domain name as appropriate.
+    sudo pip uninstall py-bcrypt
+    sudo pip install py-bcrypt
 
 FreeBSD
 -------
@@ -469,7 +454,7 @@ You can fix this by manually upgrading pip and virtualenv::
 
     sudo pip install --upgrade virtualenv
 
-You can next rerun ``virtualenv -p python2.7 synapse`` to update the virtual env.
+You can next rerun ``virtualenv -p python3 synapse`` to update the virtual env.
 
 Installing may fail during installing virtualenv with ``InsecurePlatformWarning: A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail. For more information, see https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning.``
 You can fix this  by manually installing ndg-httpsclient::
@@ -518,16 +503,6 @@ log lines and looking for any 'Processed request' lines which take more than
 a few seconds to execute.  Please let us know at #matrix-dev:matrix.org if
 you see this failure mode so we can help debug it, however.
 
-ArchLinux
-~~~~~~~~~
-
-If running `$ synctl start` fails with 'returned non-zero exit status 1',
-you will need to explicitly call Python2.7 - either running as::
-
-    python2.7 -m synapse.app.homeserver --daemonize -c homeserver.yaml
-
-...or by editing synctl with the correct python executable.
-
 
 Upgrading an existing Synapse
 =============================
@@ -651,7 +626,8 @@ Using a reverse proxy with Synapse
 
 It is recommended to put a reverse proxy such as
 `nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
-`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_ or
+`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
+`Caddy <https://caddyserver.com/docs/proxy>`_ or
 `HAProxy <https://www.haproxy.org/>`_ in front of Synapse. One advantage of
 doing so is that it means that you can expose the default https port (443) to
 Matrix clients without needing to run Synapse with root privileges.
@@ -682,7 +658,15 @@ so an example nginx configuration might look like::
       }
   }
 
-and an example apache configuration may look like::
+an example Caddy configuration might look like::
+
+    matrix.example.com {
+      proxy /_matrix http://localhost:8008 {
+        transparent
+      }
+    }
+
+and an example Apache configuration might look like::
 
     <VirtualHost *:443>
         SSLEngine on
@@ -714,9 +698,10 @@ port:
 
   .. __: `key_management`_
 
-* Synapse does not currently support SNI on the federation protocol
-  (`bug #1491 <https://github.com/matrix-org/synapse/issues/1491>`_), which
-  means that using name-based virtual hosting is unreliable.
+* Until v0.33.3, Synapse did not support SNI on the federation port
+  (`bug #1491 <https://github.com/matrix-org/synapse/issues/1491>`_). This bug
+  is now fixed, but means that federating with older servers can be unreliable
+  when using name-based virtual hosting.
 
 Furthermore, a number of the normal reasons for using a reverse-proxy do not
 apply:
@@ -812,7 +797,7 @@ Password reset
 ==============
 
 If a user has registered an email address to their account using an identity
-server, they can request a password-reset token via clients such as Vector.
+server, they can request a password-reset token via clients such as Riot.
 
 A manual password reset can be done via direct database access as follows.
 

+ 68 - 4
UPGRADE.rst

@@ -48,6 +48,74 @@ returned by the Client-Server API:
     # configured on port 443.
     curl -kv https://<host.name>/_matrix/client/versions 2>&1 | grep "Server:"
 
+Upgrading to v0.34.0
+====================
+
+1. This release is the first to fully support Python 3. We recommend switching
+   to Python 3, as it has been shown to give performance improvements.
+
+   For users who have installed Synapse into a virtualenv, we recommend doing
+   this by creating a new virtualenv. For example::
+
+       virtualenv -p python3 ~/synapse/env3
+       source ~/synapse/env3/bin/activate
+       pip install matrix-synapse
+
+   You can then start synapse as normal, having activated the new virtualenv::
+
+       cd ~/synapse
+       source env3/bin/activate
+       synctl start
+
+   Users who have installed from distribution packages should see the relevant
+   package documentation.
+
+   * When upgrading to Python 3, you **must** make sure that your log files are
+     configured as UTF-8, by adding ``encoding: utf8`` to the
+     ``RotatingFileHandler`` configuration (if you have one) in your
+     ``<server>.log.config`` file. For example, if your ``log.config`` file
+     contains::
+
+       handlers:
+         file:
+           class: logging.handlers.RotatingFileHandler
+           formatter: precise
+           filename: homeserver.log
+           maxBytes: 104857600
+           backupCount: 10
+           filters: [context]
+         console:
+           class: logging.StreamHandler
+           formatter: precise
+           filters: [context]
+
+     Then you should update this to be::
+
+       handlers:
+         file:
+           class: logging.handlers.RotatingFileHandler
+           formatter: precise
+           filename: homeserver.log
+           maxBytes: 104857600
+           backupCount: 10
+           filters: [context]
+           encoding: utf8
+         console:
+           class: logging.StreamHandler
+           formatter: precise
+           filters: [context]
+
+     There is no need to revert this change if downgrading to Python 2.
+
+2. This release removes the ``riot.im`` from the default list of trusted
+   identity servers.
+
+   If ``riot.im`` is in your homeserver's list of
+   ``trusted_third_party_id_servers``, you should remove it. It was added in
+   case a hypothetical future identity server was put there. If you don't
+   remove it, users may be unable to deactivate their accounts.
+
+
 Upgrading to v0.33.7
 ====================
 
@@ -61,10 +129,6 @@ If you have email notifications enabled, you should ensure that
 have installed customised templates, or leave it unset to use the default
 templates.
 
-The configuration parser will try to detect the situation where
-``email.template_dir`` is incorrectly set to ``res/templates`` and do the right
-thing, but will warn about this.
-
 Upgrading to v0.27.3
 ====================
 

+ 1 - 0
changelog.d/3830.feature

@@ -0,0 +1 @@
+Add option to track MAU stats (but not limit people)

+ 1 - 0
changelog.d/3831.feature

@@ -0,0 +1 @@
+Add an option to enable recording IPs for appservice users

+ 1 - 0
changelog.d/4165.bugfix

@@ -0,0 +1 @@
+Pushrules can now again be made with non-ASCII rule IDs.

+ 1 - 0
changelog.d/4176.bugfix

@@ -0,0 +1 @@
+The media repository now no longer fails to decode UTF-8 filenames when downloading remote media.

+ 1 - 0
changelog.d/4180.misc

@@ -0,0 +1 @@
+A coveragerc file, as well as the py36-coverage tox target, have been added.

+ 1 - 0
changelog.d/4182.misc

@@ -0,0 +1 @@
+Add a GitHub pull request template and add multiple issue templates

+ 1 - 0
changelog.d/4183.bugfix

@@ -0,0 +1 @@
+URL previews now correctly decode non-UTF-8 text if the header contains a `<meta http-equiv="Content-Type"` header.

+ 1 - 0
changelog.d/4188.misc

@@ -0,0 +1 @@
+Update README to reflect the fact that #1491 is fixed

+ 2 - 0
changelog.d/4189.misc

@@ -0,0 +1,2 @@
+Run the AS senders as background processes to fix warnings
+

+ 1 - 0
changelog.d/4190.misc

@@ -0,0 +1 @@
+Add some diagnostics to the tests to detect logcontext problems

+ 1 - 0
changelog.d/4192.bugfix

@@ -0,0 +1 @@
+Fix an issue where public consent URLs had two slashes.

+ 1 - 0
changelog.d/4193.misc

@@ -0,0 +1 @@
+Add missing `jpeg` package prerequisite for OpenBSD in README.

+ 1 - 0
changelog.d/4197.bugfix

@@ -0,0 +1 @@
+Fallback auth now accepts the session parameter on Python 3.

+ 1 - 0
changelog.d/4200.misc

@@ -0,0 +1 @@
+Add a note saying you need to manually reclaim disk space after using the Purge History API

+ 1 - 0
changelog.d/4204.misc

@@ -0,0 +1 @@
+Fix logcontext leaks in EmailPusher and in tests

+ 1 - 0
changelog.d/4205.misc

@@ -0,0 +1 @@
+More logcontext checking in unittests

+ 1 - 0
changelog.d/4207.bugfix

@@ -0,0 +1 @@
+Remove riot.im from the list of trusted Identity Servers in the default configuration

+ 1 - 0
changelog.d/4209.misc

@@ -0,0 +1 @@
+Fix logcontext leaks in EmailPusher and in tests

+ 2 - 0
changelog.d/4211.bugfix

@@ -0,0 +1,2 @@
+fix start up failure when mau_limit_reserved_threepids set and db is postgres 
+

+ 1 - 0
changelog.d/4214.misc

@@ -0,0 +1 @@
+Ignore __pycache__ directories in the database schema folder

+ 1 - 0
changelog.d/4220.feature

@@ -0,0 +1 @@
+Rename login type m.login.cas to m.login.sso

+ 1 - 0
changelog.d/4223.bugfix

@@ -0,0 +1 @@
+Fix auto join failures for servers that require user consent

+ 1 - 0
changelog.d/4224.misc

@@ -0,0 +1 @@
+Add note to UPGRADE.rst about removing riot.im from list of trusted identity servers

+ 1 - 0
changelog.d/4225.misc

@@ -0,0 +1 @@
+Added automated coverage reporting to CI.

+ 1 - 0
changelog.d/4227.misc

@@ -0,0 +1 @@
+Garbage-collect after each unit test to fix logcontext leaks

+ 1 - 0
changelog.d/4230.feature

@@ -0,0 +1 @@
+Add an option to disable search for homeservers that may not be interested in it.

+ 1 - 0
changelog.d/4234.misc

@@ -0,0 +1 @@
+add more detail to logging regarding "More than one row matched" error

+ 1 - 0
changelog.d/4241.bugfix

@@ -0,0 +1 @@
+Fix exception caused by non-ascii event IDs

+ 1 - 0
changelog.d/4244.misc

@@ -0,0 +1 @@
+Drop sent_transactions table

+ 1 - 0
changelog.d/4250.bugfix

@@ -0,0 +1 @@
+Pushers can now be unsubscribed from on Python 3.

+ 1 - 0
changelog.d/4253.bugfix

@@ -0,0 +1 @@
+Fix UnicodeDecodeError when postgres is configured to give non-English errors

+ 1 - 0
changelog.d/4257.misc

@@ -0,0 +1 @@
+Add a basic .editorconfig

+ 1 - 0
changelog.d/4260.misc

@@ -0,0 +1 @@
+Update README.rst and UPGRADE.rst for Python 3.

+ 1 - 0
changelog.d/4261.misc

@@ -0,0 +1 @@
+Remove obsolete `verbose` and `log_file` settings from `homeserver.yaml` for Docker image.

+ 1 - 0
changelog.d/4262.feature

@@ -0,0 +1 @@
+Support for serving .well-known files

+ 1 - 0
changelog.d/4266.misc

@@ -0,0 +1 @@
+drop undocumented dependency on dateutil

+ 5 - 3
contrib/docker/docker-compose.yml

@@ -6,9 +6,11 @@ version: '3'
 services:
 
   synapse:
-    build: ../..
+    build:
+        context: ../..
+        dockerfile: docker/Dockerfile
     image: docker.io/matrixdotorg/synapse:latest
-    # Since snyapse does not retry to connect to the database, restart upon
+    # Since synapse does not retry to connect to the database, restart upon
     # failure
     restart: unless-stopped
     # See the readme for a full documentation of the environment settings
@@ -47,4 +49,4 @@ services:
       # You may store the database tables in a local folder..
       - ./schemas:/var/lib/postgresql/data
       # .. or store them on some high performance storage for better results
-      # - /path/to/ssd/storage:/var/lib/postfesql/data
+      # - /path/to/ssd/storage:/var/lib/postgresql/data

+ 16 - 0
contrib/purge_api/README.md

@@ -0,0 +1,16 @@
+Purge history API examples
+==========================
+
+# `purge_history.sh`
+
+A bash file, that uses the [purge history API](/docs/admin_api/README.rst) to 
+purge all messages in a list of rooms up to a certain event. You can select a 
+timeframe or a number of messages that you want to keep in the room.
+
+Just configure the variables DOMAIN, ADMIN, ROOMS_ARRAY and TIME at the top of
+the script.
+
+# `purge_remote_media.sh`
+
+A bash file, that uses the [purge history API](/docs/admin_api/README.rst) to 
+purge all old cached remote media.

+ 141 - 0
contrib/purge_api/purge_history.sh

@@ -0,0 +1,141 @@
+#!/bin/bash
+
+# this script will use the api:
+#    https://github.com/matrix-org/synapse/blob/master/docs/admin_api/purge_history_api.rst
+# 
+# It will purge all messages in a list of rooms up to a cetrain event
+
+###################################################################################################
+# define your domain and admin user
+###################################################################################################
+# add this user as admin in your home server:
+DOMAIN=yourserver.tld
+# add this user as admin in your home server:
+ADMIN="@you_admin_username:$DOMAIN"
+
+API_URL="$DOMAIN:8008/_matrix/client/r0"
+
+###################################################################################################
+#choose the rooms to prune old messages from (add a free comment at the end)
+###################################################################################################
+# the room_id's you can get e.g. from your Riot clients "View Source" button on each message
+ROOMS_ARRAY=(
+'!DgvjtOljKujDBrxyHk:matrix.org#riot:matrix.org'
+'!QtykxKocfZaZOUrTwp:matrix.org#Matrix HQ'
+)
+
+# ALTERNATIVELY:
+# you can select all the rooms that are not encrypted and loop over the result:
+# SELECT room_id FROM rooms WHERE room_id NOT IN (SELECT DISTINCT room_id FROM events WHERE type ='m.room.encrypted')
+# or
+# select all rooms with at least 100 members:
+# SELECT q.room_id FROM (select count(*) as numberofusers, room_id FROM current_state_events WHERE type ='m.room.member'
+#   GROUP BY room_id) AS q LEFT JOIN room_aliases a ON q.room_id=a.room_id WHERE q.numberofusers > 100 ORDER BY numberofusers desc
+
+###################################################################################################
+# evaluate the EVENT_ID before which should be pruned
+###################################################################################################
+# choose a time before which the messages should be pruned:
+TIME='12 months ago'
+# ALTERNATIVELY:
+# a certain time:
+# TIME='2016-08-31 23:59:59'
+
+# creates a timestamp from the given time string:
+UNIX_TIMESTAMP=$(date +%s%3N --date='TZ="UTC+2" '"$TIME")
+
+# ALTERNATIVELY:
+# prune all messages that are older than 1000 messages ago:
+# LAST_MESSAGES=1000
+# SQL_GET_EVENT="SELECT event_id from events WHERE type='m.room.message' AND room_id ='$ROOM' ORDER BY received_ts DESC LIMIT 1 offset $(($LAST_MESSAGES - 1))"
+
+# ALTERNATIVELY:
+# select the EVENT_ID manually:
+#EVENT_ID='$1471814088343495zpPNI:matrix.org' # an example event from 21st of Aug 2016 by Matthew
+
+###################################################################################################
+# make the admin user a server admin in the database with
+###################################################################################################
+# psql -A -t --dbname=synapse -c "UPDATE users SET admin=1 WHERE name LIKE '$ADMIN'"
+
+###################################################################################################
+# database function
+###################################################################################################
+sql (){
+  # for sqlite3:
+  #sqlite3 homeserver.db "pragma busy_timeout=20000;$1" | awk '{print $2}'
+  # for postgres:
+  psql -A -t --dbname=synapse -c "$1" | grep -v 'Pager'
+}
+
+###################################################################################################
+# get an access token
+###################################################################################################
+# for example externally by watching Riot in your browser's network inspector
+# or internally on the server locally, use this:
+TOKEN=$(sql "SELECT token FROM access_tokens WHERE user_id='$ADMIN' ORDER BY id DESC LIMIT 1")
+AUTH="Authorization: Bearer $TOKEN"
+
+###################################################################################################
+# check, if your TOKEN works. For example this works: 
+###################################################################################################
+# $ curl --header "$AUTH" "$API_URL/rooms/$ROOM/state/m.room.power_levels" 
+
+###################################################################################################
+# finally start pruning the room:
+###################################################################################################
+POSTDATA='{"delete_local_events":"true"}' # this will really delete local events, so the messages in the room really disappear unless they are restored by remote federation
+
+for ROOM in "${ROOMS_ARRAY[@]}"; do
+    echo "########################################### $(date) ################# "
+    echo "pruning room: $ROOM ..."
+    ROOM=${ROOM%#*}
+    #set -x
+    echo "check for alias in db..."
+    # for postgres:
+    sql "SELECT * FROM room_aliases WHERE room_id='$ROOM'"
+    echo "get event..."
+    # for postgres:
+    EVENT_ID=$(sql "SELECT event_id FROM events WHERE type='m.room.message' AND received_ts<'$UNIX_TIMESTAMP' AND room_id='$ROOM' ORDER BY received_ts DESC LIMIT 1;")
+    if [ "$EVENT_ID" == "" ]; then
+      echo "no event $TIME"
+    else
+      echo "event: $EVENT_ID"
+      SLEEP=2
+      set -x
+      # call purge
+      OUT=$(curl --header "$AUTH" -s -d $POSTDATA POST "$API_URL/admin/purge_history/$ROOM/$EVENT_ID")
+      PURGE_ID=$(echo "$OUT" |grep purge_id|cut -d'"' -f4 )
+      if [ "$PURGE_ID" == "" ]; then
+        # probably the history purge is already in progress for $ROOM
+        : "continuing with next room"
+      else
+        while : ; do
+          # get status of purge and sleep longer each time if still active
+          sleep $SLEEP
+          STATUS=$(curl --header "$AUTH" -s GET "$API_URL/admin/purge_history_status/$PURGE_ID" |grep status|cut -d'"' -f4)
+          : "$ROOM --> Status: $STATUS"
+          [[ "$STATUS" == "active" ]] || break 
+          SLEEP=$((SLEEP + 1))
+        done 
+      fi
+      set +x
+      sleep 1
+    fi  
+done
+
+
+###################################################################################################
+# additionally
+###################################################################################################
+# to benefit from pruning large amounts of data, you need to call VACUUM to free the unused space.
+# This can take a very long time (hours) and the client have to be stopped while you do so:
+# $ synctl stop
+# $ sqlite3 -line homeserver.db "vacuum;"
+# $ synctl start
+
+# This could be set, so you don't need to prune every time after deleting some rows:
+# $ sqlite3 homeserver.db "PRAGMA auto_vacuum = FULL;"
+# be cautious, it could make the database somewhat slow if there are a lot of deletions
+
+exit

+ 54 - 0
contrib/purge_api/purge_remote_media.sh

@@ -0,0 +1,54 @@
+#!/bin/bash
+
+DOMAIN=yourserver.tld
+# add this user as admin in your home server:
+ADMIN="@you_admin_username:$DOMAIN"
+
+API_URL="$DOMAIN:8008/_matrix/client/r0"
+
+# choose a time before which the messages should be pruned:
+# TIME='2016-08-31 23:59:59'
+TIME='12 months ago'
+
+# creates a timestamp from the given time string:
+UNIX_TIMESTAMP=$(date +%s%3N --date='TZ="UTC+2" '"$TIME")
+
+
+###################################################################################################
+# database function
+###################################################################################################
+sql (){
+  # for sqlite3:
+  #sqlite3 homeserver.db "pragma busy_timeout=20000;$1" | awk '{print $2}'
+  # for postgres:
+  psql -A -t --dbname=synapse -c "$1" | grep -v 'Pager'
+}
+
+###############################################################################
+# make the admin user a server admin in the database with
+###############################################################################
+# sql "UPDATE users SET admin=1 WHERE name LIKE '$ADMIN'"
+
+###############################################################################
+# get an access token
+###############################################################################
+# for example externally by watching Riot in your browser's network inspector
+# or internally on the server locally, use this:
+TOKEN=$(sql "SELECT token FROM access_tokens WHERE user_id='$ADMIN' ORDER BY id DESC LIMIT 1")
+
+###############################################################################
+# check, if your TOKEN works. For example this works: 
+###############################################################################
+# curl --header "Authorization: Bearer $TOKEN" "$API_URL/rooms/$ROOM/state/m.room.power_levels"
+
+###############################################################################
+# optional check size before
+###############################################################################
+# echo calculate used storage before ...
+# du -shc ../.synapse/media_store/*
+
+###############################################################################
+# finally start pruning media:
+###############################################################################
+set -x # for debugging the generated string
+curl --header "Authorization: Bearer $TOKEN" -v POST "$API_URL/admin/purge_media_cache/?before_ts=$UNIX_TIMESTAMP"

+ 6 - 6
docker/conf/homeserver.yaml

@@ -14,6 +14,7 @@ server_name: "{{ SYNAPSE_SERVER_NAME }}"
 pid_file: /homeserver.pid
 web_client: False
 soft_file_limit: 0
+log_config: "/compiled/log.config"
 
 ## Ports ##
 
@@ -21,7 +22,7 @@ listeners:
   {% if not SYNAPSE_NO_TLS %}
   -
     port: 8448
-    bind_addresses: ['0.0.0.0']
+    bind_addresses: ['::']
     type: http
     tls: true
     x_forwarded: false
@@ -34,7 +35,7 @@ listeners:
 
   - port: 8008
     tls: false
-    bind_addresses: ['0.0.0.0']
+    bind_addresses: ['::']
     type: http
     x_forwarded: false
 
@@ -67,9 +68,6 @@ database:
 ## Performance ##
 
 event_cache_size: "{{ SYNAPSE_EVENT_CACHE_SIZE or "10K" }}"
-verbose: 0
-log_file: "/data/homeserver.log"
-log_config: "/compiled/log.config"
 
 ## Ratelimiting ##
 
@@ -150,10 +148,12 @@ enable_group_creation: true
 
 # The list of identity servers trusted to verify third party
 # identifiers by this server.
+#
+# Also defines the ID server which will be called when an account is
+# deactivated (one will be picked arbitrarily).
 trusted_third_party_id_servers:
     - matrix.org
     - vector.im
-    - riot.im
 
 ## Metrics ###
 

+ 8 - 0
docs/admin_api/purge_history_api.rst

@@ -61,3 +61,11 @@ the following:
     }
 
 The status will be one of ``active``, ``complete``, or ``failed``.
+
+Reclaim disk space (Postgres)
+-----------------------------
+
+To reclaim the disk space and return it to the operating system, you need to run
+`VACUUM FULL;` on the database.
+
+https://www.postgresql.org/docs/current/sql-vacuum.html

+ 38 - 1
docs/consent_tracking.md

@@ -31,7 +31,7 @@ Note that the templates must be stored under a name giving the language of the
 template - currently this must always be `en` (for "English");
 internationalisation support is intended for the future.
 
-The template for the policy itself should be versioned and named according to 
+The template for the policy itself should be versioned and named according to
 the version: for example `1.0.html`. The version of the policy which the user
 has agreed to is stored in the database.
 
@@ -85,6 +85,37 @@ Once this is complete, and the server has been restarted, try visiting
 an error "Missing string query parameter 'u'". It is now possible to manually
 construct URIs where users can give their consent.
 
+### Enabling consent tracking at registration
+
+1. Add the following to your configuration:
+
+   ```yaml
+   user_consent:
+     require_at_registration: true
+     policy_name: "Privacy Policy" # or whatever you'd like to call the policy
+   ```
+
+2. In your consent templates, make use of the `public_version` variable to
+   see if an unauthenticated user is viewing the page. This is typically
+   wrapped around the form that would be used to actually agree to the document:
+
+   ```
+   {% if not public_version %}
+     <!-- The variables used here are only provided when the 'u' param is given to the homeserver -->
+     <form method="post" action="consent">
+       <input type="hidden" name="v" value="{{version}}"/>
+       <input type="hidden" name="u" value="{{user}}"/>
+       <input type="hidden" name="h" value="{{userhmac}}"/>
+       <input type="submit" value="Sure thing!"/>
+     </form>
+   {% endif %}
+   ```
+
+3. Restart Synapse to apply the changes.
+
+Visiting `https://<server>/_matrix/consent` should now give you a view of the privacy
+document. This is what users will be able to see when registering for accounts.
+
 ### Constructing the consent URI
 
 It may be useful to manually construct the "consent URI" for a given user - for
@@ -106,6 +137,12 @@ query parameters:
    `https://<server>/_matrix/consent?u=<user>&h=68a152465a4d...`.
 
 
+Note that not providing a `u` parameter will be interpreted as wanting to view
+the document from an unauthenticated perspective, such as prior to registration.
+Therefore, the `h` parameter is not required in this scenario. To enable this
+behaviour, set `require_at_registration` to `true` in your `user_consent` config.
+
+
 Sending users a server notice asking them to agree to the policy
 ----------------------------------------------------------------
 

+ 57 - 1
docs/log_contexts.rst

@@ -163,7 +163,7 @@ the logcontext was set, this will make things work out ok: provided
 It's all too easy to forget to ``yield``: for instance if we forgot that
 ``do_some_stuff`` returned a deferred, we might plough on regardless. This
 leads to a mess; it will probably work itself out eventually, but not before
-a load of stuff has been logged against the wrong content. (Normally, other
+a load of stuff has been logged against the wrong context. (Normally, other
 things will break, more obviously, if you forget to ``yield``, so this tends
 not to be a major problem in practice.)
 
@@ -440,3 +440,59 @@ To conclude: I think this scheme would have worked equally well, with less
 danger of messing it up, and probably made some more esoteric code easier to
 write. But again — changing the conventions of the entire Synapse codebase is
 not a sensible option for the marginal improvement offered.
+
+
+A note on garbage-collection of Deferred chains
+-----------------------------------------------
+
+It turns out that our logcontext rules do not play nicely with Deferred
+chains which get orphaned and garbage-collected.
+
+Imagine we have some code that looks like this:
+
+.. code:: python
+
+    listener_queue = []
+
+    def on_something_interesting():
+        for d in listener_queue:
+            d.callback("foo")
+
+    @defer.inlineCallbacks
+    def await_something_interesting():
+        new_deferred = defer.Deferred()
+        listener_queue.append(new_deferred)
+
+        with PreserveLoggingContext():
+            yield new_deferred
+
+Obviously, the idea here is that we have a bunch of things which are waiting
+for an event. (It's just an example of the problem here, but a relatively
+common one.)
+
+Now let's imagine two further things happen. First of all, whatever was
+waiting for the interesting thing goes away. (Perhaps the request times out,
+or something *even more* interesting happens.)
+
+Secondly, let's suppose that we decide that the interesting thing is never
+going to happen, and we reset the listener queue:
+
+.. code:: python
+
+    def reset_listener_queue():
+        listener_queue.clear()
+
+So, both ends of the deferred chain have now dropped their references, and the
+deferred chain is now orphaned, and will be garbage-collected at some point.
+Note that ``await_something_interesting`` is a generator function, and when
+Python garbage-collects generator functions, it gives them a chance to clean
+up by making the ``yield`` raise a ``GeneratorExit`` exception. In our case,
+that means that the ``__exit__`` handler of ``PreserveLoggingContext`` will
+carefully restore the request context, but there is now nothing waiting for
+its return, so the request context is never cleared.
+
+To reiterate, this problem only arises when *both* ends of a deferred chain
+are dropped. Dropping the the reference to a deferred you're supposed to be
+calling is probably bad practice, so this doesn't actually happen too much.
+Unfortunately, when it does happen, it will lead to leaked logcontexts which
+are incredibly hard to track down.

+ 9 - 6
docs/privacy_policy_templates/en/1.0.html

@@ -12,12 +12,15 @@
     <p>
       All your base are belong to us.
     </p>
-    <form method="post" action="consent">
-      <input type="hidden" name="v" value="{{version}}"/>
-      <input type="hidden" name="u" value="{{user}}"/>
-      <input type="hidden" name="h" value="{{userhmac}}"/>
-      <input type="submit" value="Sure thing!"/>
-    </form>
+    {% if not public_version %}
+      <!-- The variables used here are only provided when the 'u' param is given to the homeserver -->
+      <form method="post" action="consent">
+        <input type="hidden" name="v" value="{{version}}"/>
+        <input type="hidden" name="u" value="{{user}}"/>
+        <input type="hidden" name="h" value="{{userhmac}}"/>
+        <input type="submit" value="Sure thing!"/>
+      </form>
+    {% endif %}
   {% endif %}
   </body>
 </html>

+ 0 - 19
jenkins/prepare_synapse.sh

@@ -14,22 +14,3 @@ fi
 
 # set up the virtualenv
 tox -e py27 --notest -v
-
-TOX_BIN=$TOX_DIR/py27/bin
-
-# cryptography 2.2 requires setuptools >= 18.5.
-#
-# older versions of virtualenv (?) give us a virtualenv with the same version
-# of setuptools as is installed on the system python (and tox runs virtualenv
-# under python3, so we get the version of setuptools that is installed on that).
-#
-# anyway, make sure that we have a recent enough setuptools.
-$TOX_BIN/pip install 'setuptools>=18.5'
-
-# we also need a semi-recent version of pip, because old ones fail to install
-# the "enum34" dependency of cryptography.
-$TOX_BIN/pip install 'pip>=10'
-
-{ python synapse/python_dependencies.py
-  echo lxml
-} | xargs $TOX_BIN/pip install

+ 15 - 21
scripts-dev/check_auth.py

@@ -1,21 +1,20 @@
-from synapse.events import FrozenEvent
-from synapse.api.auth import Auth
-
-from mock import Mock
+from __future__ import print_function
 
 import argparse
 import itertools
 import json
 import sys
 
+from mock import Mock
+
+from synapse.api.auth import Auth
+from synapse.events import FrozenEvent
+
 
 def check_auth(auth, auth_chain, events):
     auth_chain.sort(key=lambda e: e.depth)
 
-    auth_map = {
-        e.event_id: e
-        for e in auth_chain
-    }
+    auth_map = {e.event_id: e for e in auth_chain}
 
     create_events = {}
     for e in auth_chain:
@@ -25,31 +24,26 @@ def check_auth(auth, auth_chain, events):
     for e in itertools.chain(auth_chain, events):
         auth_events_list = [auth_map[i] for i, _ in e.auth_events]
 
-        auth_events = {
-            (e.type, e.state_key): e
-            for e in auth_events_list
-        }
+        auth_events = {(e.type, e.state_key): e for e in auth_events_list}
 
         auth_events[("m.room.create", "")] = create_events[e.room_id]
 
         try:
             auth.check(e, auth_events=auth_events)
         except Exception as ex:
-            print "Failed:", e.event_id, e.type, e.state_key
-            print "Auth_events:", auth_events
-            print ex
-            print json.dumps(e.get_dict(), sort_keys=True, indent=4)
+            print("Failed:", e.event_id, e.type, e.state_key)
+            print("Auth_events:", auth_events)
+            print(ex)
+            print(json.dumps(e.get_dict(), sort_keys=True, indent=4))
             # raise
-        print "Success:", e.event_id, e.type, e.state_key
+        print("Success:", e.event_id, e.type, e.state_key)
+
 
 if __name__ == '__main__':
     parser = argparse.ArgumentParser()
 
     parser.add_argument(
-        'json',
-        nargs='?',
-        type=argparse.FileType('r'),
-        default=sys.stdin,
+        'json', nargs='?', type=argparse.FileType('r'), default=sys.stdin
     )
 
     args = parser.parse_args()

+ 18 - 14
scripts-dev/check_event_hash.py

@@ -1,10 +1,15 @@
-from synapse.crypto.event_signing import *
-from unpaddedbase64 import encode_base64
-
 import argparse
 import hashlib
-import sys
 import json
+import logging
+import sys
+
+from unpaddedbase64 import encode_base64
+
+from synapse.crypto.event_signing import (
+    check_event_content_hash,
+    compute_event_reference_hash,
+)
 
 
 class dictobj(dict):
@@ -24,27 +29,26 @@ class dictobj(dict):
 
 def main():
     parser = argparse.ArgumentParser()
-    parser.add_argument("input_json", nargs="?", type=argparse.FileType('r'),
-                        default=sys.stdin)
+    parser.add_argument(
+        "input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin
+    )
     args = parser.parse_args()
     logging.basicConfig()
 
     event_json = dictobj(json.load(args.input_json))
 
-    algorithms = {
-        "sha256": hashlib.sha256,
-    }
+    algorithms = {"sha256": hashlib.sha256}
 
     for alg_name in event_json.hashes:
         if check_event_content_hash(event_json, algorithms[alg_name]):
-            print "PASS content hash %s" % (alg_name,)
+            print("PASS content hash %s" % (alg_name,))
         else:
-            print "FAIL content hash %s" % (alg_name,)
+            print("FAIL content hash %s" % (alg_name,))
 
     for algorithm in algorithms.values():
         name, h_bytes = compute_event_reference_hash(event_json, algorithm)
-        print "Reference hash %s: %s" % (name, encode_base64(h_bytes))
+        print("Reference hash %s: %s" % (name, encode_base64(h_bytes)))
 
-if __name__=="__main__":
-    main()
 
+if __name__ == "__main__":
+    main()

+ 19 - 17
scripts-dev/check_signature.py

@@ -1,15 +1,15 @@
 
-from signedjson.sign import verify_signed_json
-from signedjson.key import decode_verify_key_bytes, write_signing_keys
-from unpaddedbase64 import decode_base64
-
-import urllib2
+import argparse
 import json
+import logging
 import sys
+import urllib2
+
 import dns.resolver
-import pprint
-import argparse
-import logging
+from signedjson.key import decode_verify_key_bytes, write_signing_keys
+from signedjson.sign import verify_signed_json
+from unpaddedbase64 import decode_base64
+
 
 def get_targets(server_name):
     if ":" in server_name:
@@ -23,6 +23,7 @@ def get_targets(server_name):
     except dns.resolver.NXDOMAIN:
         yield (server_name, 8448)
 
+
 def get_server_keys(server_name, target, port):
     url = "https://%s:%i/_matrix/key/v1" % (target, port)
     keys = json.load(urllib2.urlopen(url))
@@ -33,12 +34,14 @@ def get_server_keys(server_name, target, port):
         verify_keys[key_id] = verify_key
     return verify_keys
 
+
 def main():
 
     parser = argparse.ArgumentParser()
     parser.add_argument("signature_name")
-    parser.add_argument("input_json", nargs="?", type=argparse.FileType('r'),
-                        default=sys.stdin)
+    parser.add_argument(
+        "input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin
+    )
 
     args = parser.parse_args()
     logging.basicConfig()
@@ -48,24 +51,23 @@ def main():
     for target, port in get_targets(server_name):
         try:
             keys = get_server_keys(server_name, target, port)
-            print "Using keys from https://%s:%s/_matrix/key/v1" % (target, port)
+            print("Using keys from https://%s:%s/_matrix/key/v1" % (target, port))
             write_signing_keys(sys.stdout, keys.values())
             break
-        except:
+        except Exception:
             logging.exception("Error talking to %s:%s", target, port)
 
     json_to_check = json.load(args.input_json)
-    print "Checking JSON:"
+    print("Checking JSON:")
     for key_id in json_to_check["signatures"][args.signature_name]:
         try:
             key = keys[key_id]
             verify_signed_json(json_to_check, args.signature_name, key)
-            print "PASS %s" % (key_id,)
-        except:
+            print("PASS %s" % (key_id,))
+        except Exception:
             logging.exception("Check for key %s failed" % (key_id,))
-            print "FAIL %s" % (key_id,)
+            print("FAIL %s" % (key_id,))
 
 
 if __name__ == '__main__':
     main()
-

+ 22 - 18
scripts-dev/convert_server_keys.py

@@ -1,13 +1,21 @@
-import psycopg2
-import yaml
-import sys
+import hashlib
 import json
+import sys
 import time
-import hashlib
-from unpaddedbase64 import encode_base64
+
+import six
+
+import psycopg2
+import yaml
+from canonicaljson import encode_canonical_json
 from signedjson.key import read_signing_keys
 from signedjson.sign import sign_json
-from canonicaljson import encode_canonical_json
+from unpaddedbase64 import encode_base64
+
+if six.PY2:
+    db_type = six.moves.builtins.buffer
+else:
+    db_type = memoryview
 
 
 def select_v1_keys(connection):
@@ -39,7 +47,9 @@ def select_v2_json(connection):
     cursor.close()
     results = {}
     for server_name, key_id, key_json in rows:
-        results.setdefault(server_name, {})[key_id] = json.loads(str(key_json).decode("utf-8"))
+        results.setdefault(server_name, {})[key_id] = json.loads(
+            str(key_json).decode("utf-8")
+        )
     return results
 
 
@@ -47,10 +57,7 @@ def convert_v1_to_v2(server_name, valid_until, keys, certificate):
     return {
         "old_verify_keys": {},
         "server_name": server_name,
-        "verify_keys": {
-            key_id: {"key": key}
-            for key_id, key in keys.items()
-        },
+        "verify_keys": {key_id: {"key": key} for key_id, key in keys.items()},
         "valid_until_ts": valid_until,
         "tls_fingerprints": [fingerprint(certificate)],
     }
@@ -65,7 +72,7 @@ def rows_v2(server, json):
     valid_until = json["valid_until_ts"]
     key_json = encode_canonical_json(json)
     for key_id in json["verify_keys"]:
-        yield (server, key_id, "-", valid_until, valid_until, buffer(key_json))
+        yield (server, key_id, "-", valid_until, valid_until, db_type(key_json))
 
 
 def main():
@@ -87,7 +94,7 @@ def main():
 
     result = {}
     for server in keys:
-        if not server in json:
+        if server not in json:
             v2_json = convert_v1_to_v2(
                 server, valid_until, keys[server], certificates[server]
             )
@@ -96,10 +103,7 @@ def main():
 
     yaml.safe_dump(result, sys.stdout, default_flow_style=False)
 
-    rows = list(
-        row for server, json in result.items()
-        for row in rows_v2(server, json)
-    )
+    rows = list(row for server, json in result.items() for row in rows_v2(server, json))
 
     cursor = connection.cursor()
     cursor.executemany(
@@ -107,7 +111,7 @@ def main():
         " server_name, key_id, from_server,"
         " ts_added_ms, ts_valid_until_ms, key_json"
         ") VALUES (%s, %s, %s, %s, %s, %s)",
-        rows
+        rows,
     )
     connection.commit()
 

+ 34 - 20
scripts-dev/definitions.py

@@ -1,8 +1,16 @@
 #! /usr/bin/python
 
+from __future__ import print_function
+
+import argparse
 import ast
+import os
+import re
+import sys
+
 import yaml
 
+
 class DefinitionVisitor(ast.NodeVisitor):
     def __init__(self):
         super(DefinitionVisitor, self).__init__()
@@ -42,15 +50,18 @@ def non_empty(defs):
     functions = {name: non_empty(f) for name, f in defs['def'].items()}
     classes = {name: non_empty(f) for name, f in defs['class'].items()}
     result = {}
-    if functions: result['def'] = functions
-    if classes: result['class'] = classes
+    if functions:
+        result['def'] = functions
+    if classes:
+        result['class'] = classes
     names = defs['names']
     uses = []
     for name in names.get('Load', ()):
         if name not in names.get('Param', ()) and name not in names.get('Store', ()):
             uses.append(name)
     uses.extend(defs['attrs'])
-    if uses: result['uses'] = uses
+    if uses:
+        result['uses'] = uses
     result['names'] = names
     result['attrs'] = defs['attrs']
     return result
@@ -95,7 +106,6 @@ def used_names(prefix, item, defs, names):
 
 
 if __name__ == '__main__':
-    import sys, os, argparse, re
 
     parser = argparse.ArgumentParser(description='Find definitions.')
     parser.add_argument(
@@ -105,24 +115,28 @@ if __name__ == '__main__':
         "--ignore", action="append", metavar="REGEXP", help="Ignore a pattern"
     )
     parser.add_argument(
-        "--pattern", action="append", metavar="REGEXP",
-        help="Search for a pattern"
+        "--pattern", action="append", metavar="REGEXP", help="Search for a pattern"
     )
     parser.add_argument(
-        "directories", nargs='+', metavar="DIR",
-        help="Directories to search for definitions"
+        "directories",
+        nargs='+',
+        metavar="DIR",
+        help="Directories to search for definitions",
     )
     parser.add_argument(
-        "--referrers", default=0, type=int,
-        help="Include referrers up to the given depth"
+        "--referrers",
+        default=0,
+        type=int,
+        help="Include referrers up to the given depth",
     )
     parser.add_argument(
-        "--referred", default=0, type=int,
-        help="Include referred down to the given depth"
+        "--referred",
+        default=0,
+        type=int,
+        help="Include referred down to the given depth",
     )
     parser.add_argument(
-        "--format", default="yaml",
-        help="Output format, one of 'yaml' or 'dot'"
+        "--format", default="yaml", help="Output format, one of 'yaml' or 'dot'"
     )
     args = parser.parse_args()
 
@@ -162,7 +176,7 @@ if __name__ == '__main__':
             for used_by in entry.get("used", ()):
                 referrers.add(used_by)
         for name, definition in names.items():
-            if not name in referrers:
+            if name not in referrers:
                 continue
             if ignore and any(pattern.match(name) for pattern in ignore):
                 continue
@@ -176,7 +190,7 @@ if __name__ == '__main__':
             for uses in entry.get("uses", ()):
                 referred.add(uses)
         for name, definition in names.items():
-            if not name in referred:
+            if name not in referred:
                 continue
             if ignore and any(pattern.match(name) for pattern in ignore):
                 continue
@@ -185,12 +199,12 @@ if __name__ == '__main__':
     if args.format == 'yaml':
         yaml.dump(result, sys.stdout, default_flow_style=False)
     elif args.format == 'dot':
-        print "digraph {"
+        print("digraph {")
         for name, entry in result.items():
-            print name
+            print(name)
             for used_by in entry.get("used", ()):
                 if used_by in result:
-                    print used_by, "->", name
-        print "}"
+                    print(used_by, "->", name)
+        print("}")
     else:
         raise ValueError("Unknown format %r" % (args.format))

+ 8 - 5
scripts-dev/dump_macaroon.py

@@ -1,8 +1,11 @@
 #!/usr/bin/env python2
 
-import pymacaroons
+from __future__ import print_function
+
 import sys
 
+import pymacaroons
+
 if len(sys.argv) == 1:
     sys.stderr.write("usage: %s macaroon [key]\n" % (sys.argv[0],))
     sys.exit(1)
@@ -11,14 +14,14 @@ macaroon_string = sys.argv[1]
 key = sys.argv[2] if len(sys.argv) > 2 else None
 
 macaroon = pymacaroons.Macaroon.deserialize(macaroon_string)
-print macaroon.inspect()
+print(macaroon.inspect())
 
-print ""
+print("")
 
 verifier = pymacaroons.Verifier()
 verifier.satisfy_general(lambda c: True)
 try:
     verifier.verify(macaroon, key)
-    print "Signature is correct"
+    print("Signature is correct")
 except Exception as e:
-    print str(e)
+    print(str(e))

+ 54 - 52
scripts-dev/federation_client.py

@@ -18,21 +18,21 @@
 from __future__ import print_function
 
 import argparse
+import base64
+import json
+import sys
 from urlparse import urlparse, urlunparse
 
 import nacl.signing
-import json
-import base64
 import requests
-import sys
-
-from requests.adapters import HTTPAdapter
 import srvlookup
 import yaml
+from requests.adapters import HTTPAdapter
 
 # uncomment the following to enable debug logging of http requests
-#from httplib import HTTPConnection
-#HTTPConnection.debuglevel = 1
+# from httplib import HTTPConnection
+# HTTPConnection.debuglevel = 1
+
 
 def encode_base64(input_bytes):
     """Encode bytes as a base64 string without any padding."""
@@ -58,15 +58,15 @@ def decode_base64(input_string):
 
 def encode_canonical_json(value):
     return json.dumps(
-         value,
-         # Encode code-points outside of ASCII as UTF-8 rather than \u escapes
-         ensure_ascii=False,
-         # Remove unecessary white space.
-         separators=(',',':'),
-         # Sort the keys of dictionaries.
-         sort_keys=True,
-         # Encode the resulting unicode as UTF-8 bytes.
-     ).encode("UTF-8")
+        value,
+        # Encode code-points outside of ASCII as UTF-8 rather than \u escapes
+        ensure_ascii=False,
+        # Remove unecessary white space.
+        separators=(',', ':'),
+        # Sort the keys of dictionaries.
+        sort_keys=True,
+        # Encode the resulting unicode as UTF-8 bytes.
+    ).encode("UTF-8")
 
 
 def sign_json(json_object, signing_key, signing_name):
@@ -88,6 +88,7 @@ def sign_json(json_object, signing_key, signing_name):
 
 NACL_ED25519 = "ed25519"
 
+
 def decode_signing_key_base64(algorithm, version, key_base64):
     """Decode a base64 encoded signing key
     Args:
@@ -143,25 +144,25 @@ def request_json(method, origin_name, origin_key, destination, path, content):
     authorization_headers = []
 
     for key, sig in signed_json["signatures"][origin_name].items():
-        header = "X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % (
-            origin_name, key, sig,
-        )
+        header = "X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % (origin_name, key, sig)
         authorization_headers.append(bytes(header))
-        print ("Authorization: %s" % header, file=sys.stderr)
+        print("Authorization: %s" % header, file=sys.stderr)
 
     dest = "matrix://%s%s" % (destination, path)
-    print ("Requesting %s" % dest, file=sys.stderr)
+    print("Requesting %s" % dest, file=sys.stderr)
 
     s = requests.Session()
     s.mount("matrix://", MatrixConnectionAdapter())
 
+    headers = {"Host": destination, "Authorization": authorization_headers[0]}
+
+    if method == "POST":
+        headers["Content-Type"] = "application/json"
+
     result = s.request(
         method=method,
         url=dest,
-        headers={
-            "Host": destination,
-            "Authorization": authorization_headers[0]
-        },
+        headers=headers,
         verify=False,
         data=content,
     )
@@ -171,50 +172,50 @@ def request_json(method, origin_name, origin_key, destination, path, content):
 
 def main():
     parser = argparse.ArgumentParser(
-        description=
-            "Signs and sends a federation request to a matrix homeserver",
+        description="Signs and sends a federation request to a matrix homeserver"
     )
 
     parser.add_argument(
-        "-N", "--server-name",
+        "-N",
+        "--server-name",
         help="Name to give as the local homeserver. If unspecified, will be "
-             "read from the config file.",
+        "read from the config file.",
     )
 
     parser.add_argument(
-        "-k", "--signing-key-path",
+        "-k",
+        "--signing-key-path",
         help="Path to the file containing the private ed25519 key to sign the "
-             "request with.",
+        "request with.",
     )
 
     parser.add_argument(
-        "-c", "--config",
+        "-c",
+        "--config",
         default="homeserver.yaml",
         help="Path to server config file. Ignored if --server-name and "
-             "--signing-key-path are both given.",
+        "--signing-key-path are both given.",
     )
 
     parser.add_argument(
-        "-d", "--destination",
+        "-d",
+        "--destination",
         default="matrix.org",
         help="name of the remote homeserver. We will do SRV lookups and "
-             "connect appropriately.",
+        "connect appropriately.",
     )
 
     parser.add_argument(
-        "-X", "--method",
-        help="HTTP method to use for the request. Defaults to GET if --data is"
-             "unspecified, POST if it is."
+        "-X",
+        "--method",
+        help="HTTP method to use for the request. Defaults to GET if --body is"
+        "unspecified, POST if it is.",
     )
 
-    parser.add_argument(
-        "--body",
-        help="Data to send as the body of the HTTP request"
-    )
+    parser.add_argument("--body", help="Data to send as the body of the HTTP request")
 
     parser.add_argument(
-        "path",
-        help="request path. We will add '/_matrix/federation/v1/' to this."
+        "path", help="request path. We will add '/_matrix/federation/v1/' to this."
     )
 
     args = parser.parse_args()
@@ -227,13 +228,15 @@ def main():
 
     result = request_json(
         args.method,
-        args.server_name, key, args.destination,
+        args.server_name,
+        key,
+        args.destination,
         "/_matrix/federation/v1/" + args.path,
         content=args.body,
     )
 
     json.dump(result, sys.stdout)
-    print ("")
+    print("")
 
 
 def read_args_from_config(args):
@@ -253,7 +256,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
             return s, 8448
 
         if ":" in s:
-            out = s.rsplit(":",1)
+            out = s.rsplit(":", 1)
             try:
                 port = int(out[1])
             except ValueError:
@@ -263,7 +266,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
         try:
             srv = srvlookup.lookup("matrix", "tcp", s)[0]
             return srv.host, srv.port
-        except:
+        except Exception:
             return s, 8448
 
     def get_connection(self, url, proxies=None):
@@ -272,10 +275,9 @@ class MatrixConnectionAdapter(HTTPAdapter):
         (host, port) = self.lookup(parsed.netloc)
         netloc = "%s:%d" % (host, port)
         print("Connecting to %s" % (netloc,), file=sys.stderr)
-        url = urlunparse((
-            "https", netloc, parsed.path, parsed.params, parsed.query,
-            parsed.fragment,
-        ))
+        url = urlunparse(
+            ("https", netloc, parsed.path, parsed.params, parsed.query, parsed.fragment)
+        )
         return super(MatrixConnectionAdapter, self).get_connection(url, proxies)
 
 

+ 38 - 24
scripts-dev/hash_history.py

@@ -1,23 +1,31 @@
-from synapse.storage.pdu import PduStore
-from synapse.storage.signatures import SignatureStore
-from synapse.storage._base import SQLBaseStore
-from synapse.federation.units import Pdu
-from synapse.crypto.event_signing import (
-    add_event_pdu_content_hash, compute_pdu_event_reference_hash
-)
-from synapse.api.events.utils import prune_pdu
-from unpaddedbase64 import encode_base64, decode_base64
-from canonicaljson import encode_canonical_json
+from __future__ import print_function
+
 import sqlite3
 import sys
 
+from unpaddedbase64 import decode_base64, encode_base64
+
+from synapse.crypto.event_signing import (
+    add_event_pdu_content_hash,
+    compute_pdu_event_reference_hash,
+)
+from synapse.federation.units import Pdu
+from synapse.storage._base import SQLBaseStore
+from synapse.storage.pdu import PduStore
+from synapse.storage.signatures import SignatureStore
+
+
 class Store(object):
     _get_pdu_tuples = PduStore.__dict__["_get_pdu_tuples"]
     _get_pdu_content_hashes_txn = SignatureStore.__dict__["_get_pdu_content_hashes_txn"]
     _get_prev_pdu_hashes_txn = SignatureStore.__dict__["_get_prev_pdu_hashes_txn"]
-    _get_pdu_origin_signatures_txn = SignatureStore.__dict__["_get_pdu_origin_signatures_txn"]
+    _get_pdu_origin_signatures_txn = SignatureStore.__dict__[
+        "_get_pdu_origin_signatures_txn"
+    ]
     _store_pdu_content_hash_txn = SignatureStore.__dict__["_store_pdu_content_hash_txn"]
-    _store_pdu_reference_hash_txn = SignatureStore.__dict__["_store_pdu_reference_hash_txn"]
+    _store_pdu_reference_hash_txn = SignatureStore.__dict__[
+        "_store_pdu_reference_hash_txn"
+    ]
     _store_prev_pdu_hash_txn = SignatureStore.__dict__["_store_prev_pdu_hash_txn"]
     _simple_insert_txn = SQLBaseStore.__dict__["_simple_insert_txn"]
 
@@ -26,9 +34,7 @@ store = Store()
 
 
 def select_pdus(cursor):
-    cursor.execute(
-        "SELECT pdu_id, origin FROM pdus ORDER BY depth ASC"
-    )
+    cursor.execute("SELECT pdu_id, origin FROM pdus ORDER BY depth ASC")
 
     ids = cursor.fetchall()
 
@@ -41,23 +47,30 @@ def select_pdus(cursor):
     for pdu in pdus:
         try:
             if pdu.prev_pdus:
-                print "PROCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus
+                print("PROCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus)
                 for pdu_id, origin, hashes in pdu.prev_pdus:
                     ref_alg, ref_hsh = reference_hashes[(pdu_id, origin)]
                     hashes[ref_alg] = encode_base64(ref_hsh)
-                    store._store_prev_pdu_hash_txn(cursor,  pdu.pdu_id, pdu.origin, pdu_id, origin, ref_alg, ref_hsh)
-                print "SUCCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus
+                    store._store_prev_pdu_hash_txn(
+                        cursor, pdu.pdu_id, pdu.origin, pdu_id, origin, ref_alg, ref_hsh
+                    )
+                print("SUCCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus)
             pdu = add_event_pdu_content_hash(pdu)
             ref_alg, ref_hsh = compute_pdu_event_reference_hash(pdu)
             reference_hashes[(pdu.pdu_id, pdu.origin)] = (ref_alg, ref_hsh)
-            store._store_pdu_reference_hash_txn(cursor, pdu.pdu_id, pdu.origin, ref_alg, ref_hsh)
+            store._store_pdu_reference_hash_txn(
+                cursor, pdu.pdu_id, pdu.origin, ref_alg, ref_hsh
+            )
 
             for alg, hsh_base64 in pdu.hashes.items():
-                print alg, hsh_base64
-                store._store_pdu_content_hash_txn(cursor, pdu.pdu_id, pdu.origin, alg, decode_base64(hsh_base64))
+                print(alg, hsh_base64)
+                store._store_pdu_content_hash_txn(
+                    cursor, pdu.pdu_id, pdu.origin, alg, decode_base64(hsh_base64)
+                )
+
+        except Exception:
+            print("FAILED_", pdu.pdu_id, pdu.origin, pdu.prev_pdus)
 
-        except:
-            print "FAILED_", pdu.pdu_id, pdu.origin, pdu.prev_pdus
 
 def main():
     conn = sqlite3.connect(sys.argv[1])
@@ -65,5 +78,6 @@ def main():
     select_pdus(cursor)
     conn.commit()
 
-if __name__=='__main__':
+
+if __name__ == '__main__':
     main()

+ 8 - 8
scripts-dev/list_url_patterns.py

@@ -1,18 +1,17 @@
 #! /usr/bin/python
 
-import ast
 import argparse
+import ast
 import os
 import sys
+
 import yaml
 
 PATTERNS_V1 = []
 PATTERNS_V2 = []
 
-RESULT = {
-    "v1": PATTERNS_V1,
-    "v2": PATTERNS_V2,
-}
+RESULT = {"v1": PATTERNS_V1, "v2": PATTERNS_V2}
+
 
 class CallVisitor(ast.NodeVisitor):
     def visit_Call(self, node):
@@ -21,7 +20,6 @@ class CallVisitor(ast.NodeVisitor):
         else:
             return
 
-
         if name == "client_path_patterns":
             PATTERNS_V1.append(node.args[0].s)
         elif name == "client_v2_patterns":
@@ -42,8 +40,10 @@ def find_patterns_in_file(filepath):
 parser = argparse.ArgumentParser(description='Find url patterns.')
 
 parser.add_argument(
-    "directories", nargs='+', metavar="DIR",
-    help="Directories to search for definitions"
+    "directories",
+    nargs='+',
+    metavar="DIR",
+    help="Directories to search for definitions",
 )
 
 args = parser.parse_args()

+ 0 - 39
scripts-dev/make_identicons.pl

@@ -1,39 +0,0 @@
-#!/usr/bin/env perl
-
-use strict;
-use warnings;
-
-use DBI;
-use DBD::SQLite;
-use JSON;
-use Getopt::Long;
-
-my $db; # = "homeserver.db";
-my $server = "http://localhost:8008";
-my $size = 320;
-
-GetOptions("db|d=s",     \$db,
-           "server|s=s", \$server,
-           "width|w=i",  \$size) or usage();
-
-usage() unless $db;
-
-my $dbh = DBI->connect("dbi:SQLite:dbname=$db","","") || die $DBI::errstr;
-
-my $res = $dbh->selectall_arrayref("select token, name from access_tokens, users where access_tokens.user_id = users.id group by user_id") || die $DBI::errstr;
-
-foreach (@$res) {
-    my ($token, $mxid) = ($_->[0], $_->[1]);
-    my ($user_id) = ($mxid =~ m/@(.*):/);
-    my ($url) = $dbh->selectrow_array("select avatar_url from profiles where user_id=?", undef, $user_id);
-    if (!$url || $url =~ /#auto$/) {
-        `curl -s -o tmp.png "$server/_matrix/media/v1/identicon?name=${mxid}&width=$size&height=$size"`;
-        my $json = `curl -s -X POST -H "Content-Type: image/png" -T "tmp.png" $server/_matrix/media/v1/upload?access_token=$token`;
-        my $content_uri = from_json($json)->{content_uri};
-        `curl -X PUT -H "Content-Type: application/json" --data '{ "avatar_url": "${content_uri}#auto"}' $server/_matrix/client/api/v1/profile/${mxid}/avatar_url?access_token=$token`;
-    }
-}
-
-sub usage {
-    die "usage: ./make-identicons.pl\n\t-d database [e.g. homeserver.db]\n\t-s homeserver (default: http://localhost:8008)\n\t-w identicon size in pixels (default 320)";
-}

+ 13 - 13
scripts-dev/tail-synapse.py

@@ -1,8 +1,9 @@
-import requests
 import collections
+import json
 import sys
 import time
-import json
+
+import requests
 
 Entry = collections.namedtuple("Entry", "name position rows")
 
@@ -30,11 +31,11 @@ def parse_response(content):
 
 
 def replicate(server, streams):
-    return parse_response(requests.get(
-        server + "/_synapse/replication",
-        verify=False,
-        params=streams
-    ).content)
+    return parse_response(
+        requests.get(
+            server + "/_synapse/replication", verify=False, params=streams
+        ).content
+    )
 
 
 def main():
@@ -45,16 +46,16 @@ def main():
         try:
             streams = {
                 row.name: row.position
-                for row in replicate(server, {"streams":"-1"})["streams"].rows
+                for row in replicate(server, {"streams": "-1"})["streams"].rows
             }
-        except requests.exceptions.ConnectionError as e:
+        except requests.exceptions.ConnectionError:
             time.sleep(0.1)
 
     while True:
         try:
             results = replicate(server, streams)
-        except:
-            sys.stdout.write("connection_lost("+ repr(streams) + ")\n")
+        except Exception:
+            sys.stdout.write("connection_lost(" + repr(streams) + ")\n")
             break
         for update in results.values():
             for row in update.rows:
@@ -62,6 +63,5 @@ def main():
             streams[update.name] = update.position
 
 
-
-if __name__=='__main__':
+if __name__ == '__main__':
     main()

+ 34 - 10
scripts/hash_password

@@ -1,17 +1,17 @@
 #!/usr/bin/env python
 
 import argparse
-
+import getpass
 import sys
+import unicodedata
 
 import bcrypt
-import getpass
-
 import yaml
 
-bcrypt_rounds=12
+bcrypt_rounds = 12
 password_pepper = ""
 
+
 def prompt_for_pass():
     password = getpass.getpass("Password: ")
 
@@ -25,19 +25,27 @@ def prompt_for_pass():
 
     return password
 
+
 if __name__ == "__main__":
     parser = argparse.ArgumentParser(
-        description="Calculate the hash of a new password, so that passwords"
-                    " can be reset")
+        description=(
+            "Calculate the hash of a new password, so that passwords can be reset"
+        )
+    )
     parser.add_argument(
-        "-p", "--password",
+        "-p",
+        "--password",
         default=None,
         help="New password for user. Will prompt if omitted.",
     )
     parser.add_argument(
-        "-c", "--config",
+        "-c",
+        "--config",
         type=argparse.FileType('r'),
-        help="Path to server config file. Used to read in bcrypt_rounds and password_pepper.",
+        help=(
+            "Path to server config file. "
+            "Used to read in bcrypt_rounds and password_pepper."
+        ),
     )
 
     args = parser.parse_args()
@@ -51,5 +59,21 @@ if __name__ == "__main__":
     if not password:
         password = prompt_for_pass()
 
-    print bcrypt.hashpw(password + password_pepper, bcrypt.gensalt(bcrypt_rounds))
+    # On Python 2, make sure we decode it to Unicode before we normalise it
+    if isinstance(password, bytes):
+        try:
+            password = password.decode(sys.stdin.encoding)
+        except UnicodeDecodeError:
+            print(
+                "ERROR! Your password is not decodable using your terminal encoding (%s)."
+                % (sys.stdin.encoding,)
+            )
+
+    pw = unicodedata.normalize("NFKC", password)
+
+    hashed = bcrypt.hashpw(
+        pw.encode('utf8') + password_pepper.encode("utf8"),
+        bcrypt.gensalt(bcrypt_rounds),
+    ).decode('ascii')
 
+    print(hashed)

+ 12 - 24
scripts/move_remote_media_to_new_store.py

@@ -36,12 +36,9 @@ from __future__ import print_function
 
 import argparse
 import logging
-
-import sys
-
 import os
-
 import shutil
+import sys
 
 from synapse.rest.media.v1.filepath import MediaFilePaths
 
@@ -77,24 +74,23 @@ def move_media(origin_server, file_id, src_paths, dest_paths):
     if not os.path.exists(original_file):
         logger.warn(
             "Original for %s/%s (%s) does not exist",
-            origin_server, file_id, original_file,
+            origin_server,
+            file_id,
+            original_file,
         )
     else:
         mkdir_and_move(
-            original_file,
-            dest_paths.remote_media_filepath(origin_server, file_id),
+            original_file, dest_paths.remote_media_filepath(origin_server, file_id)
         )
 
     # now look for thumbnails
-    original_thumb_dir = src_paths.remote_media_thumbnail_dir(
-        origin_server, file_id,
-    )
+    original_thumb_dir = src_paths.remote_media_thumbnail_dir(origin_server, file_id)
     if not os.path.exists(original_thumb_dir):
         return
 
     mkdir_and_move(
         original_thumb_dir,
-        dest_paths.remote_media_thumbnail_dir(origin_server, file_id)
+        dest_paths.remote_media_thumbnail_dir(origin_server, file_id),
     )
 
 
@@ -109,24 +105,16 @@ def mkdir_and_move(original_file, dest_file):
 
 if __name__ == "__main__":
     parser = argparse.ArgumentParser(
-        description=__doc__,
-        formatter_class = argparse.RawDescriptionHelpFormatter,
-    )
-    parser.add_argument(
-        "-v", action='store_true', help='enable debug logging')
-    parser.add_argument(
-        "src_repo",
-        help="Path to source content repo",
-    )
-    parser.add_argument(
-        "dest_repo",
-        help="Path to source content repo",
+        description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
     )
+    parser.add_argument("-v", action='store_true', help='enable debug logging')
+    parser.add_argument("src_repo", help="Path to source content repo")
+    parser.add_argument("dest_repo", help="Path to source content repo")
     args = parser.parse_args()
 
     logging_config = {
         "level": logging.DEBUG if args.v else logging.INFO,
-        "format": "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s"
+        "format": "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s",
     }
     logging.basicConfig(**logging_config)
 

+ 3 - 191
scripts/register_new_matrix_user

@@ -14,197 +14,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import print_function
 
-import argparse
-import getpass
-import hashlib
-import hmac
-import json
-import sys
-import urllib2
-import yaml
-
-
-def request_registration(user, password, server_location, shared_secret, admin=False):
-    req = urllib2.Request(
-        "%s/_matrix/client/r0/admin/register" % (server_location,),
-        headers={'Content-Type': 'application/json'}
-    )
-
-    try:
-        if sys.version_info[:3] >= (2, 7, 9):
-            # As of version 2.7.9, urllib2 now checks SSL certs
-            import ssl
-            f = urllib2.urlopen(req, context=ssl.SSLContext(ssl.PROTOCOL_SSLv23))
-        else:
-            f = urllib2.urlopen(req)
-        body = f.read()
-        f.close()
-        nonce = json.loads(body)["nonce"]
-    except urllib2.HTTPError as e:
-        print "ERROR! Received %d %s" % (e.code, e.reason,)
-        if 400 <= e.code < 500:
-            if e.info().type == "application/json":
-                resp = json.load(e)
-                if "error" in resp:
-                    print resp["error"]
-        sys.exit(1)
-
-    mac = hmac.new(
-        key=shared_secret,
-        digestmod=hashlib.sha1,
-    )
-
-    mac.update(nonce)
-    mac.update("\x00")
-    mac.update(user)
-    mac.update("\x00")
-    mac.update(password)
-    mac.update("\x00")
-    mac.update("admin" if admin else "notadmin")
-
-    mac = mac.hexdigest()
-
-    data = {
-        "nonce": nonce,
-        "username": user,
-        "password": password,
-        "mac": mac,
-        "admin": admin,
-    }
-
-    server_location = server_location.rstrip("/")
-
-    print "Sending registration request..."
-
-    req = urllib2.Request(
-        "%s/_matrix/client/r0/admin/register" % (server_location,),
-        data=json.dumps(data),
-        headers={'Content-Type': 'application/json'}
-    )
-    try:
-        if sys.version_info[:3] >= (2, 7, 9):
-            # As of version 2.7.9, urllib2 now checks SSL certs
-            import ssl
-            f = urllib2.urlopen(req, context=ssl.SSLContext(ssl.PROTOCOL_SSLv23))
-        else:
-            f = urllib2.urlopen(req)
-        f.read()
-        f.close()
-        print "Success."
-    except urllib2.HTTPError as e:
-        print "ERROR! Received %d %s" % (e.code, e.reason,)
-        if 400 <= e.code < 500:
-            if e.info().type == "application/json":
-                resp = json.load(e)
-                if "error" in resp:
-                    print resp["error"]
-        sys.exit(1)
-
-
-def register_new_user(user, password, server_location, shared_secret, admin):
-    if not user:
-        try:
-            default_user = getpass.getuser()
-        except:
-            default_user = None
-
-        if default_user:
-            user = raw_input("New user localpart [%s]: " % (default_user,))
-            if not user:
-                user = default_user
-        else:
-            user = raw_input("New user localpart: ")
-
-    if not user:
-        print "Invalid user name"
-        sys.exit(1)
-
-    if not password:
-        password = getpass.getpass("Password: ")
-
-        if not password:
-            print "Password cannot be blank."
-            sys.exit(1)
-
-        confirm_password = getpass.getpass("Confirm password: ")
-
-        if password != confirm_password:
-            print "Passwords do not match"
-            sys.exit(1)
-
-    if admin is None:
-        admin = raw_input("Make admin [no]: ")
-        if admin in ("y", "yes", "true"):
-            admin = True
-        else:
-            admin = False
-
-    request_registration(user, password, server_location, shared_secret, bool(admin))
-
+from synapse._scripts.register_new_matrix_user import main
 
 if __name__ == "__main__":
-    parser = argparse.ArgumentParser(
-        description="Used to register new users with a given home server when"
-                    " registration has been disabled. The home server must be"
-                    " configured with the 'registration_shared_secret' option"
-                    " set.",
-    )
-    parser.add_argument(
-        "-u", "--user",
-        default=None,
-        help="Local part of the new user. Will prompt if omitted.",
-    )
-    parser.add_argument(
-        "-p", "--password",
-        default=None,
-        help="New password for user. Will prompt if omitted.",
-    )
-    admin_group = parser.add_mutually_exclusive_group()
-    admin_group.add_argument(
-        "-a", "--admin",
-        action="store_true",
-        help="Register new user as an admin. Will prompt if --no-admin is not set either.",
-    )
-    admin_group.add_argument(
-        "--no-admin",
-        action="store_true",
-        help="Register new user as a regular user. Will prompt if --admin is not set either.",
-    )
-
-    group = parser.add_mutually_exclusive_group(required=True)
-    group.add_argument(
-        "-c", "--config",
-        type=argparse.FileType('r'),
-        help="Path to server config file. Used to read in shared secret.",
-    )
-
-    group.add_argument(
-        "-k", "--shared-secret",
-        help="Shared secret as defined in server config file.",
-    )
-
-    parser.add_argument(
-        "server_url",
-        default="https://localhost:8448",
-        nargs='?',
-        help="URL to use to talk to the home server. Defaults to "
-             " 'https://localhost:8448'.",
-    )
-
-    args = parser.parse_args()
-
-    if "config" in args and args.config:
-        config = yaml.safe_load(args.config)
-        secret = config.get("registration_shared_secret", None)
-        if not secret:
-            print "No 'registration_shared_secret' defined in config."
-            sys.exit(1)
-    else:
-        secret = args.shared_secret
-
-    admin = None
-    if args.admin or args.no_admin:
-        admin = args.admin
-
-    register_new_user(args.user, args.password, args.server_url, secret, admin)
+    main()

+ 124 - 151
scripts/synapse_port_db

@@ -15,23 +15,23 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from twisted.internet import defer, reactor
-from twisted.enterprise import adbapi
-
-from synapse.storage._base import LoggingTransaction, SQLBaseStore
-from synapse.storage.engines import create_engine
-from synapse.storage.prepare_database import prepare_database
-
 import argparse
 import curses
 import logging
 import sys
 import time
 import traceback
-import yaml
 
 from six import string_types
 
+import yaml
+
+from twisted.enterprise import adbapi
+from twisted.internet import defer, reactor
+
+from synapse.storage._base import LoggingTransaction, SQLBaseStore
+from synapse.storage.engines import create_engine
+from synapse.storage.prepare_database import prepare_database
 
 logger = logging.getLogger("synapse_port_db")
 
@@ -105,6 +105,7 @@ class Store(object):
 
     *All* database interactions should go through this object.
     """
+
     def __init__(self, db_pool, engine):
         self.db_pool = db_pool
         self.database_engine = engine
@@ -135,7 +136,8 @@ class Store(object):
                         txn = conn.cursor()
                         return func(
                             LoggingTransaction(txn, desc, self.database_engine, [], []),
-                            *args, **kwargs
+                            *args,
+                            **kwargs
                         )
                     except self.database_engine.module.DatabaseError as e:
                         if self.database_engine.is_deadlock(e):
@@ -158,22 +160,20 @@ class Store(object):
         def r(txn):
             txn.execute(sql, args)
             return txn.fetchall()
+
         return self.runInteraction("execute_sql", r)
 
     def insert_many_txn(self, txn, table, headers, rows):
         sql = "INSERT INTO %s (%s) VALUES (%s)" % (
             table,
             ", ".join(k for k in headers),
-            ", ".join("%s" for _ in headers)
+            ", ".join("%s" for _ in headers),
         )
 
         try:
             txn.executemany(sql, rows)
-        except:
-            logger.exception(
-                "Failed to insert: %s",
-                table,
-            )
+        except Exception:
+            logger.exception("Failed to insert: %s", table)
             raise
 
 
@@ -206,7 +206,7 @@ class Porter(object):
                             "table_name": table,
                             "forward_rowid": 1,
                             "backward_rowid": 0,
-                        }
+                        },
                     )
 
                     forward_chunk = 1
@@ -221,10 +221,10 @@ class Porter(object):
                     table, forward_chunk, backward_chunk
                 )
         else:
+
             def delete_all(txn):
                 txn.execute(
-                    "DELETE FROM port_from_sqlite3 WHERE table_name = %s",
-                    (table,)
+                    "DELETE FROM port_from_sqlite3 WHERE table_name = %s", (table,)
                 )
                 txn.execute("TRUNCATE %s CASCADE" % (table,))
 
@@ -232,11 +232,7 @@ class Porter(object):
 
             yield self.postgres_store._simple_insert(
                 table="port_from_sqlite3",
-                values={
-                    "table_name": table,
-                    "forward_rowid": 1,
-                    "backward_rowid": 0,
-                }
+                values={"table_name": table, "forward_rowid": 1, "backward_rowid": 0},
             )
 
             forward_chunk = 1
@@ -251,12 +247,16 @@ class Porter(object):
         )
 
     @defer.inlineCallbacks
-    def handle_table(self, table, postgres_size, table_size, forward_chunk,
-                     backward_chunk):
+    def handle_table(
+        self, table, postgres_size, table_size, forward_chunk, backward_chunk
+    ):
         logger.info(
             "Table %s: %i/%i (rows %i-%i) already ported",
-            table, postgres_size, table_size,
-            backward_chunk+1, forward_chunk-1,
+            table,
+            postgres_size,
+            table_size,
+            backward_chunk + 1,
+            forward_chunk - 1,
         )
 
         if not table_size:
@@ -271,7 +271,9 @@ class Porter(object):
             return
 
         if table in (
-            "user_directory", "user_directory_search", "users_who_share_rooms",
+            "user_directory",
+            "user_directory_search",
+            "users_who_share_rooms",
             "users_in_pubic_room",
         ):
             # We don't port these tables, as they're a faff and we can regenreate
@@ -283,37 +285,35 @@ class Porter(object):
             # We need to make sure there is a single row, `(X, null), as that is
             # what synapse expects to be there.
             yield self.postgres_store._simple_insert(
-                table=table,
-                values={"stream_id": None},
+                table=table, values={"stream_id": None}
             )
             self.progress.update(table, table_size)  # Mark table as done
             return
 
         forward_select = (
-            "SELECT rowid, * FROM %s WHERE rowid >= ? ORDER BY rowid LIMIT ?"
-            % (table,)
+            "SELECT rowid, * FROM %s WHERE rowid >= ? ORDER BY rowid LIMIT ?" % (table,)
         )
 
         backward_select = (
-            "SELECT rowid, * FROM %s WHERE rowid <= ? ORDER BY rowid LIMIT ?"
-            % (table,)
+            "SELECT rowid, * FROM %s WHERE rowid <= ? ORDER BY rowid LIMIT ?" % (table,)
         )
 
         do_forward = [True]
         do_backward = [True]
 
         while True:
+
             def r(txn):
                 forward_rows = []
                 backward_rows = []
                 if do_forward[0]:
-                    txn.execute(forward_select, (forward_chunk, self.batch_size,))
+                    txn.execute(forward_select, (forward_chunk, self.batch_size))
                     forward_rows = txn.fetchall()
                     if not forward_rows:
                         do_forward[0] = False
 
                 if do_backward[0]:
-                    txn.execute(backward_select, (backward_chunk, self.batch_size,))
+                    txn.execute(backward_select, (backward_chunk, self.batch_size))
                     backward_rows = txn.fetchall()
                     if not backward_rows:
                         do_backward[0] = False
@@ -325,9 +325,7 @@ class Porter(object):
 
                 return headers, forward_rows, backward_rows
 
-            headers, frows, brows = yield self.sqlite_store.runInteraction(
-                "select", r
-            )
+            headers, frows, brows = yield self.sqlite_store.runInteraction("select", r)
 
             if frows or brows:
                 if frows:
@@ -339,9 +337,7 @@ class Porter(object):
                 rows = self._convert_rows(table, headers, rows)
 
                 def insert(txn):
-                    self.postgres_store.insert_many_txn(
-                        txn, table, headers[1:], rows
-                    )
+                    self.postgres_store.insert_many_txn(txn, table, headers[1:], rows)
 
                     self.postgres_store._simple_update_one_txn(
                         txn,
@@ -362,8 +358,9 @@ class Porter(object):
                 return
 
     @defer.inlineCallbacks
-    def handle_search_table(self, postgres_size, table_size, forward_chunk,
-                            backward_chunk):
+    def handle_search_table(
+        self, postgres_size, table_size, forward_chunk, backward_chunk
+    ):
         select = (
             "SELECT es.rowid, es.*, e.origin_server_ts, e.stream_ordering"
             " FROM event_search as es"
@@ -373,8 +370,9 @@ class Porter(object):
         )
 
         while True:
+
             def r(txn):
-                txn.execute(select, (forward_chunk, self.batch_size,))
+                txn.execute(select, (forward_chunk, self.batch_size))
                 rows = txn.fetchall()
                 headers = [column[0] for column in txn.description]
 
@@ -402,18 +400,21 @@ class Porter(object):
                         else:
                             rows_dict.append(d)
 
-                    txn.executemany(sql, [
-                        (
-                            row["event_id"],
-                            row["room_id"],
-                            row["key"],
-                            row["sender"],
-                            row["value"],
-                            row["origin_server_ts"],
-                            row["stream_ordering"],
-                        )
-                        for row in rows_dict
-                    ])
+                    txn.executemany(
+                        sql,
+                        [
+                            (
+                                row["event_id"],
+                                row["room_id"],
+                                row["key"],
+                                row["sender"],
+                                row["value"],
+                                row["origin_server_ts"],
+                                row["stream_ordering"],
+                            )
+                            for row in rows_dict
+                        ],
+                    )
 
                     self.postgres_store._simple_update_one_txn(
                         txn,
@@ -437,7 +438,8 @@ class Porter(object):
     def setup_db(self, db_config, database_engine):
         db_conn = database_engine.module.connect(
             **{
-                k: v for k, v in db_config.get("args", {}).items()
+                k: v
+                for k, v in db_config.get("args", {}).items()
                 if not k.startswith("cp_")
             }
         )
@@ -450,13 +452,11 @@ class Porter(object):
     def run(self):
         try:
             sqlite_db_pool = adbapi.ConnectionPool(
-                self.sqlite_config["name"],
-                **self.sqlite_config["args"]
+                self.sqlite_config["name"], **self.sqlite_config["args"]
             )
 
             postgres_db_pool = adbapi.ConnectionPool(
-                self.postgres_config["name"],
-                **self.postgres_config["args"]
+                self.postgres_config["name"], **self.postgres_config["args"]
             )
 
             sqlite_engine = create_engine(sqlite_config)
@@ -465,9 +465,7 @@ class Porter(object):
             self.sqlite_store = Store(sqlite_db_pool, sqlite_engine)
             self.postgres_store = Store(postgres_db_pool, postgres_engine)
 
-            yield self.postgres_store.execute(
-                postgres_engine.check_database
-            )
+            yield self.postgres_store.execute(postgres_engine.check_database)
 
             # Step 1. Set up databases.
             self.progress.set_state("Preparing SQLite3")
@@ -477,6 +475,7 @@ class Porter(object):
             self.setup_db(postgres_config, postgres_engine)
 
             self.progress.set_state("Creating port tables")
+
             def create_port_table(txn):
                 txn.execute(
                     "CREATE TABLE IF NOT EXISTS port_from_sqlite3 ("
@@ -501,10 +500,9 @@ class Porter(object):
                 )
 
             try:
-                yield self.postgres_store.runInteraction(
-                    "alter_table", alter_table
-                )
-            except Exception as e:
+                yield self.postgres_store.runInteraction("alter_table", alter_table)
+            except Exception:
+                # On Error Resume Next
                 pass
 
             yield self.postgres_store.runInteraction(
@@ -514,11 +512,7 @@ class Porter(object):
             # Step 2. Get tables.
             self.progress.set_state("Fetching tables")
             sqlite_tables = yield self.sqlite_store._simple_select_onecol(
-                table="sqlite_master",
-                keyvalues={
-                    "type": "table",
-                },
-                retcol="name",
+                table="sqlite_master", keyvalues={"type": "table"}, retcol="name"
             )
 
             postgres_tables = yield self.postgres_store._simple_select_onecol(
@@ -545,18 +539,14 @@ class Porter(object):
             # Step 4. Do the copying.
             self.progress.set_state("Copying to postgres")
             yield defer.gatherResults(
-                [
-                    self.handle_table(*res)
-                    for res in setup_res
-                ],
-                consumeErrors=True,
+                [self.handle_table(*res) for res in setup_res], consumeErrors=True
             )
 
             # Step 5. Do final post-processing
             yield self._setup_state_group_id_seq()
 
             self.progress.done()
-        except:
+        except Exception:
             global end_error_exec_info
             end_error_exec_info = sys.exc_info()
             logger.exception("")
@@ -566,9 +556,7 @@ class Porter(object):
     def _convert_rows(self, table, headers, rows):
         bool_col_names = BOOLEAN_COLUMNS.get(table, [])
 
-        bool_cols = [
-            i for i, h in enumerate(headers) if h in bool_col_names
-        ]
+        bool_cols = [i for i, h in enumerate(headers) if h in bool_col_names]
 
         class BadValueException(Exception):
             pass
@@ -577,18 +565,21 @@ class Porter(object):
             if j in bool_cols:
                 return bool(col)
             elif isinstance(col, string_types) and "\0" in col:
-                logger.warn("DROPPING ROW: NUL value in table %s col %s: %r", table, headers[j], col)
-                raise BadValueException();
+                logger.warn(
+                    "DROPPING ROW: NUL value in table %s col %s: %r",
+                    table,
+                    headers[j],
+                    col,
+                )
+                raise BadValueException()
             return col
 
         outrows = []
         for i, row in enumerate(rows):
             try:
-                outrows.append(tuple(
-                    conv(j, col)
-                    for j, col in enumerate(row)
-                    if j > 0
-                ))
+                outrows.append(
+                    tuple(conv(j, col) for j, col in enumerate(row) if j > 0)
+                )
             except BadValueException:
                 pass
 
@@ -616,9 +607,7 @@ class Porter(object):
 
             return headers, [r for r in rows if r[ts_ind] < yesterday]
 
-        headers, rows = yield self.sqlite_store.runInteraction(
-            "select", r,
-        )
+        headers, rows = yield self.sqlite_store.runInteraction("select", r)
 
         rows = self._convert_rows("sent_transactions", headers, rows)
 
@@ -639,7 +628,7 @@ class Porter(object):
             txn.execute(
                 "SELECT rowid FROM sent_transactions WHERE ts >= ?"
                 " ORDER BY rowid ASC LIMIT 1",
-                (yesterday,)
+                (yesterday,),
             )
 
             rows = txn.fetchall()
@@ -657,21 +646,17 @@ class Porter(object):
                 "table_name": "sent_transactions",
                 "forward_rowid": next_chunk,
                 "backward_rowid": 0,
-            }
+            },
         )
 
         def get_sent_table_size(txn):
             txn.execute(
-                "SELECT count(*) FROM sent_transactions"
-                " WHERE ts >= ?",
-                (yesterday,)
+                "SELECT count(*) FROM sent_transactions" " WHERE ts >= ?", (yesterday,)
             )
             size, = txn.fetchone()
             return int(size)
 
-        remaining_count = yield self.sqlite_store.execute(
-            get_sent_table_size
-        )
+        remaining_count = yield self.sqlite_store.execute(get_sent_table_size)
 
         total_count = remaining_count + inserted_rows
 
@@ -680,13 +665,11 @@ class Porter(object):
     @defer.inlineCallbacks
     def _get_remaining_count_to_port(self, table, forward_chunk, backward_chunk):
         frows = yield self.sqlite_store.execute_sql(
-            "SELECT count(*) FROM %s WHERE rowid >= ?" % (table,),
-            forward_chunk,
+            "SELECT count(*) FROM %s WHERE rowid >= ?" % (table,), forward_chunk
         )
 
         brows = yield self.sqlite_store.execute_sql(
-            "SELECT count(*) FROM %s WHERE rowid <= ?" % (table,),
-            backward_chunk,
+            "SELECT count(*) FROM %s WHERE rowid <= ?" % (table,), backward_chunk
         )
 
         defer.returnValue(frows[0][0] + brows[0][0])
@@ -694,7 +677,7 @@ class Porter(object):
     @defer.inlineCallbacks
     def _get_already_ported_count(self, table):
         rows = yield self.postgres_store.execute_sql(
-            "SELECT count(*) FROM %s" % (table,),
+            "SELECT count(*) FROM %s" % (table,)
         )
 
         defer.returnValue(rows[0][0])
@@ -717,22 +700,21 @@ class Porter(object):
     def _setup_state_group_id_seq(self):
         def r(txn):
             txn.execute("SELECT MAX(id) FROM state_groups")
-            next_id = txn.fetchone()[0]+1
-            txn.execute(
-                "ALTER SEQUENCE state_group_id_seq RESTART WITH %s",
-                (next_id,),
-            )
+            next_id = txn.fetchone()[0] + 1
+            txn.execute("ALTER SEQUENCE state_group_id_seq RESTART WITH %s", (next_id,))
+
         return self.postgres_store.runInteraction("setup_state_group_id_seq", r)
 
 
 ##############################################
-###### The following is simply UI stuff ######
+# The following is simply UI stuff
 ##############################################
 
 
 class Progress(object):
     """Used to report progress of the port
     """
+
     def __init__(self):
         self.tables = {}
 
@@ -758,6 +740,7 @@ class Progress(object):
 class CursesProgress(Progress):
     """Reports progress to a curses window
     """
+
     def __init__(self, stdscr):
         self.stdscr = stdscr
 
@@ -801,7 +784,7 @@ class CursesProgress(Progress):
         duration = int(now) - int(self.start_time)
 
         minutes, seconds = divmod(duration, 60)
-        duration_str = '%02dm %02ds' % (minutes, seconds,)
+        duration_str = '%02dm %02ds' % (minutes, seconds)
 
         if self.finished:
             status = "Time spent: %s (Done!)" % (duration_str,)
@@ -814,16 +797,12 @@ class CursesProgress(Progress):
                 est_remaining_str = '%02dm %02ds remaining' % divmod(est_remaining, 60)
             else:
                 est_remaining_str = "Unknown"
-            status = (
-                "Time spent: %s (est. remaining: %s)"
-                % (duration_str, est_remaining_str,)
+            status = "Time spent: %s (est. remaining: %s)" % (
+                duration_str,
+                est_remaining_str,
             )
 
-        self.stdscr.addstr(
-            0, 0,
-            status,
-            curses.A_BOLD,
-        )
+        self.stdscr.addstr(0, 0, status, curses.A_BOLD)
 
         max_len = max([len(t) for t in self.tables.keys()])
 
@@ -831,9 +810,7 @@ class CursesProgress(Progress):
         middle_space = 1
 
         items = self.tables.items()
-        items.sort(
-            key=lambda i: (i[1]["perc"], i[0]),
-        )
+        items.sort(key=lambda i: (i[1]["perc"], i[0]))
 
         for i, (table, data) in enumerate(items):
             if i + 2 >= rows:
@@ -844,9 +821,7 @@ class CursesProgress(Progress):
             color = curses.color_pair(2) if perc == 100 else curses.color_pair(1)
 
             self.stdscr.addstr(
-                i + 2, left_margin + max_len - len(table),
-                table,
-                curses.A_BOLD | color,
+                i + 2, left_margin + max_len - len(table), table, curses.A_BOLD | color
             )
 
             size = 20
@@ -857,15 +832,13 @@ class CursesProgress(Progress):
             )
 
             self.stdscr.addstr(
-                i + 2, left_margin + max_len + middle_space,
+                i + 2,
+                left_margin + max_len + middle_space,
                 "%s %3d%% (%d/%d)" % (progress, perc, data["num_done"], data["total"]),
             )
 
         if self.finished:
-            self.stdscr.addstr(
-                rows - 1, 0,
-                "Press any key to exit...",
-            )
+            self.stdscr.addstr(rows - 1, 0, "Press any key to exit...")
 
         self.stdscr.refresh()
         self.last_update = time.time()
@@ -877,29 +850,25 @@ class CursesProgress(Progress):
 
     def set_state(self, state):
         self.stdscr.clear()
-        self.stdscr.addstr(
-            0, 0,
-            state + "...",
-            curses.A_BOLD,
-        )
+        self.stdscr.addstr(0, 0, state + "...", curses.A_BOLD)
         self.stdscr.refresh()
 
 
 class TerminalProgress(Progress):
     """Just prints progress to the terminal
     """
+
     def update(self, table, num_done):
         super(TerminalProgress, self).update(table, num_done)
 
         data = self.tables[table]
 
-        print "%s: %d%% (%d/%d)" % (
-            table, data["perc"],
-            data["num_done"], data["total"],
+        print(
+            "%s: %d%% (%d/%d)" % (table, data["perc"], data["num_done"], data["total"])
         )
 
     def set_state(self, state):
-        print state + "..."
+        print(state + "...")
 
 
 ##############################################
@@ -909,34 +878,38 @@ class TerminalProgress(Progress):
 if __name__ == "__main__":
     parser = argparse.ArgumentParser(
         description="A script to port an existing synapse SQLite database to"
-                    " a new PostgreSQL database."
+        " a new PostgreSQL database."
     )
     parser.add_argument("-v", action='store_true')
     parser.add_argument(
-        "--sqlite-database", required=True,
+        "--sqlite-database",
+        required=True,
         help="The snapshot of the SQLite database file. This must not be"
-             " currently used by a running synapse server"
+        " currently used by a running synapse server",
     )
     parser.add_argument(
-        "--postgres-config", type=argparse.FileType('r'), required=True,
-        help="The database config file for the PostgreSQL database"
+        "--postgres-config",
+        type=argparse.FileType('r'),
+        required=True,
+        help="The database config file for the PostgreSQL database",
     )
     parser.add_argument(
-        "--curses", action='store_true',
-        help="display a curses based progress UI"
+        "--curses", action='store_true', help="display a curses based progress UI"
     )
 
     parser.add_argument(
-        "--batch-size", type=int, default=1000,
+        "--batch-size",
+        type=int,
+        default=1000,
         help="The number of rows to select from the SQLite table each"
-             " iteration [default=1000]",
+        " iteration [default=1000]",
     )
 
     args = parser.parse_args()
 
     logging_config = {
         "level": logging.DEBUG if args.v else logging.INFO,
-        "format": "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s"
+        "format": "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s",
     }
 
     if args.curses:

+ 8 - 9
setup.cfg

@@ -14,17 +14,16 @@ ignore =
     pylint.cfg
     tox.ini
 
-[pep8]
-max-line-length = 90
-#  W503 requires that binary operators be at the end, not start, of lines. Erik
-#  doesn't like it.  E203 is contrary to PEP8.  E731 is silly.
-ignore = W503,E203,E731
-
 [flake8]
-# note that flake8 inherits the "ignore" settings from "pep8" (because it uses
-# pep8 to do those checks), but not the "max-line-length" setting
 max-line-length = 90
-ignore=W503,E203,E731
+
+# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes
+# for error codes. The ones we ignore are:
+#  W503: line break before binary operator
+#  W504: line break after binary operator
+#  E203: whitespace before ':' (which is contrary to pep8?)
+#  E731: do not assign a lambda expression, use a def
+ignore=W503,W504,E203,E731
 
 [isort]
 line_length = 89

+ 4 - 2
setup.py

@@ -1,6 +1,8 @@
 #!/usr/bin/env python
 
-# Copyright 2014-2016 OpenMarket Ltd
+# Copyright 2014-2017 OpenMarket Ltd
+# Copyright 2017 Vector Creations Ltd
+# Copyright 2017-2018 New Vector Ltd
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -86,7 +88,7 @@ setup(
     name="matrix-synapse",
     version=version,
     packages=find_packages(exclude=["tests", "tests.*"]),
-    description="Reference Synapse Home Server",
+    description="Reference homeserver for the Matrix decentralised comms protocol",
     install_requires=dependencies['requirements'](include_conditional=True).keys(),
     dependency_links=dependencies["DEPENDENCY_LINKS"].values(),
     include_package_data=True,

+ 1 - 1
synapse/__init__.py

@@ -27,4 +27,4 @@ try:
 except ImportError:
     pass
 
-__version__ = "0.33.7"
+__version__ = "0.33.9"

+ 0 - 0
synapse/_scripts/__init__.py


+ 215 - 0
synapse/_scripts/register_new_matrix_user.py

@@ -0,0 +1,215 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015, 2016 OpenMarket Ltd
+# Copyright 2018 New Vector
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import argparse
+import getpass
+import hashlib
+import hmac
+import logging
+import sys
+
+from six.moves import input
+
+import requests as _requests
+import yaml
+
+
+def request_registration(
+    user,
+    password,
+    server_location,
+    shared_secret,
+    admin=False,
+    requests=_requests,
+    _print=print,
+    exit=sys.exit,
+):
+
+    url = "%s/_matrix/client/r0/admin/register" % (server_location,)
+
+    # Get the nonce
+    r = requests.get(url, verify=False)
+
+    if r.status_code is not 200:
+        _print("ERROR! Received %d %s" % (r.status_code, r.reason))
+        if 400 <= r.status_code < 500:
+            try:
+                _print(r.json()["error"])
+            except Exception:
+                pass
+        return exit(1)
+
+    nonce = r.json()["nonce"]
+
+    mac = hmac.new(key=shared_secret.encode('utf8'), digestmod=hashlib.sha1)
+
+    mac.update(nonce.encode('utf8'))
+    mac.update(b"\x00")
+    mac.update(user.encode('utf8'))
+    mac.update(b"\x00")
+    mac.update(password.encode('utf8'))
+    mac.update(b"\x00")
+    mac.update(b"admin" if admin else b"notadmin")
+
+    mac = mac.hexdigest()
+
+    data = {
+        "nonce": nonce,
+        "username": user,
+        "password": password,
+        "mac": mac,
+        "admin": admin,
+    }
+
+    _print("Sending registration request...")
+    r = requests.post(url, json=data, verify=False)
+
+    if r.status_code is not 200:
+        _print("ERROR! Received %d %s" % (r.status_code, r.reason))
+        if 400 <= r.status_code < 500:
+            try:
+                _print(r.json()["error"])
+            except Exception:
+                pass
+        return exit(1)
+
+    _print("Success!")
+
+
+def register_new_user(user, password, server_location, shared_secret, admin):
+    if not user:
+        try:
+            default_user = getpass.getuser()
+        except Exception:
+            default_user = None
+
+        if default_user:
+            user = input("New user localpart [%s]: " % (default_user,))
+            if not user:
+                user = default_user
+        else:
+            user = input("New user localpart: ")
+
+    if not user:
+        print("Invalid user name")
+        sys.exit(1)
+
+    if not password:
+        password = getpass.getpass("Password: ")
+
+        if not password:
+            print("Password cannot be blank.")
+            sys.exit(1)
+
+        confirm_password = getpass.getpass("Confirm password: ")
+
+        if password != confirm_password:
+            print("Passwords do not match")
+            sys.exit(1)
+
+    if admin is None:
+        admin = input("Make admin [no]: ")
+        if admin in ("y", "yes", "true"):
+            admin = True
+        else:
+            admin = False
+
+    request_registration(user, password, server_location, shared_secret, bool(admin))
+
+
+def main():
+
+    logging.captureWarnings(True)
+
+    parser = argparse.ArgumentParser(
+        description="Used to register new users with a given home server when"
+        " registration has been disabled. The home server must be"
+        " configured with the 'registration_shared_secret' option"
+        " set."
+    )
+    parser.add_argument(
+        "-u",
+        "--user",
+        default=None,
+        help="Local part of the new user. Will prompt if omitted.",
+    )
+    parser.add_argument(
+        "-p",
+        "--password",
+        default=None,
+        help="New password for user. Will prompt if omitted.",
+    )
+    admin_group = parser.add_mutually_exclusive_group()
+    admin_group.add_argument(
+        "-a",
+        "--admin",
+        action="store_true",
+        help=(
+            "Register new user as an admin. "
+            "Will prompt if --no-admin is not set either."
+        ),
+    )
+    admin_group.add_argument(
+        "--no-admin",
+        action="store_true",
+        help=(
+            "Register new user as a regular user. "
+            "Will prompt if --admin is not set either."
+        ),
+    )
+
+    group = parser.add_mutually_exclusive_group(required=True)
+    group.add_argument(
+        "-c",
+        "--config",
+        type=argparse.FileType('r'),
+        help="Path to server config file. Used to read in shared secret.",
+    )
+
+    group.add_argument(
+        "-k", "--shared-secret", help="Shared secret as defined in server config file."
+    )
+
+    parser.add_argument(
+        "server_url",
+        default="https://localhost:8448",
+        nargs='?',
+        help="URL to use to talk to the home server. Defaults to "
+        " 'https://localhost:8448'.",
+    )
+
+    args = parser.parse_args()
+
+    if "config" in args and args.config:
+        config = yaml.safe_load(args.config)
+        secret = config.get("registration_shared_secret", None)
+        if not secret:
+            print("No 'registration_shared_secret' defined in config.")
+            sys.exit(1)
+    else:
+        secret = args.shared_secret
+
+    admin = None
+    if args.admin or args.no_admin:
+        admin = args.admin
+
+    register_new_user(args.user, args.password, args.server_url, secret, admin)
+
+
+if __name__ == "__main__":
+    main()

+ 20 - 9
synapse/api/auth.py

@@ -188,17 +188,33 @@ class Auth(object):
         """
         # Can optionally look elsewhere in the request (e.g. headers)
         try:
+            ip_addr = self.hs.get_ip_from_request(request)
+            user_agent = request.requestHeaders.getRawHeaders(
+                b"User-Agent",
+                default=[b""]
+            )[0].decode('ascii', 'surrogateescape')
+
+            access_token = self.get_access_token_from_request(
+                request, self.TOKEN_NOT_FOUND_HTTP_STATUS
+            )
+
             user_id, app_service = yield self._get_appservice_user_id(request)
             if user_id:
                 request.authenticated_entity = user_id
+
+                if ip_addr and self.hs.config.track_appservice_user_ips:
+                    yield self.store.insert_client_ip(
+                        user_id=user_id,
+                        access_token=access_token,
+                        ip=ip_addr,
+                        user_agent=user_agent,
+                        device_id="dummy-device",  # stubbed
+                    )
+
                 defer.returnValue(
                     synapse.types.create_requester(user_id, app_service=app_service)
                 )
 
-            access_token = self.get_access_token_from_request(
-                request, self.TOKEN_NOT_FOUND_HTTP_STATUS
-            )
-
             user_info = yield self.get_user_by_access_token(access_token, rights)
             user = user_info["user"]
             token_id = user_info["token_id"]
@@ -208,11 +224,6 @@ class Auth(object):
             # stubbed out.
             device_id = user_info.get("device_id")
 
-            ip_addr = self.hs.get_ip_from_request(request)
-            user_agent = request.requestHeaders.getRawHeaders(
-                b"User-Agent",
-                default=[b""]
-            )[0].decode('ascii', 'surrogateescape')
             if user and access_token and ip_addr:
                 yield self.store.insert_client_ip(
                     user_id=user.to_string(),

+ 8 - 1
synapse/api/constants.py

@@ -51,6 +51,7 @@ class LoginType(object):
     EMAIL_IDENTITY = u"m.login.email.identity"
     MSISDN = u"m.login.msisdn"
     RECAPTCHA = u"m.login.recaptcha"
+    TERMS = u"m.login.terms"
     DUMMY = u"m.login.dummy"
 
     # Only for C/S API v1
@@ -61,6 +62,7 @@ class LoginType(object):
 class EventTypes(object):
     Member = "m.room.member"
     Create = "m.room.create"
+    Tombstone = "m.room.tombstone"
     JoinRules = "m.room.join_rules"
     PowerLevels = "m.room.power_levels"
     Aliases = "m.room.aliases"
@@ -101,6 +103,7 @@ class ThirdPartyEntityKind(object):
 class RoomVersions(object):
     V1 = "1"
     VDH_TEST = "vdh-test-version"
+    STATE_V2_TEST = "state-v2-test"
 
 
 # the version we will give rooms which are created on this server
@@ -108,7 +111,11 @@ DEFAULT_ROOM_VERSION = RoomVersions.V1
 
 # vdh-test-version is a placeholder to get room versioning support working and tested
 # until we have a working v2.
-KNOWN_ROOM_VERSIONS = {RoomVersions.V1, RoomVersions.VDH_TEST}
+KNOWN_ROOM_VERSIONS = {
+    RoomVersions.V1,
+    RoomVersions.VDH_TEST,
+    RoomVersions.STATE_V2_TEST,
+}
 
 ServerNoticeMsgType = "m.server_notice"
 ServerNoticeLimitReached = "m.server_notice.usage_limit_reached"

+ 4 - 1
synapse/api/filtering.py

@@ -172,7 +172,10 @@ USER_FILTER_SCHEMA = {
                 # events a lot easier as we can then use a negative lookbehind
                 # assertion to split '\.' If we allowed \\ then it would
                 # incorrectly split '\\.' See synapse.events.utils.serialize_event
-                "pattern": "^((?!\\\).)*$"
+                #
+                # Note that because this is a regular expression, we have to escape
+                # each backslash in the pattern.
+                "pattern": r"^((?!\\\\).)*$"
             }
         }
     },

+ 0 - 1
synapse/api/urls.py

@@ -28,7 +28,6 @@ FEDERATION_PREFIX = "/_matrix/federation/v1"
 STATIC_PREFIX = "/_matrix/static"
 WEB_CLIENT_PREFIX = "/_matrix/client"
 CONTENT_REPO_PREFIX = "/_matrix/content"
-SERVER_KEY_PREFIX = "/_matrix/key/v1"
 SERVER_KEY_V2_PREFIX = "/_matrix/key/v2"
 MEDIA_PREFIX = "/_matrix/media/r0"
 LEGACY_MEDIA_PREFIX = "/_matrix/media/v1"

+ 24 - 24
synapse/app/homeserver.py

@@ -20,6 +20,7 @@ import sys
 
 from six import iteritems
 
+import psutil
 from prometheus_client import Gauge
 
 from twisted.application import service
@@ -36,7 +37,6 @@ from synapse.api.urls import (
     FEDERATION_PREFIX,
     LEGACY_MEDIA_PREFIX,
     MEDIA_PREFIX,
-    SERVER_KEY_PREFIX,
     SERVER_KEY_V2_PREFIX,
     STATIC_PREFIX,
     WEB_CLIENT_PREFIX,
@@ -58,9 +58,9 @@ from synapse.python_dependencies import CONDITIONAL_REQUIREMENTS, check_requirem
 from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
 from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory
 from synapse.rest import ClientRestResource
-from synapse.rest.key.v1.server_key_resource import LocalKey
 from synapse.rest.key.v2 import KeyApiV2Resource
 from synapse.rest.media.v0.content_repository import ContentRepoResource
+from synapse.rest.well_known import WellKnownResource
 from synapse.server import HomeServer
 from synapse.storage import DataStore, are_all_users_on_domain
 from synapse.storage.engines import IncorrectDatabaseSetup, create_engine
@@ -196,6 +196,7 @@ class SynapseHomeServer(HomeServer):
                 "/_matrix/client/unstable": client_resource,
                 "/_matrix/client/v2_alpha": client_resource,
                 "/_matrix/client/versions": client_resource,
+                "/.well-known/matrix/client": WellKnownResource(self),
             })
 
         if name == "consent":
@@ -235,10 +236,7 @@ class SynapseHomeServer(HomeServer):
                 )
 
         if name in ["keys", "federation"]:
-            resources.update({
-                SERVER_KEY_PREFIX: LocalKey(self),
-                SERVER_KEY_V2_PREFIX: KeyApiV2Resource(self),
-            })
+            resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self)
 
         if name == "webclient":
             resources[WEB_CLIENT_PREFIX] = build_resource_for_web_client(self)
@@ -502,7 +500,6 @@ def run(hs):
 
     def performance_stats_init():
         try:
-            import psutil
             process = psutil.Process()
             # Ensure we can fetch both, and make the initial request for cpu_percent
             # so the next request will use this as the initial point.
@@ -510,12 +507,9 @@ def run(hs):
             process.cpu_percent(interval=None)
             logger.info("report_stats can use psutil")
             stats_process.append(process)
-        except (ImportError, AttributeError):
-            logger.warn(
-                "report_stats enabled but psutil is not installed or incorrect version."
-                " Disabling reporting of memory/cpu stats."
-                " Ensuring psutil is available will help matrix.org track performance"
-                " changes across releases."
+        except (AttributeError):
+            logger.warning(
+                "Unable to read memory/cpu stats. Disabling reporting."
             )
 
     def generate_user_daily_visit_stats():
@@ -530,29 +524,35 @@ def run(hs):
     clock.looping_call(generate_user_daily_visit_stats, 5 * 60 * 1000)
 
     # monthly active user limiting functionality
-    clock.looping_call(
-        hs.get_datastore().reap_monthly_active_users, 1000 * 60 * 60
-    )
-    hs.get_datastore().reap_monthly_active_users()
+    def reap_monthly_active_users():
+        return run_as_background_process(
+            "reap_monthly_active_users",
+            hs.get_datastore().reap_monthly_active_users,
+        )
+    clock.looping_call(reap_monthly_active_users, 1000 * 60 * 60)
+    reap_monthly_active_users()
 
     @defer.inlineCallbacks
     def generate_monthly_active_users():
         current_mau_count = 0
         reserved_count = 0
         store = hs.get_datastore()
-        if hs.config.limit_usage_by_mau:
+        if hs.config.limit_usage_by_mau or hs.config.mau_stats_only:
             current_mau_count = yield store.get_monthly_active_count()
             reserved_count = yield store.get_registered_reserved_users_count()
         current_mau_gauge.set(float(current_mau_count))
         registered_reserved_users_mau_gauge.set(float(reserved_count))
         max_mau_gauge.set(float(hs.config.max_mau_value))
 
-    hs.get_datastore().initialise_reserved_users(
-        hs.config.mau_limits_reserved_threepids
-    )
-    generate_monthly_active_users()
+    def start_generate_monthly_active_users():
+        return run_as_background_process(
+            "generate_monthly_active_users",
+            generate_monthly_active_users,
+        )
+
+    start_generate_monthly_active_users()
     if hs.config.limit_usage_by_mau:
-        clock.looping_call(generate_monthly_active_users, 5 * 60 * 1000)
+        clock.looping_call(start_generate_monthly_active_users, 5 * 60 * 1000)
     # End of monthly active user settings
 
     if hs.config.report_stats:
@@ -568,7 +568,7 @@ def run(hs):
         clock.call_later(5 * 60, start_phone_stats_home)
 
     if hs.config.daemonize and hs.config.print_pidfile:
-        print (hs.config.pid_file)
+        print(hs.config.pid_file)
 
     _base.start_reactor(
         "synapse-homeserver",

+ 3 - 3
synapse/app/pusher.py

@@ -161,11 +161,11 @@ class PusherReplicationHandler(ReplicationClientHandler):
                     else:
                         yield self.start_pusher(row.user_id, row.app_id, row.pushkey)
             elif stream_name == "events":
-                self.pusher_pool.on_new_notifications(
+                yield self.pusher_pool.on_new_notifications(
                     token, token,
                 )
             elif stream_name == "receipts":
-                self.pusher_pool.on_new_receipts(
+                yield self.pusher_pool.on_new_receipts(
                     token, token, set(row.room_id for row in rows)
                 )
         except Exception:
@@ -183,7 +183,7 @@ class PusherReplicationHandler(ReplicationClientHandler):
     def start_pusher(self, user_id, app_id, pushkey):
         key = "%s:%s" % (app_id, pushkey)
         logger.info("Starting pusher %r / %r", user_id, key)
-        return self.pusher_pool._refresh_pusher(app_id, pushkey, user_id)
+        return self.pusher_pool.start_pusher_by_id(app_id, pushkey, user_id)
 
 
 def start(config_options):

+ 14 - 0
synapse/app/synchrotron.py

@@ -226,7 +226,15 @@ class SynchrotronPresence(object):
 class SynchrotronTyping(object):
     def __init__(self, hs):
         self._latest_room_serial = 0
+        self._reset()
+
+    def _reset(self):
+        """
+        Reset the typing handler's data caches.
+        """
+        # map room IDs to serial numbers
         self._room_serials = {}
+        # map room IDs to sets of users currently typing
         self._room_typing = {}
 
     def stream_positions(self):
@@ -236,6 +244,12 @@ class SynchrotronTyping(object):
         return {"typing": self._latest_room_serial}
 
     def process_replication_rows(self, token, rows):
+        if self._latest_room_serial > token:
+            # The master has gone backwards. To prevent inconsistent data, just
+            # clear everything.
+            self._reset()
+
+        # Set the latest serial token to whatever the server gave us.
         self._latest_room_serial = token
 
         for row in rows:

+ 24 - 12
synapse/appservice/scheduler.py

@@ -53,8 +53,8 @@ import logging
 from twisted.internet import defer
 
 from synapse.appservice import ApplicationServiceState
+from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.util.logcontext import run_in_background
-from synapse.util.metrics import Measure
 
 logger = logging.getLogger(__name__)
 
@@ -104,27 +104,34 @@ class _ServiceQueuer(object):
         self.clock = clock
 
     def enqueue(self, service, event):
-        # if this service isn't being sent something
         self.queued_events.setdefault(service.id, []).append(event)
-        run_in_background(self._send_request, service)
 
-    @defer.inlineCallbacks
-    def _send_request(self, service):
+        # start a sender for this appservice if we don't already have one
+
         if service.id in self.requests_in_flight:
             return
 
+        run_as_background_process(
+            "as-sender-%s" % (service.id, ),
+            self._send_request, service,
+        )
+
+    @defer.inlineCallbacks
+    def _send_request(self, service):
+        # sanity-check: we shouldn't get here if this service already has a sender
+        # running.
+        assert(service.id not in self.requests_in_flight)
+
         self.requests_in_flight.add(service.id)
         try:
             while True:
                 events = self.queued_events.pop(service.id, [])
                 if not events:
                     return
-
-                with Measure(self.clock, "servicequeuer.send"):
-                    try:
-                        yield self.txn_ctrl.send(service, events)
-                    except Exception:
-                        logger.exception("AS request failed")
+                try:
+                    yield self.txn_ctrl.send(service, events)
+                except Exception:
+                    logger.exception("AS request failed")
         finally:
             self.requests_in_flight.discard(service.id)
 
@@ -223,7 +230,12 @@ class _Recoverer(object):
         self.backoff_counter = 1
 
     def recover(self):
-        self.clock.call_later((2 ** self.backoff_counter), self.retry)
+        def _retry():
+            run_as_background_process(
+                "as-recoverer-%s" % (self.service.id,),
+                self.retry,
+            )
+        self.clock.call_later((2 ** self.backoff_counter), _retry)
 
     def _backoff(self):
         # cap the backoff to be around 8.5min => (2^9) = 512 secs

+ 1 - 1
synapse/config/__main__.py

@@ -28,7 +28,7 @@ if __name__ == "__main__":
             sys.stderr.write("\n" + str(e) + "\n")
             sys.exit(1)
 
-        print (getattr(config, key))
+        print(getattr(config, key))
         sys.exit(0)
     else:
         sys.stderr.write("Unknown command %r\n" % (action,))

+ 59 - 62
synapse/config/_base.py

@@ -106,10 +106,7 @@ class Config(object):
     @classmethod
     def check_file(cls, file_path, config_name):
         if file_path is None:
-            raise ConfigError(
-                "Missing config for %s."
-                % (config_name,)
-            )
+            raise ConfigError("Missing config for %s." % (config_name,))
         try:
             os.stat(file_path)
         except OSError as e:
@@ -128,9 +125,7 @@ class Config(object):
             if e.errno != errno.EEXIST:
                 raise
         if not os.path.isdir(dir_path):
-            raise ConfigError(
-                "%s is not a directory" % (dir_path,)
-            )
+            raise ConfigError("%s is not a directory" % (dir_path,))
         return dir_path
 
     @classmethod
@@ -156,21 +151,20 @@ class Config(object):
         return results
 
     def generate_config(
-            self,
-            config_dir_path,
-            server_name,
-            is_generating_file,
-            report_stats=None,
+        self, config_dir_path, server_name, is_generating_file, report_stats=None
     ):
         default_config = "# vim:ft=yaml\n"
 
-        default_config += "\n\n".join(dedent(conf) for conf in self.invoke_all(
-            "default_config",
-            config_dir_path=config_dir_path,
-            server_name=server_name,
-            is_generating_file=is_generating_file,
-            report_stats=report_stats,
-        ))
+        default_config += "\n\n".join(
+            dedent(conf)
+            for conf in self.invoke_all(
+                "default_config",
+                config_dir_path=config_dir_path,
+                server_name=server_name,
+                is_generating_file=is_generating_file,
+                report_stats=report_stats,
+            )
+        )
 
         config = yaml.load(default_config)
 
@@ -178,23 +172,22 @@ class Config(object):
 
     @classmethod
     def load_config(cls, description, argv):
-        config_parser = argparse.ArgumentParser(
-            description=description,
-        )
+        config_parser = argparse.ArgumentParser(description=description)
         config_parser.add_argument(
-            "-c", "--config-path",
+            "-c",
+            "--config-path",
             action="append",
             metavar="CONFIG_FILE",
             help="Specify config file. Can be given multiple times and"
-                 " may specify directories containing *.yaml files."
+            " may specify directories containing *.yaml files.",
         )
 
         config_parser.add_argument(
             "--keys-directory",
             metavar="DIRECTORY",
             help="Where files such as certs and signing keys are stored when"
-                 " their location is given explicitly in the config."
-                 " Defaults to the directory containing the last config file",
+            " their location is given explicitly in the config."
+            " Defaults to the directory containing the last config file",
         )
 
         config_args = config_parser.parse_args(argv)
@@ -203,9 +196,7 @@ class Config(object):
 
         obj = cls()
         obj.read_config_files(
-            config_files,
-            keys_directory=config_args.keys_directory,
-            generate_keys=False,
+            config_files, keys_directory=config_args.keys_directory, generate_keys=False
         )
         return obj
 
@@ -213,38 +204,38 @@ class Config(object):
     def load_or_generate_config(cls, description, argv):
         config_parser = argparse.ArgumentParser(add_help=False)
         config_parser.add_argument(
-            "-c", "--config-path",
+            "-c",
+            "--config-path",
             action="append",
             metavar="CONFIG_FILE",
             help="Specify config file. Can be given multiple times and"
-                 " may specify directories containing *.yaml files."
+            " may specify directories containing *.yaml files.",
         )
         config_parser.add_argument(
             "--generate-config",
             action="store_true",
-            help="Generate a config file for the server name"
+            help="Generate a config file for the server name",
         )
         config_parser.add_argument(
             "--report-stats",
             action="store",
             help="Whether the generated config reports anonymized usage statistics",
-            choices=["yes", "no"]
+            choices=["yes", "no"],
         )
         config_parser.add_argument(
             "--generate-keys",
             action="store_true",
-            help="Generate any missing key files then exit"
+            help="Generate any missing key files then exit",
         )
         config_parser.add_argument(
             "--keys-directory",
             metavar="DIRECTORY",
             help="Used with 'generate-*' options to specify where files such as"
-                 " certs and signing keys should be stored in, unless explicitly"
-                 " specified in the config."
+            " certs and signing keys should be stored in, unless explicitly"
+            " specified in the config.",
         )
         config_parser.add_argument(
-            "-H", "--server-name",
-            help="The server name to generate a config file for"
+            "-H", "--server-name", help="The server name to generate a config file for"
         )
         config_args, remaining_args = config_parser.parse_known_args(argv)
 
@@ -257,8 +248,8 @@ class Config(object):
         if config_args.generate_config:
             if config_args.report_stats is None:
                 config_parser.error(
-                    "Please specify either --report-stats=yes or --report-stats=no\n\n" +
-                    MISSING_REPORT_STATS_SPIEL
+                    "Please specify either --report-stats=yes or --report-stats=no\n\n"
+                    + MISSING_REPORT_STATS_SPIEL
                 )
             if not config_files:
                 config_parser.error(
@@ -287,26 +278,32 @@ class Config(object):
                         config_dir_path=config_dir_path,
                         server_name=server_name,
                         report_stats=(config_args.report_stats == "yes"),
-                        is_generating_file=True
+                        is_generating_file=True,
                     )
                     obj.invoke_all("generate_files", config)
                     config_file.write(config_str)
-                print((
-                    "A config file has been generated in %r for server name"
-                    " %r with corresponding SSL keys and self-signed"
-                    " certificates. Please review this file and customise it"
-                    " to your needs."
-                ) % (config_path, server_name))
+                print(
+                    (
+                        "A config file has been generated in %r for server name"
+                        " %r with corresponding SSL keys and self-signed"
+                        " certificates. Please review this file and customise it"
+                        " to your needs."
+                    )
+                    % (config_path, server_name)
+                )
                 print(
                     "If this server name is incorrect, you will need to"
                     " regenerate the SSL certificates"
                 )
                 return
             else:
-                print((
-                    "Config file %r already exists. Generating any missing key"
-                    " files."
-                ) % (config_path,))
+                print(
+                    (
+                        "Config file %r already exists. Generating any missing key"
+                        " files."
+                    )
+                    % (config_path,)
+                )
                 generate_keys = True
 
         parser = argparse.ArgumentParser(
@@ -338,8 +335,7 @@ class Config(object):
 
         return obj
 
-    def read_config_files(self, config_files, keys_directory=None,
-                          generate_keys=False):
+    def read_config_files(self, config_files, keys_directory=None, generate_keys=False):
         if not keys_directory:
             keys_directory = os.path.dirname(config_files[-1])
 
@@ -364,8 +360,9 @@ class Config(object):
 
         if "report_stats" not in config:
             raise ConfigError(
-                MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS + "\n" +
-                MISSING_REPORT_STATS_SPIEL
+                MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS
+                + "\n"
+                + MISSING_REPORT_STATS_SPIEL
             )
 
         if generate_keys:
@@ -399,16 +396,16 @@ def find_config_files(search_paths):
                 for entry in os.listdir(config_path):
                     entry_path = os.path.join(config_path, entry)
                     if not os.path.isfile(entry_path):
-                        print (
-                            "Found subdirectory in config directory: %r. IGNORING."
-                        ) % (entry_path, )
+                        err = "Found subdirectory in config directory: %r. IGNORING."
+                        print(err % (entry_path,))
                         continue
 
                     if not entry.endswith(".yaml"):
-                        print (
-                            "Found file in config directory that does not"
-                            " end in '.yaml': %r. IGNORING."
-                        ) % (entry_path, )
+                        err = (
+                            "Found file in config directory that does not end in "
+                            "'.yaml': %r. IGNORING."
+                        )
+                        print(err % (entry_path,))
                         continue
 
                     files.append(entry_path)

+ 5 - 0
synapse/config/appservice.py

@@ -33,11 +33,16 @@ class AppServiceConfig(Config):
     def read_config(self, config):
         self.app_service_config_files = config.get("app_service_config_files", [])
         self.notify_appservices = config.get("notify_appservices", True)
+        self.track_appservice_user_ips = config.get("track_appservice_user_ips", False)
 
     def default_config(cls, **kwargs):
         return """\
         # A list of application service config file to use
         app_service_config_files: []
+
+        # Whether or not to track application service IP addresses. Implicitly
+        # enables MAU tracking for application service users.
+        track_appservice_user_ips: False
         """
 
 

+ 18 - 0
synapse/config/consent_config.py

@@ -42,6 +42,14 @@ DEFAULT_CONFIG = """\
 # until the user consents to the privacy policy. The value of the setting is
 # used as the text of the error.
 #
+# 'require_at_registration', if enabled, will add a step to the registration
+# process, similar to how captcha works. Users will be required to accept the
+# policy before their account is created.
+#
+# 'policy_name' is the display name of the policy users will see when registering
+# for an account. Has no effect unless `require_at_registration` is enabled.
+# Defaults to "Privacy Policy".
+#
 # user_consent:
 #   template_dir: res/templates/privacy
 #   version: 1.0
@@ -54,6 +62,8 @@ DEFAULT_CONFIG = """\
 #   block_events_error: >-
 #     To continue using this homeserver you must review and agree to the
 #     terms and conditions at %(consent_uri)s
+#   require_at_registration: False
+#   policy_name: Privacy Policy
 #
 """
 
@@ -67,6 +77,8 @@ class ConsentConfig(Config):
         self.user_consent_server_notice_content = None
         self.user_consent_server_notice_to_guests = False
         self.block_events_without_consent_error = None
+        self.user_consent_at_registration = False
+        self.user_consent_policy_name = "Privacy Policy"
 
     def read_config(self, config):
         consent_config = config.get("user_consent")
@@ -83,6 +95,12 @@ class ConsentConfig(Config):
         self.user_consent_server_notice_to_guests = bool(consent_config.get(
             "send_server_notice_to_guests", False,
         ))
+        self.user_consent_at_registration = bool(consent_config.get(
+            "require_at_registration", False,
+        ))
+        self.user_consent_policy_name = consent_config.get(
+            "policy_name", "Privacy Policy",
+        )
 
     def default_config(self, **kwargs):
         return DEFAULT_CONFIG

+ 18 - 22
synapse/config/emailconfig.py

@@ -19,18 +19,12 @@ from __future__ import print_function
 import email.utils
 import logging
 import os
-import sys
-import textwrap
 
-from ._base import Config
+import pkg_resources
 
-logger = logging.getLogger(__name__)
+from ._base import Config, ConfigError
 
-TEMPLATE_DIR_WARNING = """\
-WARNING: The email notifier is configured to look for templates in '%(template_dir)s',
-but no templates could be found there. We will fall back to using the example templates;
-to get rid of this warning, leave 'email.template_dir' unset.
-"""
+logger = logging.getLogger(__name__)
 
 
 class EmailConfig(Config):
@@ -78,20 +72,22 @@ class EmailConfig(Config):
             self.email_notif_template_html = email_config["notif_template_html"]
             self.email_notif_template_text = email_config["notif_template_text"]
 
-            self.email_template_dir = email_config.get("template_dir")
-
-            # backwards-compatibility hack
-            if (
-                self.email_template_dir == "res/templates"
-                and not os.path.isfile(
-                    os.path.join(self.email_template_dir, self.email_notif_template_text)
+            template_dir = email_config.get("template_dir")
+            # we need an absolute path, because we change directory after starting (and
+            # we don't yet know what auxilliary templates like mail.css we will need).
+            # (Note that loading as package_resources with jinja.PackageLoader doesn't
+            # work for the same reason.)
+            if not template_dir:
+                template_dir = pkg_resources.resource_filename(
+                    'synapse', 'res/templates'
                 )
-            ):
-                t = TEMPLATE_DIR_WARNING % {
-                    "template_dir": self.email_template_dir,
-                }
-                print(textwrap.fill(t, width=80) + "\n", file=sys.stderr)
-                self.email_template_dir = None
+            template_dir = os.path.abspath(template_dir)
+
+            for f in self.email_notif_template_text, self.email_notif_template_html:
+                p = os.path.join(template_dir, f)
+                if not os.path.isfile(p):
+                    raise ConfigError("Unable to find email template file %s" % (p, ))
+            self.email_template_dir = template_dir
 
             self.email_notif_for_new_users = email_config.get(
                 "notif_for_new_users", True

+ 2 - 1
synapse/config/homeserver.py

@@ -31,6 +31,7 @@ from .push import PushConfig
 from .ratelimiting import RatelimitConfig
 from .registration import RegistrationConfig
 from .repository import ContentRepositoryConfig
+from .room_directory import RoomDirectoryConfig
 from .saml2 import SAML2Config
 from .server import ServerConfig
 from .server_notices_config import ServerNoticesConfig
@@ -49,7 +50,7 @@ class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig,
                        WorkerConfig, PasswordAuthProviderConfig, PushConfig,
                        SpamCheckerConfig, GroupsConfig, UserDirectoryConfig,
                        ConsentConfig,
-                       ServerNoticesConfig,
+                       ServerNoticesConfig, RoomDirectoryConfig,
                        ):
     pass
 

+ 1 - 0
synapse/config/logger.py

@@ -50,6 +50,7 @@ handlers:
         maxBytes: 104857600
         backupCount: 10
         filters: [context]
+        encoding: utf8
     console:
         class: logging.StreamHandler
         formatter: precise

+ 25 - 3
synapse/config/registration.py

@@ -15,10 +15,10 @@
 
 from distutils.util import strtobool
 
+from synapse.config._base import Config, ConfigError
+from synapse.types import RoomAlias
 from synapse.util.stringutils import random_string_with_symbols
 
-from ._base import Config
-
 
 class RegistrationConfig(Config):
 
@@ -37,6 +37,7 @@ class RegistrationConfig(Config):
 
         self.bcrypt_rounds = config.get("bcrypt_rounds", 12)
         self.trusted_third_party_id_servers = config["trusted_third_party_id_servers"]
+        self.default_identity_server = config.get("default_identity_server")
         self.allow_guest_access = config.get("allow_guest_access", False)
 
         self.invite_3pid_guest = (
@@ -44,6 +45,10 @@ class RegistrationConfig(Config):
         )
 
         self.auto_join_rooms = config.get("auto_join_rooms", [])
+        for room_alias in self.auto_join_rooms:
+            if not RoomAlias.is_valid(room_alias):
+                raise ConfigError('Invalid auto_join_rooms entry %s' % (room_alias,))
+        self.autocreate_auto_join_rooms = config.get("autocreate_auto_join_rooms", True)
 
     def default_config(self, **kwargs):
         registration_shared_secret = random_string_with_symbols(50)
@@ -87,17 +92,34 @@ class RegistrationConfig(Config):
         # accessible to anonymous users.
         allow_guest_access: False
 
+        # The identity server which we suggest that clients should use when users log
+        # in on this server.
+        #
+        # (By default, no suggestion is made, so it is left up to the client.
+        # This setting is ignored unless public_baseurl is also set.)
+        #
+        # default_identity_server: https://matrix.org
+
         # The list of identity servers trusted to verify third party
         # identifiers by this server.
+        #
+        # Also defines the ID server which will be called when an account is
+        # deactivated (one will be picked arbitrarily).
         trusted_third_party_id_servers:
             - matrix.org
             - vector.im
-            - riot.im
 
         # Users who register on this homeserver will automatically be joined
         # to these rooms
         #auto_join_rooms:
         #    - "#example:example.com"
+
+        # Where auto_join_rooms are specified, setting this flag ensures that the
+        # the rooms exist by creating them when the first user on the
+        # homeserver registers.
+        # Setting to false means that if the rooms are not manually created,
+        # users cannot be auto-joined since they do not exist.
+        autocreate_auto_join_rooms: true
         """ % locals()
 
     def add_arguments(self, parser):

+ 1 - 1
synapse/config/repository.py

@@ -178,7 +178,7 @@ class ContentRepositoryConfig(Config):
     def default_config(self, **kwargs):
         media_store = self.default_path("media_store")
         uploads_path = self.default_path("uploads")
-        return """
+        return r"""
         # Directory where uploaded images and attachments are stored.
         media_store_path: "%(media_store)s"
 

+ 102 - 0
synapse/config/room_directory.py

@@ -0,0 +1,102 @@
+# -*- coding: utf-8 -*-
+# Copyright 2018 New Vector Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from synapse.util import glob_to_regex
+
+from ._base import Config, ConfigError
+
+
+class RoomDirectoryConfig(Config):
+    def read_config(self, config):
+        alias_creation_rules = config["alias_creation_rules"]
+
+        self._alias_creation_rules = [
+            _AliasRule(rule)
+            for rule in alias_creation_rules
+        ]
+
+    def default_config(self, config_dir_path, server_name, **kwargs):
+        return """
+        # The `alias_creation` option controls who's allowed to create aliases
+        # on this server.
+        #
+        # The format of this option is a list of rules that contain globs that
+        # match against user_id and the new alias (fully qualified with server
+        # name). The action in the first rule that matches is taken, which can
+        # currently either be "allow" or "deny".
+        #
+        # If no rules match the request is denied.
+        alias_creation_rules:
+            - user_id: "*"
+              alias: "*"
+              action: allow
+        """
+
+    def is_alias_creation_allowed(self, user_id, alias):
+        """Checks if the given user is allowed to create the given alias
+
+        Args:
+            user_id (str)
+            alias (str)
+
+        Returns:
+            boolean: True if user is allowed to crate the alias
+        """
+        for rule in self._alias_creation_rules:
+            if rule.matches(user_id, alias):
+                return rule.action == "allow"
+
+        return False
+
+
+class _AliasRule(object):
+    def __init__(self, rule):
+        action = rule["action"]
+        user_id = rule["user_id"]
+        alias = rule["alias"]
+
+        if action in ("allow", "deny"):
+            self.action = action
+        else:
+            raise ConfigError(
+                "alias_creation_rules rules can only have action of 'allow'"
+                " or 'deny'"
+            )
+
+        try:
+            self._user_id_regex = glob_to_regex(user_id)
+            self._alias_regex = glob_to_regex(alias)
+        except Exception as e:
+            raise ConfigError("Failed to parse glob into regex: %s", e)
+
+    def matches(self, user_id, alias):
+        """Tests if this rule matches the given user_id and alias.
+
+        Args:
+            user_id (str)
+            alias (str)
+
+        Returns:
+            boolean
+        """
+
+        # Note: The regexes are anchored at both ends
+        if not self._user_id_regex.match(user_id):
+            return False
+
+        if not self._alias_regex.match(alias):
+            return False
+
+        return True

+ 17 - 1
synapse/config/server.py

@@ -62,6 +62,11 @@ class ServerConfig(Config):
         # master, potentially causing inconsistency.
         self.enable_media_repo = config.get("enable_media_repo", True)
 
+        # whether to enable search. If disabled, new entries will not be inserted
+        # into the search tables and they will not be indexed. Users will receive
+        # errors when attempting to search for messages.
+        self.enable_search = config.get("enable_search", True)
+
         self.filter_timeline_limit = config.get("filter_timeline_limit", -1)
 
         # Whether we should block invites sent to users on this server
@@ -77,6 +82,7 @@ class ServerConfig(Config):
             self.max_mau_value = config.get(
                 "max_mau_value", 0,
             )
+        self.mau_stats_only = config.get("mau_stats_only", False)
 
         self.mau_limits_reserved_threepids = config.get(
             "mau_limit_reserved_threepids", []
@@ -372,13 +378,23 @@ class ServerConfig(Config):
           # max_mau_value: 50
           # mau_trial_days: 2
           #
+          # If enabled, the metrics for the number of monthly active users will
+          # be populated, however no one will be limited. If limit_usage_by_mau
+          # is true, this is implied to be true.
+          # mau_stats_only: False
+          #
           # Sometimes the server admin will want to ensure certain accounts are
           # never blocked by mau checking. These accounts are specified here.
           #
           # mau_limit_reserved_threepids:
           # - medium: 'email'
           #   address: 'reserved_user@example.com'
-
+          #
+          # Room searching
+          #
+          # If disabled, new messages will not be indexed for searching and users
+          # will receive errors when searching for messages. Defaults to enabled.
+          # enable_search: true
         """ % locals()
 
     def read_arguments(self, args):

+ 6 - 4
synapse/crypto/keyclient.py

@@ -15,6 +15,8 @@
 
 import logging
 
+from six.moves import urllib
+
 from canonicaljson import json
 
 from twisted.internet import defer, reactor
@@ -28,15 +30,15 @@ from synapse.util import logcontext
 
 logger = logging.getLogger(__name__)
 
-KEY_API_V1 = b"/_matrix/key/v1/"
+KEY_API_V2 = "/_matrix/key/v2/server/%s"
 
 
 @defer.inlineCallbacks
-def fetch_server_key(server_name, tls_client_options_factory, path=KEY_API_V1):
+def fetch_server_key(server_name, tls_client_options_factory, key_id):
     """Fetch the keys for a remote server."""
 
     factory = SynapseKeyClientFactory()
-    factory.path = path
+    factory.path = KEY_API_V2 % (urllib.parse.quote(key_id), )
     factory.host = server_name
     endpoint = matrix_federation_endpoint(
         reactor, server_name, tls_client_options_factory, timeout=30
@@ -55,7 +57,7 @@ def fetch_server_key(server_name, tls_client_options_factory, path=KEY_API_V1):
                 raise IOError("Cannot get key for %r" % server_name)
         except (ConnectError, DomainError) as e:
             logger.warn("Error getting key for %r: %s", server_name, e)
-        except Exception as e:
+        except Exception:
             logger.exception("Error getting key for %r", server_name)
     raise IOError("Cannot get key for %r" % server_name)
 

Some files were not shown because too many files changed in this diff