utils.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340
  1. # Copyright 2014-2016 OpenMarket Ltd
  2. # Copyright 2018-2019 New Vector Ltd
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. import atexit
  16. import os
  17. from typing import Any, Callable, Dict, List, Tuple, Union, overload
  18. import attr
  19. from typing_extensions import Literal, ParamSpec
  20. from synapse.api.constants import EventTypes
  21. from synapse.api.room_versions import RoomVersions
  22. from synapse.config.homeserver import HomeServerConfig
  23. from synapse.config.server import DEFAULT_ROOM_VERSION
  24. from synapse.logging.context import current_context, set_current_context
  25. from synapse.server import HomeServer
  26. from synapse.storage.database import LoggingDatabaseConnection
  27. from synapse.storage.engines import create_engine
  28. from synapse.storage.prepare_database import prepare_database
  29. # set this to True to run the tests against postgres instead of sqlite.
  30. #
  31. # When running under postgres, we first create a base database with the name
  32. # POSTGRES_BASE_DB and update it to the current schema. Then, for each test case, we
  33. # create another unique database, using the base database as a template.
  34. USE_POSTGRES_FOR_TESTS = os.environ.get("SYNAPSE_POSTGRES", False)
  35. LEAVE_DB = os.environ.get("SYNAPSE_LEAVE_DB", False)
  36. POSTGRES_USER = os.environ.get("SYNAPSE_POSTGRES_USER", None)
  37. POSTGRES_HOST = os.environ.get("SYNAPSE_POSTGRES_HOST", None)
  38. POSTGRES_PASSWORD = os.environ.get("SYNAPSE_POSTGRES_PASSWORD", None)
  39. POSTGRES_PORT = (
  40. int(os.environ["SYNAPSE_POSTGRES_PORT"])
  41. if "SYNAPSE_POSTGRES_PORT" in os.environ
  42. else None
  43. )
  44. POSTGRES_BASE_DB = "_synapse_unit_tests_base_%s" % (os.getpid(),)
  45. # When debugging a specific test, it's occasionally useful to write the
  46. # DB to disk and query it with the sqlite CLI.
  47. SQLITE_PERSIST_DB = os.environ.get("SYNAPSE_TEST_PERSIST_SQLITE_DB") is not None
  48. # the dbname we will connect to in order to create the base database.
  49. POSTGRES_DBNAME_FOR_INITIAL_CREATE = "postgres"
  50. def setupdb() -> None:
  51. # If we're using PostgreSQL, set up the db once
  52. if USE_POSTGRES_FOR_TESTS:
  53. # create a PostgresEngine
  54. db_engine = create_engine({"name": "psycopg2", "args": {}})
  55. # connect to postgres to create the base database.
  56. db_conn = db_engine.module.connect(
  57. user=POSTGRES_USER,
  58. host=POSTGRES_HOST,
  59. port=POSTGRES_PORT,
  60. password=POSTGRES_PASSWORD,
  61. dbname=POSTGRES_DBNAME_FOR_INITIAL_CREATE,
  62. )
  63. db_engine.attempt_to_set_autocommit(db_conn, autocommit=True)
  64. cur = db_conn.cursor()
  65. cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,))
  66. cur.execute(
  67. "CREATE DATABASE %s ENCODING 'UTF8' LC_COLLATE='C' LC_CTYPE='C' "
  68. "template=template0;" % (POSTGRES_BASE_DB,)
  69. )
  70. cur.close()
  71. db_conn.close()
  72. # Set up in the db
  73. db_conn = db_engine.module.connect(
  74. database=POSTGRES_BASE_DB,
  75. user=POSTGRES_USER,
  76. host=POSTGRES_HOST,
  77. port=POSTGRES_PORT,
  78. password=POSTGRES_PASSWORD,
  79. )
  80. logging_conn = LoggingDatabaseConnection(db_conn, db_engine, "tests")
  81. prepare_database(logging_conn, db_engine, None)
  82. logging_conn.close()
  83. def _cleanup() -> None:
  84. db_conn = db_engine.module.connect(
  85. user=POSTGRES_USER,
  86. host=POSTGRES_HOST,
  87. port=POSTGRES_PORT,
  88. password=POSTGRES_PASSWORD,
  89. dbname=POSTGRES_DBNAME_FOR_INITIAL_CREATE,
  90. )
  91. db_engine.attempt_to_set_autocommit(db_conn, autocommit=True)
  92. cur = db_conn.cursor()
  93. cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,))
  94. cur.close()
  95. db_conn.close()
  96. atexit.register(_cleanup)
  97. @overload
  98. def default_config(name: str, parse: Literal[False] = ...) -> Dict[str, object]:
  99. ...
  100. @overload
  101. def default_config(name: str, parse: Literal[True]) -> HomeServerConfig:
  102. ...
  103. def default_config(
  104. name: str, parse: bool = False
  105. ) -> Union[Dict[str, object], HomeServerConfig]:
  106. """
  107. Create a reasonable test config.
  108. """
  109. config_dict = {
  110. "server_name": name,
  111. # Setting this to an empty list turns off federation sending.
  112. "federation_sender_instances": [],
  113. "media_store_path": "media",
  114. # the test signing key is just an arbitrary ed25519 key to keep the config
  115. # parser happy
  116. "signing_key": "ed25519 a_lPym qvioDNmfExFBRPgdTU+wtFYKq4JfwFRv7sYVgWvmgJg",
  117. "event_cache_size": 1,
  118. "enable_registration": True,
  119. "enable_registration_captcha": False,
  120. "macaroon_secret_key": "not even a little secret",
  121. "password_providers": [],
  122. "worker_app": None,
  123. "block_non_admin_invites": False,
  124. "federation_domain_whitelist": None,
  125. "filter_timeline_limit": 5000,
  126. "user_directory_search_all_users": False,
  127. "user_consent_server_notice_content": None,
  128. "block_events_without_consent_error": None,
  129. "user_consent_at_registration": False,
  130. "user_consent_policy_name": "Privacy Policy",
  131. "media_storage_providers": [],
  132. "autocreate_auto_join_rooms": True,
  133. "auto_join_rooms": [],
  134. "limit_usage_by_mau": False,
  135. "hs_disabled": False,
  136. "hs_disabled_message": "",
  137. "max_mau_value": 50,
  138. "mau_trial_days": 0,
  139. "mau_stats_only": False,
  140. "mau_limits_reserved_threepids": [],
  141. "admin_contact": None,
  142. "rc_message": {"per_second": 10000, "burst_count": 10000},
  143. "rc_registration": {"per_second": 10000, "burst_count": 10000},
  144. "rc_login": {
  145. "address": {"per_second": 10000, "burst_count": 10000},
  146. "account": {"per_second": 10000, "burst_count": 10000},
  147. "failed_attempts": {"per_second": 10000, "burst_count": 10000},
  148. },
  149. "rc_joins": {
  150. "local": {"per_second": 10000, "burst_count": 10000},
  151. "remote": {"per_second": 10000, "burst_count": 10000},
  152. },
  153. "rc_joins_per_room": {"per_second": 10000, "burst_count": 10000},
  154. "rc_invites": {
  155. "per_room": {"per_second": 10000, "burst_count": 10000},
  156. "per_user": {"per_second": 10000, "burst_count": 10000},
  157. },
  158. "rc_3pid_validation": {"per_second": 10000, "burst_count": 10000},
  159. "saml2_enabled": False,
  160. "public_baseurl": None,
  161. "default_identity_server": None,
  162. "key_refresh_interval": 24 * 60 * 60 * 1000,
  163. "old_signing_keys": {},
  164. "tls_fingerprints": [],
  165. "use_frozen_dicts": False,
  166. # We need a sane default_room_version, otherwise attempts to create
  167. # rooms will fail.
  168. "default_room_version": DEFAULT_ROOM_VERSION,
  169. # disable user directory updates, because they get done in the
  170. # background, which upsets the test runner. Setting this to an
  171. # (obviously) fake worker name disables updating the user directory.
  172. "update_user_directory_from_worker": "does_not_exist_worker_name",
  173. "caches": {"global_factor": 1, "sync_response_cache_duration": 0},
  174. "listeners": [{"port": 0, "type": "http"}],
  175. }
  176. if parse:
  177. config = HomeServerConfig()
  178. config.parse_config_dict(config_dict, "", "")
  179. return config
  180. return config_dict
  181. def mock_getRawHeaders(headers=None): # type: ignore[no-untyped-def]
  182. headers = headers if headers is not None else {}
  183. def getRawHeaders(name, default=None): # type: ignore[no-untyped-def]
  184. # If the requested header is present, the real twisted function returns
  185. # List[str] if name is a str and List[bytes] if name is a bytes.
  186. # This mock doesn't support that behaviour.
  187. # Fortunately, none of the current callers of mock_getRawHeaders() provide a
  188. # headers dict, so we don't encounter this discrepancy in practice.
  189. return headers.get(name, default)
  190. return getRawHeaders
  191. P = ParamSpec("P")
  192. @attr.s(slots=True, auto_attribs=True)
  193. class Timer:
  194. absolute_time: float
  195. callback: Callable[[], None]
  196. expired: bool
  197. # TODO: Make this generic over a ParamSpec?
  198. @attr.s(slots=True, auto_attribs=True)
  199. class Looper:
  200. func: Callable[..., Any]
  201. interval: float # seconds
  202. last: float
  203. args: Tuple[object, ...]
  204. kwargs: Dict[str, object]
  205. class MockClock:
  206. now = 1000.0
  207. def __init__(self) -> None:
  208. # Timers in no particular order
  209. self.timers: List[Timer] = []
  210. self.loopers: List[Looper] = []
  211. def time(self) -> float:
  212. return self.now
  213. def time_msec(self) -> int:
  214. return int(self.time() * 1000)
  215. def call_later(
  216. self,
  217. delay: float,
  218. callback: Callable[P, object],
  219. *args: P.args,
  220. **kwargs: P.kwargs,
  221. ) -> Timer:
  222. ctx = current_context()
  223. def wrapped_callback() -> None:
  224. set_current_context(ctx)
  225. callback(*args, **kwargs)
  226. t = Timer(self.now + delay, wrapped_callback, False)
  227. self.timers.append(t)
  228. return t
  229. def looping_call(
  230. self,
  231. function: Callable[P, object],
  232. interval: float,
  233. *args: P.args,
  234. **kwargs: P.kwargs,
  235. ) -> None:
  236. self.loopers.append(Looper(function, interval / 1000.0, self.now, args, kwargs))
  237. def cancel_call_later(self, timer: Timer, ignore_errs: bool = False) -> None:
  238. if timer.expired:
  239. if not ignore_errs:
  240. raise Exception("Cannot cancel an expired timer")
  241. timer.expired = True
  242. self.timers = [t for t in self.timers if t != timer]
  243. # For unit testing
  244. def advance_time(self, secs: float) -> None:
  245. self.now += secs
  246. timers = self.timers
  247. self.timers = []
  248. for t in timers:
  249. if t.expired:
  250. raise Exception("Timer already expired")
  251. if self.now >= t.absolute_time:
  252. t.expired = True
  253. t.callback()
  254. else:
  255. self.timers.append(t)
  256. for looped in self.loopers:
  257. if looped.last + looped.interval < self.now:
  258. looped.func(*looped.args, **looped.kwargs)
  259. looped.last = self.now
  260. def advance_time_msec(self, ms: float) -> None:
  261. self.advance_time(ms / 1000.0)
  262. async def create_room(hs: HomeServer, room_id: str, creator_id: str) -> None:
  263. """Creates and persist a creation event for the given room"""
  264. persistence_store = hs.get_storage_controllers().persistence
  265. assert persistence_store is not None
  266. store = hs.get_datastores().main
  267. event_builder_factory = hs.get_event_builder_factory()
  268. event_creation_handler = hs.get_event_creation_handler()
  269. await store.store_room(
  270. room_id=room_id,
  271. room_creator_user_id=creator_id,
  272. is_public=False,
  273. room_version=RoomVersions.V1,
  274. )
  275. builder = event_builder_factory.for_room_version(
  276. RoomVersions.V1,
  277. {
  278. "type": EventTypes.Create,
  279. "state_key": "",
  280. "sender": creator_id,
  281. "room_id": room_id,
  282. "content": {},
  283. },
  284. )
  285. event, context = await event_creation_handler.create_new_client_event(builder)
  286. await persistence_store.persist_event(event, context)