unittest.py 37 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091
  1. # Copyright 2014-2016 OpenMarket Ltd
  2. # Copyright 2018 New Vector
  3. # Copyright 2019 Matrix.org Federation C.I.C
  4. #
  5. # Licensed under the Apache License, Version 2.0 (the "License");
  6. # you may not use this file except in compliance with the License.
  7. # You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. import functools
  17. import gc
  18. import hashlib
  19. import hmac
  20. import json
  21. import logging
  22. import secrets
  23. import time
  24. from typing import (
  25. Any,
  26. Awaitable,
  27. Callable,
  28. ClassVar,
  29. Dict,
  30. Generic,
  31. Iterable,
  32. List,
  33. NoReturn,
  34. Optional,
  35. Tuple,
  36. Type,
  37. TypeVar,
  38. Union,
  39. )
  40. from unittest.mock import Mock, patch
  41. import canonicaljson
  42. import signedjson.key
  43. import unpaddedbase64
  44. from typing_extensions import Concatenate, ParamSpec, Protocol
  45. from twisted.internet.defer import Deferred, ensureDeferred
  46. from twisted.python.failure import Failure
  47. from twisted.python.threadpool import ThreadPool
  48. from twisted.test.proto_helpers import MemoryReactor, MemoryReactorClock
  49. from twisted.trial import unittest
  50. from twisted.web.resource import Resource
  51. from twisted.web.server import Request
  52. from synapse import events
  53. from synapse.api.constants import EventTypes
  54. from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
  55. from synapse.config._base import Config, RootConfig
  56. from synapse.config.homeserver import HomeServerConfig
  57. from synapse.config.server import DEFAULT_ROOM_VERSION
  58. from synapse.crypto.event_signing import add_hashes_and_signatures
  59. from synapse.federation.transport.server import TransportLayerServer
  60. from synapse.http.server import JsonResource
  61. from synapse.http.site import SynapseRequest, SynapseSite
  62. from synapse.logging.context import (
  63. SENTINEL_CONTEXT,
  64. LoggingContext,
  65. current_context,
  66. set_current_context,
  67. )
  68. from synapse.rest import RegisterServletsFunc
  69. from synapse.server import HomeServer
  70. from synapse.types import JsonDict, Requester, UserID, create_requester
  71. from synapse.util import Clock
  72. from synapse.util.httpresourcetree import create_resource_tree
  73. from tests.server import (
  74. CustomHeaderType,
  75. FakeChannel,
  76. ThreadedMemoryReactorClock,
  77. get_clock,
  78. make_request,
  79. setup_test_homeserver,
  80. )
  81. from tests.test_utils import event_injection, setup_awaitable_errors
  82. from tests.test_utils.logging_setup import setup_logging
  83. from tests.utils import checked_cast, default_config, setupdb
  84. setupdb()
  85. setup_logging()
  86. TV = TypeVar("TV")
  87. _ExcType = TypeVar("_ExcType", bound=BaseException, covariant=True)
  88. P = ParamSpec("P")
  89. R = TypeVar("R")
  90. S = TypeVar("S")
  91. class _TypedFailure(Generic[_ExcType], Protocol):
  92. """Extension to twisted.Failure, where the 'value' has a certain type."""
  93. @property
  94. def value(self) -> _ExcType:
  95. ...
  96. def around(target: TV) -> Callable[[Callable[Concatenate[S, P], R]], None]:
  97. """A CLOS-style 'around' modifier, which wraps the original method of the
  98. given instance with another piece of code.
  99. @around(self)
  100. def method_name(orig, *args, **kwargs):
  101. return orig(*args, **kwargs)
  102. """
  103. def _around(code: Callable[Concatenate[S, P], R]) -> None:
  104. name = code.__name__
  105. orig = getattr(target, name)
  106. def new(*args: P.args, **kwargs: P.kwargs) -> R:
  107. return code(orig, *args, **kwargs)
  108. setattr(target, name, new)
  109. return _around
  110. _TConfig = TypeVar("_TConfig", Config, RootConfig)
  111. def deepcopy_config(config: _TConfig) -> _TConfig:
  112. new_config: _TConfig
  113. if isinstance(config, RootConfig):
  114. new_config = config.__class__(config.config_files) # type: ignore[arg-type]
  115. else:
  116. new_config = config.__class__(config.root)
  117. for attr_name in config.__dict__:
  118. if attr_name.startswith("__") or attr_name == "root":
  119. continue
  120. attr = getattr(config, attr_name)
  121. if isinstance(attr, Config):
  122. new_attr = deepcopy_config(attr)
  123. else:
  124. new_attr = attr
  125. setattr(new_config, attr_name, new_attr)
  126. return new_config
  127. @functools.lru_cache(maxsize=8)
  128. def _parse_config_dict(config: str) -> RootConfig:
  129. config_obj = HomeServerConfig()
  130. config_obj.parse_config_dict(json.loads(config), "", "")
  131. return config_obj
  132. def make_homeserver_config_obj(config: Dict[str, Any]) -> RootConfig:
  133. """Creates a :class:`HomeServerConfig` instance with the given configuration dict.
  134. This is equivalent to::
  135. config_obj = HomeServerConfig()
  136. config_obj.parse_config_dict(config, "", "")
  137. but it keeps a cache of `HomeServerConfig` instances and deepcopies them as needed,
  138. to avoid validating the whole configuration every time.
  139. """
  140. config_obj = _parse_config_dict(json.dumps(config, sort_keys=True))
  141. return deepcopy_config(config_obj)
  142. class TestCase(unittest.TestCase):
  143. """A subclass of twisted.trial's TestCase which looks for 'loglevel'
  144. attributes on both itself and its individual test methods, to override the
  145. root logger's logging level while that test (case|method) runs."""
  146. def __init__(self, methodName: str):
  147. super().__init__(methodName)
  148. method = getattr(self, methodName)
  149. level = getattr(method, "loglevel", getattr(self, "loglevel", None))
  150. @around(self)
  151. def setUp(orig: Callable[[], R]) -> R:
  152. # if we're not starting in the sentinel logcontext, then to be honest
  153. # all future bets are off.
  154. if current_context():
  155. self.fail(
  156. "Test starting with non-sentinel logging context %s"
  157. % (current_context(),)
  158. )
  159. # Disable GC for duration of test. See below for why.
  160. gc.disable()
  161. old_level = logging.getLogger().level
  162. if level is not None and old_level != level:
  163. @around(self)
  164. def tearDown(orig: Callable[[], R]) -> R:
  165. ret = orig()
  166. logging.getLogger().setLevel(old_level)
  167. return ret
  168. logging.getLogger().setLevel(level)
  169. # Trial messes with the warnings configuration, thus this has to be
  170. # done in the context of an individual TestCase.
  171. self.addCleanup(setup_awaitable_errors())
  172. return orig()
  173. # We want to force a GC to workaround problems with deferreds leaking
  174. # logcontexts when they are GCed (see the logcontext docs).
  175. #
  176. # The easiest way to do this would be to do a full GC after each test
  177. # run, but that is very expensive. Instead, we disable GC (above) for
  178. # the duration of the test and only run a gen-0 GC, which is a lot
  179. # quicker. This doesn't clean up everything, since the TestCase
  180. # instance still holds references to objects created during the test,
  181. # such as HomeServers, so we do a full GC every so often.
  182. @around(self)
  183. def tearDown(orig: Callable[[], R]) -> R:
  184. import sys
  185. import time
  186. import tracemalloc
  187. ret = orig()
  188. gc.collect(0)
  189. # Run a full GC every 50 gen-0 GCs.
  190. gen0_stats = gc.get_stats()[0]
  191. gen0_collections = gen0_stats["collections"]
  192. if gen0_collections % 50 == 0:
  193. if not getattr(tracemalloc, "aaa", None):
  194. tracemalloc.aaa = True
  195. tracemalloc.start()
  196. tracemalloc.s0 = tracemalloc.take_snapshot()
  197. t0 = time.time()
  198. gc.collect()
  199. dt = time.time() - t0
  200. s1 = tracemalloc.take_snapshot()
  201. for line in s1.statistics("lineno")[:20]:
  202. sys.stdout.write(f" {line}\n")
  203. sys.stdout.write(f"full collection took {dt} s\n")
  204. dt = time.time() - t0
  205. sys.stdout.write(f"snapshot took {dt} s\n")
  206. if dt > 1.5:
  207. def dump_paths(o: object, max_distance: int) -> None:
  208. import pdb
  209. from collections import deque
  210. queue: deque[object] = deque()
  211. seen: set[int] = set()
  212. prevs: Dict[int, object] = {}
  213. roots: List[object] = []
  214. distances: Dict[int, int] = {}
  215. whitelist = {id(queue), id(seen), id(prevs), id(roots)}
  216. i = 0
  217. seen.add(id(o))
  218. queue.append(o)
  219. distances[id(o)] = 0
  220. while len(queue) > 0:
  221. o = queue.popleft()
  222. has_referrers = False
  223. if (
  224. not isinstance(o, pdb.Pdb)
  225. and distances[id(o)] < max_distance
  226. ):
  227. referrers = gc.get_referrers(o)
  228. for referrer in referrers:
  229. if id(referrer) in whitelist:
  230. continue
  231. has_referrers = True
  232. if id(referrer) in seen:
  233. continue
  234. prevs[id(referrer)] = o
  235. distances[id(referrer)] = distances[id(o)] + 1
  236. seen.add(id(referrer))
  237. queue.append(referrer)
  238. if not has_referrers:
  239. roots.append(o)
  240. i += 1
  241. print(f"{len(roots)} roots")
  242. for root in roots:
  243. o = root
  244. while o is not None:
  245. print(str(o)[:200])
  246. o = prevs.get(id(o))
  247. print("")
  248. print(f"{len(roots)} roots")
  249. import pdb
  250. pdb.set_trace()
  251. gc.enable()
  252. set_current_context(SENTINEL_CONTEXT)
  253. return ret
  254. def assertObjectHasAttributes(self, attrs: Dict[str, object], obj: object) -> None:
  255. """Asserts that the given object has each of the attributes given, and
  256. that the value of each matches according to assertEqual."""
  257. for key in attrs.keys():
  258. if not hasattr(obj, key):
  259. raise AssertionError("Expected obj to have a '.%s'" % key)
  260. try:
  261. self.assertEqual(attrs[key], getattr(obj, key))
  262. except AssertionError as e:
  263. raise (type(e))(f"Assert error for '.{key}':") from e
  264. def assert_dict(self, required: dict, actual: dict) -> None:
  265. """Does a partial assert of a dict.
  266. Args:
  267. required: The keys and value which MUST be in 'actual'.
  268. actual: The test result. Extra keys will not be checked.
  269. """
  270. for key in required:
  271. self.assertEqual(
  272. required[key], actual[key], msg="%s mismatch. %s" % (key, actual)
  273. )
  274. def DEBUG(target: TV) -> TV:
  275. """A decorator to set the .loglevel attribute to logging.DEBUG.
  276. Can apply to either a TestCase or an individual test method."""
  277. target.loglevel = logging.DEBUG # type: ignore[attr-defined]
  278. return target
  279. def INFO(target: TV) -> TV:
  280. """A decorator to set the .loglevel attribute to logging.INFO.
  281. Can apply to either a TestCase or an individual test method."""
  282. target.loglevel = logging.INFO # type: ignore[attr-defined]
  283. return target
  284. def logcontext_clean(target: TV) -> TV:
  285. """A decorator which marks the TestCase or method as 'logcontext_clean'
  286. ... ie, any logcontext errors should cause a test failure
  287. """
  288. def logcontext_error(msg: str) -> NoReturn:
  289. raise AssertionError("logcontext error: %s" % (msg))
  290. patcher = patch("synapse.logging.context.logcontext_error", new=logcontext_error)
  291. return patcher(target) # type: ignore[call-overload]
  292. class HomeserverTestCase(TestCase):
  293. """
  294. A base TestCase that reduces boilerplate for HomeServer-using test cases.
  295. Defines a setUp method which creates a mock reactor, and instantiates a homeserver
  296. running on that reactor.
  297. There are various hooks for modifying the way that the homeserver is instantiated:
  298. * override make_homeserver, for example by making it pass different parameters into
  299. setup_test_homeserver.
  300. * override default_config, to return a modified configuration dictionary for use
  301. by setup_test_homeserver.
  302. * On a per-test basis, you can use the @override_config decorator to give a
  303. dictionary containing additional configuration settings to be added to the basic
  304. config dict.
  305. Attributes:
  306. servlets: List of servlet registration function.
  307. user_id (str): The user ID to assume if auth is hijacked.
  308. hijack_auth: Whether to hijack auth to return the user specified
  309. in user_id.
  310. """
  311. hijack_auth: ClassVar[bool] = True
  312. needs_threadpool: ClassVar[bool] = False
  313. servlets: ClassVar[List[RegisterServletsFunc]] = []
  314. def __init__(self, methodName: str):
  315. super().__init__(methodName)
  316. # see if we have any additional config for this test
  317. method = getattr(self, methodName)
  318. self._extra_config = getattr(method, "_extra_config", None)
  319. def setUp(self) -> None:
  320. """
  321. Set up the TestCase by calling the homeserver constructor, optionally
  322. hijacking the authentication system to return a fixed user, and then
  323. calling the prepare function.
  324. """
  325. self.reactor, self.clock = get_clock()
  326. self._hs_args = {"clock": self.clock, "reactor": self.reactor}
  327. self.hs = self.make_homeserver(self.reactor, self.clock)
  328. # Honour the `use_frozen_dicts` config option. We have to do this
  329. # manually because this is taken care of in the app `start` code, which
  330. # we don't run. Plus we want to reset it on tearDown.
  331. events.USE_FROZEN_DICTS = self.hs.config.server.use_frozen_dicts
  332. if self.hs is None:
  333. raise Exception("No homeserver returned from make_homeserver.")
  334. if not isinstance(self.hs, HomeServer):
  335. raise Exception("A homeserver wasn't returned, but %r" % (self.hs,))
  336. # create the root resource, and a site to wrap it.
  337. self.resource = self.create_test_resource()
  338. self.site = SynapseSite(
  339. logger_name="synapse.access.http.fake",
  340. site_tag=self.hs.config.server.server_name,
  341. config=self.hs.config.server.listeners[0],
  342. resource=self.resource,
  343. server_version_string="1",
  344. max_request_body_size=4096,
  345. reactor=self.reactor,
  346. )
  347. from tests.rest.client.utils import RestHelper
  348. self.helper = RestHelper(
  349. self.hs,
  350. checked_cast(MemoryReactorClock, self.hs.get_reactor()),
  351. self.site,
  352. getattr(self, "user_id", None),
  353. )
  354. if hasattr(self, "user_id"):
  355. if self.hijack_auth:
  356. assert self.helper.auth_user_id is not None
  357. token = "some_fake_token"
  358. # We need a valid token ID to satisfy foreign key constraints.
  359. token_id = self.get_success(
  360. self.hs.get_datastores().main.add_access_token_to_user(
  361. self.helper.auth_user_id,
  362. token,
  363. None,
  364. None,
  365. )
  366. )
  367. # This has to be a function and not just a Mock, because
  368. # `self.helper.auth_user_id` is temporarily reassigned in some tests
  369. async def get_requester(*args: Any, **kwargs: Any) -> Requester:
  370. assert self.helper.auth_user_id is not None
  371. return create_requester(
  372. user_id=UserID.from_string(self.helper.auth_user_id),
  373. access_token_id=token_id,
  374. )
  375. # Type ignore: mypy doesn't like us assigning to methods.
  376. self.hs.get_auth().get_user_by_req = get_requester # type: ignore[assignment]
  377. self.hs.get_auth().get_user_by_access_token = get_requester # type: ignore[assignment]
  378. self.hs.get_auth().get_access_token_from_request = Mock(return_value=token) # type: ignore[assignment]
  379. if self.needs_threadpool:
  380. self.reactor.threadpool = ThreadPool() # type: ignore[assignment]
  381. self.addCleanup(self.reactor.threadpool.stop)
  382. self.reactor.threadpool.start()
  383. if hasattr(self, "prepare"):
  384. self.prepare(self.reactor, self.clock, self.hs)
  385. def tearDown(self) -> None:
  386. # Reset to not use frozen dicts.
  387. events.USE_FROZEN_DICTS = False
  388. def wait_on_thread(self, deferred: Deferred, timeout: int = 10) -> None:
  389. """
  390. Wait until a Deferred is done, where it's waiting on a real thread.
  391. """
  392. start_time = time.time()
  393. while not deferred.called:
  394. if start_time + timeout < time.time():
  395. raise ValueError("Timed out waiting for threadpool")
  396. self.reactor.advance(0.01)
  397. time.sleep(0.01)
  398. def wait_for_background_updates(self) -> None:
  399. """Block until all background database updates have completed."""
  400. store = self.hs.get_datastores().main
  401. while not self.get_success(
  402. store.db_pool.updates.has_completed_background_updates()
  403. ):
  404. self.get_success(
  405. store.db_pool.updates.do_next_background_update(False), by=0.1
  406. )
  407. def make_homeserver(
  408. self, reactor: ThreadedMemoryReactorClock, clock: Clock
  409. ) -> HomeServer:
  410. """
  411. Make and return a homeserver.
  412. Args:
  413. reactor: A Twisted Reactor, or something that pretends to be one.
  414. clock: The Clock, associated with the reactor.
  415. Returns:
  416. A homeserver suitable for testing.
  417. Function to be overridden in subclasses.
  418. """
  419. hs = self.setup_test_homeserver()
  420. return hs
  421. def create_test_resource(self) -> Resource:
  422. """
  423. Create a the root resource for the test server.
  424. The default calls `self.create_resource_dict` and builds the resultant dict
  425. into a tree.
  426. """
  427. root_resource = Resource()
  428. create_resource_tree(self.create_resource_dict(), root_resource)
  429. return root_resource
  430. def create_resource_dict(self) -> Dict[str, Resource]:
  431. """Create a resource tree for the test server
  432. A resource tree is a mapping from path to twisted.web.resource.
  433. The default implementation creates a JsonResource and calls each function in
  434. `servlets` to register servlets against it.
  435. """
  436. servlet_resource = JsonResource(self.hs)
  437. for servlet in self.servlets:
  438. servlet(self.hs, servlet_resource)
  439. return {
  440. "/_matrix/client": servlet_resource,
  441. "/_synapse/admin": servlet_resource,
  442. }
  443. def default_config(self) -> JsonDict:
  444. """
  445. Get a default HomeServer config dict.
  446. """
  447. config = default_config("test")
  448. # apply any additional config which was specified via the override_config
  449. # decorator.
  450. if self._extra_config is not None:
  451. config.update(self._extra_config)
  452. return config
  453. def prepare(
  454. self, reactor: MemoryReactor, clock: Clock, homeserver: HomeServer
  455. ) -> None:
  456. """
  457. Prepare for the test. This involves things like mocking out parts of
  458. the homeserver, or building test data common across the whole test
  459. suite.
  460. Args:
  461. reactor: A Twisted Reactor, or something that pretends to be one.
  462. clock: The Clock, associated with the reactor.
  463. homeserver: The HomeServer to test against.
  464. Function to optionally be overridden in subclasses.
  465. """
  466. def make_request(
  467. self,
  468. method: Union[bytes, str],
  469. path: Union[bytes, str],
  470. content: Union[bytes, str, JsonDict] = b"",
  471. access_token: Optional[str] = None,
  472. request: Type[Request] = SynapseRequest,
  473. shorthand: bool = True,
  474. federation_auth_origin: Optional[bytes] = None,
  475. content_is_form: bool = False,
  476. await_result: bool = True,
  477. custom_headers: Optional[Iterable[CustomHeaderType]] = None,
  478. client_ip: str = "127.0.0.1",
  479. ) -> FakeChannel:
  480. """
  481. Create a SynapseRequest at the path using the method and containing the
  482. given content.
  483. Args:
  484. method: The HTTP request method ("verb").
  485. path: The HTTP path, suitably URL encoded (e.g. escaped UTF-8 & spaces
  486. and such). content (bytes or dict): The body of the request.
  487. JSON-encoded, if a dict.
  488. shorthand: Whether to try and be helpful and prefix the given URL
  489. with the usual REST API path, if it doesn't contain it.
  490. federation_auth_origin: if set to not-None, we will add a fake
  491. Authorization header pretenting to be the given server name.
  492. content_is_form: Whether the content is URL encoded form data. Adds the
  493. 'Content-Type': 'application/x-www-form-urlencoded' header.
  494. await_result: whether to wait for the request to complete rendering. If
  495. true (the default), will pump the test reactor until the the renderer
  496. tells the channel the request is finished.
  497. custom_headers: (name, value) pairs to add as request headers
  498. client_ip: The IP to use as the requesting IP. Useful for testing
  499. ratelimiting.
  500. Returns:
  501. The FakeChannel object which stores the result of the request.
  502. """
  503. return make_request(
  504. self.reactor,
  505. self.site,
  506. method,
  507. path,
  508. content,
  509. access_token,
  510. request,
  511. shorthand,
  512. federation_auth_origin,
  513. content_is_form,
  514. await_result,
  515. custom_headers,
  516. client_ip,
  517. )
  518. def setup_test_homeserver(
  519. self, name: Optional[str] = None, **kwargs: Any
  520. ) -> HomeServer:
  521. """
  522. Set up the test homeserver, meant to be called by the overridable
  523. make_homeserver. It automatically passes through the test class's
  524. clock & reactor.
  525. Args:
  526. See tests.utils.setup_test_homeserver.
  527. Returns:
  528. synapse.server.HomeServer
  529. """
  530. kwargs = dict(kwargs)
  531. kwargs.update(self._hs_args)
  532. if "config" not in kwargs:
  533. config = self.default_config()
  534. else:
  535. config = kwargs["config"]
  536. # The server name can be specified using either the `name` argument or a config
  537. # override. The `name` argument takes precedence over any config overrides.
  538. if name is not None:
  539. config["server_name"] = name
  540. # Parse the config from a config dict into a HomeServerConfig
  541. config_obj = make_homeserver_config_obj(config)
  542. kwargs["config"] = config_obj
  543. # The server name in the config is now `name`, if provided, or the `server_name`
  544. # from a config override, or the default of "test". Whichever it is, we
  545. # construct a homeserver with a matching name.
  546. kwargs["name"] = config_obj.server.server_name
  547. async def run_bg_updates() -> None:
  548. with LoggingContext("run_bg_updates"):
  549. self.get_success(stor.db_pool.updates.run_background_updates(False))
  550. hs = setup_test_homeserver(self.addCleanup, **kwargs)
  551. stor = hs.get_datastores().main
  552. # Run the database background updates, when running against "master".
  553. if hs.__class__.__name__ == "TestHomeServer":
  554. self.get_success(run_bg_updates())
  555. return hs
  556. def pump(self, by: float = 0.0) -> None:
  557. """
  558. Pump the reactor enough that Deferreds will fire.
  559. """
  560. self.reactor.pump([by] * 100)
  561. def get_success(self, d: Awaitable[TV], by: float = 0.0) -> TV:
  562. deferred: Deferred[TV] = ensureDeferred(d) # type: ignore[arg-type]
  563. self.pump(by=by)
  564. return self.successResultOf(deferred)
  565. def get_failure(
  566. self, d: Awaitable[Any], exc: Type[_ExcType]
  567. ) -> _TypedFailure[_ExcType]:
  568. """
  569. Run a Deferred and get a Failure from it. The failure must be of the type `exc`.
  570. """
  571. deferred: Deferred[Any] = ensureDeferred(d) # type: ignore[arg-type]
  572. self.pump()
  573. return self.failureResultOf(deferred, exc)
  574. def get_success_or_raise(self, d: Awaitable[TV], by: float = 0.0) -> TV:
  575. """Drive deferred to completion and return result or raise exception
  576. on failure.
  577. """
  578. deferred: Deferred[TV] = ensureDeferred(d) # type: ignore[arg-type]
  579. results: list = []
  580. deferred.addBoth(results.append)
  581. self.pump(by=by)
  582. if not results:
  583. self.fail(
  584. "Success result expected on {!r}, found no result instead".format(
  585. deferred
  586. )
  587. )
  588. result = results[0]
  589. if isinstance(result, Failure):
  590. result.raiseException()
  591. return result
  592. def register_user(
  593. self,
  594. username: str,
  595. password: str,
  596. admin: Optional[bool] = False,
  597. displayname: Optional[str] = None,
  598. ) -> str:
  599. """
  600. Register a user. Requires the Admin API be registered.
  601. Args:
  602. username: The user part of the new user.
  603. password: The password of the new user.
  604. admin: Whether the user should be created as an admin or not.
  605. displayname: The displayname of the new user.
  606. Returns:
  607. The MXID of the new user.
  608. """
  609. self.hs.config.registration.registration_shared_secret = "shared"
  610. # Create the user
  611. channel = self.make_request("GET", "/_synapse/admin/v1/register")
  612. self.assertEqual(channel.code, 200, msg=channel.result)
  613. nonce = channel.json_body["nonce"]
  614. want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1)
  615. nonce_str = b"\x00".join([username.encode("utf8"), password.encode("utf8")])
  616. if admin:
  617. nonce_str += b"\x00admin"
  618. else:
  619. nonce_str += b"\x00notadmin"
  620. want_mac.update(nonce.encode("ascii") + b"\x00" + nonce_str)
  621. want_mac_digest = want_mac.hexdigest()
  622. body = {
  623. "nonce": nonce,
  624. "username": username,
  625. "displayname": displayname,
  626. "password": password,
  627. "admin": admin,
  628. "mac": want_mac_digest,
  629. "inhibit_login": True,
  630. }
  631. channel = self.make_request("POST", "/_synapse/admin/v1/register", body)
  632. self.assertEqual(channel.code, 200, channel.json_body)
  633. user_id = channel.json_body["user_id"]
  634. return user_id
  635. def register_appservice_user(
  636. self,
  637. username: str,
  638. appservice_token: str,
  639. ) -> Tuple[str, str]:
  640. """Register an appservice user as an application service.
  641. Requires the client-facing registration API be registered.
  642. Args:
  643. username: the user to be registered by an application service.
  644. Should NOT be a full username, i.e. just "localpart" as opposed to "@localpart:hostname"
  645. appservice_token: the acccess token for that application service.
  646. Raises: if the request to '/register' does not return 200 OK.
  647. Returns:
  648. The MXID of the new user, the device ID of the new user's first device.
  649. """
  650. channel = self.make_request(
  651. "POST",
  652. "/_matrix/client/r0/register",
  653. {
  654. "username": username,
  655. "type": "m.login.application_service",
  656. },
  657. access_token=appservice_token,
  658. )
  659. self.assertEqual(channel.code, 200, channel.json_body)
  660. return channel.json_body["user_id"], channel.json_body["device_id"]
  661. def login(
  662. self,
  663. username: str,
  664. password: str,
  665. device_id: Optional[str] = None,
  666. additional_request_fields: Optional[Dict[str, str]] = None,
  667. custom_headers: Optional[Iterable[CustomHeaderType]] = None,
  668. ) -> str:
  669. """
  670. Log in a user, and get an access token. Requires the Login API be registered.
  671. Args:
  672. username: The localpart to assign to the new user.
  673. password: The password to assign to the new user.
  674. device_id: An optional device ID to assign to the new device created during
  675. login.
  676. additional_request_fields: A dictionary containing any additional /login
  677. request fields and their values.
  678. custom_headers: Custom HTTP headers and values to add to the /login request.
  679. Returns:
  680. The newly registered user's Matrix ID.
  681. """
  682. body = {"type": "m.login.password", "user": username, "password": password}
  683. if device_id:
  684. body["device_id"] = device_id
  685. if additional_request_fields:
  686. body.update(additional_request_fields)
  687. channel = self.make_request(
  688. "POST",
  689. "/_matrix/client/r0/login",
  690. body,
  691. custom_headers=custom_headers,
  692. )
  693. self.assertEqual(channel.code, 200, channel.result)
  694. access_token = channel.json_body["access_token"]
  695. return access_token
  696. def create_and_send_event(
  697. self,
  698. room_id: str,
  699. user: UserID,
  700. soft_failed: bool = False,
  701. prev_event_ids: Optional[List[str]] = None,
  702. ) -> str:
  703. """
  704. Create and send an event.
  705. Args:
  706. soft_failed: Whether to create a soft failed event or not
  707. prev_event_ids: Explicitly set the prev events,
  708. or if None just use the default
  709. Returns:
  710. The new event's ID.
  711. """
  712. event_creator = self.hs.get_event_creation_handler()
  713. requester = create_requester(user)
  714. event, unpersisted_context = self.get_success(
  715. event_creator.create_event(
  716. requester,
  717. {
  718. "type": EventTypes.Message,
  719. "room_id": room_id,
  720. "sender": user.to_string(),
  721. "content": {"body": secrets.token_hex(), "msgtype": "m.text"},
  722. },
  723. prev_event_ids=prev_event_ids,
  724. )
  725. )
  726. context = self.get_success(unpersisted_context.persist(event))
  727. if soft_failed:
  728. event.internal_metadata.soft_failed = True
  729. self.get_success(
  730. event_creator.handle_new_client_event(
  731. requester, events_and_context=[(event, context)]
  732. )
  733. )
  734. return event.event_id
  735. def inject_room_member(self, room: str, user: str, membership: str) -> None:
  736. """
  737. Inject a membership event into a room.
  738. Deprecated: use event_injection.inject_room_member directly
  739. Args:
  740. room: Room ID to inject the event into.
  741. user: MXID of the user to inject the membership for.
  742. membership: The membership type.
  743. """
  744. self.get_success(
  745. event_injection.inject_member_event(self.hs, room, user, membership)
  746. )
  747. class FederatingHomeserverTestCase(HomeserverTestCase):
  748. """
  749. A federating homeserver, set up to validate incoming federation requests
  750. """
  751. OTHER_SERVER_NAME = "other.example.com"
  752. OTHER_SERVER_SIGNATURE_KEY = signedjson.key.generate_signing_key("test")
  753. def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
  754. super().prepare(reactor, clock, hs)
  755. # poke the other server's signing key into the key store, so that we don't
  756. # make requests for it
  757. verify_key = signedjson.key.get_verify_key(self.OTHER_SERVER_SIGNATURE_KEY)
  758. verify_key_id = "%s:%s" % (verify_key.alg, verify_key.version)
  759. self.get_success(
  760. hs.get_datastores().main.store_server_keys_json(
  761. self.OTHER_SERVER_NAME,
  762. verify_key_id,
  763. from_server=self.OTHER_SERVER_NAME,
  764. ts_now_ms=clock.time_msec(),
  765. ts_expires_ms=clock.time_msec() + 10000,
  766. key_json_bytes=canonicaljson.encode_canonical_json(
  767. {
  768. "verify_keys": {
  769. verify_key_id: {
  770. "key": signedjson.key.encode_verify_key_base64(
  771. verify_key
  772. )
  773. }
  774. }
  775. }
  776. ),
  777. )
  778. )
  779. def create_resource_dict(self) -> Dict[str, Resource]:
  780. d = super().create_resource_dict()
  781. d["/_matrix/federation"] = TransportLayerServer(self.hs)
  782. return d
  783. def make_signed_federation_request(
  784. self,
  785. method: str,
  786. path: str,
  787. content: Optional[JsonDict] = None,
  788. await_result: bool = True,
  789. custom_headers: Optional[Iterable[CustomHeaderType]] = None,
  790. client_ip: str = "127.0.0.1",
  791. ) -> FakeChannel:
  792. """Make an inbound signed federation request to this server
  793. The request is signed as if it came from "other.example.com", which our HS
  794. already has the keys for.
  795. """
  796. if custom_headers is None:
  797. custom_headers = []
  798. else:
  799. custom_headers = list(custom_headers)
  800. custom_headers.append(
  801. (
  802. "Authorization",
  803. _auth_header_for_request(
  804. origin=self.OTHER_SERVER_NAME,
  805. destination=self.hs.hostname,
  806. signing_key=self.OTHER_SERVER_SIGNATURE_KEY,
  807. method=method,
  808. path=path,
  809. content=content,
  810. ),
  811. )
  812. )
  813. return make_request(
  814. self.reactor,
  815. self.site,
  816. method=method,
  817. path=path,
  818. content=content if content is not None else "",
  819. shorthand=False,
  820. await_result=await_result,
  821. custom_headers=custom_headers,
  822. client_ip=client_ip,
  823. )
  824. def add_hashes_and_signatures_from_other_server(
  825. self,
  826. event_dict: JsonDict,
  827. room_version: RoomVersion = KNOWN_ROOM_VERSIONS[DEFAULT_ROOM_VERSION],
  828. ) -> JsonDict:
  829. """Adds hashes and signatures to the given event dict
  830. Returns:
  831. The modified event dict, for convenience
  832. """
  833. add_hashes_and_signatures(
  834. room_version,
  835. event_dict,
  836. signature_name=self.OTHER_SERVER_NAME,
  837. signing_key=self.OTHER_SERVER_SIGNATURE_KEY,
  838. )
  839. return event_dict
  840. def _auth_header_for_request(
  841. origin: str,
  842. destination: str,
  843. signing_key: signedjson.key.SigningKey,
  844. method: str,
  845. path: str,
  846. content: Optional[JsonDict],
  847. ) -> str:
  848. """Build a suitable Authorization header for an outgoing federation request"""
  849. request_description: JsonDict = {
  850. "method": method,
  851. "uri": path,
  852. "destination": destination,
  853. "origin": origin,
  854. }
  855. if content is not None:
  856. request_description["content"] = content
  857. signature_base64 = unpaddedbase64.encode_base64(
  858. signing_key.sign(
  859. canonicaljson.encode_canonical_json(request_description)
  860. ).signature
  861. )
  862. return (
  863. f"X-Matrix origin={origin},"
  864. f"key={signing_key.alg}:{signing_key.version},"
  865. f"sig={signature_base64}"
  866. )
  867. def override_config(extra_config: JsonDict) -> Callable[[TV], TV]:
  868. """A decorator which can be applied to test functions to give additional HS config
  869. For use
  870. For example:
  871. class MyTestCase(HomeserverTestCase):
  872. @override_config({"enable_registration": False, ...})
  873. def test_foo(self):
  874. ...
  875. Args:
  876. extra_config: Additional config settings to be merged into the default
  877. config dict before instantiating the test homeserver.
  878. """
  879. def decorator(func: TV) -> TV:
  880. # This attribute is being defined.
  881. func._extra_config = extra_config # type: ignore[attr-defined]
  882. return func
  883. return decorator
  884. def skip_unless(condition: bool, reason: str) -> Callable[[TV], TV]:
  885. """A test decorator which will skip the decorated test unless a condition is set
  886. For example:
  887. class MyTestCase(TestCase):
  888. @skip_unless(HAS_FOO, "Cannot test without foo")
  889. def test_foo(self):
  890. ...
  891. Args:
  892. condition: If true, the test will be skipped
  893. reason: the reason to give for skipping the test
  894. """
  895. def decorator(f: TV) -> TV:
  896. if not condition:
  897. f.skip = reason # type: ignore
  898. return f
  899. return decorator