utils.py 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667
  1. # -*- coding: utf-8 -*-
  2. # Copyright 2014-2016 OpenMarket Ltd
  3. # Copyright 2018-2019 New Vector Ltd
  4. #
  5. # Licensed under the Apache License, Version 2.0 (the "License");
  6. # you may not use this file except in compliance with the License.
  7. # You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. import atexit
  17. import hashlib
  18. import os
  19. import time
  20. import uuid
  21. import warnings
  22. from inspect import getcallargs
  23. from mock import Mock, patch
  24. from six.moves.urllib import parse as urlparse
  25. from twisted.internet import defer, reactor
  26. from synapse.api.constants import EventTypes
  27. from synapse.api.errors import CodeMessageException, cs_error
  28. from synapse.api.room_versions import RoomVersions
  29. from synapse.config.homeserver import HomeServerConfig
  30. from synapse.config.server import DEFAULT_ROOM_VERSION
  31. from synapse.federation.transport import server as federation_server
  32. from synapse.http.server import HttpServer
  33. from synapse.logging.context import LoggingContext
  34. from synapse.server import HomeServer
  35. from synapse.storage import DataStore
  36. from synapse.storage.engines import PostgresEngine, create_engine
  37. from synapse.storage.prepare_database import prepare_database
  38. from synapse.util.ratelimitutils import FederationRateLimiter
  39. # set this to True to run the tests against postgres instead of sqlite.
  40. #
  41. # When running under postgres, we first create a base database with the name
  42. # POSTGRES_BASE_DB and update it to the current schema. Then, for each test case, we
  43. # create another unique database, using the base database as a template.
  44. USE_POSTGRES_FOR_TESTS = os.environ.get("SYNAPSE_POSTGRES", False)
  45. LEAVE_DB = os.environ.get("SYNAPSE_LEAVE_DB", False)
  46. POSTGRES_USER = os.environ.get("SYNAPSE_POSTGRES_USER", None)
  47. POSTGRES_HOST = os.environ.get("SYNAPSE_POSTGRES_HOST", None)
  48. POSTGRES_PASSWORD = os.environ.get("SYNAPSE_POSTGRES_PASSWORD", None)
  49. POSTGRES_BASE_DB = "_synapse_unit_tests_base_%s" % (os.getpid(),)
  50. # the dbname we will connect to in order to create the base database.
  51. POSTGRES_DBNAME_FOR_INITIAL_CREATE = "postgres"
  52. def setupdb():
  53. # If we're using PostgreSQL, set up the db once
  54. if USE_POSTGRES_FOR_TESTS:
  55. # create a PostgresEngine
  56. db_engine = create_engine({"name": "psycopg2", "args": {}})
  57. # connect to postgres to create the base database.
  58. db_conn = db_engine.module.connect(
  59. user=POSTGRES_USER,
  60. host=POSTGRES_HOST,
  61. password=POSTGRES_PASSWORD,
  62. dbname=POSTGRES_DBNAME_FOR_INITIAL_CREATE,
  63. )
  64. db_conn.autocommit = True
  65. cur = db_conn.cursor()
  66. cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,))
  67. cur.execute("CREATE DATABASE %s;" % (POSTGRES_BASE_DB,))
  68. cur.close()
  69. db_conn.close()
  70. # Set up in the db
  71. db_conn = db_engine.module.connect(
  72. database=POSTGRES_BASE_DB,
  73. user=POSTGRES_USER,
  74. host=POSTGRES_HOST,
  75. password=POSTGRES_PASSWORD,
  76. )
  77. prepare_database(db_conn, db_engine, None)
  78. db_conn.close()
  79. def _cleanup():
  80. db_conn = db_engine.module.connect(
  81. user=POSTGRES_USER,
  82. host=POSTGRES_HOST,
  83. password=POSTGRES_PASSWORD,
  84. dbname=POSTGRES_DBNAME_FOR_INITIAL_CREATE,
  85. )
  86. db_conn.autocommit = True
  87. cur = db_conn.cursor()
  88. cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,))
  89. cur.close()
  90. db_conn.close()
  91. atexit.register(_cleanup)
  92. def default_config(name, parse=False):
  93. """
  94. Create a reasonable test config.
  95. """
  96. config_dict = {
  97. "server_name": name,
  98. "media_store_path": "media",
  99. "uploads_path": "uploads",
  100. # the test signing key is just an arbitrary ed25519 key to keep the config
  101. # parser happy
  102. "signing_key": "ed25519 a_lPym qvioDNmfExFBRPgdTU+wtFYKq4JfwFRv7sYVgWvmgJg",
  103. "event_cache_size": 1,
  104. "enable_registration": True,
  105. "enable_registration_captcha": False,
  106. "macaroon_secret_key": "not even a little secret",
  107. "trusted_third_party_id_servers": [],
  108. "room_invite_state_types": [],
  109. "password_providers": [],
  110. "worker_replication_url": "",
  111. "worker_app": None,
  112. "block_non_admin_invites": False,
  113. "federation_domain_whitelist": None,
  114. "filter_timeline_limit": 5000,
  115. "user_directory_search_all_users": False,
  116. "user_consent_server_notice_content": None,
  117. "block_events_without_consent_error": None,
  118. "user_consent_at_registration": False,
  119. "user_consent_policy_name": "Privacy Policy",
  120. "media_storage_providers": [],
  121. "autocreate_auto_join_rooms": True,
  122. "auto_join_rooms": [],
  123. "limit_usage_by_mau": False,
  124. "hs_disabled": False,
  125. "hs_disabled_message": "",
  126. "hs_disabled_limit_type": "",
  127. "max_mau_value": 50,
  128. "mau_trial_days": 0,
  129. "mau_stats_only": False,
  130. "mau_limits_reserved_threepids": [],
  131. "admin_contact": None,
  132. "rc_message": {"per_second": 10000, "burst_count": 10000},
  133. "rc_registration": {"per_second": 10000, "burst_count": 10000},
  134. "rc_login": {
  135. "address": {"per_second": 10000, "burst_count": 10000},
  136. "account": {"per_second": 10000, "burst_count": 10000},
  137. "failed_attempts": {"per_second": 10000, "burst_count": 10000},
  138. },
  139. "saml2_enabled": False,
  140. "public_baseurl": None,
  141. "default_identity_server": None,
  142. "key_refresh_interval": 24 * 60 * 60 * 1000,
  143. "old_signing_keys": {},
  144. "tls_fingerprints": [],
  145. "use_frozen_dicts": False,
  146. # We need a sane default_room_version, otherwise attempts to create
  147. # rooms will fail.
  148. "default_room_version": DEFAULT_ROOM_VERSION,
  149. # disable user directory updates, because they get done in the
  150. # background, which upsets the test runner.
  151. "update_user_directory": False,
  152. }
  153. if parse:
  154. config = HomeServerConfig()
  155. config.parse_config_dict(config_dict, "", "")
  156. return config
  157. return config_dict
  158. class TestHomeServer(HomeServer):
  159. DATASTORE_CLASS = DataStore
  160. @defer.inlineCallbacks
  161. def setup_test_homeserver(
  162. cleanup_func,
  163. name="test",
  164. datastore=None,
  165. config=None,
  166. reactor=None,
  167. homeserverToUse=TestHomeServer,
  168. **kargs
  169. ):
  170. """
  171. Setup a homeserver suitable for running tests against. Keyword arguments
  172. are passed to the Homeserver constructor.
  173. If no datastore is supplied, one is created and given to the homeserver.
  174. Args:
  175. cleanup_func : The function used to register a cleanup routine for
  176. after the test.
  177. Calling this method directly is deprecated: you should instead derive from
  178. HomeserverTestCase.
  179. """
  180. if reactor is None:
  181. from twisted.internet import reactor
  182. if config is None:
  183. config = default_config(name, parse=True)
  184. config.ldap_enabled = False
  185. if "clock" not in kargs:
  186. kargs["clock"] = MockClock()
  187. if USE_POSTGRES_FOR_TESTS:
  188. test_db = "synapse_test_%s" % uuid.uuid4().hex
  189. config.database_config = {
  190. "name": "psycopg2",
  191. "args": {
  192. "database": test_db,
  193. "host": POSTGRES_HOST,
  194. "password": POSTGRES_PASSWORD,
  195. "user": POSTGRES_USER,
  196. "cp_min": 1,
  197. "cp_max": 5,
  198. },
  199. }
  200. else:
  201. config.database_config = {
  202. "name": "sqlite3",
  203. "args": {"database": ":memory:", "cp_min": 1, "cp_max": 1},
  204. }
  205. db_engine = create_engine(config.database_config)
  206. # Create the database before we actually try and connect to it, based off
  207. # the template database we generate in setupdb()
  208. if datastore is None and isinstance(db_engine, PostgresEngine):
  209. db_conn = db_engine.module.connect(
  210. database=POSTGRES_BASE_DB,
  211. user=POSTGRES_USER,
  212. host=POSTGRES_HOST,
  213. password=POSTGRES_PASSWORD,
  214. )
  215. db_conn.autocommit = True
  216. cur = db_conn.cursor()
  217. cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,))
  218. cur.execute(
  219. "CREATE DATABASE %s WITH TEMPLATE %s;" % (test_db, POSTGRES_BASE_DB)
  220. )
  221. cur.close()
  222. db_conn.close()
  223. # we need to configure the connection pool to run the on_new_connection
  224. # function, so that we can test code that uses custom sqlite functions
  225. # (like rank).
  226. config.database_config["args"]["cp_openfun"] = db_engine.on_new_connection
  227. if datastore is None:
  228. hs = homeserverToUse(
  229. name,
  230. config=config,
  231. db_config=config.database_config,
  232. version_string="Synapse/tests",
  233. database_engine=db_engine,
  234. tls_server_context_factory=Mock(),
  235. tls_client_options_factory=Mock(),
  236. reactor=reactor,
  237. **kargs
  238. )
  239. # Prepare the DB on SQLite -- PostgreSQL is a copy of an already up to
  240. # date db
  241. if not isinstance(db_engine, PostgresEngine):
  242. db_conn = hs.get_db_conn()
  243. yield prepare_database(db_conn, db_engine, config)
  244. db_conn.commit()
  245. db_conn.close()
  246. else:
  247. # We need to do cleanup on PostgreSQL
  248. def cleanup():
  249. import psycopg2
  250. # Close all the db pools
  251. hs.get_db_pool().close()
  252. dropped = False
  253. # Drop the test database
  254. db_conn = db_engine.module.connect(
  255. database=POSTGRES_BASE_DB,
  256. user=POSTGRES_USER,
  257. host=POSTGRES_HOST,
  258. password=POSTGRES_PASSWORD,
  259. )
  260. db_conn.autocommit = True
  261. cur = db_conn.cursor()
  262. # Try a few times to drop the DB. Some things may hold on to the
  263. # database for a few more seconds due to flakiness, preventing
  264. # us from dropping it when the test is over. If we can't drop
  265. # it, warn and move on.
  266. for x in range(5):
  267. try:
  268. cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,))
  269. db_conn.commit()
  270. dropped = True
  271. except psycopg2.OperationalError as e:
  272. warnings.warn(
  273. "Couldn't drop old db: " + str(e), category=UserWarning
  274. )
  275. time.sleep(0.5)
  276. cur.close()
  277. db_conn.close()
  278. if not dropped:
  279. warnings.warn("Failed to drop old DB.", category=UserWarning)
  280. if not LEAVE_DB:
  281. # Register the cleanup hook
  282. cleanup_func(cleanup)
  283. hs.setup()
  284. if homeserverToUse.__name__ == "TestHomeServer":
  285. hs.setup_master()
  286. else:
  287. hs = homeserverToUse(
  288. name,
  289. db_pool=None,
  290. datastore=datastore,
  291. config=config,
  292. version_string="Synapse/tests",
  293. database_engine=db_engine,
  294. tls_server_context_factory=Mock(),
  295. tls_client_options_factory=Mock(),
  296. reactor=reactor,
  297. **kargs
  298. )
  299. # bcrypt is far too slow to be doing in unit tests
  300. # Need to let the HS build an auth handler and then mess with it
  301. # because AuthHandler's constructor requires the HS, so we can't make one
  302. # beforehand and pass it in to the HS's constructor (chicken / egg)
  303. hs.get_auth_handler().hash = lambda p: hashlib.md5(p.encode("utf8")).hexdigest()
  304. hs.get_auth_handler().validate_hash = (
  305. lambda p, h: hashlib.md5(p.encode("utf8")).hexdigest() == h
  306. )
  307. fed = kargs.get("resource_for_federation", None)
  308. if fed:
  309. register_federation_servlets(hs, fed)
  310. return hs
  311. def register_federation_servlets(hs, resource):
  312. federation_server.register_servlets(
  313. hs,
  314. resource=resource,
  315. authenticator=federation_server.Authenticator(hs),
  316. ratelimiter=FederationRateLimiter(
  317. hs.get_clock(), config=hs.config.rc_federation
  318. ),
  319. )
  320. def get_mock_call_args(pattern_func, mock_func):
  321. """ Return the arguments the mock function was called with interpreted
  322. by the pattern functions argument list.
  323. """
  324. invoked_args, invoked_kargs = mock_func.call_args
  325. return getcallargs(pattern_func, *invoked_args, **invoked_kargs)
  326. def mock_getRawHeaders(headers=None):
  327. headers = headers if headers is not None else {}
  328. def getRawHeaders(name, default=None):
  329. return headers.get(name, default)
  330. return getRawHeaders
  331. # This is a mock /resource/ not an entire server
  332. class MockHttpResource(HttpServer):
  333. def __init__(self, prefix=""):
  334. self.callbacks = [] # 3-tuple of method/pattern/function
  335. self.prefix = prefix
  336. def trigger_get(self, path):
  337. return self.trigger(b"GET", path, None)
  338. @patch("twisted.web.http.Request")
  339. @defer.inlineCallbacks
  340. def trigger(
  341. self, http_method, path, content, mock_request, federation_auth_origin=None
  342. ):
  343. """ Fire an HTTP event.
  344. Args:
  345. http_method : The HTTP method
  346. path : The HTTP path
  347. content : The HTTP body
  348. mock_request : Mocked request to pass to the event so it can get
  349. content.
  350. federation_auth_origin (bytes|None): domain to authenticate as, for federation
  351. Returns:
  352. A tuple of (code, response)
  353. Raises:
  354. KeyError If no event is found which will handle the path.
  355. """
  356. path = self.prefix + path
  357. # annoyingly we return a twisted http request which has chained calls
  358. # to get at the http content, hence mock it here.
  359. mock_content = Mock()
  360. config = {"read.return_value": content}
  361. mock_content.configure_mock(**config)
  362. mock_request.content = mock_content
  363. mock_request.method = http_method.encode("ascii")
  364. mock_request.uri = path.encode("ascii")
  365. mock_request.getClientIP.return_value = "-"
  366. headers = {}
  367. if federation_auth_origin is not None:
  368. headers[b"Authorization"] = [
  369. b"X-Matrix origin=%s,key=,sig=" % (federation_auth_origin,)
  370. ]
  371. mock_request.requestHeaders.getRawHeaders = mock_getRawHeaders(headers)
  372. # return the right path if the event requires it
  373. mock_request.path = path
  374. # add in query params to the right place
  375. try:
  376. mock_request.args = urlparse.parse_qs(path.split("?")[1])
  377. mock_request.path = path.split("?")[0]
  378. path = mock_request.path
  379. except Exception:
  380. pass
  381. if isinstance(path, bytes):
  382. path = path.decode("utf8")
  383. for (method, pattern, func) in self.callbacks:
  384. if http_method != method:
  385. continue
  386. matcher = pattern.match(path)
  387. if matcher:
  388. try:
  389. args = [urlparse.unquote(u) for u in matcher.groups()]
  390. (code, response) = yield func(mock_request, *args)
  391. return code, response
  392. except CodeMessageException as e:
  393. return (e.code, cs_error(e.msg, code=e.errcode))
  394. raise KeyError("No event can handle %s" % path)
  395. def register_paths(self, method, path_patterns, callback, servlet_name):
  396. for path_pattern in path_patterns:
  397. self.callbacks.append((method, path_pattern, callback))
  398. class MockKey(object):
  399. alg = "mock_alg"
  400. version = "mock_version"
  401. signature = b"\x9a\x87$"
  402. @property
  403. def verify_key(self):
  404. return self
  405. def sign(self, message):
  406. return self
  407. def verify(self, message, sig):
  408. assert sig == b"\x9a\x87$"
  409. def encode(self):
  410. return b"<fake_encoded_key>"
  411. class MockClock(object):
  412. now = 1000
  413. def __init__(self):
  414. # list of lists of [absolute_time, callback, expired] in no particular
  415. # order
  416. self.timers = []
  417. self.loopers = []
  418. def time(self):
  419. return self.now
  420. def time_msec(self):
  421. return self.time() * 1000
  422. def call_later(self, delay, callback, *args, **kwargs):
  423. current_context = LoggingContext.current_context()
  424. def wrapped_callback():
  425. LoggingContext.thread_local.current_context = current_context
  426. callback(*args, **kwargs)
  427. t = [self.now + delay, wrapped_callback, False]
  428. self.timers.append(t)
  429. return t
  430. def looping_call(self, function, interval):
  431. self.loopers.append([function, interval / 1000.0, self.now])
  432. def cancel_call_later(self, timer, ignore_errs=False):
  433. if timer[2]:
  434. if not ignore_errs:
  435. raise Exception("Cannot cancel an expired timer")
  436. timer[2] = True
  437. self.timers = [t for t in self.timers if t != timer]
  438. # For unit testing
  439. def advance_time(self, secs):
  440. self.now += secs
  441. timers = self.timers
  442. self.timers = []
  443. for t in timers:
  444. time, callback, expired = t
  445. if expired:
  446. raise Exception("Timer already expired")
  447. if self.now >= time:
  448. t[2] = True
  449. callback()
  450. else:
  451. self.timers.append(t)
  452. for looped in self.loopers:
  453. func, interval, last = looped
  454. if last + interval < self.now:
  455. func()
  456. looped[2] = self.now
  457. def advance_time_msec(self, ms):
  458. self.advance_time(ms / 1000.0)
  459. def time_bound_deferred(self, d, *args, **kwargs):
  460. # We don't bother timing things out for now.
  461. return d
  462. def _format_call(args, kwargs):
  463. return ", ".join(
  464. ["%r" % (a) for a in args] + ["%s=%r" % (k, v) for k, v in kwargs.items()]
  465. )
  466. class DeferredMockCallable(object):
  467. """A callable instance that stores a set of pending call expectations and
  468. return values for them. It allows a unit test to assert that the given set
  469. of function calls are eventually made, by awaiting on them to be called.
  470. """
  471. def __init__(self):
  472. self.expectations = []
  473. self.calls = []
  474. def __call__(self, *args, **kwargs):
  475. self.calls.append((args, kwargs))
  476. if not self.expectations:
  477. raise ValueError(
  478. "%r has no pending calls to handle call(%s)"
  479. % (self, _format_call(args, kwargs))
  480. )
  481. for (call, result, d) in self.expectations:
  482. if args == call[1] and kwargs == call[2]:
  483. d.callback(None)
  484. return result
  485. failure = AssertionError(
  486. "Was not expecting call(%s)" % (_format_call(args, kwargs))
  487. )
  488. for _, _, d in self.expectations:
  489. try:
  490. d.errback(failure)
  491. except Exception:
  492. pass
  493. raise failure
  494. def expect_call_and_return(self, call, result):
  495. self.expectations.append((call, result, defer.Deferred()))
  496. @defer.inlineCallbacks
  497. def await_calls(self, timeout=1000):
  498. deferred = defer.DeferredList(
  499. [d for _, _, d in self.expectations], fireOnOneErrback=True
  500. )
  501. timer = reactor.callLater(
  502. timeout / 1000,
  503. deferred.errback,
  504. AssertionError(
  505. "%d pending calls left: %s"
  506. % (
  507. len([e for e in self.expectations if not e[2].called]),
  508. [e for e in self.expectations if not e[2].called],
  509. )
  510. ),
  511. )
  512. yield deferred
  513. timer.cancel()
  514. self.calls = []
  515. def assert_had_no_calls(self):
  516. if self.calls:
  517. calls = self.calls
  518. self.calls = []
  519. raise AssertionError(
  520. "Expected not to received any calls, got:\n"
  521. + "\n".join(["call(%s)" % _format_call(c[0], c[1]) for c in calls])
  522. )
  523. @defer.inlineCallbacks
  524. def create_room(hs, room_id, creator_id):
  525. """Creates and persist a creation event for the given room
  526. Args:
  527. hs
  528. room_id (str)
  529. creator_id (str)
  530. """
  531. store = hs.get_datastore()
  532. event_builder_factory = hs.get_event_builder_factory()
  533. event_creation_handler = hs.get_event_creation_handler()
  534. builder = event_builder_factory.for_room_version(
  535. RoomVersions.V1,
  536. {
  537. "type": EventTypes.Create,
  538. "state_key": "",
  539. "sender": creator_id,
  540. "room_id": room_id,
  541. "content": {},
  542. },
  543. )
  544. event, context = yield event_creation_handler.create_new_client_event(builder)
  545. yield store.persist_event(event, context)