utils.py 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672
  1. # -*- coding: utf-8 -*-
  2. # Copyright 2014-2016 OpenMarket Ltd
  3. # Copyright 2018-2019 New Vector Ltd
  4. #
  5. # Licensed under the Apache License, Version 2.0 (the "License");
  6. # you may not use this file except in compliance with the License.
  7. # You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. import atexit
  17. import hashlib
  18. import os
  19. import time
  20. import uuid
  21. import warnings
  22. from inspect import getcallargs
  23. from mock import Mock, patch
  24. from six.moves.urllib import parse as urlparse
  25. from twisted.internet import defer, reactor
  26. from synapse.api.constants import EventTypes
  27. from synapse.api.errors import CodeMessageException, cs_error
  28. from synapse.api.room_versions import RoomVersions
  29. from synapse.config.homeserver import HomeServerConfig
  30. from synapse.config.server import DEFAULT_ROOM_VERSION
  31. from synapse.federation.transport import server as federation_server
  32. from synapse.http.server import HttpServer
  33. from synapse.logging.context import LoggingContext
  34. from synapse.server import HomeServer
  35. from synapse.storage import DataStore
  36. from synapse.storage.engines import PostgresEngine, create_engine
  37. from synapse.storage.prepare_database import prepare_database
  38. from synapse.util.ratelimitutils import FederationRateLimiter
  39. # set this to True to run the tests against postgres instead of sqlite.
  40. #
  41. # When running under postgres, we first create a base database with the name
  42. # POSTGRES_BASE_DB and update it to the current schema. Then, for each test case, we
  43. # create another unique database, using the base database as a template.
  44. USE_POSTGRES_FOR_TESTS = os.environ.get("SYNAPSE_POSTGRES", False)
  45. LEAVE_DB = os.environ.get("SYNAPSE_LEAVE_DB", False)
  46. POSTGRES_USER = os.environ.get("SYNAPSE_POSTGRES_USER", None)
  47. POSTGRES_HOST = os.environ.get("SYNAPSE_POSTGRES_HOST", None)
  48. POSTGRES_PASSWORD = os.environ.get("SYNAPSE_POSTGRES_PASSWORD", None)
  49. POSTGRES_BASE_DB = "_synapse_unit_tests_base_%s" % (os.getpid(),)
  50. # the dbname we will connect to in order to create the base database.
  51. POSTGRES_DBNAME_FOR_INITIAL_CREATE = "postgres"
  52. def setupdb():
  53. # If we're using PostgreSQL, set up the db once
  54. if USE_POSTGRES_FOR_TESTS:
  55. # create a PostgresEngine
  56. db_engine = create_engine({"name": "psycopg2", "args": {}})
  57. # connect to postgres to create the base database.
  58. db_conn = db_engine.module.connect(
  59. user=POSTGRES_USER,
  60. host=POSTGRES_HOST,
  61. password=POSTGRES_PASSWORD,
  62. dbname=POSTGRES_DBNAME_FOR_INITIAL_CREATE,
  63. )
  64. db_conn.autocommit = True
  65. cur = db_conn.cursor()
  66. cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,))
  67. cur.execute("CREATE DATABASE %s;" % (POSTGRES_BASE_DB,))
  68. cur.close()
  69. db_conn.close()
  70. # Set up in the db
  71. db_conn = db_engine.module.connect(
  72. database=POSTGRES_BASE_DB,
  73. user=POSTGRES_USER,
  74. host=POSTGRES_HOST,
  75. password=POSTGRES_PASSWORD,
  76. )
  77. prepare_database(db_conn, db_engine, None)
  78. db_conn.close()
  79. def _cleanup():
  80. db_conn = db_engine.module.connect(
  81. user=POSTGRES_USER,
  82. host=POSTGRES_HOST,
  83. password=POSTGRES_PASSWORD,
  84. dbname=POSTGRES_DBNAME_FOR_INITIAL_CREATE,
  85. )
  86. db_conn.autocommit = True
  87. cur = db_conn.cursor()
  88. cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,))
  89. cur.close()
  90. db_conn.close()
  91. atexit.register(_cleanup)
  92. def default_config(name, parse=False):
  93. """
  94. Create a reasonable test config.
  95. """
  96. config_dict = {
  97. "server_name": name,
  98. "media_store_path": "media",
  99. "uploads_path": "uploads",
  100. # the test signing key is just an arbitrary ed25519 key to keep the config
  101. # parser happy
  102. "signing_key": "ed25519 a_lPym qvioDNmfExFBRPgdTU+wtFYKq4JfwFRv7sYVgWvmgJg",
  103. "event_cache_size": 1,
  104. "enable_registration": True,
  105. "enable_registration_captcha": False,
  106. "macaroon_secret_key": "not even a little secret",
  107. "trusted_third_party_id_servers": [],
  108. "room_invite_state_types": [],
  109. "password_providers": [],
  110. "worker_replication_url": "",
  111. "worker_app": None,
  112. "block_non_admin_invites": False,
  113. "federation_domain_whitelist": None,
  114. "filter_timeline_limit": 5000,
  115. "user_directory_search_all_users": False,
  116. "user_consent_server_notice_content": None,
  117. "block_events_without_consent_error": None,
  118. "user_consent_at_registration": False,
  119. "user_consent_policy_name": "Privacy Policy",
  120. "media_storage_providers": [],
  121. "autocreate_auto_join_rooms": True,
  122. "auto_join_rooms": [],
  123. "limit_usage_by_mau": False,
  124. "hs_disabled": False,
  125. "hs_disabled_message": "",
  126. "max_mau_value": 50,
  127. "mau_trial_days": 0,
  128. "mau_stats_only": False,
  129. "mau_limits_reserved_threepids": [],
  130. "admin_contact": None,
  131. "rc_message": {"per_second": 10000, "burst_count": 10000},
  132. "rc_registration": {"per_second": 10000, "burst_count": 10000},
  133. "rc_login": {
  134. "address": {"per_second": 10000, "burst_count": 10000},
  135. "account": {"per_second": 10000, "burst_count": 10000},
  136. "failed_attempts": {"per_second": 10000, "burst_count": 10000},
  137. },
  138. "saml2_enabled": False,
  139. "public_baseurl": None,
  140. "default_identity_server": None,
  141. "key_refresh_interval": 24 * 60 * 60 * 1000,
  142. "old_signing_keys": {},
  143. "tls_fingerprints": [],
  144. "use_frozen_dicts": False,
  145. # We need a sane default_room_version, otherwise attempts to create
  146. # rooms will fail.
  147. "default_room_version": DEFAULT_ROOM_VERSION,
  148. # disable user directory updates, because they get done in the
  149. # background, which upsets the test runner.
  150. "update_user_directory": False,
  151. }
  152. if parse:
  153. config = HomeServerConfig()
  154. config.parse_config_dict(config_dict, "", "")
  155. return config
  156. return config_dict
  157. class TestHomeServer(HomeServer):
  158. DATASTORE_CLASS = DataStore
  159. @defer.inlineCallbacks
  160. def setup_test_homeserver(
  161. cleanup_func,
  162. name="test",
  163. datastore=None,
  164. config=None,
  165. reactor=None,
  166. homeserverToUse=TestHomeServer,
  167. **kargs
  168. ):
  169. """
  170. Setup a homeserver suitable for running tests against. Keyword arguments
  171. are passed to the Homeserver constructor.
  172. If no datastore is supplied, one is created and given to the homeserver.
  173. Args:
  174. cleanup_func : The function used to register a cleanup routine for
  175. after the test.
  176. Calling this method directly is deprecated: you should instead derive from
  177. HomeserverTestCase.
  178. """
  179. if reactor is None:
  180. from twisted.internet import reactor
  181. if config is None:
  182. config = default_config(name, parse=True)
  183. config.ldap_enabled = False
  184. if "clock" not in kargs:
  185. kargs["clock"] = MockClock()
  186. if USE_POSTGRES_FOR_TESTS:
  187. test_db = "synapse_test_%s" % uuid.uuid4().hex
  188. config.database_config = {
  189. "name": "psycopg2",
  190. "args": {
  191. "database": test_db,
  192. "host": POSTGRES_HOST,
  193. "password": POSTGRES_PASSWORD,
  194. "user": POSTGRES_USER,
  195. "cp_min": 1,
  196. "cp_max": 5,
  197. },
  198. }
  199. else:
  200. config.database_config = {
  201. "name": "sqlite3",
  202. "args": {"database": ":memory:", "cp_min": 1, "cp_max": 1},
  203. }
  204. db_engine = create_engine(config.database_config)
  205. # Create the database before we actually try and connect to it, based off
  206. # the template database we generate in setupdb()
  207. if datastore is None and isinstance(db_engine, PostgresEngine):
  208. db_conn = db_engine.module.connect(
  209. database=POSTGRES_BASE_DB,
  210. user=POSTGRES_USER,
  211. host=POSTGRES_HOST,
  212. password=POSTGRES_PASSWORD,
  213. )
  214. db_conn.autocommit = True
  215. cur = db_conn.cursor()
  216. cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,))
  217. cur.execute(
  218. "CREATE DATABASE %s WITH TEMPLATE %s;" % (test_db, POSTGRES_BASE_DB)
  219. )
  220. cur.close()
  221. db_conn.close()
  222. # we need to configure the connection pool to run the on_new_connection
  223. # function, so that we can test code that uses custom sqlite functions
  224. # (like rank).
  225. config.database_config["args"]["cp_openfun"] = db_engine.on_new_connection
  226. if datastore is None:
  227. hs = homeserverToUse(
  228. name,
  229. config=config,
  230. db_config=config.database_config,
  231. version_string="Synapse/tests",
  232. database_engine=db_engine,
  233. tls_server_context_factory=Mock(),
  234. tls_client_options_factory=Mock(),
  235. reactor=reactor,
  236. **kargs
  237. )
  238. # Prepare the DB on SQLite -- PostgreSQL is a copy of an already up to
  239. # date db
  240. if not isinstance(db_engine, PostgresEngine):
  241. db_conn = hs.get_db_conn()
  242. yield prepare_database(db_conn, db_engine, config)
  243. db_conn.commit()
  244. db_conn.close()
  245. else:
  246. # We need to do cleanup on PostgreSQL
  247. def cleanup():
  248. import psycopg2
  249. # Close all the db pools
  250. hs.get_db_pool().close()
  251. dropped = False
  252. # Drop the test database
  253. db_conn = db_engine.module.connect(
  254. database=POSTGRES_BASE_DB,
  255. user=POSTGRES_USER,
  256. host=POSTGRES_HOST,
  257. password=POSTGRES_PASSWORD,
  258. )
  259. db_conn.autocommit = True
  260. cur = db_conn.cursor()
  261. # Try a few times to drop the DB. Some things may hold on to the
  262. # database for a few more seconds due to flakiness, preventing
  263. # us from dropping it when the test is over. If we can't drop
  264. # it, warn and move on.
  265. for x in range(5):
  266. try:
  267. cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,))
  268. db_conn.commit()
  269. dropped = True
  270. except psycopg2.OperationalError as e:
  271. warnings.warn(
  272. "Couldn't drop old db: " + str(e), category=UserWarning
  273. )
  274. time.sleep(0.5)
  275. cur.close()
  276. db_conn.close()
  277. if not dropped:
  278. warnings.warn("Failed to drop old DB.", category=UserWarning)
  279. if not LEAVE_DB:
  280. # Register the cleanup hook
  281. cleanup_func(cleanup)
  282. hs.setup()
  283. if homeserverToUse.__name__ == "TestHomeServer":
  284. hs.setup_master()
  285. else:
  286. # If we have been given an explicit datastore we probably want to mock
  287. # out the DataStores somehow too. This all feels a bit wrong, but then
  288. # mocking the stores feels wrong too.
  289. datastores = Mock(datastore=datastore)
  290. hs = homeserverToUse(
  291. name,
  292. db_pool=None,
  293. datastore=datastore,
  294. datastores=datastores,
  295. config=config,
  296. version_string="Synapse/tests",
  297. database_engine=db_engine,
  298. tls_server_context_factory=Mock(),
  299. tls_client_options_factory=Mock(),
  300. reactor=reactor,
  301. **kargs
  302. )
  303. # bcrypt is far too slow to be doing in unit tests
  304. # Need to let the HS build an auth handler and then mess with it
  305. # because AuthHandler's constructor requires the HS, so we can't make one
  306. # beforehand and pass it in to the HS's constructor (chicken / egg)
  307. hs.get_auth_handler().hash = lambda p: hashlib.md5(p.encode("utf8")).hexdigest()
  308. hs.get_auth_handler().validate_hash = (
  309. lambda p, h: hashlib.md5(p.encode("utf8")).hexdigest() == h
  310. )
  311. fed = kargs.get("resource_for_federation", None)
  312. if fed:
  313. register_federation_servlets(hs, fed)
  314. return hs
  315. def register_federation_servlets(hs, resource):
  316. federation_server.register_servlets(
  317. hs,
  318. resource=resource,
  319. authenticator=federation_server.Authenticator(hs),
  320. ratelimiter=FederationRateLimiter(
  321. hs.get_clock(), config=hs.config.rc_federation
  322. ),
  323. )
  324. def get_mock_call_args(pattern_func, mock_func):
  325. """ Return the arguments the mock function was called with interpreted
  326. by the pattern functions argument list.
  327. """
  328. invoked_args, invoked_kargs = mock_func.call_args
  329. return getcallargs(pattern_func, *invoked_args, **invoked_kargs)
  330. def mock_getRawHeaders(headers=None):
  331. headers = headers if headers is not None else {}
  332. def getRawHeaders(name, default=None):
  333. return headers.get(name, default)
  334. return getRawHeaders
  335. # This is a mock /resource/ not an entire server
  336. class MockHttpResource(HttpServer):
  337. def __init__(self, prefix=""):
  338. self.callbacks = [] # 3-tuple of method/pattern/function
  339. self.prefix = prefix
  340. def trigger_get(self, path):
  341. return self.trigger(b"GET", path, None)
  342. @patch("twisted.web.http.Request")
  343. @defer.inlineCallbacks
  344. def trigger(
  345. self, http_method, path, content, mock_request, federation_auth_origin=None
  346. ):
  347. """ Fire an HTTP event.
  348. Args:
  349. http_method : The HTTP method
  350. path : The HTTP path
  351. content : The HTTP body
  352. mock_request : Mocked request to pass to the event so it can get
  353. content.
  354. federation_auth_origin (bytes|None): domain to authenticate as, for federation
  355. Returns:
  356. A tuple of (code, response)
  357. Raises:
  358. KeyError If no event is found which will handle the path.
  359. """
  360. path = self.prefix + path
  361. # annoyingly we return a twisted http request which has chained calls
  362. # to get at the http content, hence mock it here.
  363. mock_content = Mock()
  364. config = {"read.return_value": content}
  365. mock_content.configure_mock(**config)
  366. mock_request.content = mock_content
  367. mock_request.method = http_method.encode("ascii")
  368. mock_request.uri = path.encode("ascii")
  369. mock_request.getClientIP.return_value = "-"
  370. headers = {}
  371. if federation_auth_origin is not None:
  372. headers[b"Authorization"] = [
  373. b"X-Matrix origin=%s,key=,sig=" % (federation_auth_origin,)
  374. ]
  375. mock_request.requestHeaders.getRawHeaders = mock_getRawHeaders(headers)
  376. # return the right path if the event requires it
  377. mock_request.path = path
  378. # add in query params to the right place
  379. try:
  380. mock_request.args = urlparse.parse_qs(path.split("?")[1])
  381. mock_request.path = path.split("?")[0]
  382. path = mock_request.path
  383. except Exception:
  384. pass
  385. if isinstance(path, bytes):
  386. path = path.decode("utf8")
  387. for (method, pattern, func) in self.callbacks:
  388. if http_method != method:
  389. continue
  390. matcher = pattern.match(path)
  391. if matcher:
  392. try:
  393. args = [urlparse.unquote(u) for u in matcher.groups()]
  394. (code, response) = yield func(mock_request, *args)
  395. return code, response
  396. except CodeMessageException as e:
  397. return (e.code, cs_error(e.msg, code=e.errcode))
  398. raise KeyError("No event can handle %s" % path)
  399. def register_paths(self, method, path_patterns, callback, servlet_name):
  400. for path_pattern in path_patterns:
  401. self.callbacks.append((method, path_pattern, callback))
  402. class MockKey(object):
  403. alg = "mock_alg"
  404. version = "mock_version"
  405. signature = b"\x9a\x87$"
  406. @property
  407. def verify_key(self):
  408. return self
  409. def sign(self, message):
  410. return self
  411. def verify(self, message, sig):
  412. assert sig == b"\x9a\x87$"
  413. def encode(self):
  414. return b"<fake_encoded_key>"
  415. class MockClock(object):
  416. now = 1000
  417. def __init__(self):
  418. # list of lists of [absolute_time, callback, expired] in no particular
  419. # order
  420. self.timers = []
  421. self.loopers = []
  422. def time(self):
  423. return self.now
  424. def time_msec(self):
  425. return self.time() * 1000
  426. def call_later(self, delay, callback, *args, **kwargs):
  427. current_context = LoggingContext.current_context()
  428. def wrapped_callback():
  429. LoggingContext.thread_local.current_context = current_context
  430. callback(*args, **kwargs)
  431. t = [self.now + delay, wrapped_callback, False]
  432. self.timers.append(t)
  433. return t
  434. def looping_call(self, function, interval):
  435. self.loopers.append([function, interval / 1000.0, self.now])
  436. def cancel_call_later(self, timer, ignore_errs=False):
  437. if timer[2]:
  438. if not ignore_errs:
  439. raise Exception("Cannot cancel an expired timer")
  440. timer[2] = True
  441. self.timers = [t for t in self.timers if t != timer]
  442. # For unit testing
  443. def advance_time(self, secs):
  444. self.now += secs
  445. timers = self.timers
  446. self.timers = []
  447. for t in timers:
  448. time, callback, expired = t
  449. if expired:
  450. raise Exception("Timer already expired")
  451. if self.now >= time:
  452. t[2] = True
  453. callback()
  454. else:
  455. self.timers.append(t)
  456. for looped in self.loopers:
  457. func, interval, last = looped
  458. if last + interval < self.now:
  459. func()
  460. looped[2] = self.now
  461. def advance_time_msec(self, ms):
  462. self.advance_time(ms / 1000.0)
  463. def time_bound_deferred(self, d, *args, **kwargs):
  464. # We don't bother timing things out for now.
  465. return d
  466. def _format_call(args, kwargs):
  467. return ", ".join(
  468. ["%r" % (a) for a in args] + ["%s=%r" % (k, v) for k, v in kwargs.items()]
  469. )
  470. class DeferredMockCallable(object):
  471. """A callable instance that stores a set of pending call expectations and
  472. return values for them. It allows a unit test to assert that the given set
  473. of function calls are eventually made, by awaiting on them to be called.
  474. """
  475. def __init__(self):
  476. self.expectations = []
  477. self.calls = []
  478. def __call__(self, *args, **kwargs):
  479. self.calls.append((args, kwargs))
  480. if not self.expectations:
  481. raise ValueError(
  482. "%r has no pending calls to handle call(%s)"
  483. % (self, _format_call(args, kwargs))
  484. )
  485. for (call, result, d) in self.expectations:
  486. if args == call[1] and kwargs == call[2]:
  487. d.callback(None)
  488. return result
  489. failure = AssertionError(
  490. "Was not expecting call(%s)" % (_format_call(args, kwargs))
  491. )
  492. for _, _, d in self.expectations:
  493. try:
  494. d.errback(failure)
  495. except Exception:
  496. pass
  497. raise failure
  498. def expect_call_and_return(self, call, result):
  499. self.expectations.append((call, result, defer.Deferred()))
  500. @defer.inlineCallbacks
  501. def await_calls(self, timeout=1000):
  502. deferred = defer.DeferredList(
  503. [d for _, _, d in self.expectations], fireOnOneErrback=True
  504. )
  505. timer = reactor.callLater(
  506. timeout / 1000,
  507. deferred.errback,
  508. AssertionError(
  509. "%d pending calls left: %s"
  510. % (
  511. len([e for e in self.expectations if not e[2].called]),
  512. [e for e in self.expectations if not e[2].called],
  513. )
  514. ),
  515. )
  516. yield deferred
  517. timer.cancel()
  518. self.calls = []
  519. def assert_had_no_calls(self):
  520. if self.calls:
  521. calls = self.calls
  522. self.calls = []
  523. raise AssertionError(
  524. "Expected not to received any calls, got:\n"
  525. + "\n".join(["call(%s)" % _format_call(c[0], c[1]) for c in calls])
  526. )
  527. @defer.inlineCallbacks
  528. def create_room(hs, room_id, creator_id):
  529. """Creates and persist a creation event for the given room
  530. Args:
  531. hs
  532. room_id (str)
  533. creator_id (str)
  534. """
  535. persistence_store = hs.get_storage().persistence
  536. event_builder_factory = hs.get_event_builder_factory()
  537. event_creation_handler = hs.get_event_creation_handler()
  538. builder = event_builder_factory.for_room_version(
  539. RoomVersions.V1,
  540. {
  541. "type": EventTypes.Create,
  542. "state_key": "",
  543. "sender": creator_id,
  544. "room_id": room_id,
  545. "content": {},
  546. },
  547. )
  548. event, context = yield event_creation_handler.create_new_client_event(builder)
  549. yield persistence_store.persist_event(event, context)