1
0

utils.py 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673
  1. # -*- coding: utf-8 -*-
  2. # Copyright 2014-2016 OpenMarket Ltd
  3. # Copyright 2018-2019 New Vector Ltd
  4. #
  5. # Licensed under the Apache License, Version 2.0 (the "License");
  6. # you may not use this file except in compliance with the License.
  7. # You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. import atexit
  17. import hashlib
  18. import os
  19. import time
  20. import uuid
  21. import warnings
  22. from inspect import getcallargs
  23. from mock import Mock, patch
  24. from six.moves.urllib import parse as urlparse
  25. from twisted.internet import defer, reactor
  26. from synapse.api.constants import EventTypes
  27. from synapse.api.errors import CodeMessageException, cs_error
  28. from synapse.api.room_versions import RoomVersions
  29. from synapse.config.homeserver import HomeServerConfig
  30. from synapse.config.server import DEFAULT_ROOM_VERSION
  31. from synapse.federation.transport import server as federation_server
  32. from synapse.http.server import HttpServer
  33. from synapse.logging.context import LoggingContext
  34. from synapse.server import HomeServer
  35. from synapse.storage import DataStore
  36. from synapse.storage.engines import PostgresEngine, create_engine
  37. from synapse.storage.prepare_database import prepare_database
  38. from synapse.util.ratelimitutils import FederationRateLimiter
  39. # set this to True to run the tests against postgres instead of sqlite.
  40. #
  41. # When running under postgres, we first create a base database with the name
  42. # POSTGRES_BASE_DB and update it to the current schema. Then, for each test case, we
  43. # create another unique database, using the base database as a template.
  44. USE_POSTGRES_FOR_TESTS = os.environ.get("SYNAPSE_POSTGRES", False)
  45. LEAVE_DB = os.environ.get("SYNAPSE_LEAVE_DB", False)
  46. POSTGRES_USER = os.environ.get("SYNAPSE_POSTGRES_USER", None)
  47. POSTGRES_HOST = os.environ.get("SYNAPSE_POSTGRES_HOST", None)
  48. POSTGRES_PASSWORD = os.environ.get("SYNAPSE_POSTGRES_PASSWORD", None)
  49. POSTGRES_BASE_DB = "_synapse_unit_tests_base_%s" % (os.getpid(),)
  50. # the dbname we will connect to in order to create the base database.
  51. POSTGRES_DBNAME_FOR_INITIAL_CREATE = "postgres"
  52. def setupdb():
  53. # If we're using PostgreSQL, set up the db once
  54. if USE_POSTGRES_FOR_TESTS:
  55. # create a PostgresEngine
  56. db_engine = create_engine({"name": "psycopg2", "args": {}})
  57. # connect to postgres to create the base database.
  58. db_conn = db_engine.module.connect(
  59. user=POSTGRES_USER,
  60. host=POSTGRES_HOST,
  61. password=POSTGRES_PASSWORD,
  62. dbname=POSTGRES_DBNAME_FOR_INITIAL_CREATE,
  63. )
  64. db_conn.autocommit = True
  65. cur = db_conn.cursor()
  66. cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,))
  67. cur.execute("CREATE DATABASE %s;" % (POSTGRES_BASE_DB,))
  68. cur.close()
  69. db_conn.close()
  70. # Set up in the db
  71. db_conn = db_engine.module.connect(
  72. database=POSTGRES_BASE_DB,
  73. user=POSTGRES_USER,
  74. host=POSTGRES_HOST,
  75. password=POSTGRES_PASSWORD,
  76. )
  77. prepare_database(db_conn, db_engine, None)
  78. db_conn.close()
  79. def _cleanup():
  80. db_conn = db_engine.module.connect(
  81. user=POSTGRES_USER,
  82. host=POSTGRES_HOST,
  83. password=POSTGRES_PASSWORD,
  84. dbname=POSTGRES_DBNAME_FOR_INITIAL_CREATE,
  85. )
  86. db_conn.autocommit = True
  87. cur = db_conn.cursor()
  88. cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,))
  89. cur.close()
  90. db_conn.close()
  91. atexit.register(_cleanup)
  92. def default_config(name, parse=False):
  93. """
  94. Create a reasonable test config.
  95. """
  96. config_dict = {
  97. "server_name": name,
  98. "send_federation": False,
  99. "media_store_path": "media",
  100. "uploads_path": "uploads",
  101. # the test signing key is just an arbitrary ed25519 key to keep the config
  102. # parser happy
  103. "signing_key": "ed25519 a_lPym qvioDNmfExFBRPgdTU+wtFYKq4JfwFRv7sYVgWvmgJg",
  104. "event_cache_size": 1,
  105. "enable_registration": True,
  106. "enable_registration_captcha": False,
  107. "macaroon_secret_key": "not even a little secret",
  108. "trusted_third_party_id_servers": [],
  109. "room_invite_state_types": [],
  110. "password_providers": [],
  111. "worker_replication_url": "",
  112. "worker_app": None,
  113. "block_non_admin_invites": False,
  114. "federation_domain_whitelist": None,
  115. "filter_timeline_limit": 5000,
  116. "user_directory_search_all_users": False,
  117. "user_consent_server_notice_content": None,
  118. "block_events_without_consent_error": None,
  119. "user_consent_at_registration": False,
  120. "user_consent_policy_name": "Privacy Policy",
  121. "media_storage_providers": [],
  122. "autocreate_auto_join_rooms": True,
  123. "auto_join_rooms": [],
  124. "limit_usage_by_mau": False,
  125. "hs_disabled": False,
  126. "hs_disabled_message": "",
  127. "max_mau_value": 50,
  128. "mau_trial_days": 0,
  129. "mau_stats_only": False,
  130. "mau_limits_reserved_threepids": [],
  131. "admin_contact": None,
  132. "rc_message": {"per_second": 10000, "burst_count": 10000},
  133. "rc_registration": {"per_second": 10000, "burst_count": 10000},
  134. "rc_login": {
  135. "address": {"per_second": 10000, "burst_count": 10000},
  136. "account": {"per_second": 10000, "burst_count": 10000},
  137. "failed_attempts": {"per_second": 10000, "burst_count": 10000},
  138. },
  139. "saml2_enabled": False,
  140. "public_baseurl": None,
  141. "default_identity_server": None,
  142. "key_refresh_interval": 24 * 60 * 60 * 1000,
  143. "old_signing_keys": {},
  144. "tls_fingerprints": [],
  145. "use_frozen_dicts": False,
  146. # We need a sane default_room_version, otherwise attempts to create
  147. # rooms will fail.
  148. "default_room_version": DEFAULT_ROOM_VERSION,
  149. # disable user directory updates, because they get done in the
  150. # background, which upsets the test runner.
  151. "update_user_directory": False,
  152. }
  153. if parse:
  154. config = HomeServerConfig()
  155. config.parse_config_dict(config_dict, "", "")
  156. return config
  157. return config_dict
  158. class TestHomeServer(HomeServer):
  159. DATASTORE_CLASS = DataStore
  160. @defer.inlineCallbacks
  161. def setup_test_homeserver(
  162. cleanup_func,
  163. name="test",
  164. datastore=None,
  165. config=None,
  166. reactor=None,
  167. homeserverToUse=TestHomeServer,
  168. **kargs
  169. ):
  170. """
  171. Setup a homeserver suitable for running tests against. Keyword arguments
  172. are passed to the Homeserver constructor.
  173. If no datastore is supplied, one is created and given to the homeserver.
  174. Args:
  175. cleanup_func : The function used to register a cleanup routine for
  176. after the test.
  177. Calling this method directly is deprecated: you should instead derive from
  178. HomeserverTestCase.
  179. """
  180. if reactor is None:
  181. from twisted.internet import reactor
  182. if config is None:
  183. config = default_config(name, parse=True)
  184. config.ldap_enabled = False
  185. if "clock" not in kargs:
  186. kargs["clock"] = MockClock()
  187. if USE_POSTGRES_FOR_TESTS:
  188. test_db = "synapse_test_%s" % uuid.uuid4().hex
  189. config.database_config = {
  190. "name": "psycopg2",
  191. "args": {
  192. "database": test_db,
  193. "host": POSTGRES_HOST,
  194. "password": POSTGRES_PASSWORD,
  195. "user": POSTGRES_USER,
  196. "cp_min": 1,
  197. "cp_max": 5,
  198. },
  199. }
  200. else:
  201. config.database_config = {
  202. "name": "sqlite3",
  203. "args": {"database": ":memory:", "cp_min": 1, "cp_max": 1},
  204. }
  205. db_engine = create_engine(config.database_config)
  206. # Create the database before we actually try and connect to it, based off
  207. # the template database we generate in setupdb()
  208. if datastore is None and isinstance(db_engine, PostgresEngine):
  209. db_conn = db_engine.module.connect(
  210. database=POSTGRES_BASE_DB,
  211. user=POSTGRES_USER,
  212. host=POSTGRES_HOST,
  213. password=POSTGRES_PASSWORD,
  214. )
  215. db_conn.autocommit = True
  216. cur = db_conn.cursor()
  217. cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,))
  218. cur.execute(
  219. "CREATE DATABASE %s WITH TEMPLATE %s;" % (test_db, POSTGRES_BASE_DB)
  220. )
  221. cur.close()
  222. db_conn.close()
  223. # we need to configure the connection pool to run the on_new_connection
  224. # function, so that we can test code that uses custom sqlite functions
  225. # (like rank).
  226. config.database_config["args"]["cp_openfun"] = db_engine.on_new_connection
  227. if datastore is None:
  228. hs = homeserverToUse(
  229. name,
  230. config=config,
  231. db_config=config.database_config,
  232. version_string="Synapse/tests",
  233. database_engine=db_engine,
  234. tls_server_context_factory=Mock(),
  235. tls_client_options_factory=Mock(),
  236. reactor=reactor,
  237. **kargs
  238. )
  239. # Prepare the DB on SQLite -- PostgreSQL is a copy of an already up to
  240. # date db
  241. if not isinstance(db_engine, PostgresEngine):
  242. db_conn = hs.get_db_conn()
  243. yield prepare_database(db_conn, db_engine, config)
  244. db_conn.commit()
  245. db_conn.close()
  246. else:
  247. # We need to do cleanup on PostgreSQL
  248. def cleanup():
  249. import psycopg2
  250. # Close all the db pools
  251. hs.get_db_pool().close()
  252. dropped = False
  253. # Drop the test database
  254. db_conn = db_engine.module.connect(
  255. database=POSTGRES_BASE_DB,
  256. user=POSTGRES_USER,
  257. host=POSTGRES_HOST,
  258. password=POSTGRES_PASSWORD,
  259. )
  260. db_conn.autocommit = True
  261. cur = db_conn.cursor()
  262. # Try a few times to drop the DB. Some things may hold on to the
  263. # database for a few more seconds due to flakiness, preventing
  264. # us from dropping it when the test is over. If we can't drop
  265. # it, warn and move on.
  266. for x in range(5):
  267. try:
  268. cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,))
  269. db_conn.commit()
  270. dropped = True
  271. except psycopg2.OperationalError as e:
  272. warnings.warn(
  273. "Couldn't drop old db: " + str(e), category=UserWarning
  274. )
  275. time.sleep(0.5)
  276. cur.close()
  277. db_conn.close()
  278. if not dropped:
  279. warnings.warn("Failed to drop old DB.", category=UserWarning)
  280. if not LEAVE_DB:
  281. # Register the cleanup hook
  282. cleanup_func(cleanup)
  283. hs.setup()
  284. if homeserverToUse.__name__ == "TestHomeServer":
  285. hs.setup_master()
  286. else:
  287. # If we have been given an explicit datastore we probably want to mock
  288. # out the DataStores somehow too. This all feels a bit wrong, but then
  289. # mocking the stores feels wrong too.
  290. datastores = Mock(datastore=datastore)
  291. hs = homeserverToUse(
  292. name,
  293. db_pool=None,
  294. datastore=datastore,
  295. datastores=datastores,
  296. config=config,
  297. version_string="Synapse/tests",
  298. database_engine=db_engine,
  299. tls_server_context_factory=Mock(),
  300. tls_client_options_factory=Mock(),
  301. reactor=reactor,
  302. **kargs
  303. )
  304. # bcrypt is far too slow to be doing in unit tests
  305. # Need to let the HS build an auth handler and then mess with it
  306. # because AuthHandler's constructor requires the HS, so we can't make one
  307. # beforehand and pass it in to the HS's constructor (chicken / egg)
  308. hs.get_auth_handler().hash = lambda p: hashlib.md5(p.encode("utf8")).hexdigest()
  309. hs.get_auth_handler().validate_hash = (
  310. lambda p, h: hashlib.md5(p.encode("utf8")).hexdigest() == h
  311. )
  312. fed = kargs.get("resource_for_federation", None)
  313. if fed:
  314. register_federation_servlets(hs, fed)
  315. return hs
  316. def register_federation_servlets(hs, resource):
  317. federation_server.register_servlets(
  318. hs,
  319. resource=resource,
  320. authenticator=federation_server.Authenticator(hs),
  321. ratelimiter=FederationRateLimiter(
  322. hs.get_clock(), config=hs.config.rc_federation
  323. ),
  324. )
  325. def get_mock_call_args(pattern_func, mock_func):
  326. """ Return the arguments the mock function was called with interpreted
  327. by the pattern functions argument list.
  328. """
  329. invoked_args, invoked_kargs = mock_func.call_args
  330. return getcallargs(pattern_func, *invoked_args, **invoked_kargs)
  331. def mock_getRawHeaders(headers=None):
  332. headers = headers if headers is not None else {}
  333. def getRawHeaders(name, default=None):
  334. return headers.get(name, default)
  335. return getRawHeaders
  336. # This is a mock /resource/ not an entire server
  337. class MockHttpResource(HttpServer):
  338. def __init__(self, prefix=""):
  339. self.callbacks = [] # 3-tuple of method/pattern/function
  340. self.prefix = prefix
  341. def trigger_get(self, path):
  342. return self.trigger(b"GET", path, None)
  343. @patch("twisted.web.http.Request")
  344. @defer.inlineCallbacks
  345. def trigger(
  346. self, http_method, path, content, mock_request, federation_auth_origin=None
  347. ):
  348. """ Fire an HTTP event.
  349. Args:
  350. http_method : The HTTP method
  351. path : The HTTP path
  352. content : The HTTP body
  353. mock_request : Mocked request to pass to the event so it can get
  354. content.
  355. federation_auth_origin (bytes|None): domain to authenticate as, for federation
  356. Returns:
  357. A tuple of (code, response)
  358. Raises:
  359. KeyError If no event is found which will handle the path.
  360. """
  361. path = self.prefix + path
  362. # annoyingly we return a twisted http request which has chained calls
  363. # to get at the http content, hence mock it here.
  364. mock_content = Mock()
  365. config = {"read.return_value": content}
  366. mock_content.configure_mock(**config)
  367. mock_request.content = mock_content
  368. mock_request.method = http_method.encode("ascii")
  369. mock_request.uri = path.encode("ascii")
  370. mock_request.getClientIP.return_value = "-"
  371. headers = {}
  372. if federation_auth_origin is not None:
  373. headers[b"Authorization"] = [
  374. b"X-Matrix origin=%s,key=,sig=" % (federation_auth_origin,)
  375. ]
  376. mock_request.requestHeaders.getRawHeaders = mock_getRawHeaders(headers)
  377. # return the right path if the event requires it
  378. mock_request.path = path
  379. # add in query params to the right place
  380. try:
  381. mock_request.args = urlparse.parse_qs(path.split("?")[1])
  382. mock_request.path = path.split("?")[0]
  383. path = mock_request.path
  384. except Exception:
  385. pass
  386. if isinstance(path, bytes):
  387. path = path.decode("utf8")
  388. for (method, pattern, func) in self.callbacks:
  389. if http_method != method:
  390. continue
  391. matcher = pattern.match(path)
  392. if matcher:
  393. try:
  394. args = [urlparse.unquote(u) for u in matcher.groups()]
  395. (code, response) = yield func(mock_request, *args)
  396. return code, response
  397. except CodeMessageException as e:
  398. return (e.code, cs_error(e.msg, code=e.errcode))
  399. raise KeyError("No event can handle %s" % path)
  400. def register_paths(self, method, path_patterns, callback, servlet_name):
  401. for path_pattern in path_patterns:
  402. self.callbacks.append((method, path_pattern, callback))
  403. class MockKey(object):
  404. alg = "mock_alg"
  405. version = "mock_version"
  406. signature = b"\x9a\x87$"
  407. @property
  408. def verify_key(self):
  409. return self
  410. def sign(self, message):
  411. return self
  412. def verify(self, message, sig):
  413. assert sig == b"\x9a\x87$"
  414. def encode(self):
  415. return b"<fake_encoded_key>"
  416. class MockClock(object):
  417. now = 1000
  418. def __init__(self):
  419. # list of lists of [absolute_time, callback, expired] in no particular
  420. # order
  421. self.timers = []
  422. self.loopers = []
  423. def time(self):
  424. return self.now
  425. def time_msec(self):
  426. return self.time() * 1000
  427. def call_later(self, delay, callback, *args, **kwargs):
  428. current_context = LoggingContext.current_context()
  429. def wrapped_callback():
  430. LoggingContext.thread_local.current_context = current_context
  431. callback(*args, **kwargs)
  432. t = [self.now + delay, wrapped_callback, False]
  433. self.timers.append(t)
  434. return t
  435. def looping_call(self, function, interval):
  436. self.loopers.append([function, interval / 1000.0, self.now])
  437. def cancel_call_later(self, timer, ignore_errs=False):
  438. if timer[2]:
  439. if not ignore_errs:
  440. raise Exception("Cannot cancel an expired timer")
  441. timer[2] = True
  442. self.timers = [t for t in self.timers if t != timer]
  443. # For unit testing
  444. def advance_time(self, secs):
  445. self.now += secs
  446. timers = self.timers
  447. self.timers = []
  448. for t in timers:
  449. time, callback, expired = t
  450. if expired:
  451. raise Exception("Timer already expired")
  452. if self.now >= time:
  453. t[2] = True
  454. callback()
  455. else:
  456. self.timers.append(t)
  457. for looped in self.loopers:
  458. func, interval, last = looped
  459. if last + interval < self.now:
  460. func()
  461. looped[2] = self.now
  462. def advance_time_msec(self, ms):
  463. self.advance_time(ms / 1000.0)
  464. def time_bound_deferred(self, d, *args, **kwargs):
  465. # We don't bother timing things out for now.
  466. return d
  467. def _format_call(args, kwargs):
  468. return ", ".join(
  469. ["%r" % (a) for a in args] + ["%s=%r" % (k, v) for k, v in kwargs.items()]
  470. )
  471. class DeferredMockCallable(object):
  472. """A callable instance that stores a set of pending call expectations and
  473. return values for them. It allows a unit test to assert that the given set
  474. of function calls are eventually made, by awaiting on them to be called.
  475. """
  476. def __init__(self):
  477. self.expectations = []
  478. self.calls = []
  479. def __call__(self, *args, **kwargs):
  480. self.calls.append((args, kwargs))
  481. if not self.expectations:
  482. raise ValueError(
  483. "%r has no pending calls to handle call(%s)"
  484. % (self, _format_call(args, kwargs))
  485. )
  486. for (call, result, d) in self.expectations:
  487. if args == call[1] and kwargs == call[2]:
  488. d.callback(None)
  489. return result
  490. failure = AssertionError(
  491. "Was not expecting call(%s)" % (_format_call(args, kwargs))
  492. )
  493. for _, _, d in self.expectations:
  494. try:
  495. d.errback(failure)
  496. except Exception:
  497. pass
  498. raise failure
  499. def expect_call_and_return(self, call, result):
  500. self.expectations.append((call, result, defer.Deferred()))
  501. @defer.inlineCallbacks
  502. def await_calls(self, timeout=1000):
  503. deferred = defer.DeferredList(
  504. [d for _, _, d in self.expectations], fireOnOneErrback=True
  505. )
  506. timer = reactor.callLater(
  507. timeout / 1000,
  508. deferred.errback,
  509. AssertionError(
  510. "%d pending calls left: %s"
  511. % (
  512. len([e for e in self.expectations if not e[2].called]),
  513. [e for e in self.expectations if not e[2].called],
  514. )
  515. ),
  516. )
  517. yield deferred
  518. timer.cancel()
  519. self.calls = []
  520. def assert_had_no_calls(self):
  521. if self.calls:
  522. calls = self.calls
  523. self.calls = []
  524. raise AssertionError(
  525. "Expected not to received any calls, got:\n"
  526. + "\n".join(["call(%s)" % _format_call(c[0], c[1]) for c in calls])
  527. )
  528. @defer.inlineCallbacks
  529. def create_room(hs, room_id, creator_id):
  530. """Creates and persist a creation event for the given room
  531. Args:
  532. hs
  533. room_id (str)
  534. creator_id (str)
  535. """
  536. persistence_store = hs.get_storage().persistence
  537. event_builder_factory = hs.get_event_builder_factory()
  538. event_creation_handler = hs.get_event_creation_handler()
  539. builder = event_builder_factory.for_room_version(
  540. RoomVersions.V1,
  541. {
  542. "type": EventTypes.Create,
  543. "state_key": "",
  544. "sender": creator_id,
  545. "room_id": room_id,
  546. "content": {},
  547. },
  548. )
  549. event, context = yield event_creation_handler.create_new_client_event(builder)
  550. yield persistence_store.persist_event(event, context)