user_dir.py 8.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224
  1. #!/usr/bin/env python
  2. # -*- coding: utf-8 -*-
  3. # Copyright 2017 Vector Creations Ltd
  4. #
  5. # Licensed under the Apache License, Version 2.0 (the "License");
  6. # you may not use this file except in compliance with the License.
  7. # You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. import logging
  17. import sys
  18. import synapse
  19. from synapse import events
  20. from synapse.app import _base
  21. from synapse.config._base import ConfigError
  22. from synapse.config.homeserver import HomeServerConfig
  23. from synapse.config.logger import setup_logging
  24. from synapse.crypto import context_factory
  25. from synapse.http.server import JsonResource
  26. from synapse.http.site import SynapseSite
  27. from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
  28. from synapse.replication.slave.storage._base import BaseSlavedStore
  29. from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
  30. from synapse.replication.slave.storage.client_ips import SlavedClientIpStore
  31. from synapse.replication.slave.storage.events import SlavedEventStore
  32. from synapse.replication.slave.storage.registration import SlavedRegistrationStore
  33. from synapse.replication.tcp.client import ReplicationClientHandler
  34. from synapse.rest.client.v2_alpha import user_directory
  35. from synapse.server import HomeServer
  36. from synapse.storage.engines import create_engine
  37. from synapse.storage.user_directory import UserDirectoryStore
  38. from synapse.util.caches.stream_change_cache import StreamChangeCache
  39. from synapse.util.httpresourcetree import create_resource_tree
  40. from synapse.util.logcontext import LoggingContext, preserve_fn
  41. from synapse.util.manhole import manhole
  42. from synapse.util.versionstring import get_version_string
  43. from twisted.internet import reactor
  44. from twisted.web.resource import NoResource
  45. logger = logging.getLogger("synapse.app.user_dir")
  46. class UserDirectorySlaveStore(
  47. SlavedEventStore,
  48. SlavedApplicationServiceStore,
  49. SlavedRegistrationStore,
  50. SlavedClientIpStore,
  51. UserDirectoryStore,
  52. BaseSlavedStore,
  53. ):
  54. def __init__(self, db_conn, hs):
  55. super(UserDirectorySlaveStore, self).__init__(db_conn, hs)
  56. events_max = self._stream_id_gen.get_current_token()
  57. curr_state_delta_prefill, min_curr_state_delta_id = self._get_cache_dict(
  58. db_conn, "current_state_delta_stream",
  59. entity_column="room_id",
  60. stream_column="stream_id",
  61. max_value=events_max, # As we share the stream id with events token
  62. limit=1000,
  63. )
  64. self._curr_state_delta_stream_cache = StreamChangeCache(
  65. "_curr_state_delta_stream_cache", min_curr_state_delta_id,
  66. prefilled_cache=curr_state_delta_prefill,
  67. )
  68. self._current_state_delta_pos = events_max
  69. def stream_positions(self):
  70. result = super(UserDirectorySlaveStore, self).stream_positions()
  71. result["current_state_deltas"] = self._current_state_delta_pos
  72. return result
  73. def process_replication_rows(self, stream_name, token, rows):
  74. if stream_name == "current_state_deltas":
  75. self._current_state_delta_pos = token
  76. for row in rows:
  77. self._curr_state_delta_stream_cache.entity_has_changed(
  78. row.room_id, token
  79. )
  80. return super(UserDirectorySlaveStore, self).process_replication_rows(
  81. stream_name, token, rows
  82. )
  83. class UserDirectoryServer(HomeServer):
  84. def setup(self):
  85. logger.info("Setting up.")
  86. self.datastore = UserDirectorySlaveStore(self.get_db_conn(), self)
  87. logger.info("Finished setting up.")
  88. def _listen_http(self, listener_config):
  89. port = listener_config["port"]
  90. bind_addresses = listener_config["bind_addresses"]
  91. site_tag = listener_config.get("tag", port)
  92. resources = {}
  93. for res in listener_config["resources"]:
  94. for name in res["names"]:
  95. if name == "metrics":
  96. resources[METRICS_PREFIX] = MetricsResource(self)
  97. elif name == "client":
  98. resource = JsonResource(self, canonical_json=False)
  99. user_directory.register_servlets(self, resource)
  100. resources.update({
  101. "/_matrix/client/r0": resource,
  102. "/_matrix/client/unstable": resource,
  103. "/_matrix/client/v2_alpha": resource,
  104. "/_matrix/client/api/v1": resource,
  105. })
  106. root_resource = create_resource_tree(resources, NoResource())
  107. _base.listen_tcp(
  108. bind_addresses,
  109. port,
  110. SynapseSite(
  111. "synapse.access.http.%s" % (site_tag,),
  112. site_tag,
  113. listener_config,
  114. root_resource,
  115. )
  116. )
  117. logger.info("Synapse user_dir now listening on port %d", port)
  118. def start_listening(self, listeners):
  119. for listener in listeners:
  120. if listener["type"] == "http":
  121. self._listen_http(listener)
  122. elif listener["type"] == "manhole":
  123. _base.listen_tcp(
  124. listener["bind_addresses"],
  125. listener["port"],
  126. manhole(
  127. username="matrix",
  128. password="rabbithole",
  129. globals={"hs": self},
  130. )
  131. )
  132. else:
  133. logger.warn("Unrecognized listener type: %s", listener["type"])
  134. self.get_tcp_replication().start_replication(self)
  135. def build_tcp_replication(self):
  136. return UserDirectoryReplicationHandler(self)
  137. class UserDirectoryReplicationHandler(ReplicationClientHandler):
  138. def __init__(self, hs):
  139. super(UserDirectoryReplicationHandler, self).__init__(hs.get_datastore())
  140. self.user_directory = hs.get_user_directory_handler()
  141. def on_rdata(self, stream_name, token, rows):
  142. super(UserDirectoryReplicationHandler, self).on_rdata(
  143. stream_name, token, rows
  144. )
  145. if stream_name == "current_state_deltas":
  146. preserve_fn(self.user_directory.notify_new_event)()
  147. def start(config_options):
  148. try:
  149. config = HomeServerConfig.load_config(
  150. "Synapse user directory", config_options
  151. )
  152. except ConfigError as e:
  153. sys.stderr.write("\n" + e.message + "\n")
  154. sys.exit(1)
  155. assert config.worker_app == "synapse.app.user_dir"
  156. setup_logging(config, use_worker_options=True)
  157. events.USE_FROZEN_DICTS = config.use_frozen_dicts
  158. database_engine = create_engine(config.database_config)
  159. if config.update_user_directory:
  160. sys.stderr.write(
  161. "\nThe update_user_directory must be disabled in the main synapse process"
  162. "\nbefore they can be run in a separate worker."
  163. "\nPlease add ``update_user_directory: false`` to the main config"
  164. "\n"
  165. )
  166. sys.exit(1)
  167. # Force the pushers to start since they will be disabled in the main config
  168. config.update_user_directory = True
  169. tls_server_context_factory = context_factory.ServerContextFactory(config)
  170. ps = UserDirectoryServer(
  171. config.server_name,
  172. db_config=config.database_config,
  173. tls_server_context_factory=tls_server_context_factory,
  174. config=config,
  175. version_string="Synapse/" + get_version_string(synapse),
  176. database_engine=database_engine,
  177. )
  178. ps.setup()
  179. ps.start_listening(config.worker_listeners)
  180. def start():
  181. ps.get_datastore().start_profiling()
  182. ps.get_state_handler().start_caching()
  183. reactor.callWhenRunning(start)
  184. _base.start_worker_reactor("synapse-user-dir", config)
  185. if __name__ == '__main__':
  186. with LoggingContext("main"):
  187. start(sys.argv[1:])