user_dir.py 8.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243
  1. #!/usr/bin/env python
  2. # -*- coding: utf-8 -*-
  3. # Copyright 2017 Vector Creations Ltd
  4. #
  5. # Licensed under the Apache License, Version 2.0 (the "License");
  6. # you may not use this file except in compliance with the License.
  7. # You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. import logging
  17. import sys
  18. from twisted.internet import defer, reactor
  19. from twisted.web.resource import NoResource
  20. import synapse
  21. from synapse import events
  22. from synapse.app import _base
  23. from synapse.config._base import ConfigError
  24. from synapse.config.homeserver import HomeServerConfig
  25. from synapse.config.logger import setup_logging
  26. from synapse.crypto import context_factory
  27. from synapse.http.server import JsonResource
  28. from synapse.http.site import SynapseSite
  29. from synapse.metrics import RegistryProxy
  30. from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
  31. from synapse.replication.slave.storage._base import BaseSlavedStore
  32. from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
  33. from synapse.replication.slave.storage.client_ips import SlavedClientIpStore
  34. from synapse.replication.slave.storage.events import SlavedEventStore
  35. from synapse.replication.slave.storage.registration import SlavedRegistrationStore
  36. from synapse.replication.tcp.client import ReplicationClientHandler
  37. from synapse.rest.client.v2_alpha import user_directory
  38. from synapse.server import HomeServer
  39. from synapse.storage.engines import create_engine
  40. from synapse.storage.user_directory import UserDirectoryStore
  41. from synapse.util.caches.stream_change_cache import StreamChangeCache
  42. from synapse.util.httpresourcetree import create_resource_tree
  43. from synapse.util.logcontext import LoggingContext, run_in_background
  44. from synapse.util.manhole import manhole
  45. from synapse.util.versionstring import get_version_string
  46. logger = logging.getLogger("synapse.app.user_dir")
  47. class UserDirectorySlaveStore(
  48. SlavedEventStore,
  49. SlavedApplicationServiceStore,
  50. SlavedRegistrationStore,
  51. SlavedClientIpStore,
  52. UserDirectoryStore,
  53. BaseSlavedStore,
  54. ):
  55. def __init__(self, db_conn, hs):
  56. super(UserDirectorySlaveStore, self).__init__(db_conn, hs)
  57. events_max = self._stream_id_gen.get_current_token()
  58. curr_state_delta_prefill, min_curr_state_delta_id = self._get_cache_dict(
  59. db_conn, "current_state_delta_stream",
  60. entity_column="room_id",
  61. stream_column="stream_id",
  62. max_value=events_max, # As we share the stream id with events token
  63. limit=1000,
  64. )
  65. self._curr_state_delta_stream_cache = StreamChangeCache(
  66. "_curr_state_delta_stream_cache", min_curr_state_delta_id,
  67. prefilled_cache=curr_state_delta_prefill,
  68. )
  69. self._current_state_delta_pos = events_max
  70. def stream_positions(self):
  71. result = super(UserDirectorySlaveStore, self).stream_positions()
  72. result["current_state_deltas"] = self._current_state_delta_pos
  73. return result
  74. def process_replication_rows(self, stream_name, token, rows):
  75. if stream_name == "current_state_deltas":
  76. self._current_state_delta_pos = token
  77. for row in rows:
  78. self._curr_state_delta_stream_cache.entity_has_changed(
  79. row.room_id, token
  80. )
  81. return super(UserDirectorySlaveStore, self).process_replication_rows(
  82. stream_name, token, rows
  83. )
  84. class UserDirectoryServer(HomeServer):
  85. def setup(self):
  86. logger.info("Setting up.")
  87. self.datastore = UserDirectorySlaveStore(self.get_db_conn(), self)
  88. logger.info("Finished setting up.")
  89. def _listen_http(self, listener_config):
  90. port = listener_config["port"]
  91. bind_addresses = listener_config["bind_addresses"]
  92. site_tag = listener_config.get("tag", port)
  93. resources = {}
  94. for res in listener_config["resources"]:
  95. for name in res["names"]:
  96. if name == "metrics":
  97. resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
  98. elif name == "client":
  99. resource = JsonResource(self, canonical_json=False)
  100. user_directory.register_servlets(self, resource)
  101. resources.update({
  102. "/_matrix/client/r0": resource,
  103. "/_matrix/client/unstable": resource,
  104. "/_matrix/client/v2_alpha": resource,
  105. "/_matrix/client/api/v1": resource,
  106. })
  107. root_resource = create_resource_tree(resources, NoResource())
  108. _base.listen_tcp(
  109. bind_addresses,
  110. port,
  111. SynapseSite(
  112. "synapse.access.http.%s" % (site_tag,),
  113. site_tag,
  114. listener_config,
  115. root_resource,
  116. self.version_string,
  117. )
  118. )
  119. logger.info("Synapse user_dir now listening on port %d", port)
  120. def start_listening(self, listeners):
  121. for listener in listeners:
  122. if listener["type"] == "http":
  123. self._listen_http(listener)
  124. elif listener["type"] == "manhole":
  125. _base.listen_tcp(
  126. listener["bind_addresses"],
  127. listener["port"],
  128. manhole(
  129. username="matrix",
  130. password="rabbithole",
  131. globals={"hs": self},
  132. )
  133. )
  134. elif listener["type"] == "metrics":
  135. if not self.get_config().enable_metrics:
  136. logger.warn(("Metrics listener configured, but "
  137. "enable_metrics is not True!"))
  138. else:
  139. _base.listen_metrics(listener["bind_addresses"],
  140. listener["port"])
  141. else:
  142. logger.warn("Unrecognized listener type: %s", listener["type"])
  143. self.get_tcp_replication().start_replication(self)
  144. def build_tcp_replication(self):
  145. return UserDirectoryReplicationHandler(self)
  146. class UserDirectoryReplicationHandler(ReplicationClientHandler):
  147. def __init__(self, hs):
  148. super(UserDirectoryReplicationHandler, self).__init__(hs.get_datastore())
  149. self.user_directory = hs.get_user_directory_handler()
  150. def on_rdata(self, stream_name, token, rows):
  151. super(UserDirectoryReplicationHandler, self).on_rdata(
  152. stream_name, token, rows
  153. )
  154. if stream_name == "current_state_deltas":
  155. run_in_background(self._notify_directory)
  156. @defer.inlineCallbacks
  157. def _notify_directory(self):
  158. try:
  159. yield self.user_directory.notify_new_event()
  160. except Exception:
  161. logger.exception("Error notifiying user directory of state update")
  162. def start(config_options):
  163. try:
  164. config = HomeServerConfig.load_config(
  165. "Synapse user directory", config_options
  166. )
  167. except ConfigError as e:
  168. sys.stderr.write("\n" + e.message + "\n")
  169. sys.exit(1)
  170. assert config.worker_app == "synapse.app.user_dir"
  171. setup_logging(config, use_worker_options=True)
  172. events.USE_FROZEN_DICTS = config.use_frozen_dicts
  173. database_engine = create_engine(config.database_config)
  174. if config.update_user_directory:
  175. sys.stderr.write(
  176. "\nThe update_user_directory must be disabled in the main synapse process"
  177. "\nbefore they can be run in a separate worker."
  178. "\nPlease add ``update_user_directory: false`` to the main config"
  179. "\n"
  180. )
  181. sys.exit(1)
  182. # Force the pushers to start since they will be disabled in the main config
  183. config.update_user_directory = True
  184. tls_server_context_factory = context_factory.ServerContextFactory(config)
  185. tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
  186. ps = UserDirectoryServer(
  187. config.server_name,
  188. db_config=config.database_config,
  189. tls_server_context_factory=tls_server_context_factory,
  190. tls_client_options_factory=tls_client_options_factory,
  191. config=config,
  192. version_string="Synapse/" + get_version_string(synapse),
  193. database_engine=database_engine,
  194. )
  195. ps.setup()
  196. ps.start_listening(config.worker_listeners)
  197. def start():
  198. ps.get_datastore().start_profiling()
  199. ps.get_state_handler().start_caching()
  200. reactor.callWhenRunning(start)
  201. _base.start_worker_reactor("synapse-user-dir", config)
  202. if __name__ == '__main__':
  203. with LoggingContext("main"):
  204. start(sys.argv[1:])