user_dir.py 8.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236
  1. #!/usr/bin/env python
  2. # -*- coding: utf-8 -*-
  3. # Copyright 2017 Vector Creations Ltd
  4. #
  5. # Licensed under the Apache License, Version 2.0 (the "License");
  6. # you may not use this file except in compliance with the License.
  7. # You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. import logging
  17. import sys
  18. from twisted.internet import defer, reactor
  19. from twisted.web.resource import NoResource
  20. import synapse
  21. from synapse import events
  22. from synapse.app import _base
  23. from synapse.config._base import ConfigError
  24. from synapse.config.homeserver import HomeServerConfig
  25. from synapse.config.logger import setup_logging
  26. from synapse.http.server import JsonResource
  27. from synapse.http.site import SynapseSite
  28. from synapse.logging.context import LoggingContext, run_in_background
  29. from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
  30. from synapse.replication.slave.storage._base import BaseSlavedStore
  31. from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
  32. from synapse.replication.slave.storage.client_ips import SlavedClientIpStore
  33. from synapse.replication.slave.storage.events import SlavedEventStore
  34. from synapse.replication.slave.storage.registration import SlavedRegistrationStore
  35. from synapse.replication.tcp.client import ReplicationClientHandler
  36. from synapse.replication.tcp.streams.events import (
  37. EventsStream,
  38. EventsStreamCurrentStateRow,
  39. )
  40. from synapse.rest.client.v2_alpha import user_directory
  41. from synapse.server import HomeServer
  42. from synapse.storage.engines import create_engine
  43. from synapse.storage.user_directory import UserDirectoryStore
  44. from synapse.util.caches.stream_change_cache import StreamChangeCache
  45. from synapse.util.httpresourcetree import create_resource_tree
  46. from synapse.util.manhole import manhole
  47. from synapse.util.versionstring import get_version_string
  48. logger = logging.getLogger("synapse.app.user_dir")
  49. class UserDirectorySlaveStore(
  50. SlavedEventStore,
  51. SlavedApplicationServiceStore,
  52. SlavedRegistrationStore,
  53. SlavedClientIpStore,
  54. UserDirectoryStore,
  55. BaseSlavedStore,
  56. ):
  57. def __init__(self, db_conn, hs):
  58. super(UserDirectorySlaveStore, self).__init__(db_conn, hs)
  59. events_max = self._stream_id_gen.get_current_token()
  60. curr_state_delta_prefill, min_curr_state_delta_id = self._get_cache_dict(
  61. db_conn,
  62. "current_state_delta_stream",
  63. entity_column="room_id",
  64. stream_column="stream_id",
  65. max_value=events_max, # As we share the stream id with events token
  66. limit=1000,
  67. )
  68. self._curr_state_delta_stream_cache = StreamChangeCache(
  69. "_curr_state_delta_stream_cache",
  70. min_curr_state_delta_id,
  71. prefilled_cache=curr_state_delta_prefill,
  72. )
  73. def stream_positions(self):
  74. result = super(UserDirectorySlaveStore, self).stream_positions()
  75. return result
  76. def process_replication_rows(self, stream_name, token, rows):
  77. if stream_name == EventsStream.NAME:
  78. self._stream_id_gen.advance(token)
  79. for row in rows:
  80. if row.type != EventsStreamCurrentStateRow.TypeId:
  81. continue
  82. self._curr_state_delta_stream_cache.entity_has_changed(
  83. row.data.room_id, token
  84. )
  85. return super(UserDirectorySlaveStore, self).process_replication_rows(
  86. stream_name, token, rows
  87. )
  88. class UserDirectoryServer(HomeServer):
  89. DATASTORE_CLASS = UserDirectorySlaveStore
  90. def _listen_http(self, listener_config):
  91. port = listener_config["port"]
  92. bind_addresses = listener_config["bind_addresses"]
  93. site_tag = listener_config.get("tag", port)
  94. resources = {}
  95. for res in listener_config["resources"]:
  96. for name in res["names"]:
  97. if name == "metrics":
  98. resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
  99. elif name == "client":
  100. resource = JsonResource(self, canonical_json=False)
  101. user_directory.register_servlets(self, resource)
  102. resources.update(
  103. {
  104. "/_matrix/client/r0": resource,
  105. "/_matrix/client/unstable": resource,
  106. "/_matrix/client/v2_alpha": resource,
  107. "/_matrix/client/api/v1": resource,
  108. }
  109. )
  110. root_resource = create_resource_tree(resources, NoResource())
  111. _base.listen_tcp(
  112. bind_addresses,
  113. port,
  114. SynapseSite(
  115. "synapse.access.http.%s" % (site_tag,),
  116. site_tag,
  117. listener_config,
  118. root_resource,
  119. self.version_string,
  120. ),
  121. )
  122. logger.info("Synapse user_dir now listening on port %d", port)
  123. def start_listening(self, listeners):
  124. for listener in listeners:
  125. if listener["type"] == "http":
  126. self._listen_http(listener)
  127. elif listener["type"] == "manhole":
  128. _base.listen_tcp(
  129. listener["bind_addresses"],
  130. listener["port"],
  131. manhole(
  132. username="matrix", password="rabbithole", globals={"hs": self}
  133. ),
  134. )
  135. elif listener["type"] == "metrics":
  136. if not self.get_config().enable_metrics:
  137. logger.warn(
  138. (
  139. "Metrics listener configured, but "
  140. "enable_metrics is not True!"
  141. )
  142. )
  143. else:
  144. _base.listen_metrics(listener["bind_addresses"], listener["port"])
  145. else:
  146. logger.warn("Unrecognized listener type: %s", listener["type"])
  147. self.get_tcp_replication().start_replication(self)
  148. def build_tcp_replication(self):
  149. return UserDirectoryReplicationHandler(self)
  150. class UserDirectoryReplicationHandler(ReplicationClientHandler):
  151. def __init__(self, hs):
  152. super(UserDirectoryReplicationHandler, self).__init__(hs.get_datastore())
  153. self.user_directory = hs.get_user_directory_handler()
  154. @defer.inlineCallbacks
  155. def on_rdata(self, stream_name, token, rows):
  156. yield super(UserDirectoryReplicationHandler, self).on_rdata(
  157. stream_name, token, rows
  158. )
  159. if stream_name == EventsStream.NAME:
  160. run_in_background(self._notify_directory)
  161. @defer.inlineCallbacks
  162. def _notify_directory(self):
  163. try:
  164. yield self.user_directory.notify_new_event()
  165. except Exception:
  166. logger.exception("Error notifiying user directory of state update")
  167. def start(config_options):
  168. try:
  169. config = HomeServerConfig.load_config("Synapse user directory", config_options)
  170. except ConfigError as e:
  171. sys.stderr.write("\n" + str(e) + "\n")
  172. sys.exit(1)
  173. assert config.worker_app == "synapse.app.user_dir"
  174. events.USE_FROZEN_DICTS = config.use_frozen_dicts
  175. database_engine = create_engine(config.database_config)
  176. if config.update_user_directory:
  177. sys.stderr.write(
  178. "\nThe update_user_directory must be disabled in the main synapse process"
  179. "\nbefore they can be run in a separate worker."
  180. "\nPlease add ``update_user_directory: false`` to the main config"
  181. "\n"
  182. )
  183. sys.exit(1)
  184. # Force the pushers to start since they will be disabled in the main config
  185. config.update_user_directory = True
  186. ss = UserDirectoryServer(
  187. config.server_name,
  188. db_config=config.database_config,
  189. config=config,
  190. version_string="Synapse/" + get_version_string(synapse),
  191. database_engine=database_engine,
  192. )
  193. setup_logging(ss, config, use_worker_options=True)
  194. ss.setup()
  195. reactor.addSystemEventTrigger(
  196. "before", "startup", _base.start, ss, config.worker_listeners
  197. )
  198. _base.start_worker_reactor("synapse-user-dir", config)
  199. if __name__ == "__main__":
  200. with LoggingContext("main"):
  201. start(sys.argv[1:])