_base.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309
  1. # Copyright 2018 New Vector Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import abc
  15. import logging
  16. import re
  17. import urllib
  18. from inspect import signature
  19. from typing import TYPE_CHECKING, Dict, List, Tuple
  20. from prometheus_client import Counter, Gauge
  21. from synapse.api.errors import HttpResponseException, SynapseError
  22. from synapse.http import RequestTimedOutError
  23. from synapse.logging import opentracing
  24. from synapse.logging.opentracing import trace
  25. from synapse.util.caches.response_cache import ResponseCache
  26. from synapse.util.stringutils import random_string
  27. if TYPE_CHECKING:
  28. from synapse.server import HomeServer
  29. logger = logging.getLogger(__name__)
  30. _pending_outgoing_requests = Gauge(
  31. "synapse_pending_outgoing_replication_requests",
  32. "Number of active outgoing replication requests, by replication method name",
  33. ["name"],
  34. )
  35. _outgoing_request_counter = Counter(
  36. "synapse_outgoing_replication_requests",
  37. "Number of outgoing replication requests, by replication method name and result",
  38. ["name", "code"],
  39. )
  40. class ReplicationEndpoint(metaclass=abc.ABCMeta):
  41. """Helper base class for defining new replication HTTP endpoints.
  42. This creates an endpoint under `/_synapse/replication/:NAME/:PATH_ARGS..`
  43. (with a `/:txn_id` suffix for cached requests), where NAME is a name,
  44. PATH_ARGS are a tuple of parameters to be encoded in the URL.
  45. For example, if `NAME` is "send_event" and `PATH_ARGS` is `("event_id",)`,
  46. with `CACHE` set to true then this generates an endpoint:
  47. /_synapse/replication/send_event/:event_id/:txn_id
  48. For POST/PUT requests the payload is serialized to json and sent as the
  49. body, while for GET requests the payload is added as query parameters. See
  50. `_serialize_payload` for details.
  51. Incoming requests are handled by overriding `_handle_request`. Servers
  52. must call `register` to register the path with the HTTP server.
  53. Requests can be sent by calling the client returned by `make_client`.
  54. Requests are sent to master process by default, but can be sent to other
  55. named processes by specifying an `instance_name` keyword argument.
  56. Attributes:
  57. NAME (str): A name for the endpoint, added to the path as well as used
  58. in logging and metrics.
  59. PATH_ARGS (tuple[str]): A list of parameters to be added to the path.
  60. Adding parameters to the path (rather than payload) can make it
  61. easier to follow along in the log files.
  62. METHOD (str): The method of the HTTP request, defaults to POST. Can be
  63. one of POST, PUT or GET. If GET then the payload is sent as query
  64. parameters rather than a JSON body.
  65. CACHE (bool): Whether server should cache the result of the request/
  66. If true then transparently adds a txn_id to all requests, and
  67. `_handle_request` must return a Deferred.
  68. RETRY_ON_TIMEOUT(bool): Whether or not to retry the request when a 504
  69. is received.
  70. """
  71. NAME: str = abc.abstractproperty() # type: ignore
  72. PATH_ARGS: Tuple[str, ...] = abc.abstractproperty() # type: ignore
  73. METHOD = "POST"
  74. CACHE = True
  75. RETRY_ON_TIMEOUT = True
  76. def __init__(self, hs: "HomeServer"):
  77. if self.CACHE:
  78. self.response_cache: ResponseCache[str] = ResponseCache(
  79. hs.get_clock(), "repl." + self.NAME, timeout_ms=30 * 60 * 1000
  80. )
  81. # We reserve `instance_name` as a parameter to sending requests, so we
  82. # assert here that sub classes don't try and use the name.
  83. assert (
  84. "instance_name" not in self.PATH_ARGS
  85. ), "`instance_name` is a reserved parameter name"
  86. assert (
  87. "instance_name"
  88. not in signature(self.__class__._serialize_payload).parameters
  89. ), "`instance_name` is a reserved parameter name"
  90. assert self.METHOD in ("PUT", "POST", "GET")
  91. self._replication_secret = None
  92. if hs.config.worker.worker_replication_secret:
  93. self._replication_secret = hs.config.worker.worker_replication_secret
  94. def _check_auth(self, request) -> None:
  95. # Get the authorization header.
  96. auth_headers = request.requestHeaders.getRawHeaders(b"Authorization")
  97. if len(auth_headers) > 1:
  98. raise RuntimeError("Too many Authorization headers.")
  99. parts = auth_headers[0].split(b" ")
  100. if parts[0] == b"Bearer" and len(parts) == 2:
  101. received_secret = parts[1].decode("ascii")
  102. if self._replication_secret == received_secret:
  103. # Success!
  104. return
  105. raise RuntimeError("Invalid Authorization header.")
  106. @abc.abstractmethod
  107. async def _serialize_payload(**kwargs):
  108. """Static method that is called when creating a request.
  109. Concrete implementations should have explicit parameters (rather than
  110. kwargs) so that an appropriate exception is raised if the client is
  111. called with unexpected parameters. All PATH_ARGS must appear in
  112. argument list.
  113. Returns:
  114. dict: If POST/PUT request then dictionary must be JSON serialisable,
  115. otherwise must be appropriate for adding as query args.
  116. """
  117. return {}
  118. @abc.abstractmethod
  119. async def _handle_request(self, request, **kwargs):
  120. """Handle incoming request.
  121. This is called with the request object and PATH_ARGS.
  122. Returns:
  123. tuple[int, dict]: HTTP status code and a JSON serialisable dict
  124. to be used as response body of request.
  125. """
  126. pass
  127. @classmethod
  128. def make_client(cls, hs):
  129. """Create a client that makes requests.
  130. Returns a callable that accepts the same parameters as
  131. `_serialize_payload`, and also accepts an optional `instance_name`
  132. parameter to specify which instance to hit (the instance must be in
  133. the `instance_map` config).
  134. """
  135. clock = hs.get_clock()
  136. client = hs.get_simple_http_client()
  137. local_instance_name = hs.get_instance_name()
  138. master_host = hs.config.worker.worker_replication_host
  139. master_port = hs.config.worker.worker_replication_http_port
  140. instance_map = hs.config.worker.instance_map
  141. outgoing_gauge = _pending_outgoing_requests.labels(cls.NAME)
  142. replication_secret = None
  143. if hs.config.worker.worker_replication_secret:
  144. replication_secret = hs.config.worker.worker_replication_secret.encode(
  145. "ascii"
  146. )
  147. @trace(opname="outgoing_replication_request")
  148. async def send_request(*, instance_name="master", **kwargs):
  149. with outgoing_gauge.track_inprogress():
  150. if instance_name == local_instance_name:
  151. raise Exception("Trying to send HTTP request to self")
  152. if instance_name == "master":
  153. host = master_host
  154. port = master_port
  155. elif instance_name in instance_map:
  156. host = instance_map[instance_name].host
  157. port = instance_map[instance_name].port
  158. else:
  159. raise Exception(
  160. "Instance %r not in 'instance_map' config" % (instance_name,)
  161. )
  162. data = await cls._serialize_payload(**kwargs)
  163. url_args = [
  164. urllib.parse.quote(kwargs[name], safe="") for name in cls.PATH_ARGS
  165. ]
  166. if cls.CACHE:
  167. txn_id = random_string(10)
  168. url_args.append(txn_id)
  169. if cls.METHOD == "POST":
  170. request_func = client.post_json_get_json
  171. elif cls.METHOD == "PUT":
  172. request_func = client.put_json
  173. elif cls.METHOD == "GET":
  174. request_func = client.get_json
  175. else:
  176. # We have already asserted in the constructor that a
  177. # compatible was picked, but lets be paranoid.
  178. raise Exception(
  179. "Unknown METHOD on %s replication endpoint" % (cls.NAME,)
  180. )
  181. uri = "http://%s:%s/_synapse/replication/%s/%s" % (
  182. host,
  183. port,
  184. cls.NAME,
  185. "/".join(url_args),
  186. )
  187. try:
  188. # We keep retrying the same request for timeouts. This is so that we
  189. # have a good idea that the request has either succeeded or failed
  190. # on the master, and so whether we should clean up or not.
  191. while True:
  192. headers: Dict[bytes, List[bytes]] = {}
  193. # Add an authorization header, if configured.
  194. if replication_secret:
  195. headers[b"Authorization"] = [
  196. b"Bearer " + replication_secret
  197. ]
  198. opentracing.inject_header_dict(headers, check_destination=False)
  199. try:
  200. result = await request_func(uri, data, headers=headers)
  201. break
  202. except RequestTimedOutError:
  203. if not cls.RETRY_ON_TIMEOUT:
  204. raise
  205. logger.warning("%s request timed out; retrying", cls.NAME)
  206. # If we timed out we probably don't need to worry about backing
  207. # off too much, but lets just wait a little anyway.
  208. await clock.sleep(1)
  209. except HttpResponseException as e:
  210. # We convert to SynapseError as we know that it was a SynapseError
  211. # on the main process that we should send to the client. (And
  212. # importantly, not stack traces everywhere)
  213. _outgoing_request_counter.labels(cls.NAME, e.code).inc()
  214. raise e.to_synapse_error()
  215. except Exception as e:
  216. _outgoing_request_counter.labels(cls.NAME, "ERR").inc()
  217. raise SynapseError(502, "Failed to talk to main process") from e
  218. _outgoing_request_counter.labels(cls.NAME, 200).inc()
  219. return result
  220. return send_request
  221. def register(self, http_server):
  222. """Called by the server to register this as a handler to the
  223. appropriate path.
  224. """
  225. url_args = list(self.PATH_ARGS)
  226. method = self.METHOD
  227. if self.CACHE:
  228. url_args.append("txn_id")
  229. args = "/".join("(?P<%s>[^/]+)" % (arg,) for arg in url_args)
  230. pattern = re.compile("^/_synapse/replication/%s/%s$" % (self.NAME, args))
  231. http_server.register_paths(
  232. method,
  233. [pattern],
  234. self._check_auth_and_handle,
  235. self.__class__.__name__,
  236. )
  237. async def _check_auth_and_handle(self, request, **kwargs):
  238. """Called on new incoming requests when caching is enabled. Checks
  239. if there is a cached response for the request and returns that,
  240. otherwise calls `_handle_request` and caches its response.
  241. """
  242. # We just use the txn_id here, but we probably also want to use the
  243. # other PATH_ARGS as well.
  244. # Check the authorization headers before handling the request.
  245. if self._replication_secret:
  246. self._check_auth(request)
  247. if self.CACHE:
  248. txn_id = kwargs.pop("txn_id")
  249. return await self.response_cache.wrap(
  250. txn_id, self._handle_request, request, **kwargs
  251. )
  252. return await self._handle_request(request, **kwargs)