httppusher.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378
  1. # -*- coding: utf-8 -*-
  2. # Copyright 2015, 2016 OpenMarket Ltd
  3. # Copyright 2017 New Vector Ltd
  4. #
  5. # Licensed under the Apache License, Version 2.0 (the "License");
  6. # you may not use this file except in compliance with the License.
  7. # You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. import logging
  17. from prometheus_client import Counter
  18. from twisted.internet import defer
  19. from twisted.internet.error import AlreadyCalled, AlreadyCancelled
  20. from synapse.push import PusherConfigException
  21. from synapse.util.logcontext import LoggingContext
  22. from synapse.util.metrics import Measure
  23. from . import push_rule_evaluator, push_tools
  24. logger = logging.getLogger(__name__)
  25. http_push_processed_counter = Counter("synapse_http_httppusher_http_pushes_processed", "")
  26. http_push_failed_counter = Counter("synapse_http_httppusher_http_pushes_failed", "")
  27. class HttpPusher(object):
  28. INITIAL_BACKOFF_SEC = 1 # in seconds because that's what Twisted takes
  29. MAX_BACKOFF_SEC = 60 * 60
  30. # This one's in ms because we compare it against the clock
  31. GIVE_UP_AFTER_MS = 24 * 60 * 60 * 1000
  32. def __init__(self, hs, pusherdict):
  33. self.hs = hs
  34. self.store = self.hs.get_datastore()
  35. self.clock = self.hs.get_clock()
  36. self.state_handler = self.hs.get_state_handler()
  37. self.user_id = pusherdict['user_name']
  38. self.app_id = pusherdict['app_id']
  39. self.app_display_name = pusherdict['app_display_name']
  40. self.device_display_name = pusherdict['device_display_name']
  41. self.pushkey = pusherdict['pushkey']
  42. self.pushkey_ts = pusherdict['ts']
  43. self.data = pusherdict['data']
  44. self.last_stream_ordering = pusherdict['last_stream_ordering']
  45. self.backoff_delay = HttpPusher.INITIAL_BACKOFF_SEC
  46. self.failing_since = pusherdict['failing_since']
  47. self.timed_call = None
  48. self.processing = False
  49. # This is the highest stream ordering we know it's safe to process.
  50. # When new events arrive, we'll be given a window of new events: we
  51. # should honour this rather than just looking for anything higher
  52. # because of potential out-of-order event serialisation. This starts
  53. # off as None though as we don't know any better.
  54. self.max_stream_ordering = None
  55. if 'data' not in pusherdict:
  56. raise PusherConfigException(
  57. "No 'data' key for HTTP pusher"
  58. )
  59. self.data = pusherdict['data']
  60. self.name = "%s/%s/%s" % (
  61. pusherdict['user_name'],
  62. pusherdict['app_id'],
  63. pusherdict['pushkey'],
  64. )
  65. if 'url' not in self.data:
  66. raise PusherConfigException(
  67. "'url' required in data for HTTP pusher"
  68. )
  69. self.url = self.data['url']
  70. self.http_client = hs.get_simple_http_client()
  71. self.data_minus_url = {}
  72. self.data_minus_url.update(self.data)
  73. del self.data_minus_url['url']
  74. @defer.inlineCallbacks
  75. def on_started(self):
  76. try:
  77. yield self._process()
  78. except Exception:
  79. logger.exception("Error starting http pusher")
  80. @defer.inlineCallbacks
  81. def on_new_notifications(self, min_stream_ordering, max_stream_ordering):
  82. self.max_stream_ordering = max(max_stream_ordering, self.max_stream_ordering)
  83. yield self._process()
  84. @defer.inlineCallbacks
  85. def on_new_receipts(self, min_stream_id, max_stream_id):
  86. # Note that the min here shouldn't be relied upon to be accurate.
  87. # We could check the receipts are actually m.read receipts here,
  88. # but currently that's the only type of receipt anyway...
  89. with LoggingContext("push.on_new_receipts"):
  90. with Measure(self.clock, "push.on_new_receipts"):
  91. badge = yield push_tools.get_badge_count(
  92. self.hs.get_datastore(), self.user_id
  93. )
  94. yield self._send_badge(badge)
  95. @defer.inlineCallbacks
  96. def on_timer(self):
  97. yield self._process()
  98. def on_stop(self):
  99. if self.timed_call:
  100. try:
  101. self.timed_call.cancel()
  102. except (AlreadyCalled, AlreadyCancelled):
  103. pass
  104. self.timed_call = None
  105. @defer.inlineCallbacks
  106. def _process(self):
  107. if self.processing:
  108. return
  109. with LoggingContext("push._process"):
  110. with Measure(self.clock, "push._process"):
  111. try:
  112. self.processing = True
  113. # if the max ordering changes while we're running _unsafe_process,
  114. # call it again, and so on until we've caught up.
  115. while True:
  116. starting_max_ordering = self.max_stream_ordering
  117. try:
  118. yield self._unsafe_process()
  119. except Exception:
  120. logger.exception("Exception processing notifs")
  121. if self.max_stream_ordering == starting_max_ordering:
  122. break
  123. finally:
  124. self.processing = False
  125. @defer.inlineCallbacks
  126. def _unsafe_process(self):
  127. """
  128. Looks for unset notifications and dispatch them, in order
  129. Never call this directly: use _process which will only allow this to
  130. run once per pusher.
  131. """
  132. fn = self.store.get_unread_push_actions_for_user_in_range_for_http
  133. unprocessed = yield fn(
  134. self.user_id, self.last_stream_ordering, self.max_stream_ordering
  135. )
  136. logger.info(
  137. "Processing %i unprocessed push actions for %s starting at "
  138. "stream_ordering %s",
  139. len(unprocessed), self.name, self.last_stream_ordering,
  140. )
  141. for push_action in unprocessed:
  142. processed = yield self._process_one(push_action)
  143. if processed:
  144. http_push_processed_counter.inc()
  145. self.backoff_delay = HttpPusher.INITIAL_BACKOFF_SEC
  146. self.last_stream_ordering = push_action['stream_ordering']
  147. yield self.store.update_pusher_last_stream_ordering_and_success(
  148. self.app_id, self.pushkey, self.user_id,
  149. self.last_stream_ordering,
  150. self.clock.time_msec()
  151. )
  152. if self.failing_since:
  153. self.failing_since = None
  154. yield self.store.update_pusher_failing_since(
  155. self.app_id, self.pushkey, self.user_id,
  156. self.failing_since
  157. )
  158. else:
  159. http_push_failed_counter.inc()
  160. if not self.failing_since:
  161. self.failing_since = self.clock.time_msec()
  162. yield self.store.update_pusher_failing_since(
  163. self.app_id, self.pushkey, self.user_id,
  164. self.failing_since
  165. )
  166. if (
  167. self.failing_since and
  168. self.failing_since <
  169. self.clock.time_msec() - HttpPusher.GIVE_UP_AFTER_MS
  170. ):
  171. # we really only give up so that if the URL gets
  172. # fixed, we don't suddenly deliver a load
  173. # of old notifications.
  174. logger.warn("Giving up on a notification to user %s, "
  175. "pushkey %s",
  176. self.user_id, self.pushkey)
  177. self.backoff_delay = HttpPusher.INITIAL_BACKOFF_SEC
  178. self.last_stream_ordering = push_action['stream_ordering']
  179. yield self.store.update_pusher_last_stream_ordering(
  180. self.app_id,
  181. self.pushkey,
  182. self.user_id,
  183. self.last_stream_ordering
  184. )
  185. self.failing_since = None
  186. yield self.store.update_pusher_failing_since(
  187. self.app_id,
  188. self.pushkey,
  189. self.user_id,
  190. self.failing_since
  191. )
  192. else:
  193. logger.info("Push failed: delaying for %ds", self.backoff_delay)
  194. self.timed_call = self.hs.get_reactor().callLater(
  195. self.backoff_delay, self.on_timer
  196. )
  197. self.backoff_delay = min(self.backoff_delay * 2, self.MAX_BACKOFF_SEC)
  198. break
  199. @defer.inlineCallbacks
  200. def _process_one(self, push_action):
  201. if 'notify' not in push_action['actions']:
  202. defer.returnValue(True)
  203. tweaks = push_rule_evaluator.tweaks_for_actions(push_action['actions'])
  204. badge = yield push_tools.get_badge_count(self.hs.get_datastore(), self.user_id)
  205. event = yield self.store.get_event(push_action['event_id'], allow_none=True)
  206. if event is None:
  207. defer.returnValue(True) # It's been redacted
  208. rejected = yield self.dispatch_push(event, tweaks, badge)
  209. if rejected is False:
  210. defer.returnValue(False)
  211. if isinstance(rejected, list) or isinstance(rejected, tuple):
  212. for pk in rejected:
  213. if pk != self.pushkey:
  214. # for sanity, we only remove the pushkey if it
  215. # was the one we actually sent...
  216. logger.warn(
  217. ("Ignoring rejected pushkey %s because we"
  218. " didn't send it"), pk
  219. )
  220. else:
  221. logger.info(
  222. "Pushkey %s was rejected: removing",
  223. pk
  224. )
  225. yield self.hs.remove_pusher(
  226. self.app_id, pk, self.user_id
  227. )
  228. defer.returnValue(True)
  229. @defer.inlineCallbacks
  230. def _build_notification_dict(self, event, tweaks, badge):
  231. if self.data.get('format') == 'event_id_only':
  232. d = {
  233. 'notification': {
  234. 'event_id': event.event_id,
  235. 'room_id': event.room_id,
  236. 'counts': {
  237. 'unread': badge,
  238. },
  239. 'devices': [
  240. {
  241. 'app_id': self.app_id,
  242. 'pushkey': self.pushkey,
  243. 'pushkey_ts': long(self.pushkey_ts / 1000),
  244. 'data': self.data_minus_url,
  245. }
  246. ]
  247. }
  248. }
  249. defer.returnValue(d)
  250. ctx = yield push_tools.get_context_for_event(
  251. self.store, self.state_handler, event, self.user_id
  252. )
  253. d = {
  254. 'notification': {
  255. 'id': event.event_id, # deprecated: remove soon
  256. 'event_id': event.event_id,
  257. 'room_id': event.room_id,
  258. 'type': event.type,
  259. 'sender': event.user_id,
  260. 'counts': { # -- we don't mark messages as read yet so
  261. # we have no way of knowing
  262. # Just set the badge to 1 until we have read receipts
  263. 'unread': badge,
  264. # 'missed_calls': 2
  265. },
  266. 'devices': [
  267. {
  268. 'app_id': self.app_id,
  269. 'pushkey': self.pushkey,
  270. 'pushkey_ts': long(self.pushkey_ts / 1000),
  271. 'data': self.data_minus_url,
  272. 'tweaks': tweaks
  273. }
  274. ]
  275. }
  276. }
  277. if event.type == 'm.room.member':
  278. d['notification']['membership'] = event.content['membership']
  279. d['notification']['user_is_target'] = event.state_key == self.user_id
  280. if self.hs.config.push_include_content and 'content' in event:
  281. d['notification']['content'] = event.content
  282. # We no longer send aliases separately, instead, we send the human
  283. # readable name of the room, which may be an alias.
  284. if 'sender_display_name' in ctx and len(ctx['sender_display_name']) > 0:
  285. d['notification']['sender_display_name'] = ctx['sender_display_name']
  286. if 'name' in ctx and len(ctx['name']) > 0:
  287. d['notification']['room_name'] = ctx['name']
  288. defer.returnValue(d)
  289. @defer.inlineCallbacks
  290. def dispatch_push(self, event, tweaks, badge):
  291. notification_dict = yield self._build_notification_dict(event, tweaks, badge)
  292. if not notification_dict:
  293. defer.returnValue([])
  294. try:
  295. resp = yield self.http_client.post_json_get_json(self.url, notification_dict)
  296. except Exception:
  297. logger.warn(
  298. "Failed to push event %s to %s",
  299. event.event_id, self.name, exc_info=True,
  300. )
  301. defer.returnValue(False)
  302. rejected = []
  303. if 'rejected' in resp:
  304. rejected = resp['rejected']
  305. defer.returnValue(rejected)
  306. @defer.inlineCallbacks
  307. def _send_badge(self, badge):
  308. logger.info("Sending updated badge count %d to %s", badge, self.name)
  309. d = {
  310. 'notification': {
  311. 'id': '',
  312. 'type': None,
  313. 'sender': '',
  314. 'counts': {
  315. 'unread': badge
  316. },
  317. 'devices': [
  318. {
  319. 'app_id': self.app_id,
  320. 'pushkey': self.pushkey,
  321. 'pushkey_ts': long(self.pushkey_ts / 1000),
  322. 'data': self.data_minus_url,
  323. }
  324. ]
  325. }
  326. }
  327. try:
  328. resp = yield self.http_client.post_json_get_json(self.url, d)
  329. except Exception:
  330. logger.warn(
  331. "Failed to send badge count to %s",
  332. self.name, exc_info=True,
  333. )
  334. defer.returnValue(False)
  335. rejected = []
  336. if 'rejected' in resp:
  337. rejected = resp['rejected']
  338. defer.returnValue(rejected)