httppusher.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325
  1. # -*- coding: utf-8 -*-
  2. # Copyright 2015, 2016 OpenMarket Ltd
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. from synapse.push import PusherConfigException
  16. from twisted.internet import defer, reactor
  17. import logging
  18. import push_rule_evaluator
  19. import push_tools
  20. from synapse.util.logcontext import LoggingContext
  21. from synapse.util.metrics import Measure
  22. logger = logging.getLogger(__name__)
  23. class HttpPusher(object):
  24. INITIAL_BACKOFF_SEC = 1 # in seconds because that's what Twisted takes
  25. MAX_BACKOFF_SEC = 60 * 60
  26. # This one's in ms because we compare it against the clock
  27. GIVE_UP_AFTER_MS = 24 * 60 * 60 * 1000
  28. def __init__(self, hs, pusherdict):
  29. self.hs = hs
  30. self.store = self.hs.get_datastore()
  31. self.clock = self.hs.get_clock()
  32. self.user_id = pusherdict['user_name']
  33. self.app_id = pusherdict['app_id']
  34. self.app_display_name = pusherdict['app_display_name']
  35. self.device_display_name = pusherdict['device_display_name']
  36. self.pushkey = pusherdict['pushkey']
  37. self.pushkey_ts = pusherdict['ts']
  38. self.data = pusherdict['data']
  39. self.last_stream_ordering = pusherdict['last_stream_ordering']
  40. self.backoff_delay = HttpPusher.INITIAL_BACKOFF_SEC
  41. self.failing_since = pusherdict['failing_since']
  42. self.timed_call = None
  43. self.processing = False
  44. # This is the highest stream ordering we know it's safe to process.
  45. # When new events arrive, we'll be given a window of new events: we
  46. # should honour this rather than just looking for anything higher
  47. # because of potential out-of-order event serialisation. This starts
  48. # off as None though as we don't know any better.
  49. self.max_stream_ordering = None
  50. if 'data' not in pusherdict:
  51. raise PusherConfigException(
  52. "No 'data' key for HTTP pusher"
  53. )
  54. self.data = pusherdict['data']
  55. self.name = "%s/%s/%s" % (
  56. pusherdict['user_name'],
  57. pusherdict['app_id'],
  58. pusherdict['pushkey'],
  59. )
  60. if 'url' not in self.data:
  61. raise PusherConfigException(
  62. "'url' required in data for HTTP pusher"
  63. )
  64. self.url = self.data['url']
  65. self.http_client = hs.get_simple_http_client()
  66. self.data_minus_url = {}
  67. self.data_minus_url.update(self.data)
  68. del self.data_minus_url['url']
  69. @defer.inlineCallbacks
  70. def on_started(self):
  71. yield self._process()
  72. @defer.inlineCallbacks
  73. def on_new_notifications(self, min_stream_ordering, max_stream_ordering):
  74. self.max_stream_ordering = max(max_stream_ordering, self.max_stream_ordering)
  75. yield self._process()
  76. @defer.inlineCallbacks
  77. def on_new_receipts(self, min_stream_id, max_stream_id):
  78. # Note that the min here shouldn't be relied upon to be accurate.
  79. # We could check the receipts are actually m.read receipts here,
  80. # but currently that's the only type of receipt anyway...
  81. with LoggingContext("push.on_new_receipts"):
  82. with Measure(self.clock, "push.on_new_receipts"):
  83. badge = yield push_tools.get_badge_count(
  84. self.hs.get_datastore(), self.user_id
  85. )
  86. yield self._send_badge(badge)
  87. @defer.inlineCallbacks
  88. def on_timer(self):
  89. yield self._process()
  90. def on_stop(self):
  91. if self.timed_call:
  92. self.timed_call.cancel()
  93. @defer.inlineCallbacks
  94. def _process(self):
  95. if self.processing:
  96. return
  97. with LoggingContext("push._process"):
  98. with Measure(self.clock, "push._process"):
  99. try:
  100. self.processing = True
  101. # if the max ordering changes while we're running _unsafe_process,
  102. # call it again, and so on until we've caught up.
  103. while True:
  104. starting_max_ordering = self.max_stream_ordering
  105. try:
  106. yield self._unsafe_process()
  107. except:
  108. logger.exception("Exception processing notifs")
  109. if self.max_stream_ordering == starting_max_ordering:
  110. break
  111. finally:
  112. self.processing = False
  113. @defer.inlineCallbacks
  114. def _unsafe_process(self):
  115. """
  116. Looks for unset notifications and dispatch them, in order
  117. Never call this directly: use _process which will only allow this to
  118. run once per pusher.
  119. """
  120. unprocessed = yield self.store.get_unread_push_actions_for_user_in_range(
  121. self.user_id, self.last_stream_ordering, self.max_stream_ordering
  122. )
  123. for push_action in unprocessed:
  124. processed = yield self._process_one(push_action)
  125. if processed:
  126. self.backoff_delay = HttpPusher.INITIAL_BACKOFF_SEC
  127. self.last_stream_ordering = push_action['stream_ordering']
  128. yield self.store.update_pusher_last_stream_ordering_and_success(
  129. self.app_id, self.pushkey, self.user_id,
  130. self.last_stream_ordering,
  131. self.clock.time_msec()
  132. )
  133. if self.failing_since:
  134. self.failing_since = None
  135. yield self.store.update_pusher_failing_since(
  136. self.app_id, self.pushkey, self.user_id,
  137. self.failing_since
  138. )
  139. else:
  140. if not self.failing_since:
  141. self.failing_since = self.clock.time_msec()
  142. yield self.store.update_pusher_failing_since(
  143. self.app_id, self.pushkey, self.user_id,
  144. self.failing_since
  145. )
  146. if (
  147. self.failing_since and
  148. self.failing_since <
  149. self.clock.time_msec() - HttpPusher.GIVE_UP_AFTER_MS
  150. ):
  151. # we really only give up so that if the URL gets
  152. # fixed, we don't suddenly deliver a load
  153. # of old notifications.
  154. logger.warn("Giving up on a notification to user %s, "
  155. "pushkey %s",
  156. self.user_id, self.pushkey)
  157. self.backoff_delay = HttpPusher.INITIAL_BACKOFF_SEC
  158. self.last_stream_ordering = push_action['stream_ordering']
  159. yield self.store.update_pusher_last_stream_ordering(
  160. self.app_id,
  161. self.pushkey,
  162. self.user_id,
  163. self.last_stream_ordering
  164. )
  165. self.failing_since = None
  166. yield self.store.update_pusher_failing_since(
  167. self.app_id,
  168. self.pushkey,
  169. self.user_id,
  170. self.failing_since
  171. )
  172. else:
  173. logger.info("Push failed: delaying for %ds", self.backoff_delay)
  174. self.timed_call = reactor.callLater(self.backoff_delay, self.on_timer)
  175. self.backoff_delay = min(self.backoff_delay * 2, self.MAX_BACKOFF_SEC)
  176. break
  177. @defer.inlineCallbacks
  178. def _process_one(self, push_action):
  179. if 'notify' not in push_action['actions']:
  180. defer.returnValue(True)
  181. tweaks = push_rule_evaluator.tweaks_for_actions(push_action['actions'])
  182. badge = yield push_tools.get_badge_count(self.hs.get_datastore(), self.user_id)
  183. event = yield self.store.get_event(push_action['event_id'], allow_none=True)
  184. if event is None:
  185. defer.returnValue(True) # It's been redacted
  186. rejected = yield self.dispatch_push(event, tweaks, badge)
  187. if rejected is False:
  188. defer.returnValue(False)
  189. if isinstance(rejected, list) or isinstance(rejected, tuple):
  190. for pk in rejected:
  191. if pk != self.pushkey:
  192. # for sanity, we only remove the pushkey if it
  193. # was the one we actually sent...
  194. logger.warn(
  195. ("Ignoring rejected pushkey %s because we"
  196. " didn't send it"), pk
  197. )
  198. else:
  199. logger.info(
  200. "Pushkey %s was rejected: removing",
  201. pk
  202. )
  203. yield self.hs.remove_pusher(
  204. self.app_id, pk, self.user_id
  205. )
  206. defer.returnValue(True)
  207. @defer.inlineCallbacks
  208. def _build_notification_dict(self, event, tweaks, badge):
  209. ctx = yield push_tools.get_context_for_event(self.hs.get_datastore(), event)
  210. d = {
  211. 'notification': {
  212. 'id': event.event_id, # deprecated: remove soon
  213. 'event_id': event.event_id,
  214. 'room_id': event.room_id,
  215. 'type': event.type,
  216. 'sender': event.user_id,
  217. 'counts': { # -- we don't mark messages as read yet so
  218. # we have no way of knowing
  219. # Just set the badge to 1 until we have read receipts
  220. 'unread': badge,
  221. # 'missed_calls': 2
  222. },
  223. 'devices': [
  224. {
  225. 'app_id': self.app_id,
  226. 'pushkey': self.pushkey,
  227. 'pushkey_ts': long(self.pushkey_ts / 1000),
  228. 'data': self.data_minus_url,
  229. 'tweaks': tweaks
  230. }
  231. ]
  232. }
  233. }
  234. if event.type == 'm.room.member':
  235. d['notification']['membership'] = event.content['membership']
  236. d['notification']['user_is_target'] = event.state_key == self.user_id
  237. if 'content' in event:
  238. d['notification']['content'] = event.content
  239. if len(ctx['aliases']):
  240. d['notification']['room_alias'] = ctx['aliases'][0]
  241. if 'sender_display_name' in ctx and len(ctx['sender_display_name']) > 0:
  242. d['notification']['sender_display_name'] = ctx['sender_display_name']
  243. if 'name' in ctx and len(ctx['name']) > 0:
  244. d['notification']['room_name'] = ctx['name']
  245. defer.returnValue(d)
  246. @defer.inlineCallbacks
  247. def dispatch_push(self, event, tweaks, badge):
  248. notification_dict = yield self._build_notification_dict(event, tweaks, badge)
  249. if not notification_dict:
  250. defer.returnValue([])
  251. try:
  252. resp = yield self.http_client.post_json_get_json(self.url, notification_dict)
  253. except:
  254. logger.warn("Failed to push %s ", self.url)
  255. defer.returnValue(False)
  256. rejected = []
  257. if 'rejected' in resp:
  258. rejected = resp['rejected']
  259. defer.returnValue(rejected)
  260. @defer.inlineCallbacks
  261. def _send_badge(self, badge):
  262. logger.info("Sending updated badge count %d to %r", badge, self.user_id)
  263. d = {
  264. 'notification': {
  265. 'id': '',
  266. 'type': None,
  267. 'sender': '',
  268. 'counts': {
  269. 'unread': badge
  270. },
  271. 'devices': [
  272. {
  273. 'app_id': self.app_id,
  274. 'pushkey': self.pushkey,
  275. 'pushkey_ts': long(self.pushkey_ts / 1000),
  276. 'data': self.data_minus_url,
  277. }
  278. ]
  279. }
  280. }
  281. try:
  282. resp = yield self.http_client.post_json_get_json(self.url, d)
  283. except:
  284. logger.exception("Failed to push %s ", self.url)
  285. defer.returnValue(False)
  286. rejected = []
  287. if 'rejected' in resp:
  288. rejected = resp['rejected']
  289. defer.returnValue(rejected)