search.py 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581
  1. # -*- coding: utf-8 -*-
  2. # Copyright 2015, 2016 OpenMarket Ltd
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. from twisted.internet import defer
  16. from .background_updates import BackgroundUpdateStore
  17. from synapse.api.errors import SynapseError
  18. from synapse.storage.engines import PostgresEngine, Sqlite3Engine
  19. import logging
  20. import re
  21. import ujson as json
  22. logger = logging.getLogger(__name__)
  23. class SearchStore(BackgroundUpdateStore):
  24. EVENT_SEARCH_UPDATE_NAME = "event_search"
  25. EVENT_SEARCH_ORDER_UPDATE_NAME = "event_search_order"
  26. def __init__(self, hs):
  27. super(SearchStore, self).__init__(hs)
  28. self.register_background_update_handler(
  29. self.EVENT_SEARCH_UPDATE_NAME, self._background_reindex_search
  30. )
  31. self.register_background_update_handler(
  32. self.EVENT_SEARCH_ORDER_UPDATE_NAME,
  33. self._background_reindex_search_order
  34. )
  35. @defer.inlineCallbacks
  36. def _background_reindex_search(self, progress, batch_size):
  37. target_min_stream_id = progress["target_min_stream_id_inclusive"]
  38. max_stream_id = progress["max_stream_id_exclusive"]
  39. rows_inserted = progress.get("rows_inserted", 0)
  40. INSERT_CLUMP_SIZE = 1000
  41. TYPES = ["m.room.name", "m.room.message", "m.room.topic"]
  42. def reindex_search_txn(txn):
  43. sql = (
  44. "SELECT stream_ordering, event_id, room_id, type, content FROM events"
  45. " WHERE ? <= stream_ordering AND stream_ordering < ?"
  46. " AND (%s)"
  47. " ORDER BY stream_ordering DESC"
  48. " LIMIT ?"
  49. ) % (" OR ".join("type = '%s'" % (t,) for t in TYPES),)
  50. txn.execute(sql, (target_min_stream_id, max_stream_id, batch_size))
  51. rows = self.cursor_to_dict(txn)
  52. if not rows:
  53. return 0
  54. min_stream_id = rows[-1]["stream_ordering"]
  55. event_search_rows = []
  56. for row in rows:
  57. try:
  58. event_id = row["event_id"]
  59. room_id = row["room_id"]
  60. etype = row["type"]
  61. try:
  62. content = json.loads(row["content"])
  63. except:
  64. continue
  65. if etype == "m.room.message":
  66. key = "content.body"
  67. value = content["body"]
  68. elif etype == "m.room.topic":
  69. key = "content.topic"
  70. value = content["topic"]
  71. elif etype == "m.room.name":
  72. key = "content.name"
  73. value = content["name"]
  74. except (KeyError, AttributeError):
  75. # If the event is missing a necessary field then
  76. # skip over it.
  77. continue
  78. if not isinstance(value, basestring):
  79. # If the event body, name or topic isn't a string
  80. # then skip over it
  81. continue
  82. event_search_rows.append((event_id, room_id, key, value))
  83. if isinstance(self.database_engine, PostgresEngine):
  84. sql = (
  85. "INSERT INTO event_search (event_id, room_id, key, vector)"
  86. " VALUES (?,?,?,to_tsvector('english', ?))"
  87. )
  88. elif isinstance(self.database_engine, Sqlite3Engine):
  89. sql = (
  90. "INSERT INTO event_search (event_id, room_id, key, value)"
  91. " VALUES (?,?,?,?)"
  92. )
  93. else:
  94. # This should be unreachable.
  95. raise Exception("Unrecognized database engine")
  96. for index in range(0, len(event_search_rows), INSERT_CLUMP_SIZE):
  97. clump = event_search_rows[index:index + INSERT_CLUMP_SIZE]
  98. txn.executemany(sql, clump)
  99. progress = {
  100. "target_min_stream_id_inclusive": target_min_stream_id,
  101. "max_stream_id_exclusive": min_stream_id,
  102. "rows_inserted": rows_inserted + len(event_search_rows)
  103. }
  104. self._background_update_progress_txn(
  105. txn, self.EVENT_SEARCH_UPDATE_NAME, progress
  106. )
  107. return len(event_search_rows)
  108. result = yield self.runInteraction(
  109. self.EVENT_SEARCH_UPDATE_NAME, reindex_search_txn
  110. )
  111. if not result:
  112. yield self._end_background_update(self.EVENT_SEARCH_UPDATE_NAME)
  113. defer.returnValue(result)
  114. @defer.inlineCallbacks
  115. def _background_reindex_search_order(self, progress, batch_size):
  116. target_min_stream_id = progress["target_min_stream_id_inclusive"]
  117. max_stream_id = progress["max_stream_id_exclusive"]
  118. rows_inserted = progress.get("rows_inserted", 0)
  119. have_added_index = progress['have_added_indexes']
  120. if not have_added_index:
  121. def create_index(conn):
  122. conn.rollback()
  123. conn.set_session(autocommit=True)
  124. c = conn.cursor()
  125. # We create with NULLS FIRST so that when we search *backwards*
  126. # we get the ones with non null origin_server_ts *first*
  127. c.execute(
  128. "CREATE INDEX CONCURRENTLY event_search_room_order ON event_search("
  129. "room_id, origin_server_ts NULLS FIRST, stream_ordering NULLS FIRST)"
  130. )
  131. c.execute(
  132. "CREATE INDEX CONCURRENTLY event_search_order ON event_search("
  133. "origin_server_ts NULLS FIRST, stream_ordering NULLS FIRST)"
  134. )
  135. conn.set_session(autocommit=False)
  136. yield self.runWithConnection(create_index)
  137. pg = dict(progress)
  138. pg["have_added_indexes"] = True
  139. yield self.runInteraction(
  140. self.EVENT_SEARCH_ORDER_UPDATE_NAME,
  141. self._background_update_progress_txn,
  142. self.EVENT_SEARCH_ORDER_UPDATE_NAME, pg,
  143. )
  144. def reindex_search_txn(txn):
  145. sql = (
  146. "UPDATE event_search AS es SET stream_ordering = e.stream_ordering,"
  147. " origin_server_ts = e.origin_server_ts"
  148. " FROM events AS e"
  149. " WHERE e.event_id = es.event_id"
  150. " AND ? <= e.stream_ordering AND e.stream_ordering < ?"
  151. " RETURNING es.stream_ordering"
  152. )
  153. min_stream_id = max_stream_id - batch_size
  154. txn.execute(sql, (min_stream_id, max_stream_id))
  155. rows = txn.fetchall()
  156. if min_stream_id < target_min_stream_id:
  157. # We've recached the end.
  158. return len(rows), False
  159. progress = {
  160. "target_min_stream_id_inclusive": target_min_stream_id,
  161. "max_stream_id_exclusive": min_stream_id,
  162. "rows_inserted": rows_inserted + len(rows),
  163. "have_added_indexes": True,
  164. }
  165. self._background_update_progress_txn(
  166. txn, self.EVENT_SEARCH_ORDER_UPDATE_NAME, progress
  167. )
  168. return len(rows), True
  169. num_rows, finished = yield self.runInteraction(
  170. self.EVENT_SEARCH_ORDER_UPDATE_NAME, reindex_search_txn
  171. )
  172. if not finished:
  173. yield self._end_background_update(self.EVENT_SEARCH_ORDER_UPDATE_NAME)
  174. defer.returnValue(num_rows)
  175. @defer.inlineCallbacks
  176. def search_msgs(self, room_ids, search_term, keys):
  177. """Performs a full text search over events with given keys.
  178. Args:
  179. room_ids (list): List of room ids to search in
  180. search_term (str): Search term to search for
  181. keys (list): List of keys to search in, currently supports
  182. "content.body", "content.name", "content.topic"
  183. Returns:
  184. list of dicts
  185. """
  186. clauses = []
  187. search_query = search_query = _parse_query(self.database_engine, search_term)
  188. args = []
  189. # Make sure we don't explode because the person is in too many rooms.
  190. # We filter the results below regardless.
  191. if len(room_ids) < 500:
  192. clauses.append(
  193. "room_id IN (%s)" % (",".join(["?"] * len(room_ids)),)
  194. )
  195. args.extend(room_ids)
  196. local_clauses = []
  197. for key in keys:
  198. local_clauses.append("key = ?")
  199. args.append(key)
  200. clauses.append(
  201. "(%s)" % (" OR ".join(local_clauses),)
  202. )
  203. count_args = args
  204. count_clauses = clauses
  205. if isinstance(self.database_engine, PostgresEngine):
  206. sql = (
  207. "SELECT ts_rank_cd(vector, to_tsquery('english', ?)) AS rank,"
  208. " room_id, event_id"
  209. " FROM event_search"
  210. " WHERE vector @@ to_tsquery('english', ?)"
  211. )
  212. args = [search_query, search_query] + args
  213. count_sql = (
  214. "SELECT room_id, count(*) as count FROM event_search"
  215. " WHERE vector @@ to_tsquery('english', ?)"
  216. )
  217. count_args = [search_query] + count_args
  218. elif isinstance(self.database_engine, Sqlite3Engine):
  219. sql = (
  220. "SELECT rank(matchinfo(event_search)) as rank, room_id, event_id"
  221. " FROM event_search"
  222. " WHERE value MATCH ?"
  223. )
  224. args = [search_query] + args
  225. count_sql = (
  226. "SELECT room_id, count(*) as count FROM event_search"
  227. " WHERE value MATCH ?"
  228. )
  229. count_args = [search_term] + count_args
  230. else:
  231. # This should be unreachable.
  232. raise Exception("Unrecognized database engine")
  233. for clause in clauses:
  234. sql += " AND " + clause
  235. for clause in count_clauses:
  236. count_sql += " AND " + clause
  237. # We add an arbitrary limit here to ensure we don't try to pull the
  238. # entire table from the database.
  239. sql += " ORDER BY rank DESC LIMIT 500"
  240. results = yield self._execute(
  241. "search_msgs", self.cursor_to_dict, sql, *args
  242. )
  243. results = filter(lambda row: row["room_id"] in room_ids, results)
  244. events = yield self._get_events([r["event_id"] for r in results])
  245. event_map = {
  246. ev.event_id: ev
  247. for ev in events
  248. }
  249. highlights = None
  250. if isinstance(self.database_engine, PostgresEngine):
  251. highlights = yield self._find_highlights_in_postgres(search_query, events)
  252. count_sql += " GROUP BY room_id"
  253. count_results = yield self._execute(
  254. "search_rooms_count", self.cursor_to_dict, count_sql, *count_args
  255. )
  256. count = sum(row["count"] for row in count_results if row["room_id"] in room_ids)
  257. defer.returnValue({
  258. "results": [
  259. {
  260. "event": event_map[r["event_id"]],
  261. "rank": r["rank"],
  262. }
  263. for r in results
  264. if r["event_id"] in event_map
  265. ],
  266. "highlights": highlights,
  267. "count": count,
  268. })
  269. @defer.inlineCallbacks
  270. def search_rooms(self, room_ids, search_term, keys, limit, pagination_token=None):
  271. """Performs a full text search over events with given keys.
  272. Args:
  273. room_id (list): The room_ids to search in
  274. search_term (str): Search term to search for
  275. keys (list): List of keys to search in, currently supports
  276. "content.body", "content.name", "content.topic"
  277. pagination_token (str): A pagination token previously returned
  278. Returns:
  279. list of dicts
  280. """
  281. clauses = []
  282. search_query = search_query = _parse_query(self.database_engine, search_term)
  283. args = []
  284. # Make sure we don't explode because the person is in too many rooms.
  285. # We filter the results below regardless.
  286. if len(room_ids) < 500:
  287. clauses.append(
  288. "room_id IN (%s)" % (",".join(["?"] * len(room_ids)),)
  289. )
  290. args.extend(room_ids)
  291. local_clauses = []
  292. for key in keys:
  293. local_clauses.append("key = ?")
  294. args.append(key)
  295. clauses.append(
  296. "(%s)" % (" OR ".join(local_clauses),)
  297. )
  298. # take copies of the current args and clauses lists, before adding
  299. # pagination clauses to main query.
  300. count_args = list(args)
  301. count_clauses = list(clauses)
  302. if pagination_token:
  303. try:
  304. origin_server_ts, stream = pagination_token.split(",")
  305. origin_server_ts = int(origin_server_ts)
  306. stream = int(stream)
  307. except:
  308. raise SynapseError(400, "Invalid pagination token")
  309. clauses.append(
  310. "(origin_server_ts < ?"
  311. " OR (origin_server_ts = ? AND stream_ordering < ?))"
  312. )
  313. args.extend([origin_server_ts, origin_server_ts, stream])
  314. if isinstance(self.database_engine, PostgresEngine):
  315. sql = (
  316. "SELECT ts_rank_cd(vector, to_tsquery('english', ?)) as rank,"
  317. " origin_server_ts, stream_ordering, room_id, event_id"
  318. " FROM event_search"
  319. " WHERE vector @@ to_tsquery('english', ?) AND "
  320. )
  321. args = [search_query, search_query] + args
  322. count_sql = (
  323. "SELECT room_id, count(*) as count FROM event_search"
  324. " WHERE vector @@ to_tsquery('english', ?) AND "
  325. )
  326. count_args = [search_query] + count_args
  327. elif isinstance(self.database_engine, Sqlite3Engine):
  328. # We use CROSS JOIN here to ensure we use the right indexes.
  329. # https://sqlite.org/optoverview.html#crossjoin
  330. #
  331. # We want to use the full text search index on event_search to
  332. # extract all possible matches first, then lookup those matches
  333. # in the events table to get the topological ordering. We need
  334. # to use the indexes in this order because sqlite refuses to
  335. # MATCH unless it uses the full text search index
  336. sql = (
  337. "SELECT rank(matchinfo) as rank, room_id, event_id,"
  338. " origin_server_ts, stream_ordering"
  339. " FROM (SELECT key, event_id, matchinfo(event_search) as matchinfo"
  340. " FROM event_search"
  341. " WHERE value MATCH ?"
  342. " )"
  343. " CROSS JOIN events USING (event_id)"
  344. " WHERE "
  345. )
  346. args = [search_query] + args
  347. count_sql = (
  348. "SELECT room_id, count(*) as count FROM event_search"
  349. " WHERE value MATCH ? AND "
  350. )
  351. count_args = [search_term] + count_args
  352. else:
  353. # This should be unreachable.
  354. raise Exception("Unrecognized database engine")
  355. sql += " AND ".join(clauses)
  356. count_sql += " AND ".join(count_clauses)
  357. # We add an arbitrary limit here to ensure we don't try to pull the
  358. # entire table from the database.
  359. if isinstance(self.database_engine, PostgresEngine):
  360. sql += (
  361. " ORDER BY origin_server_ts DESC NULLS LAST,"
  362. " stream_ordering DESC NULLS LAST LIMIT ?"
  363. )
  364. elif isinstance(self.database_engine, Sqlite3Engine):
  365. sql += " ORDER BY origin_server_ts DESC, stream_ordering DESC LIMIT ?"
  366. else:
  367. raise Exception("Unrecognized database engine")
  368. args.append(limit)
  369. results = yield self._execute(
  370. "search_rooms", self.cursor_to_dict, sql, *args
  371. )
  372. results = filter(lambda row: row["room_id"] in room_ids, results)
  373. events = yield self._get_events([r["event_id"] for r in results])
  374. event_map = {
  375. ev.event_id: ev
  376. for ev in events
  377. }
  378. highlights = None
  379. if isinstance(self.database_engine, PostgresEngine):
  380. highlights = yield self._find_highlights_in_postgres(search_query, events)
  381. count_sql += " GROUP BY room_id"
  382. count_results = yield self._execute(
  383. "search_rooms_count", self.cursor_to_dict, count_sql, *count_args
  384. )
  385. count = sum(row["count"] for row in count_results if row["room_id"] in room_ids)
  386. defer.returnValue({
  387. "results": [
  388. {
  389. "event": event_map[r["event_id"]],
  390. "rank": r["rank"],
  391. "pagination_token": "%s,%s" % (
  392. r["origin_server_ts"], r["stream_ordering"]
  393. ),
  394. }
  395. for r in results
  396. if r["event_id"] in event_map
  397. ],
  398. "highlights": highlights,
  399. "count": count,
  400. })
  401. def _find_highlights_in_postgres(self, search_query, events):
  402. """Given a list of events and a search term, return a list of words
  403. that match from the content of the event.
  404. This is used to give a list of words that clients can match against to
  405. highlight the matching parts.
  406. Args:
  407. search_query (str)
  408. events (list): A list of events
  409. Returns:
  410. deferred : A set of strings.
  411. """
  412. def f(txn):
  413. highlight_words = set()
  414. for event in events:
  415. # As a hack we simply join values of all possible keys. This is
  416. # fine since we're only using them to find possible highlights.
  417. values = []
  418. for key in ("body", "name", "topic"):
  419. v = event.content.get(key, None)
  420. if v:
  421. values.append(v)
  422. if not values:
  423. continue
  424. value = " ".join(values)
  425. # We need to find some values for StartSel and StopSel that
  426. # aren't in the value so that we can pick results out.
  427. start_sel = "<"
  428. stop_sel = ">"
  429. while start_sel in value:
  430. start_sel += "<"
  431. while stop_sel in value:
  432. stop_sel += ">"
  433. query = "SELECT ts_headline(?, to_tsquery('english', ?), %s)" % (
  434. _to_postgres_options({
  435. "StartSel": start_sel,
  436. "StopSel": stop_sel,
  437. "MaxFragments": "50",
  438. })
  439. )
  440. txn.execute(query, (value, search_query,))
  441. headline, = txn.fetchall()[0]
  442. # Now we need to pick the possible highlights out of the haedline
  443. # result.
  444. matcher_regex = "%s(.*?)%s" % (
  445. re.escape(start_sel),
  446. re.escape(stop_sel),
  447. )
  448. res = re.findall(matcher_regex, headline)
  449. highlight_words.update([r.lower() for r in res])
  450. return highlight_words
  451. return self.runInteraction("_find_highlights", f)
  452. def _to_postgres_options(options_dict):
  453. return "'%s'" % (
  454. ",".join("%s=%s" % (k, v) for k, v in options_dict.items()),
  455. )
  456. def _parse_query(database_engine, search_term):
  457. """Takes a plain unicode string from the user and converts it into a form
  458. that can be passed to database.
  459. We use this so that we can add prefix matching, which isn't something
  460. that is supported by default.
  461. """
  462. # Pull out the individual words, discarding any non-word characters.
  463. results = re.findall(r"([\w\-]+)", search_term, re.UNICODE)
  464. if isinstance(database_engine, PostgresEngine):
  465. return " & ".join(result + ":*" for result in results)
  466. elif isinstance(database_engine, Sqlite3Engine):
  467. return " & ".join(result + "*" for result in results)
  468. else:
  469. # This should be unreachable.
  470. raise Exception("Unrecognized database engine")