You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 
 

879 lines
30 KiB

  1. # Copyright 2014-2016 OpenMarket Ltd
  2. # Copyright 2018 New Vector Ltd
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. import logging
  16. from typing import (
  17. TYPE_CHECKING,
  18. Any,
  19. Collection,
  20. Dict,
  21. Iterable,
  22. List,
  23. Mapping,
  24. Optional,
  25. Sequence,
  26. Tuple,
  27. Union,
  28. cast,
  29. )
  30. from synapse.api.errors import StoreError
  31. from synapse.config.homeserver import ExperimentalConfig
  32. from synapse.replication.tcp.streams import PushRulesStream
  33. from synapse.storage._base import SQLBaseStore
  34. from synapse.storage.database import (
  35. DatabasePool,
  36. LoggingDatabaseConnection,
  37. LoggingTransaction,
  38. )
  39. from synapse.storage.databases.main.appservice import ApplicationServiceWorkerStore
  40. from synapse.storage.databases.main.events_worker import EventsWorkerStore
  41. from synapse.storage.databases.main.pusher import PusherWorkerStore
  42. from synapse.storage.databases.main.receipts import ReceiptsWorkerStore
  43. from synapse.storage.databases.main.roommember import RoomMemberWorkerStore
  44. from synapse.storage.engines import PostgresEngine, Sqlite3Engine
  45. from synapse.storage.push_rule import InconsistentRuleException, RuleNotFoundException
  46. from synapse.storage.util.id_generators import (
  47. AbstractStreamIdGenerator,
  48. AbstractStreamIdTracker,
  49. IdGenerator,
  50. StreamIdGenerator,
  51. )
  52. from synapse.synapse_rust.push import FilteredPushRules, PushRule, PushRules
  53. from synapse.types import JsonDict
  54. from synapse.util import json_encoder
  55. from synapse.util.caches.descriptors import cached, cachedList
  56. from synapse.util.caches.stream_change_cache import StreamChangeCache
  57. if TYPE_CHECKING:
  58. from synapse.server import HomeServer
  59. logger = logging.getLogger(__name__)
  60. def _load_rules(
  61. rawrules: List[JsonDict],
  62. enabled_map: Dict[str, bool],
  63. experimental_config: ExperimentalConfig,
  64. ) -> FilteredPushRules:
  65. """Take the DB rows returned from the DB and convert them into a full
  66. `FilteredPushRules` object.
  67. """
  68. ruleslist = [
  69. PushRule.from_db(
  70. rule_id=rawrule["rule_id"],
  71. priority_class=rawrule["priority_class"],
  72. conditions=rawrule["conditions"],
  73. actions=rawrule["actions"],
  74. )
  75. for rawrule in rawrules
  76. ]
  77. push_rules = PushRules(ruleslist)
  78. filtered_rules = FilteredPushRules(
  79. push_rules,
  80. enabled_map,
  81. msc1767_enabled=experimental_config.msc1767_enabled,
  82. msc3664_enabled=experimental_config.msc3664_enabled,
  83. msc3381_polls_enabled=experimental_config.msc3381_polls_enabled,
  84. msc3952_intentional_mentions=experimental_config.msc3952_intentional_mentions,
  85. )
  86. return filtered_rules
  87. class PushRulesWorkerStore(
  88. ApplicationServiceWorkerStore,
  89. PusherWorkerStore,
  90. RoomMemberWorkerStore,
  91. ReceiptsWorkerStore,
  92. EventsWorkerStore,
  93. SQLBaseStore,
  94. ):
  95. """This is an abstract base class where subclasses must implement
  96. `get_max_push_rules_stream_id` which can be called in the initializer.
  97. """
  98. def __init__(
  99. self,
  100. database: DatabasePool,
  101. db_conn: LoggingDatabaseConnection,
  102. hs: "HomeServer",
  103. ):
  104. super().__init__(database, db_conn, hs)
  105. # In the worker store this is an ID tracker which we overwrite in the non-worker
  106. # class below that is used on the main process.
  107. self._push_rules_stream_id_gen: AbstractStreamIdTracker = StreamIdGenerator(
  108. db_conn,
  109. hs.get_replication_notifier(),
  110. "push_rules_stream",
  111. "stream_id",
  112. is_writer=hs.config.worker.worker_app is None,
  113. )
  114. push_rules_prefill, push_rules_id = self.db_pool.get_cache_dict(
  115. db_conn,
  116. "push_rules_stream",
  117. entity_column="user_id",
  118. stream_column="stream_id",
  119. max_value=self.get_max_push_rules_stream_id(),
  120. )
  121. self.push_rules_stream_cache = StreamChangeCache(
  122. "PushRulesStreamChangeCache",
  123. push_rules_id,
  124. prefilled_cache=push_rules_prefill,
  125. )
  126. def get_max_push_rules_stream_id(self) -> int:
  127. """Get the position of the push rules stream.
  128. Returns:
  129. int
  130. """
  131. return self._push_rules_stream_id_gen.get_current_token()
  132. def process_replication_rows(
  133. self, stream_name: str, instance_name: str, token: int, rows: Iterable[Any]
  134. ) -> None:
  135. if stream_name == PushRulesStream.NAME:
  136. self._push_rules_stream_id_gen.advance(instance_name, token)
  137. for row in rows:
  138. self.get_push_rules_for_user.invalidate((row.user_id,))
  139. self.push_rules_stream_cache.entity_has_changed(row.user_id, token)
  140. return super().process_replication_rows(stream_name, instance_name, token, rows)
  141. def process_replication_position(
  142. self, stream_name: str, instance_name: str, token: int
  143. ) -> None:
  144. if stream_name == PushRulesStream.NAME:
  145. self._push_rules_stream_id_gen.advance(instance_name, token)
  146. super().process_replication_position(stream_name, instance_name, token)
  147. @cached(max_entries=5000)
  148. async def get_push_rules_for_user(self, user_id: str) -> FilteredPushRules:
  149. rows = await self.db_pool.simple_select_list(
  150. table="push_rules",
  151. keyvalues={"user_name": user_id},
  152. retcols=(
  153. "user_name",
  154. "rule_id",
  155. "priority_class",
  156. "priority",
  157. "conditions",
  158. "actions",
  159. ),
  160. desc="get_push_rules_for_user",
  161. )
  162. rows.sort(key=lambda row: (-int(row["priority_class"]), -int(row["priority"])))
  163. enabled_map = await self.get_push_rules_enabled_for_user(user_id)
  164. return _load_rules(rows, enabled_map, self.hs.config.experimental)
  165. async def get_push_rules_enabled_for_user(self, user_id: str) -> Dict[str, bool]:
  166. results = await self.db_pool.simple_select_list(
  167. table="push_rules_enable",
  168. keyvalues={"user_name": user_id},
  169. retcols=("rule_id", "enabled"),
  170. desc="get_push_rules_enabled_for_user",
  171. )
  172. return {r["rule_id"]: bool(r["enabled"]) for r in results}
  173. async def have_push_rules_changed_for_user(
  174. self, user_id: str, last_id: int
  175. ) -> bool:
  176. if not self.push_rules_stream_cache.has_entity_changed(user_id, last_id):
  177. return False
  178. else:
  179. def have_push_rules_changed_txn(txn: LoggingTransaction) -> bool:
  180. sql = (
  181. "SELECT COUNT(stream_id) FROM push_rules_stream"
  182. " WHERE user_id = ? AND ? < stream_id"
  183. )
  184. txn.execute(sql, (user_id, last_id))
  185. (count,) = cast(Tuple[int], txn.fetchone())
  186. return bool(count)
  187. return await self.db_pool.runInteraction(
  188. "have_push_rules_changed", have_push_rules_changed_txn
  189. )
  190. @cachedList(cached_method_name="get_push_rules_for_user", list_name="user_ids")
  191. async def bulk_get_push_rules(
  192. self, user_ids: Collection[str]
  193. ) -> Dict[str, FilteredPushRules]:
  194. if not user_ids:
  195. return {}
  196. raw_rules: Dict[str, List[JsonDict]] = {user_id: [] for user_id in user_ids}
  197. rows = await self.db_pool.simple_select_many_batch(
  198. table="push_rules",
  199. column="user_name",
  200. iterable=user_ids,
  201. retcols=("*",),
  202. desc="bulk_get_push_rules",
  203. batch_size=1000,
  204. )
  205. rows.sort(key=lambda row: (-int(row["priority_class"]), -int(row["priority"])))
  206. for row in rows:
  207. raw_rules.setdefault(row["user_name"], []).append(row)
  208. enabled_map_by_user = await self.bulk_get_push_rules_enabled(user_ids)
  209. results: Dict[str, FilteredPushRules] = {}
  210. for user_id, rules in raw_rules.items():
  211. results[user_id] = _load_rules(
  212. rules, enabled_map_by_user.get(user_id, {}), self.hs.config.experimental
  213. )
  214. return results
  215. async def bulk_get_push_rules_enabled(
  216. self, user_ids: Collection[str]
  217. ) -> Dict[str, Dict[str, bool]]:
  218. if not user_ids:
  219. return {}
  220. results: Dict[str, Dict[str, bool]] = {user_id: {} for user_id in user_ids}
  221. rows = await self.db_pool.simple_select_many_batch(
  222. table="push_rules_enable",
  223. column="user_name",
  224. iterable=user_ids,
  225. retcols=("user_name", "rule_id", "enabled"),
  226. desc="bulk_get_push_rules_enabled",
  227. batch_size=1000,
  228. )
  229. for row in rows:
  230. enabled = bool(row["enabled"])
  231. results.setdefault(row["user_name"], {})[row["rule_id"]] = enabled
  232. return results
  233. async def get_all_push_rule_updates(
  234. self, instance_name: str, last_id: int, current_id: int, limit: int
  235. ) -> Tuple[List[Tuple[int, Tuple[str]]], int, bool]:
  236. """Get updates for push_rules replication stream.
  237. Args:
  238. instance_name: The writer we want to fetch updates from. Unused
  239. here since there is only ever one writer.
  240. last_id: The token to fetch updates from. Exclusive.
  241. current_id: The token to fetch updates up to. Inclusive.
  242. limit: The requested limit for the number of rows to return. The
  243. function may return more or fewer rows.
  244. Returns:
  245. A tuple consisting of: the updates, a token to use to fetch
  246. subsequent updates, and whether we returned fewer rows than exists
  247. between the requested tokens due to the limit.
  248. The token returned can be used in a subsequent call to this
  249. function to get further updatees.
  250. The updates are a list of 2-tuples of stream ID and the row data
  251. """
  252. if last_id == current_id:
  253. return [], current_id, False
  254. def get_all_push_rule_updates_txn(
  255. txn: LoggingTransaction,
  256. ) -> Tuple[List[Tuple[int, Tuple[str]]], int, bool]:
  257. sql = """
  258. SELECT stream_id, user_id
  259. FROM push_rules_stream
  260. WHERE ? < stream_id AND stream_id <= ?
  261. ORDER BY stream_id ASC
  262. LIMIT ?
  263. """
  264. txn.execute(sql, (last_id, current_id, limit))
  265. updates = cast(
  266. List[Tuple[int, Tuple[str]]],
  267. [(stream_id, (user_id,)) for stream_id, user_id in txn],
  268. )
  269. limited = False
  270. upper_bound = current_id
  271. if len(updates) == limit:
  272. limited = True
  273. upper_bound = updates[-1][0]
  274. return updates, upper_bound, limited
  275. return await self.db_pool.runInteraction(
  276. "get_all_push_rule_updates", get_all_push_rule_updates_txn
  277. )
  278. class PushRuleStore(PushRulesWorkerStore):
  279. # Because we have write access, this will be a StreamIdGenerator
  280. # (see PushRulesWorkerStore.__init__)
  281. _push_rules_stream_id_gen: AbstractStreamIdGenerator
  282. def __init__(
  283. self,
  284. database: DatabasePool,
  285. db_conn: LoggingDatabaseConnection,
  286. hs: "HomeServer",
  287. ):
  288. super().__init__(database, db_conn, hs)
  289. self._push_rule_id_gen = IdGenerator(db_conn, "push_rules", "id")
  290. self._push_rules_enable_id_gen = IdGenerator(db_conn, "push_rules_enable", "id")
  291. async def add_push_rule(
  292. self,
  293. user_id: str,
  294. rule_id: str,
  295. priority_class: int,
  296. conditions: Sequence[Mapping[str, str]],
  297. actions: Sequence[Union[Mapping[str, Any], str]],
  298. before: Optional[str] = None,
  299. after: Optional[str] = None,
  300. ) -> None:
  301. conditions_json = json_encoder.encode(conditions)
  302. actions_json = json_encoder.encode(actions)
  303. async with self._push_rules_stream_id_gen.get_next() as stream_id:
  304. event_stream_ordering = self._stream_id_gen.get_current_token()
  305. if before or after:
  306. await self.db_pool.runInteraction(
  307. "_add_push_rule_relative_txn",
  308. self._add_push_rule_relative_txn,
  309. stream_id,
  310. event_stream_ordering,
  311. user_id,
  312. rule_id,
  313. priority_class,
  314. conditions_json,
  315. actions_json,
  316. before,
  317. after,
  318. )
  319. else:
  320. await self.db_pool.runInteraction(
  321. "_add_push_rule_highest_priority_txn",
  322. self._add_push_rule_highest_priority_txn,
  323. stream_id,
  324. event_stream_ordering,
  325. user_id,
  326. rule_id,
  327. priority_class,
  328. conditions_json,
  329. actions_json,
  330. )
  331. def _add_push_rule_relative_txn(
  332. self,
  333. txn: LoggingTransaction,
  334. stream_id: int,
  335. event_stream_ordering: int,
  336. user_id: str,
  337. rule_id: str,
  338. priority_class: int,
  339. conditions_json: str,
  340. actions_json: str,
  341. before: str,
  342. after: str,
  343. ) -> None:
  344. # Lock the table since otherwise we'll have annoying races between the
  345. # SELECT here and the UPSERT below.
  346. self.database_engine.lock_table(txn, "push_rules")
  347. relative_to_rule = before or after
  348. res = self.db_pool.simple_select_one_txn(
  349. txn,
  350. table="push_rules",
  351. keyvalues={"user_name": user_id, "rule_id": relative_to_rule},
  352. retcols=["priority_class", "priority"],
  353. allow_none=True,
  354. )
  355. if not res:
  356. raise RuleNotFoundException(
  357. "before/after rule not found: %s" % (relative_to_rule,)
  358. )
  359. base_priority_class = res["priority_class"]
  360. base_rule_priority = res["priority"]
  361. if base_priority_class != priority_class:
  362. raise InconsistentRuleException(
  363. "Given priority class does not match class of relative rule"
  364. )
  365. if before:
  366. # Higher priority rules are executed first, So adding a rule before
  367. # a rule means giving it a higher priority than that rule.
  368. new_rule_priority = base_rule_priority + 1
  369. else:
  370. # We increment the priority of the existing rules to make space for
  371. # the new rule. Therefore if we want this rule to appear after
  372. # an existing rule we give it the priority of the existing rule,
  373. # and then increment the priority of the existing rule.
  374. new_rule_priority = base_rule_priority
  375. sql = (
  376. "UPDATE push_rules SET priority = priority + 1"
  377. " WHERE user_name = ? AND priority_class = ? AND priority >= ?"
  378. )
  379. txn.execute(sql, (user_id, priority_class, new_rule_priority))
  380. self._upsert_push_rule_txn(
  381. txn,
  382. stream_id,
  383. event_stream_ordering,
  384. user_id,
  385. rule_id,
  386. priority_class,
  387. new_rule_priority,
  388. conditions_json,
  389. actions_json,
  390. )
  391. def _add_push_rule_highest_priority_txn(
  392. self,
  393. txn: LoggingTransaction,
  394. stream_id: int,
  395. event_stream_ordering: int,
  396. user_id: str,
  397. rule_id: str,
  398. priority_class: int,
  399. conditions_json: str,
  400. actions_json: str,
  401. ) -> None:
  402. # Lock the table since otherwise we'll have annoying races between the
  403. # SELECT here and the UPSERT below.
  404. self.database_engine.lock_table(txn, "push_rules")
  405. # find the highest priority rule in that class
  406. sql = (
  407. "SELECT COUNT(*), MAX(priority) FROM push_rules"
  408. " WHERE user_name = ? and priority_class = ?"
  409. )
  410. txn.execute(sql, (user_id, priority_class))
  411. res = txn.fetchall()
  412. (how_many, highest_prio) = res[0]
  413. new_prio = 0
  414. if how_many > 0:
  415. new_prio = highest_prio + 1
  416. self._upsert_push_rule_txn(
  417. txn,
  418. stream_id,
  419. event_stream_ordering,
  420. user_id,
  421. rule_id,
  422. priority_class,
  423. new_prio,
  424. conditions_json,
  425. actions_json,
  426. )
  427. def _upsert_push_rule_txn(
  428. self,
  429. txn: LoggingTransaction,
  430. stream_id: int,
  431. event_stream_ordering: int,
  432. user_id: str,
  433. rule_id: str,
  434. priority_class: int,
  435. priority: int,
  436. conditions_json: str,
  437. actions_json: str,
  438. update_stream: bool = True,
  439. ) -> None:
  440. """Specialised version of simple_upsert_txn that picks a push_rule_id
  441. using the _push_rule_id_gen if it needs to insert the rule. It assumes
  442. that the "push_rules" table is locked"""
  443. sql = (
  444. "UPDATE push_rules"
  445. " SET priority_class = ?, priority = ?, conditions = ?, actions = ?"
  446. " WHERE user_name = ? AND rule_id = ?"
  447. )
  448. txn.execute(
  449. sql,
  450. (priority_class, priority, conditions_json, actions_json, user_id, rule_id),
  451. )
  452. if txn.rowcount == 0:
  453. # We didn't update a row with the given rule_id so insert one
  454. push_rule_id = self._push_rule_id_gen.get_next()
  455. self.db_pool.simple_insert_txn(
  456. txn,
  457. table="push_rules",
  458. values={
  459. "id": push_rule_id,
  460. "user_name": user_id,
  461. "rule_id": rule_id,
  462. "priority_class": priority_class,
  463. "priority": priority,
  464. "conditions": conditions_json,
  465. "actions": actions_json,
  466. },
  467. )
  468. if update_stream:
  469. self._insert_push_rules_update_txn(
  470. txn,
  471. stream_id,
  472. event_stream_ordering,
  473. user_id,
  474. rule_id,
  475. op="ADD",
  476. data={
  477. "priority_class": priority_class,
  478. "priority": priority,
  479. "conditions": conditions_json,
  480. "actions": actions_json,
  481. },
  482. )
  483. # ensure we have a push_rules_enable row
  484. # enabledness defaults to true
  485. if isinstance(self.database_engine, PostgresEngine):
  486. sql = """
  487. INSERT INTO push_rules_enable (id, user_name, rule_id, enabled)
  488. VALUES (?, ?, ?, ?)
  489. ON CONFLICT DO NOTHING
  490. """
  491. elif isinstance(self.database_engine, Sqlite3Engine):
  492. sql = """
  493. INSERT OR IGNORE INTO push_rules_enable (id, user_name, rule_id, enabled)
  494. VALUES (?, ?, ?, ?)
  495. """
  496. else:
  497. raise RuntimeError("Unknown database engine")
  498. new_enable_id = self._push_rules_enable_id_gen.get_next()
  499. txn.execute(sql, (new_enable_id, user_id, rule_id, 1))
  500. async def delete_push_rule(self, user_id: str, rule_id: str) -> None:
  501. """
  502. Delete a push rule. Args specify the row to be deleted and can be
  503. any of the columns in the push_rule table, but below are the
  504. standard ones
  505. Args:
  506. user_id: The matrix ID of the push rule owner
  507. rule_id: The rule_id of the rule to be deleted
  508. """
  509. def delete_push_rule_txn(
  510. txn: LoggingTransaction,
  511. stream_id: int,
  512. event_stream_ordering: int,
  513. ) -> None:
  514. # we don't use simple_delete_one_txn because that would fail if the
  515. # user did not have a push_rule_enable row.
  516. self.db_pool.simple_delete_txn(
  517. txn, "push_rules_enable", {"user_name": user_id, "rule_id": rule_id}
  518. )
  519. self.db_pool.simple_delete_one_txn(
  520. txn, "push_rules", {"user_name": user_id, "rule_id": rule_id}
  521. )
  522. self._insert_push_rules_update_txn(
  523. txn, stream_id, event_stream_ordering, user_id, rule_id, op="DELETE"
  524. )
  525. async with self._push_rules_stream_id_gen.get_next() as stream_id:
  526. event_stream_ordering = self._stream_id_gen.get_current_token()
  527. await self.db_pool.runInteraction(
  528. "delete_push_rule",
  529. delete_push_rule_txn,
  530. stream_id,
  531. event_stream_ordering,
  532. )
  533. async def set_push_rule_enabled(
  534. self, user_id: str, rule_id: str, enabled: bool, is_default_rule: bool
  535. ) -> None:
  536. """
  537. Sets the `enabled` state of a push rule.
  538. Args:
  539. user_id: the user ID of the user who wishes to enable/disable the rule
  540. e.g. '@tina:example.org'
  541. rule_id: the full rule ID of the rule to be enabled/disabled
  542. e.g. 'global/override/.m.rule.roomnotif'
  543. or 'global/override/myCustomRule'
  544. enabled: True if the rule is to be enabled, False if it is to be
  545. disabled
  546. is_default_rule: True if and only if this is a server-default rule.
  547. This skips the check for existence (as only user-created rules
  548. are always stored in the database `push_rules` table).
  549. Raises:
  550. RuleNotFoundException if the rule does not exist.
  551. """
  552. async with self._push_rules_stream_id_gen.get_next() as stream_id:
  553. event_stream_ordering = self._stream_id_gen.get_current_token()
  554. await self.db_pool.runInteraction(
  555. "_set_push_rule_enabled_txn",
  556. self._set_push_rule_enabled_txn,
  557. stream_id,
  558. event_stream_ordering,
  559. user_id,
  560. rule_id,
  561. enabled,
  562. is_default_rule,
  563. )
  564. def _set_push_rule_enabled_txn(
  565. self,
  566. txn: LoggingTransaction,
  567. stream_id: int,
  568. event_stream_ordering: int,
  569. user_id: str,
  570. rule_id: str,
  571. enabled: bool,
  572. is_default_rule: bool,
  573. ) -> None:
  574. new_id = self._push_rules_enable_id_gen.get_next()
  575. if not is_default_rule:
  576. # first check it exists; we need to lock for key share so that a
  577. # transaction that deletes the push rule will conflict with this one.
  578. # We also need a push_rule_enable row to exist for every push_rules
  579. # row, otherwise it is possible to simultaneously delete a push rule
  580. # (that has no _enable row) and enable it, resulting in a dangling
  581. # _enable row. To solve this: we either need to use SERIALISABLE or
  582. # ensure we always have a push_rule_enable row for every push_rule
  583. # row. We chose the latter.
  584. for_key_share = "FOR KEY SHARE"
  585. if not isinstance(self.database_engine, PostgresEngine):
  586. # For key share is not applicable/available on SQLite
  587. for_key_share = ""
  588. sql = (
  589. """
  590. SELECT 1 FROM push_rules
  591. WHERE user_name = ? AND rule_id = ?
  592. %s
  593. """
  594. % for_key_share
  595. )
  596. txn.execute(sql, (user_id, rule_id))
  597. if txn.fetchone() is None:
  598. raise RuleNotFoundException("Push rule does not exist.")
  599. self.db_pool.simple_upsert_txn(
  600. txn,
  601. "push_rules_enable",
  602. {"user_name": user_id, "rule_id": rule_id},
  603. {"enabled": 1 if enabled else 0},
  604. {"id": new_id},
  605. )
  606. self._insert_push_rules_update_txn(
  607. txn,
  608. stream_id,
  609. event_stream_ordering,
  610. user_id,
  611. rule_id,
  612. op="ENABLE" if enabled else "DISABLE",
  613. )
  614. async def set_push_rule_actions(
  615. self,
  616. user_id: str,
  617. rule_id: str,
  618. actions: List[Union[dict, str]],
  619. is_default_rule: bool,
  620. ) -> None:
  621. """
  622. Sets the `actions` state of a push rule.
  623. Args:
  624. user_id: the user ID of the user who wishes to enable/disable the rule
  625. e.g. '@tina:example.org'
  626. rule_id: the full rule ID of the rule to be enabled/disabled
  627. e.g. 'global/override/.m.rule.roomnotif'
  628. or 'global/override/myCustomRule'
  629. actions: A list of actions (each action being a dict or string),
  630. e.g. ["notify", {"set_tweak": "highlight", "value": false}]
  631. is_default_rule: True if and only if this is a server-default rule.
  632. This skips the check for existence (as only user-created rules
  633. are always stored in the database `push_rules` table).
  634. Raises:
  635. RuleNotFoundException if the rule does not exist.
  636. """
  637. actions_json = json_encoder.encode(actions)
  638. def set_push_rule_actions_txn(
  639. txn: LoggingTransaction,
  640. stream_id: int,
  641. event_stream_ordering: int,
  642. ) -> None:
  643. if is_default_rule:
  644. # Add a dummy rule to the rules table with the user specified
  645. # actions.
  646. priority_class = -1
  647. priority = 1
  648. self._upsert_push_rule_txn(
  649. txn,
  650. stream_id,
  651. event_stream_ordering,
  652. user_id,
  653. rule_id,
  654. priority_class,
  655. priority,
  656. "[]",
  657. actions_json,
  658. update_stream=False,
  659. )
  660. else:
  661. try:
  662. self.db_pool.simple_update_one_txn(
  663. txn,
  664. "push_rules",
  665. {"user_name": user_id, "rule_id": rule_id},
  666. {"actions": actions_json},
  667. )
  668. except StoreError as serr:
  669. if serr.code == 404:
  670. # this sets the NOT_FOUND error Code
  671. raise RuleNotFoundException("Push rule does not exist")
  672. else:
  673. raise
  674. self._insert_push_rules_update_txn(
  675. txn,
  676. stream_id,
  677. event_stream_ordering,
  678. user_id,
  679. rule_id,
  680. op="ACTIONS",
  681. data={"actions": actions_json},
  682. )
  683. async with self._push_rules_stream_id_gen.get_next() as stream_id:
  684. event_stream_ordering = self._stream_id_gen.get_current_token()
  685. await self.db_pool.runInteraction(
  686. "set_push_rule_actions",
  687. set_push_rule_actions_txn,
  688. stream_id,
  689. event_stream_ordering,
  690. )
  691. def _insert_push_rules_update_txn(
  692. self,
  693. txn: LoggingTransaction,
  694. stream_id: int,
  695. event_stream_ordering: int,
  696. user_id: str,
  697. rule_id: str,
  698. op: str,
  699. data: Optional[JsonDict] = None,
  700. ) -> None:
  701. values = {
  702. "stream_id": stream_id,
  703. "event_stream_ordering": event_stream_ordering,
  704. "user_id": user_id,
  705. "rule_id": rule_id,
  706. "op": op,
  707. }
  708. if data is not None:
  709. values.update(data)
  710. self.db_pool.simple_insert_txn(txn, "push_rules_stream", values=values)
  711. txn.call_after(self.get_push_rules_for_user.invalidate, (user_id,))
  712. txn.call_after(
  713. self.push_rules_stream_cache.entity_has_changed, user_id, stream_id
  714. )
  715. def get_max_push_rules_stream_id(self) -> int:
  716. return self._push_rules_stream_id_gen.get_current_token()
  717. async def copy_push_rule_from_room_to_room(
  718. self, new_room_id: str, user_id: str, rule: PushRule
  719. ) -> None:
  720. """Copy a single push rule from one room to another for a specific user.
  721. Args:
  722. new_room_id: ID of the new room.
  723. user_id : ID of user the push rule belongs to.
  724. rule: A push rule.
  725. """
  726. # Create new rule id
  727. rule_id_scope = "/".join(rule.rule_id.split("/")[:-1])
  728. new_rule_id = rule_id_scope + "/" + new_room_id
  729. new_conditions = []
  730. # Change room id in each condition
  731. for condition in rule.conditions:
  732. new_condition = condition
  733. if condition.get("key") == "room_id":
  734. new_condition = dict(condition)
  735. new_condition["pattern"] = new_room_id
  736. new_conditions.append(new_condition)
  737. # Add the rule for the new room
  738. await self.add_push_rule(
  739. user_id=user_id,
  740. rule_id=new_rule_id,
  741. priority_class=rule.priority_class,
  742. conditions=new_conditions,
  743. actions=rule.actions,
  744. )
  745. async def copy_push_rules_from_room_to_room_for_user(
  746. self, old_room_id: str, new_room_id: str, user_id: str
  747. ) -> None:
  748. """Copy all of the push rules from one room to another for a specific
  749. user.
  750. Args:
  751. old_room_id: ID of the old room.
  752. new_room_id: ID of the new room.
  753. user_id: ID of user to copy push rules for.
  754. """
  755. # Retrieve push rules for this user
  756. user_push_rules = await self.get_push_rules_for_user(user_id)
  757. # Get rules relating to the old room and copy them to the new room
  758. for rule, enabled in user_push_rules.rules():
  759. if not enabled:
  760. continue
  761. conditions = rule.conditions
  762. if any(
  763. (c.get("key") == "room_id" and c.get("pattern") == old_room_id)
  764. for c in conditions
  765. ):
  766. await self.copy_push_rule_from_room_to_room(new_room_id, user_id, rule)