You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 
 

865 lines
29 KiB

  1. # Copyright 2014-2016 OpenMarket Ltd
  2. # Copyright 2018 New Vector Ltd
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. import abc
  16. import logging
  17. from typing import (
  18. TYPE_CHECKING,
  19. Any,
  20. Collection,
  21. Dict,
  22. List,
  23. Mapping,
  24. Optional,
  25. Sequence,
  26. Tuple,
  27. Union,
  28. cast,
  29. )
  30. from synapse.api.errors import StoreError
  31. from synapse.config.homeserver import ExperimentalConfig
  32. from synapse.replication.slave.storage._slaved_id_tracker import SlavedIdTracker
  33. from synapse.storage._base import SQLBaseStore
  34. from synapse.storage.database import (
  35. DatabasePool,
  36. LoggingDatabaseConnection,
  37. LoggingTransaction,
  38. )
  39. from synapse.storage.databases.main.appservice import ApplicationServiceWorkerStore
  40. from synapse.storage.databases.main.events_worker import EventsWorkerStore
  41. from synapse.storage.databases.main.pusher import PusherWorkerStore
  42. from synapse.storage.databases.main.receipts import ReceiptsWorkerStore
  43. from synapse.storage.databases.main.roommember import RoomMemberWorkerStore
  44. from synapse.storage.engines import PostgresEngine, Sqlite3Engine
  45. from synapse.storage.push_rule import InconsistentRuleException, RuleNotFoundException
  46. from synapse.storage.util.id_generators import (
  47. AbstractStreamIdGenerator,
  48. AbstractStreamIdTracker,
  49. IdGenerator,
  50. StreamIdGenerator,
  51. )
  52. from synapse.synapse_rust.push import FilteredPushRules, PushRule, PushRules
  53. from synapse.types import JsonDict
  54. from synapse.util import json_encoder
  55. from synapse.util.caches.descriptors import cached, cachedList
  56. from synapse.util.caches.stream_change_cache import StreamChangeCache
  57. if TYPE_CHECKING:
  58. from synapse.server import HomeServer
  59. logger = logging.getLogger(__name__)
  60. def _load_rules(
  61. rawrules: List[JsonDict],
  62. enabled_map: Dict[str, bool],
  63. experimental_config: ExperimentalConfig,
  64. ) -> FilteredPushRules:
  65. """Take the DB rows returned from the DB and convert them into a full
  66. `FilteredPushRules` object.
  67. """
  68. ruleslist = [
  69. PushRule.from_db(
  70. rule_id=rawrule["rule_id"],
  71. priority_class=rawrule["priority_class"],
  72. conditions=rawrule["conditions"],
  73. actions=rawrule["actions"],
  74. )
  75. for rawrule in rawrules
  76. ]
  77. push_rules = PushRules(
  78. ruleslist,
  79. )
  80. filtered_rules = FilteredPushRules(
  81. push_rules,
  82. enabled_map,
  83. msc3786_enabled=experimental_config.msc3786_enabled,
  84. msc3772_enabled=experimental_config.msc3772_enabled,
  85. )
  86. return filtered_rules
  87. # The ABCMeta metaclass ensures that it cannot be instantiated without
  88. # the abstract methods being implemented.
  89. class PushRulesWorkerStore(
  90. ApplicationServiceWorkerStore,
  91. PusherWorkerStore,
  92. RoomMemberWorkerStore,
  93. ReceiptsWorkerStore,
  94. EventsWorkerStore,
  95. SQLBaseStore,
  96. metaclass=abc.ABCMeta,
  97. ):
  98. """This is an abstract base class where subclasses must implement
  99. `get_max_push_rules_stream_id` which can be called in the initializer.
  100. """
  101. def __init__(
  102. self,
  103. database: DatabasePool,
  104. db_conn: LoggingDatabaseConnection,
  105. hs: "HomeServer",
  106. ):
  107. super().__init__(database, db_conn, hs)
  108. if hs.config.worker.worker_app is None:
  109. self._push_rules_stream_id_gen: AbstractStreamIdTracker = StreamIdGenerator(
  110. db_conn, "push_rules_stream", "stream_id"
  111. )
  112. else:
  113. self._push_rules_stream_id_gen = SlavedIdTracker(
  114. db_conn, "push_rules_stream", "stream_id"
  115. )
  116. push_rules_prefill, push_rules_id = self.db_pool.get_cache_dict(
  117. db_conn,
  118. "push_rules_stream",
  119. entity_column="user_id",
  120. stream_column="stream_id",
  121. max_value=self.get_max_push_rules_stream_id(),
  122. )
  123. self.push_rules_stream_cache = StreamChangeCache(
  124. "PushRulesStreamChangeCache",
  125. push_rules_id,
  126. prefilled_cache=push_rules_prefill,
  127. )
  128. @abc.abstractmethod
  129. def get_max_push_rules_stream_id(self) -> int:
  130. """Get the position of the push rules stream.
  131. Returns:
  132. int
  133. """
  134. raise NotImplementedError()
  135. @cached(max_entries=5000)
  136. async def get_push_rules_for_user(self, user_id: str) -> FilteredPushRules:
  137. rows = await self.db_pool.simple_select_list(
  138. table="push_rules",
  139. keyvalues={"user_name": user_id},
  140. retcols=(
  141. "user_name",
  142. "rule_id",
  143. "priority_class",
  144. "priority",
  145. "conditions",
  146. "actions",
  147. ),
  148. desc="get_push_rules_for_user",
  149. )
  150. rows.sort(key=lambda row: (-int(row["priority_class"]), -int(row["priority"])))
  151. enabled_map = await self.get_push_rules_enabled_for_user(user_id)
  152. return _load_rules(rows, enabled_map, self.hs.config.experimental)
  153. async def get_push_rules_enabled_for_user(self, user_id: str) -> Dict[str, bool]:
  154. results = await self.db_pool.simple_select_list(
  155. table="push_rules_enable",
  156. keyvalues={"user_name": user_id},
  157. retcols=("rule_id", "enabled"),
  158. desc="get_push_rules_enabled_for_user",
  159. )
  160. return {r["rule_id"]: bool(r["enabled"]) for r in results}
  161. async def have_push_rules_changed_for_user(
  162. self, user_id: str, last_id: int
  163. ) -> bool:
  164. if not self.push_rules_stream_cache.has_entity_changed(user_id, last_id):
  165. return False
  166. else:
  167. def have_push_rules_changed_txn(txn: LoggingTransaction) -> bool:
  168. sql = (
  169. "SELECT COUNT(stream_id) FROM push_rules_stream"
  170. " WHERE user_id = ? AND ? < stream_id"
  171. )
  172. txn.execute(sql, (user_id, last_id))
  173. (count,) = cast(Tuple[int], txn.fetchone())
  174. return bool(count)
  175. return await self.db_pool.runInteraction(
  176. "have_push_rules_changed", have_push_rules_changed_txn
  177. )
  178. @cachedList(cached_method_name="get_push_rules_for_user", list_name="user_ids")
  179. async def bulk_get_push_rules(
  180. self, user_ids: Collection[str]
  181. ) -> Dict[str, FilteredPushRules]:
  182. if not user_ids:
  183. return {}
  184. raw_rules: Dict[str, List[JsonDict]] = {user_id: [] for user_id in user_ids}
  185. rows = await self.db_pool.simple_select_many_batch(
  186. table="push_rules",
  187. column="user_name",
  188. iterable=user_ids,
  189. retcols=("*",),
  190. desc="bulk_get_push_rules",
  191. batch_size=1000,
  192. )
  193. rows.sort(key=lambda row: (-int(row["priority_class"]), -int(row["priority"])))
  194. for row in rows:
  195. raw_rules.setdefault(row["user_name"], []).append(row)
  196. enabled_map_by_user = await self.bulk_get_push_rules_enabled(user_ids)
  197. results: Dict[str, FilteredPushRules] = {}
  198. for user_id, rules in raw_rules.items():
  199. results[user_id] = _load_rules(
  200. rules, enabled_map_by_user.get(user_id, {}), self.hs.config.experimental
  201. )
  202. return results
  203. async def bulk_get_push_rules_enabled(
  204. self, user_ids: Collection[str]
  205. ) -> Dict[str, Dict[str, bool]]:
  206. if not user_ids:
  207. return {}
  208. results: Dict[str, Dict[str, bool]] = {user_id: {} for user_id in user_ids}
  209. rows = await self.db_pool.simple_select_many_batch(
  210. table="push_rules_enable",
  211. column="user_name",
  212. iterable=user_ids,
  213. retcols=("user_name", "rule_id", "enabled"),
  214. desc="bulk_get_push_rules_enabled",
  215. batch_size=1000,
  216. )
  217. for row in rows:
  218. enabled = bool(row["enabled"])
  219. results.setdefault(row["user_name"], {})[row["rule_id"]] = enabled
  220. return results
  221. async def get_all_push_rule_updates(
  222. self, instance_name: str, last_id: int, current_id: int, limit: int
  223. ) -> Tuple[List[Tuple[int, Tuple[str]]], int, bool]:
  224. """Get updates for push_rules replication stream.
  225. Args:
  226. instance_name: The writer we want to fetch updates from. Unused
  227. here since there is only ever one writer.
  228. last_id: The token to fetch updates from. Exclusive.
  229. current_id: The token to fetch updates up to. Inclusive.
  230. limit: The requested limit for the number of rows to return. The
  231. function may return more or fewer rows.
  232. Returns:
  233. A tuple consisting of: the updates, a token to use to fetch
  234. subsequent updates, and whether we returned fewer rows than exists
  235. between the requested tokens due to the limit.
  236. The token returned can be used in a subsequent call to this
  237. function to get further updatees.
  238. The updates are a list of 2-tuples of stream ID and the row data
  239. """
  240. if last_id == current_id:
  241. return [], current_id, False
  242. def get_all_push_rule_updates_txn(
  243. txn: LoggingTransaction,
  244. ) -> Tuple[List[Tuple[int, Tuple[str]]], int, bool]:
  245. sql = """
  246. SELECT stream_id, user_id
  247. FROM push_rules_stream
  248. WHERE ? < stream_id AND stream_id <= ?
  249. ORDER BY stream_id ASC
  250. LIMIT ?
  251. """
  252. txn.execute(sql, (last_id, current_id, limit))
  253. updates = cast(
  254. List[Tuple[int, Tuple[str]]],
  255. [(stream_id, (user_id,)) for stream_id, user_id in txn],
  256. )
  257. limited = False
  258. upper_bound = current_id
  259. if len(updates) == limit:
  260. limited = True
  261. upper_bound = updates[-1][0]
  262. return updates, upper_bound, limited
  263. return await self.db_pool.runInteraction(
  264. "get_all_push_rule_updates", get_all_push_rule_updates_txn
  265. )
  266. class PushRuleStore(PushRulesWorkerStore):
  267. # Because we have write access, this will be a StreamIdGenerator
  268. # (see PushRulesWorkerStore.__init__)
  269. _push_rules_stream_id_gen: AbstractStreamIdGenerator
  270. def __init__(
  271. self,
  272. database: DatabasePool,
  273. db_conn: LoggingDatabaseConnection,
  274. hs: "HomeServer",
  275. ):
  276. super().__init__(database, db_conn, hs)
  277. self._push_rule_id_gen = IdGenerator(db_conn, "push_rules", "id")
  278. self._push_rules_enable_id_gen = IdGenerator(db_conn, "push_rules_enable", "id")
  279. async def add_push_rule(
  280. self,
  281. user_id: str,
  282. rule_id: str,
  283. priority_class: int,
  284. conditions: Sequence[Mapping[str, str]],
  285. actions: Sequence[Union[Mapping[str, Any], str]],
  286. before: Optional[str] = None,
  287. after: Optional[str] = None,
  288. ) -> None:
  289. conditions_json = json_encoder.encode(conditions)
  290. actions_json = json_encoder.encode(actions)
  291. async with self._push_rules_stream_id_gen.get_next() as stream_id:
  292. event_stream_ordering = self._stream_id_gen.get_current_token()
  293. if before or after:
  294. await self.db_pool.runInteraction(
  295. "_add_push_rule_relative_txn",
  296. self._add_push_rule_relative_txn,
  297. stream_id,
  298. event_stream_ordering,
  299. user_id,
  300. rule_id,
  301. priority_class,
  302. conditions_json,
  303. actions_json,
  304. before,
  305. after,
  306. )
  307. else:
  308. await self.db_pool.runInteraction(
  309. "_add_push_rule_highest_priority_txn",
  310. self._add_push_rule_highest_priority_txn,
  311. stream_id,
  312. event_stream_ordering,
  313. user_id,
  314. rule_id,
  315. priority_class,
  316. conditions_json,
  317. actions_json,
  318. )
  319. def _add_push_rule_relative_txn(
  320. self,
  321. txn: LoggingTransaction,
  322. stream_id: int,
  323. event_stream_ordering: int,
  324. user_id: str,
  325. rule_id: str,
  326. priority_class: int,
  327. conditions_json: str,
  328. actions_json: str,
  329. before: str,
  330. after: str,
  331. ) -> None:
  332. # Lock the table since otherwise we'll have annoying races between the
  333. # SELECT here and the UPSERT below.
  334. self.database_engine.lock_table(txn, "push_rules")
  335. relative_to_rule = before or after
  336. res = self.db_pool.simple_select_one_txn(
  337. txn,
  338. table="push_rules",
  339. keyvalues={"user_name": user_id, "rule_id": relative_to_rule},
  340. retcols=["priority_class", "priority"],
  341. allow_none=True,
  342. )
  343. if not res:
  344. raise RuleNotFoundException(
  345. "before/after rule not found: %s" % (relative_to_rule,)
  346. )
  347. base_priority_class = res["priority_class"]
  348. base_rule_priority = res["priority"]
  349. if base_priority_class != priority_class:
  350. raise InconsistentRuleException(
  351. "Given priority class does not match class of relative rule"
  352. )
  353. if before:
  354. # Higher priority rules are executed first, So adding a rule before
  355. # a rule means giving it a higher priority than that rule.
  356. new_rule_priority = base_rule_priority + 1
  357. else:
  358. # We increment the priority of the existing rules to make space for
  359. # the new rule. Therefore if we want this rule to appear after
  360. # an existing rule we give it the priority of the existing rule,
  361. # and then increment the priority of the existing rule.
  362. new_rule_priority = base_rule_priority
  363. sql = (
  364. "UPDATE push_rules SET priority = priority + 1"
  365. " WHERE user_name = ? AND priority_class = ? AND priority >= ?"
  366. )
  367. txn.execute(sql, (user_id, priority_class, new_rule_priority))
  368. self._upsert_push_rule_txn(
  369. txn,
  370. stream_id,
  371. event_stream_ordering,
  372. user_id,
  373. rule_id,
  374. priority_class,
  375. new_rule_priority,
  376. conditions_json,
  377. actions_json,
  378. )
  379. def _add_push_rule_highest_priority_txn(
  380. self,
  381. txn: LoggingTransaction,
  382. stream_id: int,
  383. event_stream_ordering: int,
  384. user_id: str,
  385. rule_id: str,
  386. priority_class: int,
  387. conditions_json: str,
  388. actions_json: str,
  389. ) -> None:
  390. # Lock the table since otherwise we'll have annoying races between the
  391. # SELECT here and the UPSERT below.
  392. self.database_engine.lock_table(txn, "push_rules")
  393. # find the highest priority rule in that class
  394. sql = (
  395. "SELECT COUNT(*), MAX(priority) FROM push_rules"
  396. " WHERE user_name = ? and priority_class = ?"
  397. )
  398. txn.execute(sql, (user_id, priority_class))
  399. res = txn.fetchall()
  400. (how_many, highest_prio) = res[0]
  401. new_prio = 0
  402. if how_many > 0:
  403. new_prio = highest_prio + 1
  404. self._upsert_push_rule_txn(
  405. txn,
  406. stream_id,
  407. event_stream_ordering,
  408. user_id,
  409. rule_id,
  410. priority_class,
  411. new_prio,
  412. conditions_json,
  413. actions_json,
  414. )
  415. def _upsert_push_rule_txn(
  416. self,
  417. txn: LoggingTransaction,
  418. stream_id: int,
  419. event_stream_ordering: int,
  420. user_id: str,
  421. rule_id: str,
  422. priority_class: int,
  423. priority: int,
  424. conditions_json: str,
  425. actions_json: str,
  426. update_stream: bool = True,
  427. ) -> None:
  428. """Specialised version of simple_upsert_txn that picks a push_rule_id
  429. using the _push_rule_id_gen if it needs to insert the rule. It assumes
  430. that the "push_rules" table is locked"""
  431. sql = (
  432. "UPDATE push_rules"
  433. " SET priority_class = ?, priority = ?, conditions = ?, actions = ?"
  434. " WHERE user_name = ? AND rule_id = ?"
  435. )
  436. txn.execute(
  437. sql,
  438. (priority_class, priority, conditions_json, actions_json, user_id, rule_id),
  439. )
  440. if txn.rowcount == 0:
  441. # We didn't update a row with the given rule_id so insert one
  442. push_rule_id = self._push_rule_id_gen.get_next()
  443. self.db_pool.simple_insert_txn(
  444. txn,
  445. table="push_rules",
  446. values={
  447. "id": push_rule_id,
  448. "user_name": user_id,
  449. "rule_id": rule_id,
  450. "priority_class": priority_class,
  451. "priority": priority,
  452. "conditions": conditions_json,
  453. "actions": actions_json,
  454. },
  455. )
  456. if update_stream:
  457. self._insert_push_rules_update_txn(
  458. txn,
  459. stream_id,
  460. event_stream_ordering,
  461. user_id,
  462. rule_id,
  463. op="ADD",
  464. data={
  465. "priority_class": priority_class,
  466. "priority": priority,
  467. "conditions": conditions_json,
  468. "actions": actions_json,
  469. },
  470. )
  471. # ensure we have a push_rules_enable row
  472. # enabledness defaults to true
  473. if isinstance(self.database_engine, PostgresEngine):
  474. sql = """
  475. INSERT INTO push_rules_enable (id, user_name, rule_id, enabled)
  476. VALUES (?, ?, ?, ?)
  477. ON CONFLICT DO NOTHING
  478. """
  479. elif isinstance(self.database_engine, Sqlite3Engine):
  480. sql = """
  481. INSERT OR IGNORE INTO push_rules_enable (id, user_name, rule_id, enabled)
  482. VALUES (?, ?, ?, ?)
  483. """
  484. else:
  485. raise RuntimeError("Unknown database engine")
  486. new_enable_id = self._push_rules_enable_id_gen.get_next()
  487. txn.execute(sql, (new_enable_id, user_id, rule_id, 1))
  488. async def delete_push_rule(self, user_id: str, rule_id: str) -> None:
  489. """
  490. Delete a push rule. Args specify the row to be deleted and can be
  491. any of the columns in the push_rule table, but below are the
  492. standard ones
  493. Args:
  494. user_id: The matrix ID of the push rule owner
  495. rule_id: The rule_id of the rule to be deleted
  496. """
  497. def delete_push_rule_txn(
  498. txn: LoggingTransaction,
  499. stream_id: int,
  500. event_stream_ordering: int,
  501. ) -> None:
  502. # we don't use simple_delete_one_txn because that would fail if the
  503. # user did not have a push_rule_enable row.
  504. self.db_pool.simple_delete_txn(
  505. txn, "push_rules_enable", {"user_name": user_id, "rule_id": rule_id}
  506. )
  507. self.db_pool.simple_delete_one_txn(
  508. txn, "push_rules", {"user_name": user_id, "rule_id": rule_id}
  509. )
  510. self._insert_push_rules_update_txn(
  511. txn, stream_id, event_stream_ordering, user_id, rule_id, op="DELETE"
  512. )
  513. async with self._push_rules_stream_id_gen.get_next() as stream_id:
  514. event_stream_ordering = self._stream_id_gen.get_current_token()
  515. await self.db_pool.runInteraction(
  516. "delete_push_rule",
  517. delete_push_rule_txn,
  518. stream_id,
  519. event_stream_ordering,
  520. )
  521. async def set_push_rule_enabled(
  522. self, user_id: str, rule_id: str, enabled: bool, is_default_rule: bool
  523. ) -> None:
  524. """
  525. Sets the `enabled` state of a push rule.
  526. Args:
  527. user_id: the user ID of the user who wishes to enable/disable the rule
  528. e.g. '@tina:example.org'
  529. rule_id: the full rule ID of the rule to be enabled/disabled
  530. e.g. 'global/override/.m.rule.roomnotif'
  531. or 'global/override/myCustomRule'
  532. enabled: True if the rule is to be enabled, False if it is to be
  533. disabled
  534. is_default_rule: True if and only if this is a server-default rule.
  535. This skips the check for existence (as only user-created rules
  536. are always stored in the database `push_rules` table).
  537. Raises:
  538. RuleNotFoundException if the rule does not exist.
  539. """
  540. async with self._push_rules_stream_id_gen.get_next() as stream_id:
  541. event_stream_ordering = self._stream_id_gen.get_current_token()
  542. await self.db_pool.runInteraction(
  543. "_set_push_rule_enabled_txn",
  544. self._set_push_rule_enabled_txn,
  545. stream_id,
  546. event_stream_ordering,
  547. user_id,
  548. rule_id,
  549. enabled,
  550. is_default_rule,
  551. )
  552. def _set_push_rule_enabled_txn(
  553. self,
  554. txn: LoggingTransaction,
  555. stream_id: int,
  556. event_stream_ordering: int,
  557. user_id: str,
  558. rule_id: str,
  559. enabled: bool,
  560. is_default_rule: bool,
  561. ) -> None:
  562. new_id = self._push_rules_enable_id_gen.get_next()
  563. if not is_default_rule:
  564. # first check it exists; we need to lock for key share so that a
  565. # transaction that deletes the push rule will conflict with this one.
  566. # We also need a push_rule_enable row to exist for every push_rules
  567. # row, otherwise it is possible to simultaneously delete a push rule
  568. # (that has no _enable row) and enable it, resulting in a dangling
  569. # _enable row. To solve this: we either need to use SERIALISABLE or
  570. # ensure we always have a push_rule_enable row for every push_rule
  571. # row. We chose the latter.
  572. for_key_share = "FOR KEY SHARE"
  573. if not isinstance(self.database_engine, PostgresEngine):
  574. # For key share is not applicable/available on SQLite
  575. for_key_share = ""
  576. sql = (
  577. """
  578. SELECT 1 FROM push_rules
  579. WHERE user_name = ? AND rule_id = ?
  580. %s
  581. """
  582. % for_key_share
  583. )
  584. txn.execute(sql, (user_id, rule_id))
  585. if txn.fetchone() is None:
  586. raise RuleNotFoundException("Push rule does not exist.")
  587. self.db_pool.simple_upsert_txn(
  588. txn,
  589. "push_rules_enable",
  590. {"user_name": user_id, "rule_id": rule_id},
  591. {"enabled": 1 if enabled else 0},
  592. {"id": new_id},
  593. )
  594. self._insert_push_rules_update_txn(
  595. txn,
  596. stream_id,
  597. event_stream_ordering,
  598. user_id,
  599. rule_id,
  600. op="ENABLE" if enabled else "DISABLE",
  601. )
  602. async def set_push_rule_actions(
  603. self,
  604. user_id: str,
  605. rule_id: str,
  606. actions: List[Union[dict, str]],
  607. is_default_rule: bool,
  608. ) -> None:
  609. """
  610. Sets the `actions` state of a push rule.
  611. Args:
  612. user_id: the user ID of the user who wishes to enable/disable the rule
  613. e.g. '@tina:example.org'
  614. rule_id: the full rule ID of the rule to be enabled/disabled
  615. e.g. 'global/override/.m.rule.roomnotif'
  616. or 'global/override/myCustomRule'
  617. actions: A list of actions (each action being a dict or string),
  618. e.g. ["notify", {"set_tweak": "highlight", "value": false}]
  619. is_default_rule: True if and only if this is a server-default rule.
  620. This skips the check for existence (as only user-created rules
  621. are always stored in the database `push_rules` table).
  622. Raises:
  623. RuleNotFoundException if the rule does not exist.
  624. """
  625. actions_json = json_encoder.encode(actions)
  626. def set_push_rule_actions_txn(
  627. txn: LoggingTransaction,
  628. stream_id: int,
  629. event_stream_ordering: int,
  630. ) -> None:
  631. if is_default_rule:
  632. # Add a dummy rule to the rules table with the user specified
  633. # actions.
  634. priority_class = -1
  635. priority = 1
  636. self._upsert_push_rule_txn(
  637. txn,
  638. stream_id,
  639. event_stream_ordering,
  640. user_id,
  641. rule_id,
  642. priority_class,
  643. priority,
  644. "[]",
  645. actions_json,
  646. update_stream=False,
  647. )
  648. else:
  649. try:
  650. self.db_pool.simple_update_one_txn(
  651. txn,
  652. "push_rules",
  653. {"user_name": user_id, "rule_id": rule_id},
  654. {"actions": actions_json},
  655. )
  656. except StoreError as serr:
  657. if serr.code == 404:
  658. # this sets the NOT_FOUND error Code
  659. raise RuleNotFoundException("Push rule does not exist")
  660. else:
  661. raise
  662. self._insert_push_rules_update_txn(
  663. txn,
  664. stream_id,
  665. event_stream_ordering,
  666. user_id,
  667. rule_id,
  668. op="ACTIONS",
  669. data={"actions": actions_json},
  670. )
  671. async with self._push_rules_stream_id_gen.get_next() as stream_id:
  672. event_stream_ordering = self._stream_id_gen.get_current_token()
  673. await self.db_pool.runInteraction(
  674. "set_push_rule_actions",
  675. set_push_rule_actions_txn,
  676. stream_id,
  677. event_stream_ordering,
  678. )
  679. def _insert_push_rules_update_txn(
  680. self,
  681. txn: LoggingTransaction,
  682. stream_id: int,
  683. event_stream_ordering: int,
  684. user_id: str,
  685. rule_id: str,
  686. op: str,
  687. data: Optional[JsonDict] = None,
  688. ) -> None:
  689. values = {
  690. "stream_id": stream_id,
  691. "event_stream_ordering": event_stream_ordering,
  692. "user_id": user_id,
  693. "rule_id": rule_id,
  694. "op": op,
  695. }
  696. if data is not None:
  697. values.update(data)
  698. self.db_pool.simple_insert_txn(txn, "push_rules_stream", values=values)
  699. txn.call_after(self.get_push_rules_for_user.invalidate, (user_id,))
  700. txn.call_after(
  701. self.push_rules_stream_cache.entity_has_changed, user_id, stream_id
  702. )
  703. def get_max_push_rules_stream_id(self) -> int:
  704. return self._push_rules_stream_id_gen.get_current_token()
  705. async def copy_push_rule_from_room_to_room(
  706. self, new_room_id: str, user_id: str, rule: PushRule
  707. ) -> None:
  708. """Copy a single push rule from one room to another for a specific user.
  709. Args:
  710. new_room_id: ID of the new room.
  711. user_id : ID of user the push rule belongs to.
  712. rule: A push rule.
  713. """
  714. # Create new rule id
  715. rule_id_scope = "/".join(rule.rule_id.split("/")[:-1])
  716. new_rule_id = rule_id_scope + "/" + new_room_id
  717. new_conditions = []
  718. # Change room id in each condition
  719. for condition in rule.conditions:
  720. new_condition = condition
  721. if condition.get("key") == "room_id":
  722. new_condition = dict(condition)
  723. new_condition["pattern"] = new_room_id
  724. new_conditions.append(new_condition)
  725. # Add the rule for the new room
  726. await self.add_push_rule(
  727. user_id=user_id,
  728. rule_id=new_rule_id,
  729. priority_class=rule.priority_class,
  730. conditions=new_conditions,
  731. actions=rule.actions,
  732. )
  733. async def copy_push_rules_from_room_to_room_for_user(
  734. self, old_room_id: str, new_room_id: str, user_id: str
  735. ) -> None:
  736. """Copy all of the push rules from one room to another for a specific
  737. user.
  738. Args:
  739. old_room_id: ID of the old room.
  740. new_room_id: ID of the new room.
  741. user_id: ID of user to copy push rules for.
  742. """
  743. # Retrieve push rules for this user
  744. user_push_rules = await self.get_push_rules_for_user(user_id)
  745. # Get rules relating to the old room and copy them to the new room
  746. for rule, enabled in user_push_rules.rules():
  747. if not enabled:
  748. continue
  749. conditions = rule.conditions
  750. if any(
  751. (c.get("key") == "room_id" and c.get("pattern") == old_room_id)
  752. for c in conditions
  753. ):
  754. await self.copy_push_rule_from_room_to_room(new_room_id, user_id, rule)