You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 
 

49 lines
1.8 KiB

  1. # Copyright 2016 OpenMarket Ltd
  2. # Copyright 2018 New Vector Ltd
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. from typing import TYPE_CHECKING, Any, Iterable
  16. from synapse.replication.tcp.streams import PushersStream
  17. from synapse.storage.database import DatabasePool, LoggingDatabaseConnection
  18. from synapse.storage.databases.main.pusher import PusherWorkerStore
  19. from ._base import BaseSlavedStore
  20. from ._slaved_id_tracker import SlavedIdTracker
  21. if TYPE_CHECKING:
  22. from synapse.server import HomeServer
  23. class SlavedPusherStore(PusherWorkerStore, BaseSlavedStore):
  24. def __init__(
  25. self,
  26. database: DatabasePool,
  27. db_conn: LoggingDatabaseConnection,
  28. hs: "HomeServer",
  29. ):
  30. super().__init__(database, db_conn, hs)
  31. self._pushers_id_gen = SlavedIdTracker( # type: ignore
  32. db_conn, "pushers", "id", extra_tables=[("deleted_pushers", "stream_id")]
  33. )
  34. def get_pushers_stream_token(self) -> int:
  35. return self._pushers_id_gen.get_current_token()
  36. def process_replication_rows(
  37. self, stream_name: str, instance_name: str, token: int, rows: Iterable[Any]
  38. ) -> None:
  39. if stream_name == PushersStream.NAME:
  40. self._pushers_id_gen.advance(instance_name, token)
  41. return super().process_replication_rows(stream_name, instance_name, token, rows)