0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-11-15 22:42:23 +01:00

Use auto_attribs/native type hints for attrs classes. (#11692)

This commit is contained in:
Patrick Cloke 2022-01-13 08:49:28 -05:00 committed by GitHub
parent b92a2ff797
commit 10a88ba91c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
40 changed files with 300 additions and 307 deletions

1
changelog.d/11692.misc Normal file
View file

@ -0,0 +1 @@
Use `auto_attribs` and native type hints for attrs classes.

View file

@ -46,41 +46,41 @@ class RoomDisposition:
UNSTABLE = "unstable" UNSTABLE = "unstable"
@attr.s(slots=True, frozen=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class RoomVersion: class RoomVersion:
"""An object which describes the unique attributes of a room version.""" """An object which describes the unique attributes of a room version."""
identifier = attr.ib(type=str) # the identifier for this version identifier: str # the identifier for this version
disposition = attr.ib(type=str) # one of the RoomDispositions disposition: str # one of the RoomDispositions
event_format = attr.ib(type=int) # one of the EventFormatVersions event_format: int # one of the EventFormatVersions
state_res = attr.ib(type=int) # one of the StateResolutionVersions state_res: int # one of the StateResolutionVersions
enforce_key_validity = attr.ib(type=bool) enforce_key_validity: bool
# Before MSC2432, m.room.aliases had special auth rules and redaction rules # Before MSC2432, m.room.aliases had special auth rules and redaction rules
special_case_aliases_auth = attr.ib(type=bool) special_case_aliases_auth: bool
# Strictly enforce canonicaljson, do not allow: # Strictly enforce canonicaljson, do not allow:
# * Integers outside the range of [-2 ^ 53 + 1, 2 ^ 53 - 1] # * Integers outside the range of [-2 ^ 53 + 1, 2 ^ 53 - 1]
# * Floats # * Floats
# * NaN, Infinity, -Infinity # * NaN, Infinity, -Infinity
strict_canonicaljson = attr.ib(type=bool) strict_canonicaljson: bool
# MSC2209: Check 'notifications' key while verifying # MSC2209: Check 'notifications' key while verifying
# m.room.power_levels auth rules. # m.room.power_levels auth rules.
limit_notifications_power_levels = attr.ib(type=bool) limit_notifications_power_levels: bool
# MSC2174/MSC2176: Apply updated redaction rules algorithm. # MSC2174/MSC2176: Apply updated redaction rules algorithm.
msc2176_redaction_rules = attr.ib(type=bool) msc2176_redaction_rules: bool
# MSC3083: Support the 'restricted' join_rule. # MSC3083: Support the 'restricted' join_rule.
msc3083_join_rules = attr.ib(type=bool) msc3083_join_rules: bool
# MSC3375: Support for the proper redaction rules for MSC3083. This mustn't # MSC3375: Support for the proper redaction rules for MSC3083. This mustn't
# be enabled if MSC3083 is not. # be enabled if MSC3083 is not.
msc3375_redaction_rules = attr.ib(type=bool) msc3375_redaction_rules: bool
# MSC2403: Allows join_rules to be set to 'knock', changes auth rules to allow sending # MSC2403: Allows join_rules to be set to 'knock', changes auth rules to allow sending
# m.room.membership event with membership 'knock'. # m.room.membership event with membership 'knock'.
msc2403_knocking = attr.ib(type=bool) msc2403_knocking: bool
# MSC2716: Adds m.room.power_levels -> content.historical field to control # MSC2716: Adds m.room.power_levels -> content.historical field to control
# whether "insertion", "chunk", "marker" events can be sent # whether "insertion", "chunk", "marker" events can be sent
msc2716_historical = attr.ib(type=bool) msc2716_historical: bool
# MSC2716: Adds support for redacting "insertion", "chunk", and "marker" events # MSC2716: Adds support for redacting "insertion", "chunk", and "marker" events
msc2716_redactions = attr.ib(type=bool) msc2716_redactions: bool
class RoomVersions: class RoomVersions:

View file

@ -55,19 +55,19 @@ https://matrix-org.github.io/synapse/latest/templates.html
---------------------------------------------------------------------------------------""" ---------------------------------------------------------------------------------------"""
@attr.s(slots=True, frozen=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class EmailSubjectConfig: class EmailSubjectConfig:
message_from_person_in_room = attr.ib(type=str) message_from_person_in_room: str
message_from_person = attr.ib(type=str) message_from_person: str
messages_from_person = attr.ib(type=str) messages_from_person: str
messages_in_room = attr.ib(type=str) messages_in_room: str
messages_in_room_and_others = attr.ib(type=str) messages_in_room_and_others: str
messages_from_person_and_others = attr.ib(type=str) messages_from_person_and_others: str
invite_from_person = attr.ib(type=str) invite_from_person: str
invite_from_person_to_room = attr.ib(type=str) invite_from_person_to_room: str
invite_from_person_to_space = attr.ib(type=str) invite_from_person_to_space: str
password_reset = attr.ib(type=str) password_reset: str
email_validation = attr.ib(type=str) email_validation: str
class EmailConfig(Config): class EmailConfig(Config):

View file

@ -200,8 +200,8 @@ class HttpListenerConfig:
"""Object describing the http-specific parts of the config of a listener""" """Object describing the http-specific parts of the config of a listener"""
x_forwarded: bool = False x_forwarded: bool = False
resources: List[HttpResourceConfig] = attr.ib(factory=list) resources: List[HttpResourceConfig] = attr.Factory(list)
additional_resources: Dict[str, dict] = attr.ib(factory=dict) additional_resources: Dict[str, dict] = attr.Factory(dict)
tag: Optional[str] = None tag: Optional[str] = None

View file

@ -51,12 +51,12 @@ def _instance_to_list_converter(obj: Union[str, List[str]]) -> List[str]:
return obj return obj
@attr.s @attr.s(auto_attribs=True)
class InstanceLocationConfig: class InstanceLocationConfig:
"""The host and port to talk to an instance via HTTP replication.""" """The host and port to talk to an instance via HTTP replication."""
host = attr.ib(type=str) host: str
port = attr.ib(type=int) port: int
@attr.s @attr.s
@ -77,34 +77,28 @@ class WriterLocations:
can only be a single instance. can only be a single instance.
""" """
events = attr.ib( events: List[str] = attr.ib(
default=["master"], default=["master"],
type=List[str],
converter=_instance_to_list_converter, converter=_instance_to_list_converter,
) )
typing = attr.ib( typing: List[str] = attr.ib(
default=["master"], default=["master"],
type=List[str],
converter=_instance_to_list_converter, converter=_instance_to_list_converter,
) )
to_device = attr.ib( to_device: List[str] = attr.ib(
default=["master"], default=["master"],
type=List[str],
converter=_instance_to_list_converter, converter=_instance_to_list_converter,
) )
account_data = attr.ib( account_data: List[str] = attr.ib(
default=["master"], default=["master"],
type=List[str],
converter=_instance_to_list_converter, converter=_instance_to_list_converter,
) )
receipts = attr.ib( receipts: List[str] = attr.ib(
default=["master"], default=["master"],
type=List[str],
converter=_instance_to_list_converter, converter=_instance_to_list_converter,
) )
presence = attr.ib( presence: List[str] = attr.ib(
default=["master"], default=["master"],
type=List[str],
converter=_instance_to_list_converter, converter=_instance_to_list_converter,
) )

View file

@ -58,7 +58,7 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@attr.s(slots=True, cmp=False) @attr.s(slots=True, frozen=True, cmp=False, auto_attribs=True)
class VerifyJsonRequest: class VerifyJsonRequest:
""" """
A request to verify a JSON object. A request to verify a JSON object.
@ -78,10 +78,10 @@ class VerifyJsonRequest:
key_ids: The set of key_ids to that could be used to verify the JSON object key_ids: The set of key_ids to that could be used to verify the JSON object
""" """
server_name = attr.ib(type=str) server_name: str
get_json_object = attr.ib(type=Callable[[], JsonDict]) get_json_object: Callable[[], JsonDict]
minimum_valid_until_ts = attr.ib(type=int) minimum_valid_until_ts: int
key_ids = attr.ib(type=List[str]) key_ids: List[str]
@staticmethod @staticmethod
def from_json_object( def from_json_object(
@ -124,7 +124,7 @@ class KeyLookupError(ValueError):
pass pass
@attr.s(slots=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class _FetchKeyRequest: class _FetchKeyRequest:
"""A request for keys for a given server. """A request for keys for a given server.
@ -138,9 +138,9 @@ class _FetchKeyRequest:
key_ids: The IDs of the keys to attempt to fetch key_ids: The IDs of the keys to attempt to fetch
""" """
server_name = attr.ib(type=str) server_name: str
minimum_valid_until_ts = attr.ib(type=int) minimum_valid_until_ts: int
key_ids = attr.ib(type=List[str]) key_ids: List[str]
class Keyring: class Keyring:

View file

@ -28,7 +28,7 @@ if TYPE_CHECKING:
from synapse.storage.databases.main import DataStore from synapse.storage.databases.main import DataStore
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class EventContext: class EventContext:
""" """
Holds information relevant to persisting an event Holds information relevant to persisting an event
@ -103,15 +103,15 @@ class EventContext:
accessed via get_prev_state_ids. accessed via get_prev_state_ids.
""" """
rejected = attr.ib(default=False, type=Union[bool, str]) rejected: Union[bool, str] = False
_state_group = attr.ib(default=None, type=Optional[int]) _state_group: Optional[int] = None
state_group_before_event = attr.ib(default=None, type=Optional[int]) state_group_before_event: Optional[int] = None
prev_group = attr.ib(default=None, type=Optional[int]) prev_group: Optional[int] = None
delta_ids = attr.ib(default=None, type=Optional[StateMap[str]]) delta_ids: Optional[StateMap[str]] = None
app_service = attr.ib(default=None, type=Optional[ApplicationService]) app_service: Optional[ApplicationService] = None
_current_state_ids = attr.ib(default=None, type=Optional[StateMap[str]]) _current_state_ids: Optional[StateMap[str]] = None
_prev_state_ids = attr.ib(default=None, type=Optional[StateMap[str]]) _prev_state_ids: Optional[StateMap[str]] = None
@staticmethod @staticmethod
def with_state( def with_state(

View file

@ -607,18 +607,18 @@ class PerDestinationQueue:
self._pending_pdus = [] self._pending_pdus = []
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class _TransactionQueueManager: class _TransactionQueueManager:
"""A helper async context manager for pulling stuff off the queues and """A helper async context manager for pulling stuff off the queues and
tracking what was last successfully sent, etc. tracking what was last successfully sent, etc.
""" """
queue = attr.ib(type=PerDestinationQueue) queue: PerDestinationQueue
_device_stream_id = attr.ib(type=Optional[int], default=None) _device_stream_id: Optional[int] = None
_device_list_id = attr.ib(type=Optional[int], default=None) _device_list_id: Optional[int] = None
_last_stream_ordering = attr.ib(type=Optional[int], default=None) _last_stream_ordering: Optional[int] = None
_pdus = attr.ib(type=List[EventBase], factory=list) _pdus: List[EventBase] = attr.Factory(list)
async def __aenter__(self) -> Tuple[List[EventBase], List[Edu]]: async def __aenter__(self) -> Tuple[List[EventBase], List[Edu]]:
# First we calculate the EDUs we want to send, if any. # First we calculate the EDUs we want to send, if any.

View file

@ -168,25 +168,25 @@ def login_id_phone_to_thirdparty(identifier: JsonDict) -> Dict[str, str]:
} }
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class SsoLoginExtraAttributes: class SsoLoginExtraAttributes:
"""Data we track about SAML2 sessions""" """Data we track about SAML2 sessions"""
# time the session was created, in milliseconds # time the session was created, in milliseconds
creation_time = attr.ib(type=int) creation_time: int
extra_attributes = attr.ib(type=JsonDict) extra_attributes: JsonDict
@attr.s(slots=True, frozen=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class LoginTokenAttributes: class LoginTokenAttributes:
"""Data we store in a short-term login token""" """Data we store in a short-term login token"""
user_id = attr.ib(type=str) user_id: str
auth_provider_id = attr.ib(type=str) auth_provider_id: str
"""The SSO Identity Provider that the user authenticated with, to get this token.""" """The SSO Identity Provider that the user authenticated with, to get this token."""
auth_provider_session_id = attr.ib(type=Optional[str]) auth_provider_session_id: Optional[str]
"""The session ID advertised by the SSO Identity Provider.""" """The session ID advertised by the SSO Identity Provider."""

View file

@ -1321,14 +1321,14 @@ def _one_time_keys_match(old_key_json: str, new_key: JsonDict) -> bool:
return old_key == new_key_copy return old_key == new_key_copy
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class SignatureListItem: class SignatureListItem:
"""An item in the signature list as used by upload_signatures_for_device_keys.""" """An item in the signature list as used by upload_signatures_for_device_keys."""
signing_key_id = attr.ib(type=str) signing_key_id: str
target_user_id = attr.ib(type=str) target_user_id: str
target_device_id = attr.ib(type=str) target_device_id: str
signature = attr.ib(type=JsonDict) signature: JsonDict
class SigningKeyEduUpdater: class SigningKeyEduUpdater:

View file

@ -126,45 +126,45 @@ class SsoIdentityProvider(Protocol):
raise NotImplementedError() raise NotImplementedError()
@attr.s @attr.s(auto_attribs=True)
class UserAttributes: class UserAttributes:
# the localpart of the mxid that the mapper has assigned to the user. # the localpart of the mxid that the mapper has assigned to the user.
# if `None`, the mapper has not picked a userid, and the user should be prompted to # if `None`, the mapper has not picked a userid, and the user should be prompted to
# enter one. # enter one.
localpart = attr.ib(type=Optional[str]) localpart: Optional[str]
display_name = attr.ib(type=Optional[str], default=None) display_name: Optional[str] = None
emails = attr.ib(type=Collection[str], default=attr.Factory(list)) emails: Collection[str] = attr.Factory(list)
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class UsernameMappingSession: class UsernameMappingSession:
"""Data we track about SSO sessions""" """Data we track about SSO sessions"""
# A unique identifier for this SSO provider, e.g. "oidc" or "saml". # A unique identifier for this SSO provider, e.g. "oidc" or "saml".
auth_provider_id = attr.ib(type=str) auth_provider_id: str
# user ID on the IdP server # user ID on the IdP server
remote_user_id = attr.ib(type=str) remote_user_id: str
# attributes returned by the ID mapper # attributes returned by the ID mapper
display_name = attr.ib(type=Optional[str]) display_name: Optional[str]
emails = attr.ib(type=Collection[str]) emails: Collection[str]
# An optional dictionary of extra attributes to be provided to the client in the # An optional dictionary of extra attributes to be provided to the client in the
# login response. # login response.
extra_login_attributes = attr.ib(type=Optional[JsonDict]) extra_login_attributes: Optional[JsonDict]
# where to redirect the client back to # where to redirect the client back to
client_redirect_url = attr.ib(type=str) client_redirect_url: str
# expiry time for the session, in milliseconds # expiry time for the session, in milliseconds
expiry_time_ms = attr.ib(type=int) expiry_time_ms: int
# choices made by the user # choices made by the user
chosen_localpart = attr.ib(type=Optional[str], default=None) chosen_localpart: Optional[str] = None
use_display_name = attr.ib(type=bool, default=True) use_display_name: bool = True
emails_to_use = attr.ib(type=Collection[str], default=()) emails_to_use: Collection[str] = ()
terms_accepted_version = attr.ib(type=Optional[str], default=None) terms_accepted_version: Optional[str] = None
# the HTTP cookie used to track the mapping session id # the HTTP cookie used to track the mapping session id

View file

@ -32,9 +32,9 @@ class ProxyConnectError(ConnectError):
pass pass
@attr.s @attr.s(auto_attribs=True)
class ProxyCredentials: class ProxyCredentials:
username_password = attr.ib(type=bytes) username_password: bytes
def as_proxy_authorization_value(self) -> bytes: def as_proxy_authorization_value(self) -> bytes:
""" """

View file

@ -123,37 +123,37 @@ class ByteParser(ByteWriteable, Generic[T], abc.ABC):
pass pass
@attr.s(slots=True, frozen=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class MatrixFederationRequest: class MatrixFederationRequest:
method = attr.ib(type=str) method: str
"""HTTP method """HTTP method
""" """
path = attr.ib(type=str) path: str
"""HTTP path """HTTP path
""" """
destination = attr.ib(type=str) destination: str
"""The remote server to send the HTTP request to. """The remote server to send the HTTP request to.
""" """
json = attr.ib(default=None, type=Optional[JsonDict]) json: Optional[JsonDict] = None
"""JSON to send in the body. """JSON to send in the body.
""" """
json_callback = attr.ib(default=None, type=Optional[Callable[[], JsonDict]]) json_callback: Optional[Callable[[], JsonDict]] = None
"""A callback to generate the JSON. """A callback to generate the JSON.
""" """
query = attr.ib(default=None, type=Optional[dict]) query: Optional[dict] = None
"""Query arguments. """Query arguments.
""" """
txn_id = attr.ib(default=None, type=Optional[str]) txn_id: Optional[str] = None
"""Unique ID for this request (for logging) """Unique ID for this request (for logging)
""" """
uri = attr.ib(init=False, type=bytes) uri: bytes = attr.ib(init=False)
"""The URI of this request """The URI of this request
""" """

View file

@ -534,9 +534,9 @@ class XForwardedForRequest(SynapseRequest):
@implementer(IAddress) @implementer(IAddress)
@attr.s(frozen=True, slots=True) @attr.s(frozen=True, slots=True, auto_attribs=True)
class _XForwardedForAddress: class _XForwardedForAddress:
host = attr.ib(type=str) host: str
class SynapseSite(Site): class SynapseSite(Site):

View file

@ -39,7 +39,7 @@ from twisted.python.failure import Failure
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@attr.s @attr.s(slots=True, auto_attribs=True)
@implementer(IPushProducer) @implementer(IPushProducer)
class LogProducer: class LogProducer:
""" """
@ -54,10 +54,10 @@ class LogProducer:
# This is essentially ITCPTransport, but that is missing certain fields # This is essentially ITCPTransport, but that is missing certain fields
# (connected and registerProducer) which are part of the implementation. # (connected and registerProducer) which are part of the implementation.
transport = attr.ib(type=Connection) transport: Connection
_format = attr.ib(type=Callable[[logging.LogRecord], str]) _format: Callable[[logging.LogRecord], str]
_buffer = attr.ib(type=deque) _buffer: Deque[logging.LogRecord]
_paused = attr.ib(default=False, type=bool, init=False) _paused: bool = attr.ib(default=False, init=False)
def pauseProducing(self): def pauseProducing(self):
self._paused = True self._paused = True

View file

@ -193,7 +193,7 @@ class ContextResourceUsage:
return res return res
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class ContextRequest: class ContextRequest:
""" """
A bundle of attributes from the SynapseRequest object. A bundle of attributes from the SynapseRequest object.
@ -205,15 +205,15 @@ class ContextRequest:
their children. their children.
""" """
request_id = attr.ib(type=str) request_id: str
ip_address = attr.ib(type=str) ip_address: str
site_tag = attr.ib(type=str) site_tag: str
requester = attr.ib(type=Optional[str]) requester: Optional[str]
authenticated_entity = attr.ib(type=Optional[str]) authenticated_entity: Optional[str]
method = attr.ib(type=str) method: str
url = attr.ib(type=str) url: str
protocol = attr.ib(type=str) protocol: str
user_agent = attr.ib(type=str) user_agent: str
LoggingContextOrSentinel = Union["LoggingContext", "_Sentinel"] LoggingContextOrSentinel = Union["LoggingContext", "_Sentinel"]

View file

@ -251,7 +251,7 @@ try:
class _WrappedRustReporter(BaseReporter): class _WrappedRustReporter(BaseReporter):
"""Wrap the reporter to ensure `report_span` never throws.""" """Wrap the reporter to ensure `report_span` never throws."""
_reporter = attr.ib(type=Reporter, default=attr.Factory(Reporter)) _reporter: Reporter = attr.Factory(Reporter)
def set_process(self, *args, **kwargs): def set_process(self, *args, **kwargs):
return self._reporter.set_process(*args, **kwargs) return self._reporter.set_process(*args, **kwargs)

View file

@ -76,19 +76,17 @@ class RegistryProxy:
yield metric yield metric
@attr.s(slots=True, hash=True) @attr.s(slots=True, hash=True, auto_attribs=True)
class LaterGauge: class LaterGauge:
name = attr.ib(type=str) name: str
desc = attr.ib(type=str) desc: str
labels = attr.ib(hash=False, type=Optional[Iterable[str]]) labels: Optional[Iterable[str]] = attr.ib(hash=False)
# callback: should either return a value (if there are no labels for this metric), # callback: should either return a value (if there are no labels for this metric),
# or dict mapping from a label tuple to a value # or dict mapping from a label tuple to a value
caller = attr.ib( caller: Callable[
type=Callable[
[], Union[Mapping[Tuple[str, ...], Union[int, float]], Union[int, float]] [], Union[Mapping[Tuple[str, ...], Union[int, float]], Union[int, float]]
] ]
)
def collect(self) -> Iterable[Metric]: def collect(self) -> Iterable[Metric]:
@ -157,7 +155,9 @@ class InFlightGauge(Generic[MetricsEntry]):
# Create a class which have the sub_metrics values as attributes, which # Create a class which have the sub_metrics values as attributes, which
# default to 0 on initialization. Used to pass to registered callbacks. # default to 0 on initialization. Used to pass to registered callbacks.
self._metrics_class: Type[MetricsEntry] = attr.make_class( self._metrics_class: Type[MetricsEntry] = attr.make_class(
"_MetricsEntry", attrs={x: attr.ib(0) for x in sub_metrics}, slots=True "_MetricsEntry",
attrs={x: attr.ib(default=0) for x in sub_metrics},
slots=True,
) )
# Counts number of in flight blocks for a given set of label values # Counts number of in flight blocks for a given set of label values

View file

@ -193,15 +193,15 @@ class EventStreamResult:
return bool(self.events) return bool(self.events)
@attr.s(slots=True, frozen=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class _PendingRoomEventEntry: class _PendingRoomEventEntry:
event_pos = attr.ib(type=PersistedEventPosition) event_pos: PersistedEventPosition
extra_users = attr.ib(type=Collection[UserID]) extra_users: Collection[UserID]
room_id = attr.ib(type=str) room_id: str
type = attr.ib(type=str) type: str
state_key = attr.ib(type=Optional[str]) state_key: Optional[str]
membership = attr.ib(type=Optional[str]) membership: Optional[str]
class Notifier: class Notifier:

View file

@ -23,25 +23,25 @@ if TYPE_CHECKING:
from synapse.server import HomeServer from synapse.server import HomeServer
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class PusherConfig: class PusherConfig:
"""Parameters necessary to configure a pusher.""" """Parameters necessary to configure a pusher."""
id = attr.ib(type=Optional[str]) id: Optional[str]
user_name = attr.ib(type=str) user_name: str
access_token = attr.ib(type=Optional[int]) access_token: Optional[int]
profile_tag = attr.ib(type=str) profile_tag: str
kind = attr.ib(type=str) kind: str
app_id = attr.ib(type=str) app_id: str
app_display_name = attr.ib(type=str) app_display_name: str
device_display_name = attr.ib(type=str) device_display_name: str
pushkey = attr.ib(type=str) pushkey: str
ts = attr.ib(type=int) ts: int
lang = attr.ib(type=Optional[str]) lang: Optional[str]
data = attr.ib(type=Optional[JsonDict]) data: Optional[JsonDict]
last_stream_ordering = attr.ib(type=int) last_stream_ordering: int
last_success = attr.ib(type=Optional[int]) last_success: Optional[int]
failing_since = attr.ib(type=Optional[int]) failing_since: Optional[int]
def as_dict(self) -> Dict[str, Any]: def as_dict(self) -> Dict[str, Any]:
"""Information that can be retrieved about a pusher after creation.""" """Information that can be retrieved about a pusher after creation."""
@ -57,12 +57,12 @@ class PusherConfig:
} }
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class ThrottleParams: class ThrottleParams:
"""Parameters for controlling the rate of sending pushes via email.""" """Parameters for controlling the rate of sending pushes via email."""
last_sent_ts = attr.ib(type=int) last_sent_ts: int
throttle_ms = attr.ib(type=int) throttle_ms: int
class Pusher(metaclass=abc.ABCMeta): class Pusher(metaclass=abc.ABCMeta):

View file

@ -298,7 +298,7 @@ RulesByUser = Dict[str, List[Rule]]
StateGroup = Union[object, int] StateGroup = Union[object, int]
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class RulesForRoomData: class RulesForRoomData:
"""The data stored in the cache by `RulesForRoom`. """The data stored in the cache by `RulesForRoom`.
@ -307,29 +307,29 @@ class RulesForRoomData:
""" """
# event_id -> (user_id, state) # event_id -> (user_id, state)
member_map = attr.ib(type=MemberMap, factory=dict) member_map: MemberMap = attr.Factory(dict)
# user_id -> rules # user_id -> rules
rules_by_user = attr.ib(type=RulesByUser, factory=dict) rules_by_user: RulesByUser = attr.Factory(dict)
# The last state group we updated the caches for. If the state_group of # The last state group we updated the caches for. If the state_group of
# a new event comes along, we know that we can just return the cached # a new event comes along, we know that we can just return the cached
# result. # result.
# On invalidation of the rules themselves (if the user changes them), # On invalidation of the rules themselves (if the user changes them),
# we invalidate everything and set state_group to `object()` # we invalidate everything and set state_group to `object()`
state_group = attr.ib(type=StateGroup, factory=object) state_group: StateGroup = attr.Factory(object)
# A sequence number to keep track of when we're allowed to update the # A sequence number to keep track of when we're allowed to update the
# cache. We bump the sequence number when we invalidate the cache. If # cache. We bump the sequence number when we invalidate the cache. If
# the sequence number changes while we're calculating stuff we should # the sequence number changes while we're calculating stuff we should
# not update the cache with it. # not update the cache with it.
sequence = attr.ib(type=int, default=0) sequence: int = 0
# A cache of user_ids that we *know* aren't interesting, e.g. user_ids # A cache of user_ids that we *know* aren't interesting, e.g. user_ids
# owned by AS's, or remote users, etc. (I.e. users we will never need to # owned by AS's, or remote users, etc. (I.e. users we will never need to
# calculate push for) # calculate push for)
# These never need to be invalidated as we will never set up push for # These never need to be invalidated as we will never set up push for
# them. # them.
uninteresting_user_set = attr.ib(type=Set[str], factory=set) uninteresting_user_set: Set[str] = attr.Factory(set)
class RulesForRoom: class RulesForRoom:
@ -553,7 +553,7 @@ class RulesForRoom:
self.data.state_group = state_group self.data.state_group = state_group
@attr.attrs(slots=True, frozen=True) @attr.attrs(slots=True, frozen=True, auto_attribs=True)
class _Invalidation: class _Invalidation:
# _Invalidation is passed as an `on_invalidate` callback to bulk_get_push_rules, # _Invalidation is passed as an `on_invalidate` callback to bulk_get_push_rules,
# which means that it it is stored on the bulk_get_push_rules cache entry. In order # which means that it it is stored on the bulk_get_push_rules cache entry. In order
@ -564,8 +564,8 @@ class _Invalidation:
# attrs provides suitable __hash__ and __eq__ methods, provided we remember to # attrs provides suitable __hash__ and __eq__ methods, provided we remember to
# set `frozen=True`. # set `frozen=True`.
cache = attr.ib(type=LruCache) cache: LruCache
room_id = attr.ib(type=str) room_id: str
def __call__(self) -> None: def __call__(self) -> None:
rules_data = self.cache.get(self.room_id, None, update_metrics=False) rules_data = self.cache.get(self.room_id, None, update_metrics=False)

View file

@ -50,12 +50,12 @@ data part are:
""" """
@attr.s(slots=True, frozen=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class EventsStreamRow: class EventsStreamRow:
"""A parsed row from the events replication stream""" """A parsed row from the events replication stream"""
type = attr.ib() # str: the TypeId of one of the *EventsStreamRows type: str # the TypeId of one of the *EventsStreamRows
data = attr.ib() # BaseEventsStreamRow data: "BaseEventsStreamRow"
class BaseEventsStreamRow: class BaseEventsStreamRow:
@ -79,28 +79,28 @@ class BaseEventsStreamRow:
return cls(*data) return cls(*data)
@attr.s(slots=True, frozen=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class EventsStreamEventRow(BaseEventsStreamRow): class EventsStreamEventRow(BaseEventsStreamRow):
TypeId = "ev" TypeId = "ev"
event_id = attr.ib(type=str) event_id: str
room_id = attr.ib(type=str) room_id: str
type = attr.ib(type=str) type: str
state_key = attr.ib(type=Optional[str]) state_key: Optional[str]
redacts = attr.ib(type=Optional[str]) redacts: Optional[str]
relates_to = attr.ib(type=Optional[str]) relates_to: Optional[str]
membership = attr.ib(type=Optional[str]) membership: Optional[str]
rejected = attr.ib(type=bool) rejected: bool
@attr.s(slots=True, frozen=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class EventsStreamCurrentStateRow(BaseEventsStreamRow): class EventsStreamCurrentStateRow(BaseEventsStreamRow):
TypeId = "state" TypeId = "state"
room_id = attr.ib() # str room_id: str
type = attr.ib() # str type: str
state_key = attr.ib() # str state_key: str
event_id = attr.ib() # str, optional event_id: Optional[str]
_EventRows: Tuple[Type[BaseEventsStreamRow], ...] = ( _EventRows: Tuple[Type[BaseEventsStreamRow], ...] = (

View file

@ -343,7 +343,7 @@ class SpamMediaException(NotFoundError):
""" """
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class ReadableFileWrapper: class ReadableFileWrapper:
"""Wrapper that allows reading a file in chunks, yielding to the reactor, """Wrapper that allows reading a file in chunks, yielding to the reactor,
and writing to a callback. and writing to a callback.
@ -354,8 +354,8 @@ class ReadableFileWrapper:
CHUNK_SIZE = 2 ** 14 CHUNK_SIZE = 2 ** 14
clock = attr.ib(type=Clock) clock: Clock
path = attr.ib(type=str) path: str
async def write_chunks_to(self, callback: Callable[[bytes], None]) -> None: async def write_chunks_to(self, callback: Callable[[bytes], None]) -> None:
"""Reads the file in chunks and calls the callback with each chunk.""" """Reads the file in chunks and calls the callback with each chunk."""

View file

@ -450,19 +450,19 @@ class StateHandler:
return {key: state_map[ev_id] for key, ev_id in new_state.items()} return {key: state_map[ev_id] for key, ev_id in new_state.items()}
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class _StateResMetrics: class _StateResMetrics:
"""Keeps track of some usage metrics about state res.""" """Keeps track of some usage metrics about state res."""
# System and User CPU time, in seconds # System and User CPU time, in seconds
cpu_time = attr.ib(type=float, default=0.0) cpu_time: float = 0.0
# time spent on database transactions (excluding scheduling time). This roughly # time spent on database transactions (excluding scheduling time). This roughly
# corresponds to the amount of work done on the db server, excluding event fetches. # corresponds to the amount of work done on the db server, excluding event fetches.
db_time = attr.ib(type=float, default=0.0) db_time: float = 0.0
# number of events fetched from the db. # number of events fetched from the db.
db_events = attr.ib(type=int, default=0) db_events: int = 0
_biggest_room_by_cpu_counter = Counter( _biggest_room_by_cpu_counter = Counter(

View file

@ -143,7 +143,7 @@ def make_conn(
return db_conn return db_conn
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class LoggingDatabaseConnection: class LoggingDatabaseConnection:
"""A wrapper around a database connection that returns `LoggingTransaction` """A wrapper around a database connection that returns `LoggingTransaction`
as its cursor class. as its cursor class.
@ -151,9 +151,9 @@ class LoggingDatabaseConnection:
This is mainly used on startup to ensure that queries get logged correctly This is mainly used on startup to ensure that queries get logged correctly
""" """
conn = attr.ib(type=Connection) conn: Connection
engine = attr.ib(type=BaseDatabaseEngine) engine: BaseDatabaseEngine
default_txn_name = attr.ib(type=str) default_txn_name: str
def cursor( def cursor(
self, *, txn_name=None, after_callbacks=None, exception_callbacks=None self, *, txn_name=None, after_callbacks=None, exception_callbacks=None

View file

@ -50,16 +50,16 @@ if TYPE_CHECKING:
from synapse.server import HomeServer from synapse.server import HomeServer
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class DeviceKeyLookupResult: class DeviceKeyLookupResult:
"""The type returned by get_e2e_device_keys_and_signatures""" """The type returned by get_e2e_device_keys_and_signatures"""
display_name = attr.ib(type=Optional[str]) display_name: Optional[str]
# the key data from e2e_device_keys_json. Typically includes fields like # the key data from e2e_device_keys_json. Typically includes fields like
# "algorithm", "keys" (including the curve25519 identity key and the ed25519 signing # "algorithm", "keys" (including the curve25519 identity key and the ed25519 signing
# key) and "signatures" (a map from (user id) to (key id/device_id) to signature.) # key) and "signatures" (a map from (user id) to (key id/device_id) to signature.)
keys = attr.ib(type=Optional[JsonDict]) keys: Optional[JsonDict]
class EndToEndKeyBackgroundStore(SQLBaseStore): class EndToEndKeyBackgroundStore(SQLBaseStore):

View file

@ -69,7 +69,7 @@ event_counter = Counter(
) )
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class DeltaState: class DeltaState:
"""Deltas to use to update the `current_state_events` table. """Deltas to use to update the `current_state_events` table.
@ -80,9 +80,9 @@ class DeltaState:
should e.g. be removed from `current_state_events` table. should e.g. be removed from `current_state_events` table.
""" """
to_delete = attr.ib(type=List[Tuple[str, str]]) to_delete: List[Tuple[str, str]]
to_insert = attr.ib(type=StateMap[str]) to_insert: StateMap[str]
no_longer_in_room = attr.ib(type=bool, default=False) no_longer_in_room: bool = False
class PersistEventsStore: class PersistEventsStore:
@ -2226,17 +2226,17 @@ class PersistEventsStore:
) )
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class _LinkMap: class _LinkMap:
"""A helper type for tracking links between chains.""" """A helper type for tracking links between chains."""
# Stores the set of links as nested maps: source chain ID -> target chain ID # Stores the set of links as nested maps: source chain ID -> target chain ID
# -> source sequence number -> target sequence number. # -> source sequence number -> target sequence number.
maps = attr.ib(type=Dict[int, Dict[int, Dict[int, int]]], factory=dict) maps: Dict[int, Dict[int, Dict[int, int]]] = attr.Factory(dict)
# Stores the links that have been added (with new set to true), as tuples of # Stores the links that have been added (with new set to true), as tuples of
# `(source chain ID, source sequence no, target chain ID, target sequence no.)` # `(source chain ID, source sequence no, target chain ID, target sequence no.)`
additions = attr.ib(type=Set[Tuple[int, int, int, int]], factory=set) additions: Set[Tuple[int, int, int, int]] = attr.Factory(set)
def add_link( def add_link(
self, self,

View file

@ -65,22 +65,22 @@ class _BackgroundUpdates:
REPLACE_STREAM_ORDERING_COLUMN = "replace_stream_ordering_column" REPLACE_STREAM_ORDERING_COLUMN = "replace_stream_ordering_column"
@attr.s(slots=True, frozen=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class _CalculateChainCover: class _CalculateChainCover:
"""Return value for _calculate_chain_cover_txn.""" """Return value for _calculate_chain_cover_txn."""
# The last room_id/depth/stream processed. # The last room_id/depth/stream processed.
room_id = attr.ib(type=str) room_id: str
depth = attr.ib(type=int) depth: int
stream = attr.ib(type=int) stream: int
# Number of rows processed # Number of rows processed
processed_count = attr.ib(type=int) processed_count: int
# Map from room_id to last depth/stream processed for each room that we have # Map from room_id to last depth/stream processed for each room that we have
# processed all events for (i.e. the rooms we can flip the # processed all events for (i.e. the rooms we can flip the
# `has_auth_chain_index` for) # `has_auth_chain_index` for)
finished_room_map = attr.ib(type=Dict[str, Tuple[int, int]]) finished_room_map: Dict[str, Tuple[int, int]]
class EventsBackgroundUpdatesStore(SQLBaseStore): class EventsBackgroundUpdatesStore(SQLBaseStore):

View file

@ -51,7 +51,7 @@ class ExternalIDReuseException(Exception):
pass pass
@attr.s(frozen=True, slots=True) @attr.s(frozen=True, slots=True, auto_attribs=True)
class TokenLookupResult: class TokenLookupResult:
"""Result of looking up an access token. """Result of looking up an access token.
@ -69,14 +69,14 @@ class TokenLookupResult:
cached. cached.
""" """
user_id = attr.ib(type=str) user_id: str
is_guest = attr.ib(type=bool, default=False) is_guest: bool = False
shadow_banned = attr.ib(type=bool, default=False) shadow_banned: bool = False
token_id = attr.ib(type=Optional[int], default=None) token_id: Optional[int] = None
device_id = attr.ib(type=Optional[str], default=None) device_id: Optional[str] = None
valid_until_ms = attr.ib(type=Optional[int], default=None) valid_until_ms: Optional[int] = None
token_owner = attr.ib(type=str) token_owner: str = attr.ib()
token_used = attr.ib(type=bool, default=False) token_used: bool = False
# Make the token owner default to the user ID, which is the common case. # Make the token owner default to the user ID, which is the common case.
@token_owner.default @token_owner.default

View file

@ -1177,18 +1177,18 @@ class RoomMemberStore(RoomMemberWorkerStore, RoomMemberBackgroundUpdateStore):
await self.db_pool.runInteraction("forget_membership", f) await self.db_pool.runInteraction("forget_membership", f)
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class _JoinedHostsCache: class _JoinedHostsCache:
"""The cached data used by the `_get_joined_hosts_cache`.""" """The cached data used by the `_get_joined_hosts_cache`."""
# Dict of host to the set of their users in the room at the state group. # Dict of host to the set of their users in the room at the state group.
hosts_to_joined_users = attr.ib(type=Dict[str, Set[str]], factory=dict) hosts_to_joined_users: Dict[str, Set[str]] = attr.Factory(dict)
# The state group `hosts_to_joined_users` is derived from. Will be an object # The state group `hosts_to_joined_users` is derived from. Will be an object
# if the instance is newly created or if the state is not based on a state # if the instance is newly created or if the state is not based on a state
# group. (An object is used as a sentinel value to ensure that it never is # group. (An object is used as a sentinel value to ensure that it never is
# equal to anything else). # equal to anything else).
state_group = attr.ib(type=Union[object, int], factory=object) state_group: Union[object, int] = attr.Factory(object)
def __len__(self): def __len__(self):
return sum(len(v) for v in self.hosts_to_joined_users.values()) return sum(len(v) for v in self.hosts_to_joined_users.values())

View file

@ -23,19 +23,19 @@ from synapse.types import JsonDict
from synapse.util import json_encoder, stringutils from synapse.util import json_encoder, stringutils
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class UIAuthSessionData: class UIAuthSessionData:
session_id = attr.ib(type=str) session_id: str
# The dictionary from the client root level, not the 'auth' key. # The dictionary from the client root level, not the 'auth' key.
clientdict = attr.ib(type=JsonDict) clientdict: JsonDict
# The URI and method the session was intiatied with. These are checked at # The URI and method the session was intiatied with. These are checked at
# each stage of the authentication to ensure that the asked for operation # each stage of the authentication to ensure that the asked for operation
# has not changed. # has not changed.
uri = attr.ib(type=str) uri: str
method = attr.ib(type=str) method: str
# A string description of the operation that the current authentication is # A string description of the operation that the current authentication is
# authorising. # authorising.
description = attr.ib(type=str) description: str
class UIAuthWorkerStore(SQLBaseStore): class UIAuthWorkerStore(SQLBaseStore):

View file

@ -21,7 +21,7 @@ from signedjson.types import VerifyKey
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@attr.s(slots=True, frozen=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class FetchKeyResult: class FetchKeyResult:
verify_key = attr.ib(type=VerifyKey) # the key itself verify_key: VerifyKey # the key itself
valid_until_ts = attr.ib(type=int) # how long we can use this key for valid_until_ts: int # how long we can use this key for

View file

@ -696,7 +696,7 @@ def _get_or_create_schema_state(
) )
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class _DirectoryListing: class _DirectoryListing:
"""Helper class to store schema file name and the """Helper class to store schema file name and the
absolute path to it. absolute path to it.
@ -705,5 +705,5 @@ class _DirectoryListing:
`file_name` attr is kept first. `file_name` attr is kept first.
""" """
file_name = attr.ib(type=str) file_name: str
absolute_path = attr.ib(type=str) absolute_path: str

View file

@ -23,7 +23,7 @@ from synapse.types import JsonDict
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class PaginationChunk: class PaginationChunk:
"""Returned by relation pagination APIs. """Returned by relation pagination APIs.
@ -35,9 +35,9 @@ class PaginationChunk:
None then there are no previous results. None then there are no previous results.
""" """
chunk = attr.ib(type=List[JsonDict]) chunk: List[JsonDict]
next_batch = attr.ib(type=Optional[Any], default=None) next_batch: Optional[Any] = None
prev_batch = attr.ib(type=Optional[Any], default=None) prev_batch: Optional[Any] = None
def to_dict(self) -> Dict[str, Any]: def to_dict(self) -> Dict[str, Any]:
d = {"chunk": self.chunk} d = {"chunk": self.chunk}
@ -51,7 +51,7 @@ class PaginationChunk:
return d return d
@attr.s(frozen=True, slots=True) @attr.s(frozen=True, slots=True, auto_attribs=True)
class RelationPaginationToken: class RelationPaginationToken:
"""Pagination token for relation pagination API. """Pagination token for relation pagination API.
@ -64,8 +64,8 @@ class RelationPaginationToken:
stream: The stream ordering of the boundary event. stream: The stream ordering of the boundary event.
""" """
topological = attr.ib(type=int) topological: int
stream = attr.ib(type=int) stream: int
@staticmethod @staticmethod
def from_string(string: str) -> "RelationPaginationToken": def from_string(string: str) -> "RelationPaginationToken":
@ -82,7 +82,7 @@ class RelationPaginationToken:
return attr.astuple(self) return attr.astuple(self)
@attr.s(frozen=True, slots=True) @attr.s(frozen=True, slots=True, auto_attribs=True)
class AggregationPaginationToken: class AggregationPaginationToken:
"""Pagination token for relation aggregation pagination API. """Pagination token for relation aggregation pagination API.
@ -94,8 +94,8 @@ class AggregationPaginationToken:
stream: The MAX stream ordering in the boundary group. stream: The MAX stream ordering in the boundary group.
""" """
count = attr.ib(type=int) count: int
stream = attr.ib(type=int) stream: int
@staticmethod @staticmethod
def from_string(string: str) -> "AggregationPaginationToken": def from_string(string: str) -> "AggregationPaginationToken":

View file

@ -45,7 +45,7 @@ logger = logging.getLogger(__name__)
T = TypeVar("T") T = TypeVar("T")
@attr.s(slots=True, frozen=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class StateFilter: class StateFilter:
"""A filter used when querying for state. """A filter used when querying for state.
@ -58,8 +58,8 @@ class StateFilter:
appear in `types`. appear in `types`.
""" """
types = attr.ib(type="frozendict[str, Optional[FrozenSet[str]]]") types: "frozendict[str, Optional[FrozenSet[str]]]"
include_others = attr.ib(default=False, type=bool) include_others: bool = False
def __attrs_post_init__(self): def __attrs_post_init__(self):
# If `include_others` is set we canonicalise the filter by removing # If `include_others` is set we canonicalise the filter by removing

View file

@ -762,13 +762,13 @@ class _AsyncCtxManagerWrapper(Generic[T]):
return self.inner.__exit__(exc_type, exc, tb) return self.inner.__exit__(exc_type, exc, tb)
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class _MultiWriterCtxManager: class _MultiWriterCtxManager:
"""Async context manager returned by MultiWriterIdGenerator""" """Async context manager returned by MultiWriterIdGenerator"""
id_gen = attr.ib(type=MultiWriterIdGenerator) id_gen: MultiWriterIdGenerator
multiple_ids = attr.ib(type=Optional[int], default=None) multiple_ids: Optional[int] = None
stream_ids = attr.ib(type=List[int], factory=list) stream_ids: List[int] = attr.Factory(list)
async def __aenter__(self) -> Union[int, List[int]]: async def __aenter__(self) -> Union[int, List[int]]:
# It's safe to run this in autocommit mode as fetching values from a # It's safe to run this in autocommit mode as fetching values from a

View file

@ -28,14 +28,14 @@ logger = logging.getLogger(__name__)
MAX_LIMIT = 1000 MAX_LIMIT = 1000
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class PaginationConfig: class PaginationConfig:
"""A configuration object which stores pagination parameters.""" """A configuration object which stores pagination parameters."""
from_token = attr.ib(type=Optional[StreamToken]) from_token: Optional[StreamToken]
to_token = attr.ib(type=Optional[StreamToken]) to_token: Optional[StreamToken]
direction = attr.ib(type=str) direction: str
limit = attr.ib(type=Optional[int]) limit: Optional[int]
@classmethod @classmethod
async def from_request( async def from_request(

View file

@ -20,6 +20,7 @@ from typing import (
Any, Any,
ClassVar, ClassVar,
Dict, Dict,
List,
Mapping, Mapping,
Match, Match,
MutableMapping, MutableMapping,
@ -80,7 +81,7 @@ class ISynapseReactor(
"""The interfaces necessary for Synapse to function.""" """The interfaces necessary for Synapse to function."""
@attr.s(frozen=True, slots=True) @attr.s(frozen=True, slots=True, auto_attribs=True)
class Requester: class Requester:
""" """
Represents the user making a request Represents the user making a request
@ -98,13 +99,13 @@ class Requester:
"puppeting" the user. "puppeting" the user.
""" """
user = attr.ib(type="UserID") user: "UserID"
access_token_id = attr.ib(type=Optional[int]) access_token_id: Optional[int]
is_guest = attr.ib(type=bool) is_guest: bool
shadow_banned = attr.ib(type=bool) shadow_banned: bool
device_id = attr.ib(type=Optional[str]) device_id: Optional[str]
app_service = attr.ib(type=Optional["ApplicationService"]) app_service: Optional["ApplicationService"]
authenticated_entity = attr.ib(type=str) authenticated_entity: str
def serialize(self): def serialize(self):
"""Converts self to a type that can be serialized as JSON, and then """Converts self to a type that can be serialized as JSON, and then
@ -211,7 +212,7 @@ def get_localpart_from_id(string: str) -> str:
DS = TypeVar("DS", bound="DomainSpecificString") DS = TypeVar("DS", bound="DomainSpecificString")
@attr.s(slots=True, frozen=True, repr=False) @attr.s(slots=True, frozen=True, repr=False, auto_attribs=True)
class DomainSpecificString(metaclass=abc.ABCMeta): class DomainSpecificString(metaclass=abc.ABCMeta):
"""Common base class among ID/name strings that have a local part and a """Common base class among ID/name strings that have a local part and a
domain name, prefixed with a sigil. domain name, prefixed with a sigil.
@ -224,8 +225,8 @@ class DomainSpecificString(metaclass=abc.ABCMeta):
SIGIL: ClassVar[str] = abc.abstractproperty() # type: ignore SIGIL: ClassVar[str] = abc.abstractproperty() # type: ignore
localpart = attr.ib(type=str) localpart: str
domain = attr.ib(type=str) domain: str
# Because this is a frozen class, it is deeply immutable. # Because this is a frozen class, it is deeply immutable.
def __copy__(self): def __copy__(self):
@ -461,14 +462,12 @@ class RoomStreamToken:
attributes, must be hashable. attributes, must be hashable.
""" """
topological = attr.ib( topological: Optional[int] = attr.ib(
type=Optional[int],
validator=attr.validators.optional(attr.validators.instance_of(int)), validator=attr.validators.optional(attr.validators.instance_of(int)),
) )
stream = attr.ib(type=int, validator=attr.validators.instance_of(int)) stream: int = attr.ib(validator=attr.validators.instance_of(int))
instance_map = attr.ib( instance_map: "frozendict[str, int]" = attr.ib(
type="frozendict[str, int]",
factory=frozendict, factory=frozendict,
validator=attr.validators.deep_mapping( validator=attr.validators.deep_mapping(
key_validator=attr.validators.instance_of(str), key_validator=attr.validators.instance_of(str),
@ -477,7 +476,7 @@ class RoomStreamToken:
), ),
) )
def __attrs_post_init__(self): def __attrs_post_init__(self) -> None:
"""Validates that both `topological` and `instance_map` aren't set.""" """Validates that both `topological` and `instance_map` aren't set."""
if self.instance_map and self.topological: if self.instance_map and self.topological:
@ -593,7 +592,7 @@ class RoomStreamToken:
return "s%d" % (self.stream,) return "s%d" % (self.stream,)
@attr.s(slots=True, frozen=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class StreamToken: class StreamToken:
"""A collection of positions within multiple streams. """A collection of positions within multiple streams.
@ -601,20 +600,20 @@ class StreamToken:
must be hashable. must be hashable.
""" """
room_key = attr.ib( room_key: RoomStreamToken = attr.ib(
type=RoomStreamToken, validator=attr.validators.instance_of(RoomStreamToken) validator=attr.validators.instance_of(RoomStreamToken)
) )
presence_key = attr.ib(type=int) presence_key: int
typing_key = attr.ib(type=int) typing_key: int
receipt_key = attr.ib(type=int) receipt_key: int
account_data_key = attr.ib(type=int) account_data_key: int
push_rules_key = attr.ib(type=int) push_rules_key: int
to_device_key = attr.ib(type=int) to_device_key: int
device_list_key = attr.ib(type=int) device_list_key: int
groups_key = attr.ib(type=int) groups_key: int
_SEPARATOR = "_" _SEPARATOR = "_"
START: "StreamToken" START: ClassVar["StreamToken"]
@classmethod @classmethod
async def from_string(cls, store: "DataStore", string: str) -> "StreamToken": async def from_string(cls, store: "DataStore", string: str) -> "StreamToken":
@ -674,7 +673,7 @@ class StreamToken:
StreamToken.START = StreamToken(RoomStreamToken(None, 0), 0, 0, 0, 0, 0, 0, 0, 0) StreamToken.START = StreamToken(RoomStreamToken(None, 0), 0, 0, 0, 0, 0, 0, 0, 0)
@attr.s(slots=True, frozen=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class PersistedEventPosition: class PersistedEventPosition:
"""Position of a newly persisted event with instance that persisted it. """Position of a newly persisted event with instance that persisted it.
@ -682,8 +681,8 @@ class PersistedEventPosition:
RoomStreamToken. RoomStreamToken.
""" """
instance_name = attr.ib(type=str) instance_name: str
stream = attr.ib(type=int) stream: int
def persisted_after(self, token: RoomStreamToken) -> bool: def persisted_after(self, token: RoomStreamToken) -> bool:
return token.get_stream_pos_for_instance(self.instance_name) < self.stream return token.get_stream_pos_for_instance(self.instance_name) < self.stream
@ -733,15 +732,15 @@ class ThirdPartyInstanceID:
__str__ = to_string __str__ = to_string
@attr.s(slots=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
class ReadReceipt: class ReadReceipt:
"""Information about a read-receipt""" """Information about a read-receipt"""
room_id = attr.ib() room_id: str
receipt_type = attr.ib() receipt_type: str
user_id = attr.ib() user_id: str
event_ids = attr.ib() event_ids: List[str]
data = attr.ib() data: JsonDict
def get_verify_key_from_cross_signing_key(key_info): def get_verify_key_from_cross_signing_key(key_info):

View file

@ -309,12 +309,12 @@ def gather_results( # type: ignore[misc]
return deferred.addCallback(tuple) return deferred.addCallback(tuple)
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class _LinearizerEntry: class _LinearizerEntry:
# The number of things executing. # The number of things executing.
count = attr.ib(type=int) count: int
# Deferreds for the things blocked from executing. # Deferreds for the things blocked from executing.
deferreds = attr.ib(type=collections.OrderedDict) deferreds: collections.OrderedDict
class Linearizer: class Linearizer:

View file

@ -33,7 +33,7 @@ DV = TypeVar("DV")
# This class can't be generic because it uses slots with attrs. # This class can't be generic because it uses slots with attrs.
# See: https://github.com/python-attrs/attrs/issues/313 # See: https://github.com/python-attrs/attrs/issues/313
@attr.s(slots=True) @attr.s(slots=True, auto_attribs=True)
class DictionaryEntry: # should be: Generic[DKT, DV]. class DictionaryEntry: # should be: Generic[DKT, DV].
"""Returned when getting an entry from the cache """Returned when getting an entry from the cache
@ -41,14 +41,13 @@ class DictionaryEntry: # should be: Generic[DKT, DV].
full: Whether the cache has the full or dict or just some keys. full: Whether the cache has the full or dict or just some keys.
If not full then not all requested keys will necessarily be present If not full then not all requested keys will necessarily be present
in `value` in `value`
known_absent: Keys that were looked up in the dict and were not known_absent: Keys that were looked up in the dict and were not there.
there.
value: The full or partial dict value value: The full or partial dict value
""" """
full = attr.ib(type=bool) full: bool
known_absent = attr.ib(type=Set[Any]) # should be: Set[DKT] known_absent: Set[Any] # should be: Set[DKT]
value = attr.ib(type=Dict[Any, Any]) # should be: Dict[DKT, DV] value: Dict[Any, Any] # should be: Dict[DKT, DV]
def __len__(self) -> int: def __len__(self) -> int:
return len(self.value) return len(self.value)