0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-11-17 07:21:37 +01:00

Merge pull request #7519 from matrix-org/rav/kill_py2_code

Kill off some old python 2 code
This commit is contained in:
Richard van der Hoff 2020-05-18 10:45:30 +01:00 committed by GitHub
commit 4d1afb1dfe
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 48 additions and 196 deletions

1
changelog.d/7519.misc Normal file
View file

@ -0,0 +1 @@
Remove some redundant Python 2 support code.

View file

@ -3,8 +3,6 @@ import json
import sys import sys
import time import time
import six
import psycopg2 import psycopg2
import yaml import yaml
from canonicaljson import encode_canonical_json from canonicaljson import encode_canonical_json
@ -12,10 +10,7 @@ from signedjson.key import read_signing_keys
from signedjson.sign import sign_json from signedjson.sign import sign_json
from unpaddedbase64 import encode_base64 from unpaddedbase64 import encode_base64
if six.PY2: db_binary_type = memoryview
db_type = six.moves.builtins.buffer
else:
db_type = memoryview
def select_v1_keys(connection): def select_v1_keys(connection):
@ -72,7 +67,7 @@ def rows_v2(server, json):
valid_until = json["valid_until_ts"] valid_until = json["valid_until_ts"]
key_json = encode_canonical_json(json) key_json = encode_canonical_json(json)
for key_id in json["verify_keys"]: for key_id in json["verify_keys"]:
yield (server, key_id, "-", valid_until, valid_until, db_type(key_json)) yield (server, key_id, "-", valid_until, valid_until, db_binary_type(key_json))
def main(): def main():

View file

@ -19,7 +19,7 @@ import random
import sys import sys
from io import BytesIO from io import BytesIO
from six import PY3, raise_from, string_types from six import raise_from, string_types
from six.moves import urllib from six.moves import urllib
import attr import attr
@ -70,11 +70,7 @@ incoming_responses_counter = Counter(
MAX_LONG_RETRIES = 10 MAX_LONG_RETRIES = 10
MAX_SHORT_RETRIES = 3 MAX_SHORT_RETRIES = 3
if PY3:
MAXINT = sys.maxsize MAXINT = sys.maxsize
else:
MAXINT = sys.maxint
_next_id = 1 _next_id = 1

View file

@ -20,8 +20,6 @@ import time
from functools import wraps from functools import wraps
from inspect import getcallargs from inspect import getcallargs
from six import PY3
_TIME_FUNC_ID = 0 _TIME_FUNC_ID = 0
@ -30,12 +28,8 @@ def _log_debug_as_f(f, msg, msg_args):
logger = logging.getLogger(name) logger = logging.getLogger(name)
if logger.isEnabledFor(logging.DEBUG): if logger.isEnabledFor(logging.DEBUG):
if PY3:
lineno = f.__code__.co_firstlineno lineno = f.__code__.co_firstlineno
pathname = f.__code__.co_filename pathname = f.__code__.co_filename
else:
lineno = f.func_code.co_firstlineno
pathname = f.func_code.co_filename
record = logging.LogRecord( record = logging.LogRecord(
name=name, name=name,

View file

@ -15,8 +15,6 @@
# limitations under the License. # limitations under the License.
import logging import logging
import six
from prometheus_client import Counter from prometheus_client import Counter
from twisted.internet import defer from twisted.internet import defer
@ -28,9 +26,6 @@ from synapse.push import PusherConfigException
from . import push_rule_evaluator, push_tools from . import push_rule_evaluator, push_tools
if six.PY3:
long = int
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
http_push_processed_counter = Counter( http_push_processed_counter = Counter(
@ -318,7 +313,7 @@ class HttpPusher(object):
{ {
"app_id": self.app_id, "app_id": self.app_id,
"pushkey": self.pushkey, "pushkey": self.pushkey,
"pushkey_ts": long(self.pushkey_ts / 1000), "pushkey_ts": int(self.pushkey_ts / 1000),
"data": self.data_minus_url, "data": self.data_minus_url,
} }
], ],
@ -347,7 +342,7 @@ class HttpPusher(object):
{ {
"app_id": self.app_id, "app_id": self.app_id,
"pushkey": self.pushkey, "pushkey": self.pushkey,
"pushkey_ts": long(self.pushkey_ts / 1000), "pushkey_ts": int(self.pushkey_ts / 1000),
"data": self.data_minus_url, "data": self.data_minus_url,
"tweaks": tweaks, "tweaks": tweaks,
} }
@ -409,7 +404,7 @@ class HttpPusher(object):
{ {
"app_id": self.app_id, "app_id": self.app_id,
"pushkey": self.pushkey, "pushkey": self.pushkey,
"pushkey_ts": long(self.pushkey_ts / 1000), "pushkey_ts": int(self.pushkey_ts / 1000),
"data": self.data_minus_url, "data": self.data_minus_url,
} }
], ],

View file

@ -16,8 +16,6 @@
import logging import logging
from typing import Optional from typing import Optional
import six
from synapse.storage.data_stores.main.cache import CacheInvalidationWorkerStore from synapse.storage.data_stores.main.cache import CacheInvalidationWorkerStore
from synapse.storage.database import Database from synapse.storage.database import Database
from synapse.storage.engines import PostgresEngine from synapse.storage.engines import PostgresEngine
@ -26,13 +24,6 @@ from synapse.storage.util.id_generators import MultiWriterIdGenerator
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def __func__(inp):
if six.PY3:
return inp
else:
return inp.__func__
class BaseSlavedStore(CacheInvalidationWorkerStore): class BaseSlavedStore(CacheInvalidationWorkerStore):
def __init__(self, database: Database, db_conn, hs): def __init__(self, database: Database, db_conn, hs):
super(BaseSlavedStore, self).__init__(database, db_conn, hs) super(BaseSlavedStore, self).__init__(database, db_conn, hs)

View file

@ -18,7 +18,7 @@ from synapse.storage.data_stores.main.presence import PresenceStore
from synapse.storage.database import Database from synapse.storage.database import Database
from synapse.util.caches.stream_change_cache import StreamChangeCache from synapse.util.caches.stream_change_cache import StreamChangeCache
from ._base import BaseSlavedStore, __func__ from ._base import BaseSlavedStore
from ._slaved_id_tracker import SlavedIdTracker from ._slaved_id_tracker import SlavedIdTracker
@ -27,14 +27,14 @@ class SlavedPresenceStore(BaseSlavedStore):
super(SlavedPresenceStore, self).__init__(database, db_conn, hs) super(SlavedPresenceStore, self).__init__(database, db_conn, hs)
self._presence_id_gen = SlavedIdTracker(db_conn, "presence_stream", "stream_id") self._presence_id_gen = SlavedIdTracker(db_conn, "presence_stream", "stream_id")
self._presence_on_startup = self._get_active_presence(db_conn) self._presence_on_startup = self._get_active_presence(db_conn) # type: ignore
self.presence_stream_cache = StreamChangeCache( self.presence_stream_cache = StreamChangeCache(
"PresenceStreamChangeCache", self._presence_id_gen.get_current_token() "PresenceStreamChangeCache", self._presence_id_gen.get_current_token()
) )
_get_active_presence = __func__(DataStore._get_active_presence) _get_active_presence = DataStore._get_active_presence
take_presence_startup_info = __func__(DataStore.take_presence_startup_info) take_presence_startup_info = DataStore.take_presence_startup_info
_get_presence_for_user = PresenceStore.__dict__["_get_presence_for_user"] _get_presence_for_user = PresenceStore.__dict__["_get_presence_for_user"]
get_presence_for_users = PresenceStore.__dict__["get_presence_for_users"] get_presence_for_users = PresenceStore.__dict__["get_presence_for_users"]

View file

@ -17,7 +17,6 @@
import logging import logging
import os import os
from six import PY3
from six.moves import urllib from six.moves import urllib
from twisted.internet import defer from twisted.internet import defer
@ -324,7 +323,6 @@ def get_filename_from_headers(headers):
upload_name_utf8 = upload_name_utf8[7:] upload_name_utf8 = upload_name_utf8[7:]
# We have a filename*= section. This MUST be ASCII, and any UTF-8 # We have a filename*= section. This MUST be ASCII, and any UTF-8
# bytes are %-quoted. # bytes are %-quoted.
if PY3:
try: try:
# Once it is decoded, we can then unquote the %-encoded # Once it is decoded, we can then unquote the %-encoded
# parts strictly into a unicode string. # parts strictly into a unicode string.
@ -334,13 +332,6 @@ def get_filename_from_headers(headers):
except UnicodeDecodeError: except UnicodeDecodeError:
# Incorrect UTF-8. # Incorrect UTF-8.
pass pass
else:
# On Python 2, we first unquote the %-encoded parts and then
# decode it strictly using UTF-8.
try:
upload_name = urllib.parse.unquote(upload_name_utf8).decode("utf8")
except UnicodeDecodeError:
pass
# If there isn't check for an ascii name. # If there isn't check for an ascii name.
if not upload_name: if not upload_name:

View file

@ -19,9 +19,6 @@ import random
from abc import ABCMeta from abc import ABCMeta
from typing import Any, Optional from typing import Any, Optional
from six import PY2
from six.moves import builtins
from canonicaljson import json from canonicaljson import json
from synapse.storage.database import LoggingTransaction # noqa: F401 from synapse.storage.database import LoggingTransaction # noqa: F401
@ -103,11 +100,6 @@ def db_to_json(db_content):
if isinstance(db_content, memoryview): if isinstance(db_content, memoryview):
db_content = db_content.tobytes() db_content = db_content.tobytes()
# psycopg2 on Python 2 returns buffer objects, which we need to cast to
# bytes to decode
if PY2 and isinstance(db_content, builtins.buffer):
db_content = bytes(db_content)
# Decode it to a Unicode string before feeding it to json.loads, so we # Decode it to a Unicode string before feeding it to json.loads, so we
# consistenty get a Unicode-containing object out. # consistenty get a Unicode-containing object out.
if isinstance(db_content, (bytes, bytearray)): if isinstance(db_content, (bytes, bytearray)):

View file

@ -17,8 +17,6 @@
import itertools import itertools
import logging import logging
import six
from signedjson.key import decode_verify_key_bytes from signedjson.key import decode_verify_key_bytes
from synapse.storage._base import SQLBaseStore from synapse.storage._base import SQLBaseStore
@ -28,11 +26,7 @@ from synapse.util.iterutils import batch_iter
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# py2 sqlite has buffer hardcoded as only binary type, so we must use it,
# despite being deprecated and removed in favor of memoryview
if six.PY2:
db_binary_type = six.moves.builtins.buffer
else:
db_binary_type = memoryview db_binary_type = memoryview

View file

@ -45,7 +45,6 @@ from synapse.util.async_helpers import Linearizer
from synapse.util.caches import intern_string from synapse.util.caches import intern_string
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList
from synapse.util.metrics import Measure from synapse.util.metrics import Measure
from synapse.util.stringutils import to_ascii
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -179,7 +178,7 @@ class RoomMemberWorkerStore(EventsWorkerStore):
""" """
txn.execute(sql, (room_id, Membership.JOIN)) txn.execute(sql, (room_id, Membership.JOIN))
return [to_ascii(r[0]) for r in txn] return [r[0] for r in txn]
@cached(max_entries=100000) @cached(max_entries=100000)
def get_room_summary(self, room_id): def get_room_summary(self, room_id):
@ -223,7 +222,7 @@ class RoomMemberWorkerStore(EventsWorkerStore):
txn.execute(sql, (room_id,)) txn.execute(sql, (room_id,))
res = {} res = {}
for count, membership in txn: for count, membership in txn:
summary = res.setdefault(to_ascii(membership), MemberSummary([], count)) summary = res.setdefault(membership, MemberSummary([], count))
# we order by membership and then fairly arbitrarily by event_id so # we order by membership and then fairly arbitrarily by event_id so
# heroes are consistent # heroes are consistent
@ -255,11 +254,11 @@ class RoomMemberWorkerStore(EventsWorkerStore):
# 6 is 5 (number of heroes) plus 1, in case one of them is the calling user. # 6 is 5 (number of heroes) plus 1, in case one of them is the calling user.
txn.execute(sql, (room_id, Membership.JOIN, Membership.INVITE, 6)) txn.execute(sql, (room_id, Membership.JOIN, Membership.INVITE, 6))
for user_id, membership, event_id in txn: for user_id, membership, event_id in txn:
summary = res[to_ascii(membership)] summary = res[membership]
# we will always have a summary for this membership type at this # we will always have a summary for this membership type at this
# point given the summary currently contains the counts. # point given the summary currently contains the counts.
members = summary.members members = summary.members
members.append((to_ascii(user_id), to_ascii(event_id))) members.append((user_id, event_id))
return res return res
@ -584,13 +583,9 @@ class RoomMemberWorkerStore(EventsWorkerStore):
ev_entry = event_map.get(event_id) ev_entry = event_map.get(event_id)
if ev_entry: if ev_entry:
if ev_entry.event.membership == Membership.JOIN: if ev_entry.event.membership == Membership.JOIN:
users_in_room[to_ascii(ev_entry.event.state_key)] = ProfileInfo( users_in_room[ev_entry.event.state_key] = ProfileInfo(
display_name=to_ascii( display_name=ev_entry.event.content.get("displayname", None),
ev_entry.event.content.get("displayname", None) avatar_url=ev_entry.event.content.get("avatar_url", None),
),
avatar_url=to_ascii(
ev_entry.event.content.get("avatar_url", None)
),
) )
else: else:
missing_member_event_ids.append(event_id) missing_member_event_ids.append(event_id)
@ -604,9 +599,9 @@ class RoomMemberWorkerStore(EventsWorkerStore):
if event is not None and event.type == EventTypes.Member: if event is not None and event.type == EventTypes.Member:
if event.membership == Membership.JOIN: if event.membership == Membership.JOIN:
if event.event_id in member_event_ids: if event.event_id in member_event_ids:
users_in_room[to_ascii(event.state_key)] = ProfileInfo( users_in_room[event.state_key] = ProfileInfo(
display_name=to_ascii(event.content.get("displayname", None)), display_name=event.content.get("displayname", None),
avatar_url=to_ascii(event.content.get("avatar_url", None)), avatar_url=event.content.get("avatar_url", None),
) )
return users_in_room return users_in_room

View file

@ -29,7 +29,6 @@ from synapse.storage.database import Database
from synapse.storage.state import StateFilter from synapse.storage.state import StateFilter
from synapse.util.caches import intern_string from synapse.util.caches import intern_string
from synapse.util.caches.descriptors import cached, cachedList from synapse.util.caches.descriptors import cached, cachedList
from synapse.util.stringutils import to_ascii
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -185,9 +184,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
(room_id,), (room_id,),
) )
return { return {(intern_string(r[0]), intern_string(r[1])): r[2] for r in txn}
(intern_string(r[0]), intern_string(r[1])): to_ascii(r[2]) for r in txn
}
return self.db.runInteraction( return self.db.runInteraction(
"get_current_state_ids", _get_current_state_ids_txn "get_current_state_ids", _get_current_state_ids_txn

View file

@ -16,8 +16,6 @@
import logging import logging
from collections import namedtuple from collections import namedtuple
import six
from canonicaljson import encode_canonical_json from canonicaljson import encode_canonical_json
from twisted.internet import defer from twisted.internet import defer
@ -27,11 +25,6 @@ from synapse.storage._base import SQLBaseStore, db_to_json
from synapse.storage.database import Database from synapse.storage.database import Database
from synapse.util.caches.expiringcache import ExpiringCache from synapse.util.caches.expiringcache import ExpiringCache
# py2 sqlite has buffer hardcoded as only binary type, so we must use it,
# despite being deprecated and removed in favor of memoryview
if six.PY2:
db_binary_type = six.moves.builtins.buffer
else:
db_binary_type = memoryview db_binary_type = memoryview
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View file

@ -50,7 +50,6 @@ from synapse.storage.background_updates import BackgroundUpdater
from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine
from synapse.storage.types import Connection, Cursor from synapse.storage.types import Connection, Cursor
from synapse.types import Collection from synapse.types import Collection
from synapse.util.stringutils import exception_to_unicode
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -424,20 +423,14 @@ class Database(object):
# This can happen if the database disappears mid # This can happen if the database disappears mid
# transaction. # transaction.
logger.warning( logger.warning(
"[TXN OPERROR] {%s} %s %d/%d", "[TXN OPERROR] {%s} %s %d/%d", name, e, i, N,
name,
exception_to_unicode(e),
i,
N,
) )
if i < N: if i < N:
i += 1 i += 1
try: try:
conn.rollback() conn.rollback()
except self.engine.module.Error as e1: except self.engine.module.Error as e1:
logger.warning( logger.warning("[TXN EROLL] {%s} %s", name, e1)
"[TXN EROLL] {%s} %s", name, exception_to_unicode(e1)
)
continue continue
raise raise
except self.engine.module.DatabaseError as e: except self.engine.module.DatabaseError as e:
@ -449,9 +442,7 @@ class Database(object):
conn.rollback() conn.rollback()
except self.engine.module.Error as e1: except self.engine.module.Error as e1:
logger.warning( logger.warning(
"[TXN EROLL] {%s} %s", "[TXN EROLL] {%s} %s", name, e1,
name,
exception_to_unicode(e1),
) )
continue continue
raise raise

View file

@ -15,11 +15,9 @@
# limitations under the License. # limitations under the License.
import logging import logging
from sys import intern
from typing import Callable, Dict, Optional from typing import Callable, Dict, Optional
import six
from six.moves import intern
import attr import attr
from prometheus_client.core import Gauge from prometheus_client.core import Gauge
@ -154,9 +152,6 @@ def intern_string(string):
return None return None
try: try:
if six.PY2:
string = string.encode("ascii")
return intern(string) return intern(string)
except UnicodeEncodeError: except UnicodeEncodeError:
return string return string

View file

@ -19,10 +19,6 @@ import re
import string import string
from collections import Iterable from collections import Iterable
import six
from six import PY2, PY3
from six.moves import range
from synapse.api.errors import Codes, SynapseError from synapse.api.errors import Codes, SynapseError
_string_with_symbols = string.digits + string.ascii_letters + ".,;:^&*-_+=#~@" _string_with_symbols = string.digits + string.ascii_letters + ".,;:^&*-_+=#~@"
@ -47,8 +43,6 @@ def random_string_with_symbols(length):
def is_ascii(s): def is_ascii(s):
if PY3:
if isinstance(s, bytes): if isinstance(s, bytes):
try: try:
s.decode("ascii").encode("ascii") s.decode("ascii").encode("ascii")
@ -58,68 +52,6 @@ def is_ascii(s):
return False return False
return True return True
try:
s.encode("ascii")
except UnicodeEncodeError:
return False
except UnicodeDecodeError:
return False
else:
return True
def to_ascii(s):
"""Converts a string to ascii if it is ascii, otherwise leave it alone.
If given None then will return None.
"""
if PY3:
return s
if s is None:
return None
try:
return s.encode("ascii")
except UnicodeEncodeError:
return s
def exception_to_unicode(e):
"""Helper function to extract the text of an exception as a unicode string
Args:
e (Exception): exception to be stringified
Returns:
unicode
"""
# urgh, this is a mess. The basic problem here is that psycopg2 constructs its
# exceptions with PyErr_SetString, with a (possibly non-ascii) argument. str() will
# then produce the raw byte sequence. Under Python 2, this will then cause another
# error if it gets mixed with a `unicode` object, as per
# https://github.com/matrix-org/synapse/issues/4252
# First of all, if we're under python3, everything is fine because it will sort this
# nonsense out for us.
if not PY2:
return str(e)
# otherwise let's have a stab at decoding the exception message. We'll circumvent
# Exception.__str__(), which would explode if someone raised Exception(u'non-ascii')
# and instead look at what is in the args member.
if len(e.args) == 0:
return ""
elif len(e.args) > 1:
return six.text_type(repr(e.args))
msg = e.args[0]
if isinstance(msg, bytes):
return msg.decode("utf-8", errors="replace")
else:
return msg
def assert_valid_client_secret(client_secret): def assert_valid_client_secret(client_secret):
"""Validate that a given string matches the client_secret regex defined by the spec""" """Validate that a given string matches the client_secret regex defined by the spec"""