0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-12-15 15:33:50 +01:00

Clean up the blacklist/whitelist handling.

Always set the config key with an empty list, even if a list isn't specified.
This means that the codepaths are the same for both the empty list and
for a missing key. Since the behaviour is the same for both cases this
makes the code somewhat easier to reason about.
This commit is contained in:
Mark Haines 2016-05-16 13:03:59 +01:00
parent dd95eb4cb5
commit eb79110beb
3 changed files with 36 additions and 40 deletions

View file

@ -100,13 +100,13 @@ class ContentRepositoryConfig(Config):
"to work" "to work"
) )
if "url_preview_ip_range_whitelist" in config: self.url_preview_ip_range_whitelist = IPSet(
self.url_preview_ip_range_whitelist = IPSet( config.get("url_preview_ip_range_whitelist", ())
config["url_preview_ip_range_whitelist"] )
)
if "url_preview_url_blacklist" in config: self.url_preview_url_blacklist = config.get(
self.url_preview_url_blacklist = config["url_preview_url_blacklist"] "url_preview_url_blacklist", ()
)
def default_config(self, **kwargs): def default_config(self, **kwargs):
media_store = self.default_path("media_store") media_store = self.default_path("media_store")

View file

@ -380,8 +380,7 @@ class CaptchaServerHttpClient(SimpleHttpClient):
class SpiderEndpointFactory(object): class SpiderEndpointFactory(object):
def __init__(self, hs): def __init__(self, hs):
self.blacklist = hs.config.url_preview_ip_range_blacklist self.blacklist = hs.config.url_preview_ip_range_blacklist
if hasattr(hs.config, "url_preview_ip_range_whitelist"): self.whitelist = hs.config.url_preview_ip_range_whitelist
self.whitelist = hs.config.url_preview_ip_range_whitelist
self.policyForHTTPS = hs.get_http_client_context_factory() self.policyForHTTPS = hs.get_http_client_context_factory()
def endpointForURI(self, uri): def endpointForURI(self, uri):

View file

@ -56,8 +56,7 @@ class PreviewUrlResource(Resource):
self.client = SpiderHttpClient(hs) self.client = SpiderHttpClient(hs)
self.media_repo = media_repo self.media_repo = media_repo
if hasattr(hs.config, "url_preview_url_blacklist"): self.url_preview_url_blacklist = hs.config.url_preview_url_blacklist
self.url_preview_url_blacklist = hs.config.url_preview_url_blacklist
# simple memory cache mapping urls to OG metadata # simple memory cache mapping urls to OG metadata
self.cache = ExpiringCache( self.cache = ExpiringCache(
@ -86,39 +85,37 @@ class PreviewUrlResource(Resource):
else: else:
ts = self.clock.time_msec() ts = self.clock.time_msec()
# impose the URL pattern blacklist url_tuple = urlparse.urlsplit(url)
if hasattr(self, "url_preview_url_blacklist"): for entry in self.url_preview_url_blacklist:
url_tuple = urlparse.urlsplit(url) match = True
for entry in self.url_preview_url_blacklist: for attrib in entry:
match = True pattern = entry[attrib]
for attrib in entry: value = getattr(url_tuple, attrib)
pattern = entry[attrib] logger.debug((
value = getattr(url_tuple, attrib) "Matching attrib '%s' with value '%s' against"
logger.debug(( " pattern '%s'"
"Matching attrib '%s' with value '%s' against" ) % (attrib, value, pattern))
" pattern '%s'"
) % (attrib, value, pattern))
if value is None: if value is None:
match = False
continue
if pattern.startswith('^'):
if not re.match(pattern, getattr(url_tuple, attrib)):
match = False match = False
continue continue
else:
if pattern.startswith('^'): if not fnmatch.fnmatch(getattr(url_tuple, attrib), pattern):
if not re.match(pattern, getattr(url_tuple, attrib)): match = False
match = False continue
continue if match:
else: logger.warn(
if not fnmatch.fnmatch(getattr(url_tuple, attrib), pattern): "URL %s blocked by url_blacklist entry %s", url, entry
match = False )
continue raise SynapseError(
if match: 403, "URL blocked by url pattern blacklist entry",
logger.warn( Codes.UNKNOWN
"URL %s blocked by url_blacklist entry %s", url, entry )
)
raise SynapseError(
403, "URL blocked by url pattern blacklist entry",
Codes.UNKNOWN
)
# first check the memory cache - good to handle all the clients on this # first check the memory cache - good to handle all the clients on this
# HS thundering away to preview the same URL at the same time. # HS thundering away to preview the same URL at the same time.