Fix empty url_cache_thumbnails/yyyy-mm-dd/ directories being left behind (#10924)

This commit is contained in:
Sean Quah 2021-09-29 10:24:37 +01:00 committed by GitHub
parent 9fd057b8c5
commit 2be0fde3d6
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 75 additions and 31 deletions

1
changelog.d/10924.bugfix Normal file
View file

@ -0,0 +1 @@
Fix a bug where empty `yyyy-mm-dd/` directories would be left behind in the media store's `url_cache_thumbnails/` directory.

View file

@ -73,6 +73,7 @@ OG_TAG_VALUE_MAXLEN = 1000
ONE_HOUR = 60 * 60 * 1000 ONE_HOUR = 60 * 60 * 1000
ONE_DAY = 24 * ONE_HOUR ONE_DAY = 24 * ONE_HOUR
IMAGE_CACHE_EXPIRY_MS = 2 * ONE_DAY
@attr.s(slots=True, frozen=True, auto_attribs=True) @attr.s(slots=True, frozen=True, auto_attribs=True)
@ -496,6 +497,27 @@ class PreviewUrlResource(DirectServeJsonResource):
logger.info("Still running DB updates; skipping expiry") logger.info("Still running DB updates; skipping expiry")
return return
def try_remove_parent_dirs(dirs: Iterable[str]) -> None:
"""Attempt to remove the given chain of parent directories
Args:
dirs: The list of directory paths to delete, with children appearing
before their parents.
"""
for dir in dirs:
try:
os.rmdir(dir)
except FileNotFoundError:
# Already deleted, continue with deleting the rest
pass
except OSError as e:
# Failed, skip deleting the rest of the parent dirs
if e.errno != errno.ENOTEMPTY:
logger.warning(
"Failed to remove media directory: %r: %s", dir, e
)
break
# First we delete expired url cache entries # First we delete expired url cache entries
media_ids = await self.store.get_expired_url_cache(now) media_ids = await self.store.get_expired_url_cache(now)
@ -504,20 +526,16 @@ class PreviewUrlResource(DirectServeJsonResource):
fname = self.filepaths.url_cache_filepath(media_id) fname = self.filepaths.url_cache_filepath(media_id)
try: try:
os.remove(fname) os.remove(fname)
except FileNotFoundError:
pass # If the path doesn't exist, meh
except OSError as e: except OSError as e:
# If the path doesn't exist, meh logger.warning("Failed to remove media: %r: %s", media_id, e)
if e.errno != errno.ENOENT: continue
logger.warning("Failed to remove media: %r: %s", media_id, e)
continue
removed_media.append(media_id) removed_media.append(media_id)
try: dirs = self.filepaths.url_cache_filepath_dirs_to_delete(media_id)
dirs = self.filepaths.url_cache_filepath_dirs_to_delete(media_id) try_remove_parent_dirs(dirs)
for dir in dirs:
os.rmdir(dir)
except Exception:
pass
await self.store.delete_url_cache(removed_media) await self.store.delete_url_cache(removed_media)
@ -530,7 +548,7 @@ class PreviewUrlResource(DirectServeJsonResource):
# These may be cached for a bit on the client (i.e., they # These may be cached for a bit on the client (i.e., they
# may have a room open with a preview url thing open). # may have a room open with a preview url thing open).
# So we wait a couple of days before deleting, just in case. # So we wait a couple of days before deleting, just in case.
expire_before = now - 2 * ONE_DAY expire_before = now - IMAGE_CACHE_EXPIRY_MS
media_ids = await self.store.get_url_cache_media_before(expire_before) media_ids = await self.store.get_url_cache_media_before(expire_before)
removed_media = [] removed_media = []
@ -538,36 +556,30 @@ class PreviewUrlResource(DirectServeJsonResource):
fname = self.filepaths.url_cache_filepath(media_id) fname = self.filepaths.url_cache_filepath(media_id)
try: try:
os.remove(fname) os.remove(fname)
except FileNotFoundError:
pass # If the path doesn't exist, meh
except OSError as e: except OSError as e:
# If the path doesn't exist, meh logger.warning("Failed to remove media: %r: %s", media_id, e)
if e.errno != errno.ENOENT: continue
logger.warning("Failed to remove media: %r: %s", media_id, e)
continue
try: dirs = self.filepaths.url_cache_filepath_dirs_to_delete(media_id)
dirs = self.filepaths.url_cache_filepath_dirs_to_delete(media_id) try_remove_parent_dirs(dirs)
for dir in dirs:
os.rmdir(dir)
except Exception:
pass
thumbnail_dir = self.filepaths.url_cache_thumbnail_directory(media_id) thumbnail_dir = self.filepaths.url_cache_thumbnail_directory(media_id)
try: try:
shutil.rmtree(thumbnail_dir) shutil.rmtree(thumbnail_dir)
except FileNotFoundError:
pass # If the path doesn't exist, meh
except OSError as e: except OSError as e:
# If the path doesn't exist, meh logger.warning("Failed to remove media: %r: %s", media_id, e)
if e.errno != errno.ENOENT: continue
logger.warning("Failed to remove media: %r: %s", media_id, e)
continue
removed_media.append(media_id) removed_media.append(media_id)
try: dirs = self.filepaths.url_cache_thumbnail_dirs_to_delete(media_id)
dirs = self.filepaths.url_cache_thumbnail_dirs_to_delete(media_id) # Note that one of the directories to be deleted has already been
for dir in dirs: # removed by the `rmtree` above.
os.rmdir(dir) try_remove_parent_dirs(dirs)
except Exception:
pass
await self.store.delete_url_cache_media(removed_media) await self.store.delete_url_cache_media(removed_media)

View file

@ -21,11 +21,13 @@ from twisted.internet.error import DNSLookupError
from twisted.test.proto_helpers import AccumulatingProtocol from twisted.test.proto_helpers import AccumulatingProtocol
from synapse.config.oembed import OEmbedEndpointConfig from synapse.config.oembed import OEmbedEndpointConfig
from synapse.rest.media.v1.preview_url_resource import IMAGE_CACHE_EXPIRY_MS
from synapse.util.stringutils import parse_and_validate_mxc_uri from synapse.util.stringutils import parse_and_validate_mxc_uri
from tests import unittest from tests import unittest
from tests.server import FakeTransport from tests.server import FakeTransport
from tests.test_utils import SMALL_PNG from tests.test_utils import SMALL_PNG
from tests.utils import MockClock
try: try:
import lxml import lxml
@ -851,3 +853,32 @@ class URLPreviewTests(unittest.HomeserverTestCase):
404, 404,
"URL cache thumbnail was unexpectedly retrieved from a storage provider", "URL cache thumbnail was unexpectedly retrieved from a storage provider",
) )
def test_cache_expiry(self):
"""Test that URL cache files and thumbnails are cleaned up properly on expiry."""
self.preview_url.clock = MockClock()
_host, media_id = self._download_image()
file_path = self.preview_url.filepaths.url_cache_filepath(media_id)
file_dirs = self.preview_url.filepaths.url_cache_filepath_dirs_to_delete(
media_id
)
thumbnail_dir = self.preview_url.filepaths.url_cache_thumbnail_directory(
media_id
)
thumbnail_dirs = self.preview_url.filepaths.url_cache_thumbnail_dirs_to_delete(
media_id
)
self.assertTrue(os.path.isfile(file_path))
self.assertTrue(os.path.isdir(thumbnail_dir))
self.preview_url.clock.advance_time_msec(IMAGE_CACHE_EXPIRY_MS + 1)
self.get_success(self.preview_url._expire_url_cache_data())
for path in [file_path] + file_dirs + [thumbnail_dir] + thumbnail_dirs:
self.assertFalse(
os.path.exists(path),
f"{os.path.relpath(path, self.media_store_path)} was not deleted",
)