split S3.sync to different methods

This commit is contained in:
Evgenii Alekseev 2021-08-18 12:13:11 +03:00
parent 3d10fa472b
commit bb45b1d868
2 changed files with 53 additions and 15 deletions

View File

@ -81,6 +81,18 @@ class S3(Upload):
aws_secret_access_key=configuration.get("s3", "secret_key")) aws_secret_access_key=configuration.get("s3", "secret_key"))
return client.Bucket(configuration.get("s3", "bucket")) return client.Bucket(configuration.get("s3", "bucket"))
@staticmethod
def remove_files(local_files: Dict[Path, str], remote_objects: Dict[Path, Any]) -> None:
"""
remove files which have been removed locally
:param local_files: map of local path object to its checksum
:param remote_objects: map of remote path object to the remote s3 object
"""
for local_file, remote_object in remote_objects.items():
if local_file in local_files:
continue
remote_object.delete()
def get_local_files(self, path: Path) -> Dict[Path, str]: def get_local_files(self, path: Path) -> Dict[Path, str]:
""" """
get all local files and their calculated checksums get all local files and their calculated checksums
@ -116,7 +128,16 @@ class S3(Upload):
remote_objects = self.get_remote_objects() remote_objects = self.get_remote_objects()
local_files = self.get_local_files(path) local_files = self.get_local_files(path)
# sync to remotes first self.upload_files(path, local_files, remote_objects)
self.remove_files(local_files, remote_objects)
def upload_files(self, path: Path, local_files: Dict[Path, str], remote_objects: Dict[Path, Any]) -> None:
"""
upload changed files to s3
:param path: local path to sync
:param local_files: map of local path object to its checksum
:param remote_objects: map of remote path object to the remote s3 object
"""
for local_file, checksum in local_files.items(): for local_file, checksum in local_files.items():
remote_object = remote_objects.get(local_file) remote_object = remote_objects.get(local_file)
# 0 and -1 elements are " (double quote) # 0 and -1 elements are " (double quote)
@ -130,9 +151,3 @@ class S3(Upload):
extra_args = {"Content-Type": mime} if mime is not None else None extra_args = {"Content-Type": mime} if mime is not None else None
self.bucket.upload_file(Filename=str(local_path), Key=str(remote_path), ExtraArgs=extra_args) self.bucket.upload_file(Filename=str(local_path), Key=str(remote_path), ExtraArgs=extra_args)
# remove files which were removed locally
for local_file, remote_object in remote_objects.items():
if local_file in local_files:
continue
remote_object.delete()

View File

@ -34,6 +34,19 @@ def test_calculate_etag_small(resource_path_root: Path) -> None:
assert S3.calculate_etag(path, _chunk_size) == "04e75b4aa0fe6033e711e8ea98e059b2" assert S3.calculate_etag(path, _chunk_size) == "04e75b4aa0fe6033e711e8ea98e059b2"
def test_remove_files(s3_remote_objects: List[Any]) -> None:
"""
must remove remote objects
"""
local_files = {
Path(item.key): item.e_tag for item in s3_remote_objects if item.key != "x86_64/a"
}
remote_objects = {Path(item.key): item for item in s3_remote_objects}
S3.remove_files(local_files, remote_objects)
remote_objects[Path("x86_64/a")].delete.assert_called_once()
def test_get_local_files(s3: S3, resource_path_root: Path) -> None: def test_get_local_files(s3: S3, resource_path_root: Path) -> None:
""" """
must get all local files recursively must get all local files recursively
@ -70,10 +83,26 @@ def test_get_remote_objects(s3: S3, s3_remote_objects: List[Any]) -> None:
assert s3.get_remote_objects() == expected assert s3.get_remote_objects() == expected
def test_sync(s3: S3, s3_remote_objects: List[Any], mocker: MockerFixture) -> None: def test_sync(s3: S3, mocker: MockerFixture) -> None:
""" """
must run sync command must run sync command
""" """
local_files_mock = mocker.patch("ahriman.core.upload.s3.S3.get_local_files")
remote_objects_mock = mocker.patch("ahriman.core.upload.s3.S3.get_remote_objects")
remove_files_mock = mocker.patch("ahriman.core.upload.s3.S3.remove_files")
upload_files_mock = mocker.patch("ahriman.core.upload.s3.S3.upload_files")
s3.sync(Path("root"), [])
local_files_mock.assert_called_once()
remote_objects_mock.assert_called_once()
remove_files_mock.assert_called_once()
upload_files_mock.assert_called_once()
def test_upload_files(s3: S3, s3_remote_objects: List[Any], mocker: MockerFixture) -> None:
"""
must upload changed files
"""
def mimetype(path: Path) -> Tuple[Optional[str], None]: def mimetype(path: Path) -> Tuple[Optional[str], None]:
return ("text/html", None) if path.name == "b" else (None, None) return ("text/html", None) if path.name == "b" else (None, None)
@ -85,14 +114,9 @@ def test_sync(s3: S3, s3_remote_objects: List[Any], mocker: MockerFixture) -> No
remote_objects = {Path(item.key): item for item in s3_remote_objects} remote_objects = {Path(item.key): item for item in s3_remote_objects}
mocker.patch("mimetypes.guess_type", side_effect=mimetype) mocker.patch("mimetypes.guess_type", side_effect=mimetype)
local_files_mock = mocker.patch("ahriman.core.upload.s3.S3.get_local_files", return_value=local_files)
remote_objects_mock = mocker.patch("ahriman.core.upload.s3.S3.get_remote_objects", return_value=remote_objects)
upload_mock = s3.bucket = MagicMock() upload_mock = s3.bucket = MagicMock()
s3.sync(root, []) s3.upload_files(root, local_files, remote_objects)
local_files_mock.assert_called_once()
remote_objects_mock.assert_called_once()
upload_mock.upload_file.assert_has_calls( upload_mock.upload_file.assert_has_calls(
[ [
mock.call( mock.call(
@ -105,4 +129,3 @@ def test_sync(s3: S3, s3_remote_objects: List[Any], mocker: MockerFixture) -> No
ExtraArgs=None), ExtraArgs=None),
], ],
any_order=True) any_order=True)
remote_objects[Path("x86_64/a")].delete.assert_called_once()