mirror of
https://github.com/arcan1s/ahriman.git
synced 2025-04-24 15:27:17 +00:00
fix paths reading
This commit is contained in:
parent
47a56fdc1c
commit
b620de8bb2
@ -91,7 +91,7 @@ class S3(Upload):
|
||||
if element.is_dir():
|
||||
yield from walk(element)
|
||||
continue
|
||||
yield element.resolve()
|
||||
yield element
|
||||
return {
|
||||
local_file.relative_to(path): self.calculate_etag(local_file, self.chunk_size)
|
||||
for local_file in walk(path)
|
||||
@ -102,8 +102,8 @@ class S3(Upload):
|
||||
get all remote objects and their checksums
|
||||
:return: map of path object to the remote s3 object
|
||||
"""
|
||||
objects = self.bucket.objects.all()
|
||||
return {Path(item.key): item for item in objects}
|
||||
objects = self.bucket.objects.filter(Prefix=self.architecture)
|
||||
return {Path(item.key).relative_to(self.architecture): item for item in objects}
|
||||
|
||||
def sync(self, path: Path, built_packages: Iterable[Package]) -> None:
|
||||
"""
|
||||
@ -121,7 +121,8 @@ class S3(Upload):
|
||||
remote_checksum = remote_object.e_tag[1:-1] if remote_object is not None else None
|
||||
if remote_checksum == checksum:
|
||||
continue
|
||||
self.bucket.upload_file(str(path / local_file), str(local_file))
|
||||
remote_path = Path(self.architecture) / local_file
|
||||
self.bucket.upload_file(str(path / local_file), str(remote_path))
|
||||
|
||||
# remove files which were removed locally
|
||||
for local_file, remote_object in remote_objects.items():
|
||||
|
@ -19,4 +19,4 @@ def s3(configuration: Configuration) -> S3:
|
||||
@pytest.fixture
|
||||
def s3_remote_objects() -> List[_s3_object]:
|
||||
delete_mock = MagicMock()
|
||||
return list(map(lambda item: _s3_object(item, f"\"{item}\"", delete_mock), ["a", "b", "c"]))
|
||||
return list(map(lambda item: _s3_object(f"x86_64/{item}", f"\"{item}\"", delete_mock), ["a", "b", "c"]))
|
||||
|
@ -62,10 +62,10 @@ def test_get_remote_objects(s3: S3, s3_remote_objects: List[Any]) -> None:
|
||||
"""
|
||||
must generate list of remote objects by calling boto3 function
|
||||
"""
|
||||
expected = {Path(item.key): item for item in s3_remote_objects}
|
||||
expected = {Path(item.key).relative_to(s3.architecture): item for item in s3_remote_objects}
|
||||
|
||||
s3.bucket = MagicMock()
|
||||
s3.bucket.objects.all.return_value = s3_remote_objects
|
||||
s3.bucket.objects.filter.return_value = s3_remote_objects
|
||||
|
||||
assert s3.get_remote_objects() == expected
|
||||
|
||||
@ -75,8 +75,13 @@ def test_sync(s3: S3, s3_remote_objects: List[Any], mocker: MockerFixture) -> No
|
||||
must run sync command
|
||||
"""
|
||||
root = Path("path")
|
||||
local_files = {Path(item.key.replace("a", "d")): item.key.replace("b", "d") for item in s3_remote_objects}
|
||||
local_files = {
|
||||
Path(item.key.replace("a", "d")): item.e_tag.replace("b", "d").replace("\"", "")
|
||||
for item in s3_remote_objects
|
||||
}
|
||||
remote_objects = {Path(item.key): item for item in s3_remote_objects}
|
||||
print(local_files)
|
||||
print(remote_objects)
|
||||
|
||||
local_files_mock = mocker.patch("ahriman.core.upload.s3.S3.get_local_files", return_value=local_files)
|
||||
remote_objects_mock = mocker.patch("ahriman.core.upload.s3.S3.get_remote_objects", return_value=remote_objects)
|
||||
@ -87,7 +92,7 @@ def test_sync(s3: S3, s3_remote_objects: List[Any], mocker: MockerFixture) -> No
|
||||
local_files_mock.assert_called_once()
|
||||
remote_objects_mock.assert_called_once()
|
||||
upload_mock.upload_file.assert_has_calls([
|
||||
mock.call(str(root / Path("b")), str(Path("b"))),
|
||||
mock.call(str(root / Path("d")), str(Path("d"))),
|
||||
mock.call(str(root / s3.architecture / "b"), f"{s3.architecture}/{s3.architecture}/b"),
|
||||
mock.call(str(root / s3.architecture / "d"), f"{s3.architecture}/{s3.architecture}/d"),
|
||||
], any_order=True)
|
||||
remote_objects[Path("a")].delete.assert_called_once()
|
||||
remote_objects[Path("x86_64/a")].delete.assert_called_once()
|
||||
|
Loading…
Reference in New Issue
Block a user