Compare commits

..

4 Commits
1.2.2 ... 1.2.3

Author SHA1 Message Date
91f66fdcee Release 1.2.3 2021-08-19 00:18:12 +03:00
bb45b1d868 split S3.sync to different methods 2021-08-18 23:59:18 +03:00
3d10fa472b guess mime type for local files 2021-08-18 05:04:26 +03:00
a90c93bbc4 add manpage generator 2021-08-17 04:05:18 +03:00
13 changed files with 115 additions and 49 deletions

View File

@ -18,7 +18,7 @@ jobs:
docker run \ docker run \
-v ${{ github.workspace }}:/build -w /build \ -v ${{ github.workspace }}:/build -w /build \
archlinux:latest \ archlinux:latest \
/bin/bash -c "pacman --noconfirm -Syu base-devel python python-pip && \ /bin/bash -c "pacman --noconfirm -Syu base-devel python-argparse-manpage python-pip && \
pip install -e .[web] && \ pip install -e .[web] && \
pip install -e .[check] && \ pip install -e .[check] && \
pip install -e .[s3] && \ pip install -e .[s3] && \

2
.gitignore vendored
View File

@ -94,3 +94,5 @@ ENV/
.venv/ .venv/
*.tar.xz *.tar.xz
man/

View File

@ -1,6 +1,7 @@
# ArcHlinux ReposItory MANager # ArcHlinux ReposItory MANager
[![build status](https://github.com/arcan1s/ahriman/actions/workflows/run-tests.yml/badge.svg)](https://github.com/arcan1s/ahriman/actions/workflows/run-tests.yml) [![build status](https://github.com/arcan1s/ahriman/actions/workflows/run-tests.yml/badge.svg)](https://github.com/arcan1s/ahriman/actions/workflows/run-tests.yml)
[![CodeFactor](https://www.codefactor.io/repository/github/arcan1s/ahriman/badge)](https://www.codefactor.io/repository/github/arcan1s/ahriman)
Wrapper for managing custom repository inspired by [repo-scripts](https://github.com/arcan1s/repo-scripts). Wrapper for managing custom repository inspired by [repo-scripts](https://github.com/arcan1s/repo-scripts).

View File

@ -1,14 +1,14 @@
# Maintainer: Evgeniy Alekseev # Maintainer: Evgeniy Alekseev
pkgname='ahriman' pkgname='ahriman'
pkgver=1.2.2 pkgver=1.2.3
pkgrel=1 pkgrel=1
pkgdesc="ArcHlinux ReposItory MANager" pkgdesc="ArcHlinux ReposItory MANager"
arch=('any') arch=('any')
url="https://github.com/arcan1s/ahriman" url="https://github.com/arcan1s/ahriman"
license=('GPL3') license=('GPL3')
depends=('devtools' 'git' 'pyalpm' 'python-aur' 'python-srcinfo') depends=('devtools' 'git' 'pyalpm' 'python-aur' 'python-srcinfo')
makedepends=('python-pip') makedepends=('python-argparse-manpage' 'python-pip')
optdepends=('breezy: -bzr packages support' optdepends=('breezy: -bzr packages support'
'darcs: -darcs packages support' 'darcs: -darcs packages support'
'gnupg: package and repository sign' 'gnupg: package and repository sign'
@ -22,9 +22,6 @@ optdepends=('breezy: -bzr packages support'
source=("https://github.com/arcan1s/ahriman/releases/download/$pkgver/$pkgname-$pkgver-src.tar.xz" source=("https://github.com/arcan1s/ahriman/releases/download/$pkgver/$pkgname-$pkgver-src.tar.xz"
'ahriman.sysusers' 'ahriman.sysusers'
'ahriman.tmpfiles') 'ahriman.tmpfiles')
sha512sums=('6ab741bfb42f92ab00d1b6ecfc44426c00e5c433486e014efbdb585715d9a12dbbafc280e5a9f85b941c8681b13a9dad41327a3e3c44a9683ae30c1d6f017f50'
'13718afec2c6786a18f0b223ef8e58dccf0688bca4cdbe203f14071f5031ed20120eb0ce38b52c76cfd6e8b6581a9c9eaa2743eb11abbaca637451a84c33f075'
'55b20f6da3d66e7bbf2add5d95a3b60632df121717d25a993e56e737d14f51fe063eb6f1b38bd81cc32e05db01c0c1d80aaa720c45cde87f238d8b46cdb8cbc4')
backup=('etc/ahriman.ini' backup=('etc/ahriman.ini'
'etc/ahriman.ini.d/logging.ini') 'etc/ahriman.ini.d/logging.ini')
@ -42,3 +39,7 @@ package() {
install -Dm644 "$srcdir/$pkgname.sysusers" "$pkgdir/usr/lib/sysusers.d/$pkgname.conf" install -Dm644 "$srcdir/$pkgname.sysusers" "$pkgdir/usr/lib/sysusers.d/$pkgname.conf"
install -Dm644 "$srcdir/$pkgname.tmpfiles" "$pkgdir/usr/lib/tmpfiles.d/$pkgname.conf" install -Dm644 "$srcdir/$pkgname.tmpfiles" "$pkgdir/usr/lib/tmpfiles.d/$pkgname.conf"
} }
sha512sums=('6ab741bfb42f92ab00d1b6ecfc44426c00e5c433486e014efbdb585715d9a12dbbafc280e5a9f85b941c8681b13a9dad41327a3e3c44a9683ae30c1d6f017f50'
'13718afec2c6786a18f0b223ef8e58dccf0688bca4cdbe203f14071f5031ed20120eb0ce38b52c76cfd6e8b6581a9c9eaa2743eb11abbaca637451a84c33f075'
'55b20f6da3d66e7bbf2add5d95a3b60632df121717d25a993e56e737d14f51fe063eb6f1b38bd81cc32e05db01c0c1d80aaa720c45cde87f238d8b46cdb8cbc4')

View File

@ -3,3 +3,6 @@ test = pytest
[tool:pytest] [tool:pytest]
addopts = --cov=ahriman --cov-report term-missing:skip-covered --pspec addopts = --cov=ahriman --cov-report term-missing:skip-covered --pspec
[build_manpages]
manpages = man/ahriman.1:module=ahriman.application.ahriman:function=_parser

View File

@ -1,11 +1,16 @@
from distutils.util import convert_path from build_manpages.build_manpages import build_manpages, get_build_py_cmd, get_install_cmd
from pathlib import Path
from setuptools import setup, find_packages from setuptools import setup, find_packages
from os import path from setuptools.command.build_py import build_py
from setuptools.command.install import install
from typing import Any, Dict
metadata_path = Path(__file__).resolve().parent / "src/ahriman/version.py"
metadata: Dict[str, Any] = dict()
with metadata_path.open() as metadata_file:
exec(metadata_file.read(), metadata) # pylint: disable=exec-used
here = path.abspath(path.dirname(__file__))
metadata = dict()
with open(convert_path("src/ahriman/version.py")) as metadata_file:
exec(metadata_file.read(), metadata)
setup( setup(
name="ahriman", name="ahriman",
@ -96,4 +101,10 @@ setup(
"aiohttp_jinja2", "aiohttp_jinja2",
], ],
}, },
cmdclass={
"build_manpages": build_manpages,
"build_py": get_build_py_cmd(build_py),
"install": get_install_cmd(install),
}
) )

View File

@ -30,8 +30,7 @@ from ahriman.models.sign_settings import SignSettings
# pylint thinks it is bad idea, but get the fuck off # pylint thinks it is bad idea, but get the fuck off
# pylint: disable=protected-access SubParserAction = argparse._SubParsersAction # pylint: disable=protected-access
SubParserAction = argparse._SubParsersAction
def _parser() -> argparse.ArgumentParser: def _parser() -> argparse.ArgumentParser:

View File

@ -53,8 +53,7 @@ class Client:
:param status: current package build status :param status: current package build status
""" """
# pylint: disable=no-self-use def get(self, base: Optional[str]) -> List[Tuple[Package, BuildStatus]]: # pylint: disable=no-self-use
def get(self, base: Optional[str]) -> List[Tuple[Package, BuildStatus]]:
""" """
get package status get package status
:param base: package base to get :param base: package base to get
@ -63,16 +62,14 @@ class Client:
del base del base
return [] return []
# pylint: disable=no-self-use def get_internal(self) -> InternalStatus: # pylint: disable=no-self-use
def get_internal(self) -> InternalStatus:
""" """
get internal service status get internal service status
:return: current internal (web) service status :return: current internal (web) service status
""" """
return InternalStatus() return InternalStatus()
# pylint: disable=no-self-use def get_self(self) -> BuildStatus: # pylint: disable=no-self-use
def get_self(self) -> BuildStatus:
""" """
get ahriman status itself get ahriman status itself
:return: current ahriman status :return: current ahriman status

View File

@ -19,6 +19,7 @@
# #
import boto3 # type: ignore import boto3 # type: ignore
import hashlib import hashlib
import mimetypes
from pathlib import Path from pathlib import Path
from typing import Any, Dict, Generator, Iterable from typing import Any, Dict, Generator, Iterable
@ -80,6 +81,18 @@ class S3(Upload):
aws_secret_access_key=configuration.get("s3", "secret_key")) aws_secret_access_key=configuration.get("s3", "secret_key"))
return client.Bucket(configuration.get("s3", "bucket")) return client.Bucket(configuration.get("s3", "bucket"))
@staticmethod
def remove_files(local_files: Dict[Path, str], remote_objects: Dict[Path, Any]) -> None:
"""
remove files which have been removed locally
:param local_files: map of local path object to its checksum
:param remote_objects: map of remote path object to the remote s3 object
"""
for local_file, remote_object in remote_objects.items():
if local_file in local_files:
continue
remote_object.delete()
def get_local_files(self, path: Path) -> Dict[Path, str]: def get_local_files(self, path: Path) -> Dict[Path, str]:
""" """
get all local files and their calculated checksums get all local files and their calculated checksums
@ -115,18 +128,26 @@ class S3(Upload):
remote_objects = self.get_remote_objects() remote_objects = self.get_remote_objects()
local_files = self.get_local_files(path) local_files = self.get_local_files(path)
# sync to remotes first self.upload_files(path, local_files, remote_objects)
self.remove_files(local_files, remote_objects)
def upload_files(self, path: Path, local_files: Dict[Path, str], remote_objects: Dict[Path, Any]) -> None:
"""
upload changed files to s3
:param path: local path to sync
:param local_files: map of local path object to its checksum
:param remote_objects: map of remote path object to the remote s3 object
"""
for local_file, checksum in local_files.items(): for local_file, checksum in local_files.items():
remote_object = remote_objects.get(local_file) remote_object = remote_objects.get(local_file)
# 0 and -1 elements are " (double quote) # 0 and -1 elements are " (double quote)
remote_checksum = remote_object.e_tag[1:-1] if remote_object is not None else None remote_checksum = remote_object.e_tag[1:-1] if remote_object is not None else None
if remote_checksum == checksum: if remote_checksum == checksum:
continue continue
remote_path = Path(self.architecture) / local_file
self.bucket.upload_file(str(path / local_file), str(remote_path))
# remove files which were removed locally local_path = path / local_file
for local_file, remote_object in remote_objects.items(): remote_path = Path(self.architecture) / local_file
if local_file in local_files: (mime, _) = mimetypes.guess_type(local_path)
continue extra_args = {"Content-Type": mime} if mime is not None else None
remote_object.delete()
self.bucket.upload_file(Filename=str(local_path), Key=str(remote_path), ExtraArgs=extra_args)

View File

@ -17,4 +17,4 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
__version__ = "1.2.2" __version__ = "1.2.3"

View File

@ -40,7 +40,7 @@ def exception_handler(logger: Logger) -> Callable[[Request, HandlerType], Awaita
except HTTPClientError: except HTTPClientError:
raise raise
except Exception: except Exception:
logger.exception(f"exception during performing request to {request.path}") logger.exception("exception during performing request to %s", request.path)
raise raise
return handle return handle

View File

@ -1,6 +1,6 @@
from pathlib import Path from pathlib import Path
from pytest_mock import MockerFixture from pytest_mock import MockerFixture
from typing import Any, List from typing import Any, List, Optional, Tuple
from unittest import mock from unittest import mock
from unittest.mock import MagicMock from unittest.mock import MagicMock
@ -34,6 +34,19 @@ def test_calculate_etag_small(resource_path_root: Path) -> None:
assert S3.calculate_etag(path, _chunk_size) == "04e75b4aa0fe6033e711e8ea98e059b2" assert S3.calculate_etag(path, _chunk_size) == "04e75b4aa0fe6033e711e8ea98e059b2"
def test_remove_files(s3_remote_objects: List[Any]) -> None:
"""
must remove remote objects
"""
local_files = {
Path(item.key): item.e_tag for item in s3_remote_objects if item.key != "x86_64/a"
}
remote_objects = {Path(item.key): item for item in s3_remote_objects}
S3.remove_files(local_files, remote_objects)
remote_objects[Path("x86_64/a")].delete.assert_called_once()
def test_get_local_files(s3: S3, resource_path_root: Path) -> None: def test_get_local_files(s3: S3, resource_path_root: Path) -> None:
""" """
must get all local files recursively must get all local files recursively
@ -70,29 +83,49 @@ def test_get_remote_objects(s3: S3, s3_remote_objects: List[Any]) -> None:
assert s3.get_remote_objects() == expected assert s3.get_remote_objects() == expected
def test_sync(s3: S3, s3_remote_objects: List[Any], mocker: MockerFixture) -> None: def test_sync(s3: S3, mocker: MockerFixture) -> None:
""" """
must run sync command must run sync command
""" """
local_files_mock = mocker.patch("ahriman.core.upload.s3.S3.get_local_files")
remote_objects_mock = mocker.patch("ahriman.core.upload.s3.S3.get_remote_objects")
remove_files_mock = mocker.patch("ahriman.core.upload.s3.S3.remove_files")
upload_files_mock = mocker.patch("ahriman.core.upload.s3.S3.upload_files")
s3.sync(Path("root"), [])
local_files_mock.assert_called_once()
remote_objects_mock.assert_called_once()
remove_files_mock.assert_called_once()
upload_files_mock.assert_called_once()
def test_upload_files(s3: S3, s3_remote_objects: List[Any], mocker: MockerFixture) -> None:
"""
must upload changed files
"""
def mimetype(path: Path) -> Tuple[Optional[str], None]:
return ("text/html", None) if path.name == "b" else (None, None)
root = Path("path") root = Path("path")
local_files = { local_files = {
Path(item.key.replace("a", "d")): item.e_tag.replace("b", "d").replace("\"", "") Path(item.key.replace("a", "d")): item.e_tag.replace("b", "d").replace("\"", "")
for item in s3_remote_objects for item in s3_remote_objects
} }
remote_objects = {Path(item.key): item for item in s3_remote_objects} remote_objects = {Path(item.key): item for item in s3_remote_objects}
print(local_files)
print(remote_objects)
local_files_mock = mocker.patch("ahriman.core.upload.s3.S3.get_local_files", return_value=local_files) mocker.patch("mimetypes.guess_type", side_effect=mimetype)
remote_objects_mock = mocker.patch("ahriman.core.upload.s3.S3.get_remote_objects", return_value=remote_objects)
upload_mock = s3.bucket = MagicMock() upload_mock = s3.bucket = MagicMock()
s3.sync(root, []) s3.upload_files(root, local_files, remote_objects)
upload_mock.upload_file.assert_has_calls(
local_files_mock.assert_called_once() [
remote_objects_mock.assert_called_once() mock.call(
upload_mock.upload_file.assert_has_calls([ Filename=str(root / s3.architecture / "b"),
mock.call(str(root / s3.architecture / "b"), f"{s3.architecture}/{s3.architecture}/b"), Key=f"{s3.architecture}/{s3.architecture}/b",
mock.call(str(root / s3.architecture / "d"), f"{s3.architecture}/{s3.architecture}/d"), ExtraArgs={"Content-Type": "text/html"}),
], any_order=True) mock.call(
remote_objects[Path("x86_64/a")].delete.assert_called_once() Filename=str(root / s3.architecture / "d"),
Key=f"{s3.architecture}/{s3.architecture}/d",
ExtraArgs=None),
],
any_order=True)

View File

@ -25,8 +25,7 @@ async def test_exception_handler_client_error(aiohttp_request: Any, mocker: Mock
""" """
must pass client exception must pass client exception
""" """
request_handler = AsyncMock() request_handler = AsyncMock(side_effect=HTTPBadRequest())
request_handler.side_effect = HTTPBadRequest()
logging_mock = mocker.patch("logging.Logger.exception") logging_mock = mocker.patch("logging.Logger.exception")
handler = exception_handler(logging.getLogger()) handler = exception_handler(logging.getLogger())
@ -39,8 +38,7 @@ async def test_exception_handler_server_error(aiohttp_request: Any, mocker: Mock
""" """
must log server exception and re-raise it must log server exception and re-raise it
""" """
request_handler = AsyncMock() request_handler = AsyncMock(side_effect=Exception())
request_handler.side_effect = Exception()
logging_mock = mocker.patch("logging.Logger.exception") logging_mock = mocker.patch("logging.Logger.exception")
handler = exception_handler(logging.getLogger()) handler = exception_handler(logging.getLogger())