Compare commits

...

14 Commits
1.2.0 ... 1.2.6

Author SHA1 Message Date
9d2a3bcbc1 Release 1.2.6 2021-08-21 16:19:21 +03:00
a5455b697d Release 1.2.5 2021-08-19 02:36:05 +03:00
0bfb763b2a disable manpages building for now since it requires installed distribution 2021-08-19 02:35:48 +03:00
9f3566a150 Release 1.2.4 2021-08-19 00:45:58 +03:00
16a6c4fdd7 include setup.cfg to tarball 2021-08-19 00:45:26 +03:00
91f66fdcee Release 1.2.3 2021-08-19 00:18:12 +03:00
bb45b1d868 split S3.sync to different methods 2021-08-18 23:59:18 +03:00
3d10fa472b guess mime type for local files 2021-08-18 05:04:26 +03:00
a90c93bbc4 add manpage generator 2021-08-17 04:05:18 +03:00
41a3c08d9f Release 1.2.2 2021-08-17 01:03:04 +03:00
cb328ad797 fix typo in log naming 2021-08-17 01:02:42 +03:00
810091cde9 Release 1.2.1 2021-08-17 00:52:09 +03:00
fc0474fa8f logging rethink
* well lets replace f-strings by %s as it is originally recommended
* use syslog handler by default
2021-08-17 00:23:34 +03:00
b94179e071 use asyncmock from unittest library 2021-08-11 21:09:10 +03:00
28 changed files with 176 additions and 111 deletions

View File

@ -18,7 +18,7 @@ jobs:
docker run \ docker run \
-v ${{ github.workspace }}:/build -w /build \ -v ${{ github.workspace }}:/build -w /build \
archlinux:latest \ archlinux:latest \
/bin/bash -c "pacman --noconfirm -Syu base-devel python python-pip && \ /bin/bash -c "pacman --noconfirm -Syu base-devel python-argparse-manpage python-pip && \
pip install -e .[web] && \ pip install -e .[web] && \
pip install -e .[check] && \ pip install -e .[check] && \
pip install -e .[s3] && \ pip install -e .[s3] && \

2
.gitignore vendored
View File

@ -94,3 +94,5 @@ ENV/
.venv/ .venv/
*.tar.xz *.tar.xz
man/

View File

@ -149,7 +149,6 @@ disable=print-statement,
too-few-public-methods, too-few-public-methods,
too-many-instance-attributes, too-many-instance-attributes,
broad-except, broad-except,
logging-fstring-interpolation,
too-many-ancestors, too-many-ancestors,
fixme, fixme,
too-many-arguments, too-many-arguments,

View File

@ -3,7 +3,7 @@
PROJECT := ahriman PROJECT := ahriman
FILES := AUTHORS COPYING CONFIGURING.md README.md package src setup.py FILES := AUTHORS COPYING CONFIGURING.md README.md package src setup.cfg setup.py
TARGET_FILES := $(addprefix $(PROJECT)/, $(FILES)) TARGET_FILES := $(addprefix $(PROJECT)/, $(FILES))
IGNORE_FILES := package/archlinux src/.mypy_cache IGNORE_FILES := package/archlinux src/.mypy_cache

View File

@ -1,6 +1,7 @@
# ArcHlinux ReposItory MANager # ArcHlinux ReposItory MANager
[![build status](https://github.com/arcan1s/ahriman/actions/workflows/run-tests.yml/badge.svg)](https://github.com/arcan1s/ahriman/actions/workflows/run-tests.yml) [![build status](https://github.com/arcan1s/ahriman/actions/workflows/run-tests.yml/badge.svg)](https://github.com/arcan1s/ahriman/actions/workflows/run-tests.yml)
[![CodeFactor](https://www.codefactor.io/repository/github/arcan1s/ahriman/badge)](https://www.codefactor.io/repository/github/arcan1s/ahriman)
Wrapper for managing custom repository inspired by [repo-scripts](https://github.com/arcan1s/repo-scripts). Wrapper for managing custom repository inspired by [repo-scripts](https://github.com/arcan1s/repo-scripts).

View File

@ -1,14 +1,14 @@
# Maintainer: Evgeniy Alekseev # Maintainer: Evgeniy Alekseev
pkgname='ahriman' pkgname='ahriman'
pkgver=1.2.0 pkgver=1.2.6
pkgrel=1 pkgrel=1
pkgdesc="ArcHlinux ReposItory MANager" pkgdesc="ArcHlinux ReposItory MANager"
arch=('any') arch=('any')
url="https://github.com/arcan1s/ahriman" url="https://github.com/arcan1s/ahriman"
license=('GPL3') license=('GPL3')
depends=('devtools' 'git' 'pyalpm' 'python-aur' 'python-srcinfo') depends=('devtools' 'git' 'pyalpm' 'python-aur' 'python-srcinfo')
makedepends=('python-pip') makedepends=('python-argparse-manpage' 'python-pip')
optdepends=('breezy: -bzr packages support' optdepends=('breezy: -bzr packages support'
'darcs: -darcs packages support' 'darcs: -darcs packages support'
'gnupg: package and repository sign' 'gnupg: package and repository sign'
@ -22,9 +22,6 @@ optdepends=('breezy: -bzr packages support'
source=("https://github.com/arcan1s/ahriman/releases/download/$pkgver/$pkgname-$pkgver-src.tar.xz" source=("https://github.com/arcan1s/ahriman/releases/download/$pkgver/$pkgname-$pkgver-src.tar.xz"
'ahriman.sysusers' 'ahriman.sysusers'
'ahriman.tmpfiles') 'ahriman.tmpfiles')
sha512sums=('6ab741bfb42f92ab00d1b6ecfc44426c00e5c433486e014efbdb585715d9a12dbbafc280e5a9f85b941c8681b13a9dad41327a3e3c44a9683ae30c1d6f017f50'
'13718afec2c6786a18f0b223ef8e58dccf0688bca4cdbe203f14071f5031ed20120eb0ce38b52c76cfd6e8b6581a9c9eaa2743eb11abbaca637451a84c33f075'
'55b20f6da3d66e7bbf2add5d95a3b60632df121717d25a993e56e737d14f51fe063eb6f1b38bd81cc32e05db01c0c1d80aaa720c45cde87f238d8b46cdb8cbc4')
backup=('etc/ahriman.ini' backup=('etc/ahriman.ini'
'etc/ahriman.ini.d/logging.ini') 'etc/ahriman.ini.d/logging.ini')
@ -42,3 +39,7 @@ package() {
install -Dm644 "$srcdir/$pkgname.sysusers" "$pkgdir/usr/lib/sysusers.d/$pkgname.conf" install -Dm644 "$srcdir/$pkgname.sysusers" "$pkgdir/usr/lib/sysusers.d/$pkgname.conf"
install -Dm644 "$srcdir/$pkgname.tmpfiles" "$pkgdir/usr/lib/tmpfiles.d/$pkgname.conf" install -Dm644 "$srcdir/$pkgname.tmpfiles" "$pkgdir/usr/lib/tmpfiles.d/$pkgname.conf"
} }
sha512sums=('6ab741bfb42f92ab00d1b6ecfc44426c00e5c433486e014efbdb585715d9a12dbbafc280e5a9f85b941c8681b13a9dad41327a3e3c44a9683ae30c1d6f017f50'
'13718afec2c6786a18f0b223ef8e58dccf0688bca4cdbe203f14071f5031ed20120eb0ce38b52c76cfd6e8b6581a9c9eaa2743eb11abbaca637451a84c33f075'
'55b20f6da3d66e7bbf2add5d95a3b60632df121717d25a993e56e737d14f51fe063eb6f1b38bd81cc32e05db01c0c1d80aaa720c45cde87f238d8b46cdb8cbc4')

View File

@ -1,2 +1 @@
d /var/lib/ahriman 0775 ahriman log
d /var/log/ahriman 0755 ahriman ahriman d /var/log/ahriman 0755 ahriman ahriman

View File

@ -2,10 +2,10 @@
keys = root,builder,build_details,http keys = root,builder,build_details,http
[handlers] [handlers]
keys = console_handler,build_file_handler,file_handler,http_handler keys = console_handler,build_file_handler,file_handler,http_handler,syslog_handler
[formatters] [formatters]
keys = generic_format keys = generic_format,syslog_format
[handler_console_handler] [handler_console_handler]
class = StreamHandler class = StreamHandler
@ -31,29 +31,39 @@ level = DEBUG
formatter = generic_format formatter = generic_format
args = ("/var/log/ahriman/http.log", "a", 20971520, 20) args = ("/var/log/ahriman/http.log", "a", 20971520, 20)
[handler_syslog_handler]
class = logging.handlers.SysLogHandler
level = DEBUG
formatter = syslog_format
args = ("/dev/log",)
[formatter_generic_format] [formatter_generic_format]
format = [%(levelname)s %(asctime)s] [%(filename)s:%(lineno)d] [%(funcName)s]: %(message)s format = [%(levelname)s %(asctime)s] [%(filename)s:%(lineno)d] [%(funcName)s]: %(message)s
datefmt = datefmt =
[formatter_syslog_format]
format = [%(levelname)s] [%(filename)s:%(lineno)d] [%(funcName)s]: %(message)s
datefmt =
[logger_root] [logger_root]
level = DEBUG level = DEBUG
handlers = file_handler handlers = syslog_handler
qualname = root qualname = root
[logger_builder] [logger_builder]
level = DEBUG level = DEBUG
handlers = file_handler handlers = syslog_handler
qualname = builder qualname = builder
propagate = 0 propagate = 0
[logger_build_details] [logger_build_details]
level = DEBUG level = DEBUG
handlers = build_file_handler handlers = syslog_handler
qualname = build_details qualname = build_details
propagate = 0 propagate = 0
[logger_http] [logger_http]
level = DEBUG level = DEBUG
handlers = http_handler handlers = syslog_handler
qualname = http qualname = http
propagate = 0 propagate = 0

View File

@ -3,3 +3,6 @@ test = pytest
[tool:pytest] [tool:pytest]
addopts = --cov=ahriman --cov-report term-missing:skip-covered --pspec addopts = --cov=ahriman --cov-report term-missing:skip-covered --pspec
[build_manpages]
manpages = man/ahriman.1:module=ahriman.application.ahriman:function=_parser

View File

@ -1,11 +1,14 @@
from distutils.util import convert_path from build_manpages import build_manpages
from pathlib import Path
from setuptools import setup, find_packages from setuptools import setup, find_packages
from os import path from typing import Any, Dict
metadata_path = Path(__file__).resolve().parent / "src/ahriman/version.py"
metadata: Dict[str, Any] = dict()
with metadata_path.open() as metadata_file:
exec(metadata_file.read(), metadata) # pylint: disable=exec-used
here = path.abspath(path.dirname(__file__))
metadata = dict()
with open(convert_path("src/ahriman/version.py")) as metadata_file:
exec(metadata_file.read(), metadata)
setup( setup(
name="ahriman", name="ahriman",
@ -96,4 +99,8 @@ setup(
"aiohttp_jinja2", "aiohttp_jinja2",
], ],
}, },
cmdclass={
"build_manpages": build_manpages.build_manpages,
}
) )

View File

@ -30,8 +30,7 @@ from ahriman.models.sign_settings import SignSettings
# pylint thinks it is bad idea, but get the fuck off # pylint thinks it is bad idea, but get the fuck off
# pylint: disable=protected-access SubParserAction = argparse._SubParsersAction # pylint: disable=protected-access
SubParserAction = argparse._SubParsersAction
def _parser() -> argparse.ArgumentParser: def _parser() -> argparse.ArgumentParser:
@ -50,9 +49,7 @@ def _parser() -> argparse.ArgumentParser:
"--lock", "--lock",
help="lock file", help="lock file",
type=Path, type=Path,
default=Path( default=Path(tempfile.gettempdir()) / "ahriman.lock")
tempfile.gettempdir()) /
"ahriman.lock")
parser.add_argument("--no-log", help="redirect all log messages to stderr", action="store_true") parser.add_argument("--no-log", help="redirect all log messages to stderr", action="store_true")
parser.add_argument("--no-report", help="force disable reporting to web service", action="store_true") parser.add_argument("--no-report", help="force disable reporting to web service", action="store_true")
parser.add_argument("--unsafe", help="allow to run ahriman as non-ahriman user", action="store_true") parser.add_argument("--unsafe", help="allow to run ahriman as non-ahriman user", action="store_true")

View File

@ -185,7 +185,7 @@ class Application:
continue continue
for archive in package.packages.values(): for archive in package.packages.values():
if archive.filepath is None: if archive.filepath is None:
self.logger.warning(f"filepath is empty for {package.base}") self.logger.warning("filepath is empty for %s", package.base)
continue # avoid mypy warning continue # avoid mypy warning
src = self.repository.paths.repository / archive.filepath src = self.repository.paths.repository / archive.filepath
dst = self.repository.paths.packages / archive.filepath dst = self.repository.paths.packages / archive.filepath
@ -224,6 +224,6 @@ class Application:
# process manual packages # process manual packages
tree = Tree.load(updates) tree = Tree.load(updates)
for num, level in enumerate(tree.levels()): for num, level in enumerate(tree.levels()):
self.logger.info(f"processing level #{num} {[package.base for package in level]}") self.logger.info("processing level #%i %s", num, [package.base for package in level])
packages = self.repository.process_build(level) packages = self.repository.process_build(level)
process_update(packages) process_update(packages)

View File

@ -94,8 +94,10 @@ class Lock:
""" """
status = self.reporter.get_internal() status = self.reporter.get_internal()
if status.version is not None and status.version != version.__version__: if status.version is not None and status.version != version.__version__:
logging.getLogger("root").warning(f"status watcher version mismatch, " logging.getLogger("root").warning(
f"our {version.__version__}, their {status.version}") "status watcher version mismatch, our %s, their %s",
version.__version__,
status.version)
def check_user(self) -> None: def check_user(self) -> None:
""" """

View File

@ -99,7 +99,7 @@ class Task:
command.extend(self.archbuild_flags) command.extend(self.archbuild_flags)
command.extend(["--"] + self.makechrootpkg_flags) command.extend(["--"] + self.makechrootpkg_flags)
command.extend(["--"] + self.makepkg_flags) command.extend(["--"] + self.makepkg_flags)
self.logger.info(f"using {command} for {self.package.base}") self.logger.info("using %s for %s", command, self.package.base)
Task._check_output( Task._check_output(
*command, *command,

View File

@ -61,7 +61,7 @@ class Executor(Cleaner):
build_single(single) build_single(single)
except Exception: except Exception:
self.reporter.set_failed(single.base) self.reporter.set_failed(single.base)
self.logger.exception(f"{single.base} ({self.architecture}) build exception") self.logger.exception("%s (%s) build exception", single.base, self.architecture)
self.clear_build() self.clear_build()
return self.packages_built() return self.packages_built()
@ -76,7 +76,7 @@ class Executor(Cleaner):
try: try:
self.repo.remove(package, fn) self.repo.remove(package, fn)
except Exception: except Exception:
self.logger.exception(f"could not remove {package}") self.logger.exception("could not remove %s", package)
requested = set(packages) requested = set(packages)
for local in self.packages(): for local in self.packages():
@ -132,7 +132,7 @@ class Executor(Cleaner):
""" """
def update_single(fn: Optional[str], base: str) -> None: def update_single(fn: Optional[str], base: str) -> None:
if fn is None: if fn is None:
self.logger.warning(f"received empty package name for base {base}") self.logger.warning("received empty package name for base %s", base)
return # suppress type checking, it never can be none actually return # suppress type checking, it never can be none actually
# in theory it might be NOT packages directory, but we suppose it is # in theory it might be NOT packages directory, but we suppose it is
full_path = self.paths.packages / fn full_path = self.paths.packages / fn
@ -150,7 +150,7 @@ class Executor(Cleaner):
local = Package.load(filename, self.pacman, self.aur_url) local = Package.load(filename, self.pacman, self.aur_url)
updates.setdefault(local.base, local).packages.update(local.packages) updates.setdefault(local.base, local).packages.update(local.packages)
except Exception: except Exception:
self.logger.exception(f"could not load package from {filename}") self.logger.exception("could not load package from %s", filename)
for local in updates.values(): for local in updates.values():
try: try:
@ -159,7 +159,7 @@ class Executor(Cleaner):
self.reporter.set_success(local) self.reporter.set_success(local)
except Exception: except Exception:
self.reporter.set_failed(local.base) self.reporter.set_failed(local.base)
self.logger.exception(f"could not process {local.base}") self.logger.exception("could not process %s", local.base)
self.clear_packages() self.clear_packages()
return self.repo.repo_path return self.repo.repo_path

View File

@ -42,7 +42,7 @@ class Repository(Executor, UpdateHandler):
local = Package.load(full_path, self.pacman, self.aur_url) local = Package.load(full_path, self.pacman, self.aur_url)
result.setdefault(local.base, local).packages.update(local.packages) result.setdefault(local.base, local).packages.update(local.packages)
except Exception: except Exception:
self.logger.exception(f"could not load package from {full_path}") self.logger.exception("could not load package from %s", full_path)
continue continue
return list(result.values()) return list(result.values())

View File

@ -59,7 +59,7 @@ class UpdateHandler(Cleaner):
result.append(remote) result.append(remote)
except Exception: except Exception:
self.reporter.set_failed(local.base) self.reporter.set_failed(local.base)
self.logger.exception(f"could not load remote package {local.base}") self.logger.exception("could not load remote package %s", local.base)
continue continue
return result return result
@ -81,7 +81,7 @@ class UpdateHandler(Cleaner):
else: else:
self.reporter.set_pending(local.base) self.reporter.set_pending(local.base)
except Exception: except Exception:
self.logger.exception(f"could not add package from {fn}") self.logger.exception("could not add package from %s", fn)
self.clear_manual() self.clear_manual()
return result return result

View File

@ -104,7 +104,7 @@ class GPG:
}) })
response.raise_for_status() response.raise_for_status()
except requests.exceptions.HTTPError as e: except requests.exceptions.HTTPError as e:
self.logger.exception(f"could not download key {key} from {server}: {exception_response_text(e)}") self.logger.exception("could not download key %s from %s: %s", key, server, exception_response_text(e))
raise raise
return response.text return response.text
@ -142,7 +142,7 @@ class GPG:
return [path] return [path]
key = self.configuration.get("sign", f"key_{base}", fallback=self.default_key) key = self.configuration.get("sign", f"key_{base}", fallback=self.default_key)
if key is None: if key is None:
self.logger.error(f"no default key set, skip package {path} sign") self.logger.error("no default key set, skip package %s sign", path)
return [path] return [path]
return self.process(path, key) return self.process(path, key)

View File

@ -53,8 +53,7 @@ class Client:
:param status: current package build status :param status: current package build status
""" """
# pylint: disable=no-self-use def get(self, base: Optional[str]) -> List[Tuple[Package, BuildStatus]]: # pylint: disable=no-self-use
def get(self, base: Optional[str]) -> List[Tuple[Package, BuildStatus]]:
""" """
get package status get package status
:param base: package base to get :param base: package base to get
@ -63,16 +62,14 @@ class Client:
del base del base
return [] return []
# pylint: disable=no-self-use def get_internal(self) -> InternalStatus: # pylint: disable=no-self-use
def get_internal(self) -> InternalStatus:
""" """
get internal service status get internal service status
:return: current internal (web) service status :return: current internal (web) service status
""" """
return InternalStatus() return InternalStatus()
# pylint: disable=no-self-use def get_self(self) -> BuildStatus: # pylint: disable=no-self-use
def get_self(self) -> BuildStatus:
""" """
get ahriman status itself get ahriman status itself
:return: current ahriman status :return: current ahriman status

View File

@ -90,7 +90,7 @@ class Watcher:
try: try:
parse_single(item) parse_single(item)
except Exception: except Exception:
self.logger.exception(f"cannot parse item f{item} to package") self.logger.exception("cannot parse item %s to package", item)
def _cache_save(self) -> None: def _cache_save(self) -> None:
""" """

View File

@ -84,9 +84,9 @@ class WebClient(Client):
response = requests.post(self._package_url(package.base), json=payload) response = requests.post(self._package_url(package.base), json=payload)
response.raise_for_status() response.raise_for_status()
except requests.exceptions.HTTPError as e: except requests.exceptions.HTTPError as e:
self.logger.exception(f"could not add {package.base}: {exception_response_text(e)}") self.logger.exception("could not add %s: %s", package.base, exception_response_text(e))
except Exception: except Exception:
self.logger.exception(f"could not add {package.base}") self.logger.exception("could not add %s", package.base)
def get(self, base: Optional[str]) -> List[Tuple[Package, BuildStatus]]: def get(self, base: Optional[str]) -> List[Tuple[Package, BuildStatus]]:
""" """
@ -104,9 +104,9 @@ class WebClient(Client):
for package in status_json for package in status_json
] ]
except requests.exceptions.HTTPError as e: except requests.exceptions.HTTPError as e:
self.logger.exception(f"could not get {base}: {exception_response_text(e)}") self.logger.exception("could not get %s: %s", base, exception_response_text(e))
except Exception: except Exception:
self.logger.exception(f"could not get {base}") self.logger.exception("could not get %s", base)
return [] return []
def get_internal(self) -> InternalStatus: def get_internal(self) -> InternalStatus:
@ -121,7 +121,7 @@ class WebClient(Client):
status_json = response.json() status_json = response.json()
return InternalStatus.from_json(status_json) return InternalStatus.from_json(status_json)
except requests.exceptions.HTTPError as e: except requests.exceptions.HTTPError as e:
self.logger.exception(f"could not get web service status: {exception_response_text(e)}") self.logger.exception("could not get web service status: %s", exception_response_text(e))
except Exception: except Exception:
self.logger.exception("could not get web service status") self.logger.exception("could not get web service status")
return InternalStatus() return InternalStatus()
@ -138,7 +138,7 @@ class WebClient(Client):
status_json = response.json() status_json = response.json()
return BuildStatus.from_json(status_json) return BuildStatus.from_json(status_json)
except requests.exceptions.HTTPError as e: except requests.exceptions.HTTPError as e:
self.logger.exception(f"could not get service status: {exception_response_text(e)}") self.logger.exception("could not get service status: %s", exception_response_text(e))
except Exception: except Exception:
self.logger.exception("could not get service status") self.logger.exception("could not get service status")
return BuildStatus() return BuildStatus()
@ -152,9 +152,9 @@ class WebClient(Client):
response = requests.delete(self._package_url(base)) response = requests.delete(self._package_url(base))
response.raise_for_status() response.raise_for_status()
except requests.exceptions.HTTPError as e: except requests.exceptions.HTTPError as e:
self.logger.exception(f"could not delete {base}: {exception_response_text(e)}") self.logger.exception("could not delete %s: %s", base, exception_response_text(e))
except Exception: except Exception:
self.logger.exception(f"could not delete {base}") self.logger.exception("could not delete %s", base)
def update(self, base: str, status: BuildStatusEnum) -> None: def update(self, base: str, status: BuildStatusEnum) -> None:
""" """
@ -168,9 +168,9 @@ class WebClient(Client):
response = requests.post(self._package_url(base), json=payload) response = requests.post(self._package_url(base), json=payload)
response.raise_for_status() response.raise_for_status()
except requests.exceptions.HTTPError as e: except requests.exceptions.HTTPError as e:
self.logger.exception(f"could not update {base}: {exception_response_text(e)}") self.logger.exception("could not update %s: %s", base, exception_response_text(e))
except Exception: except Exception:
self.logger.exception(f"could not update {base}") self.logger.exception("could not update %s", base)
def update_self(self, status: BuildStatusEnum) -> None: def update_self(self, status: BuildStatusEnum) -> None:
""" """
@ -183,6 +183,6 @@ class WebClient(Client):
response = requests.post(self._ahriman_url(), json=payload) response = requests.post(self._ahriman_url(), json=payload)
response.raise_for_status() response.raise_for_status()
except requests.exceptions.HTTPError as e: except requests.exceptions.HTTPError as e:
self.logger.exception(f"could not update service status: {exception_response_text(e)}") self.logger.exception("could not update service status: %s", exception_response_text(e))
except Exception: except Exception:
self.logger.exception("could not update service status") self.logger.exception("could not update service status")

View File

@ -19,6 +19,7 @@
# #
import boto3 # type: ignore import boto3 # type: ignore
import hashlib import hashlib
import mimetypes
from pathlib import Path from pathlib import Path
from typing import Any, Dict, Generator, Iterable from typing import Any, Dict, Generator, Iterable
@ -80,6 +81,18 @@ class S3(Upload):
aws_secret_access_key=configuration.get("s3", "secret_key")) aws_secret_access_key=configuration.get("s3", "secret_key"))
return client.Bucket(configuration.get("s3", "bucket")) return client.Bucket(configuration.get("s3", "bucket"))
@staticmethod
def remove_files(local_files: Dict[Path, str], remote_objects: Dict[Path, Any]) -> None:
"""
remove files which have been removed locally
:param local_files: map of local path object to its checksum
:param remote_objects: map of remote path object to the remote s3 object
"""
for local_file, remote_object in remote_objects.items():
if local_file in local_files:
continue
remote_object.delete()
def get_local_files(self, path: Path) -> Dict[Path, str]: def get_local_files(self, path: Path) -> Dict[Path, str]:
""" """
get all local files and their calculated checksums get all local files and their calculated checksums
@ -115,18 +128,26 @@ class S3(Upload):
remote_objects = self.get_remote_objects() remote_objects = self.get_remote_objects()
local_files = self.get_local_files(path) local_files = self.get_local_files(path)
# sync to remotes first self.upload_files(path, local_files, remote_objects)
self.remove_files(local_files, remote_objects)
def upload_files(self, path: Path, local_files: Dict[Path, str], remote_objects: Dict[Path, Any]) -> None:
"""
upload changed files to s3
:param path: local path to sync
:param local_files: map of local path object to its checksum
:param remote_objects: map of remote path object to the remote s3 object
"""
for local_file, checksum in local_files.items(): for local_file, checksum in local_files.items():
remote_object = remote_objects.get(local_file) remote_object = remote_objects.get(local_file)
# 0 and -1 elements are " (double quote) # 0 and -1 elements are " (double quote)
remote_checksum = remote_object.e_tag[1:-1] if remote_object is not None else None remote_checksum = remote_object.e_tag[1:-1] if remote_object is not None else None
if remote_checksum == checksum: if remote_checksum == checksum:
continue continue
remote_path = Path(self.architecture) / local_file
self.bucket.upload_file(str(path / local_file), str(remote_path))
# remove files which were removed locally local_path = path / local_file
for local_file, remote_object in remote_objects.items(): remote_path = Path(self.architecture) / local_file
if local_file in local_files: (mime, _) = mimetypes.guess_type(local_path)
continue extra_args = {"ContentType": mime} if mime is not None else None
remote_object.delete()
self.bucket.upload_file(Filename=str(local_path), Key=str(remote_path), ExtraArgs=extra_args)

View File

@ -17,4 +17,4 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
__version__ = "1.2.0" __version__ = "1.2.6"

View File

@ -40,7 +40,7 @@ def exception_handler(logger: Logger) -> Callable[[Request, HandlerType], Awaita
except HTTPClientError: except HTTPClientError:
raise raise
except Exception: except Exception:
logger.exception(f"exception during performing request to {request.path}") logger.exception("exception during performing request to %s", request.path)
raise raise
return handle return handle

View File

@ -42,22 +42,6 @@ def anyvar(cls: Type[T], strict: bool = False) -> T:
return AnyVar() return AnyVar()
@pytest.helpers.register
class AsyncMock(MagicMock):
"""
async magic mock object
"""
async def __call__(self, *args: Any, **kwargs: Any) -> Any:
"""
async call function
:param args:
:param kwargs:
:return:
"""
return MagicMock.__call__(self, *args, **kwargs)
# generic fixtures # generic fixtures
@pytest.fixture @pytest.fixture
def configuration(resource_path_root: Path) -> Configuration: def configuration(resource_path_root: Path) -> Configuration:

View File

@ -1,6 +1,6 @@
from pathlib import Path from pathlib import Path
from pytest_mock import MockerFixture from pytest_mock import MockerFixture
from typing import Any, List from typing import Any, List, Optional, Tuple
from unittest import mock from unittest import mock
from unittest.mock import MagicMock from unittest.mock import MagicMock
@ -34,6 +34,19 @@ def test_calculate_etag_small(resource_path_root: Path) -> None:
assert S3.calculate_etag(path, _chunk_size) == "04e75b4aa0fe6033e711e8ea98e059b2" assert S3.calculate_etag(path, _chunk_size) == "04e75b4aa0fe6033e711e8ea98e059b2"
def test_remove_files(s3_remote_objects: List[Any]) -> None:
"""
must remove remote objects
"""
local_files = {
Path(item.key): item.e_tag for item in s3_remote_objects if item.key != "x86_64/a"
}
remote_objects = {Path(item.key): item for item in s3_remote_objects}
S3.remove_files(local_files, remote_objects)
remote_objects[Path("x86_64/a")].delete.assert_called_once()
def test_get_local_files(s3: S3, resource_path_root: Path) -> None: def test_get_local_files(s3: S3, resource_path_root: Path) -> None:
""" """
must get all local files recursively must get all local files recursively
@ -70,29 +83,49 @@ def test_get_remote_objects(s3: S3, s3_remote_objects: List[Any]) -> None:
assert s3.get_remote_objects() == expected assert s3.get_remote_objects() == expected
def test_sync(s3: S3, s3_remote_objects: List[Any], mocker: MockerFixture) -> None: def test_sync(s3: S3, mocker: MockerFixture) -> None:
""" """
must run sync command must run sync command
""" """
local_files_mock = mocker.patch("ahriman.core.upload.s3.S3.get_local_files")
remote_objects_mock = mocker.patch("ahriman.core.upload.s3.S3.get_remote_objects")
remove_files_mock = mocker.patch("ahriman.core.upload.s3.S3.remove_files")
upload_files_mock = mocker.patch("ahriman.core.upload.s3.S3.upload_files")
s3.sync(Path("root"), [])
local_files_mock.assert_called_once()
remote_objects_mock.assert_called_once()
remove_files_mock.assert_called_once()
upload_files_mock.assert_called_once()
def test_upload_files(s3: S3, s3_remote_objects: List[Any], mocker: MockerFixture) -> None:
"""
must upload changed files
"""
def mimetype(path: Path) -> Tuple[Optional[str], None]:
return ("text/html", None) if path.name == "b" else (None, None)
root = Path("path") root = Path("path")
local_files = { local_files = {
Path(item.key.replace("a", "d")): item.e_tag.replace("b", "d").replace("\"", "") Path(item.key.replace("a", "d")): item.e_tag.replace("b", "d").replace("\"", "")
for item in s3_remote_objects for item in s3_remote_objects
} }
remote_objects = {Path(item.key): item for item in s3_remote_objects} remote_objects = {Path(item.key): item for item in s3_remote_objects}
print(local_files)
print(remote_objects)
local_files_mock = mocker.patch("ahriman.core.upload.s3.S3.get_local_files", return_value=local_files) mocker.patch("mimetypes.guess_type", side_effect=mimetype)
remote_objects_mock = mocker.patch("ahriman.core.upload.s3.S3.get_remote_objects", return_value=remote_objects)
upload_mock = s3.bucket = MagicMock() upload_mock = s3.bucket = MagicMock()
s3.sync(root, []) s3.upload_files(root, local_files, remote_objects)
upload_mock.upload_file.assert_has_calls(
local_files_mock.assert_called_once() [
remote_objects_mock.assert_called_once() mock.call(
upload_mock.upload_file.assert_has_calls([ Filename=str(root / s3.architecture / "b"),
mock.call(str(root / s3.architecture / "b"), f"{s3.architecture}/{s3.architecture}/b"), Key=f"{s3.architecture}/{s3.architecture}/b",
mock.call(str(root / s3.architecture / "d"), f"{s3.architecture}/{s3.architecture}/d"), ExtraArgs={"ContentType": "text/html"}),
], any_order=True) mock.call(
remote_objects[Path("x86_64/a")].delete.assert_called_once() Filename=str(root / s3.architecture / "d"),
Key=f"{s3.architecture}/{s3.architecture}/d",
ExtraArgs=None),
],
any_order=True)

View File

@ -4,6 +4,7 @@ import pytest
from aiohttp.web_exceptions import HTTPBadRequest from aiohttp.web_exceptions import HTTPBadRequest
from pytest_mock import MockerFixture from pytest_mock import MockerFixture
from typing import Any from typing import Any
from unittest.mock import AsyncMock
from ahriman.web.middlewares.exception_handler import exception_handler from ahriman.web.middlewares.exception_handler import exception_handler
@ -12,7 +13,7 @@ async def test_exception_handler(aiohttp_request: Any, mocker: MockerFixture) ->
""" """
must pass success response must pass success response
""" """
request_handler = pytest.helpers.AsyncMock() request_handler = AsyncMock()
logging_mock = mocker.patch("logging.Logger.exception") logging_mock = mocker.patch("logging.Logger.exception")
handler = exception_handler(logging.getLogger()) handler = exception_handler(logging.getLogger())
@ -24,8 +25,7 @@ async def test_exception_handler_client_error(aiohttp_request: Any, mocker: Mock
""" """
must pass client exception must pass client exception
""" """
request_handler = pytest.helpers.AsyncMock() request_handler = AsyncMock(side_effect=HTTPBadRequest())
request_handler.side_effect = HTTPBadRequest()
logging_mock = mocker.patch("logging.Logger.exception") logging_mock = mocker.patch("logging.Logger.exception")
handler = exception_handler(logging.getLogger()) handler = exception_handler(logging.getLogger())
@ -38,8 +38,7 @@ async def test_exception_handler_server_error(aiohttp_request: Any, mocker: Mock
""" """
must log server exception and re-raise it must log server exception and re-raise it
""" """
request_handler = pytest.helpers.AsyncMock() request_handler = AsyncMock(side_effect=Exception())
request_handler.side_effect = Exception()
logging_mock = mocker.patch("logging.Logger.exception") logging_mock = mocker.patch("logging.Logger.exception")
handler = exception_handler(logging.getLogger()) handler = exception_handler(logging.getLogger())

View File

@ -2,10 +2,10 @@
keys = root,builder,build_details,http keys = root,builder,build_details,http
[handlers] [handlers]
keys = console_handler,build_file_handler,file_handler,http_handler keys = console_handler,build_file_handler,file_handler,http_handler,syslog_handler
[formatters] [formatters]
keys = generic_format keys = generic_format,syslog_format
[handler_console_handler] [handler_console_handler]
class = StreamHandler class = StreamHandler
@ -31,29 +31,39 @@ level = DEBUG
formatter = generic_format formatter = generic_format
args = ("/var/log/ahriman/http.log", "a", 20971520, 20) args = ("/var/log/ahriman/http.log", "a", 20971520, 20)
[handler_syslog_handler]
class = logging.handlers.SysLogFileHandler
level = DEBUG
formatter = syslog_format
args = ("/dev/log",)
[formatter_generic_format] [formatter_generic_format]
format = [%(levelname)s %(asctime)s] [%(filename)s:%(lineno)d] [%(funcName)s]: %(message)s format = [%(levelname)s %(asctime)s] [%(filename)s:%(lineno)d] [%(funcName)s]: %(message)s
datefmt = datefmt =
[formatter_syslog_format]
format = [%(levelname)s] [%(filename)s:%(lineno)d] [%(funcName)s]: %(message)s
datefmt =
[logger_root] [logger_root]
level = DEBUG level = DEBUG
handlers = file_handler handlers = syslog_handler
qualname = root qualname = root
[logger_builder] [logger_builder]
level = DEBUG level = DEBUG
handlers = file_handler handlers = syslog_handler
qualname = builder qualname = builder
propagate = 0 propagate = 0
[logger_build_details] [logger_build_details]
level = DEBUG level = DEBUG
handlers = build_file_handler handlers = syslog_handler
qualname = build_details qualname = build_details
propagate = 0 propagate = 0
[logger_http] [logger_http]
level = DEBUG level = DEBUG
handlers = http_handler handlers = syslog_handler
qualname = http qualname = http
propagate = 0 propagate = 0