Compare commits

..

11 Commits

47 changed files with 1415 additions and 220 deletions

View File

@ -165,6 +165,11 @@ Again, the most checks can be performed by `tox` command, though some additional
# Blank line again and package imports
from ahriman.core.configuration import Configuration
# Multiline import example
from ahriman.core.database.operations import (
AuthOperations,
BuildOperations,
)
```
* One file should define only one class, exception is class satellites in case if file length remains less than 400 lines.

View File

@ -132,6 +132,14 @@ ahriman.core.database.migrations.m015\_logs\_process\_id module
:no-undoc-members:
:show-inheritance:
ahriman.core.database.migrations.m016\_archive module
-----------------------------------------------------
.. automodule:: ahriman.core.database.migrations.m016_archive
:members:
:no-undoc-members:
:show-inheritance:
Module contents
---------------

View File

@ -4,6 +4,14 @@ ahriman.core.housekeeping package
Submodules
----------
ahriman.core.housekeeping.archive\_rotation\_trigger module
-----------------------------------------------------------
.. automodule:: ahriman.core.housekeeping.archive_rotation_trigger
:members:
:no-undoc-members:
:show-inheritance:
ahriman.core.housekeeping.logs\_rotation\_trigger module
--------------------------------------------------------

View File

@ -65,6 +65,8 @@ will try to read value from ``SECRET`` environment variable. In case if the requ
will eventually lead ``key`` option in section ``section1`` to be set to the value of ``HOME`` environment variable (if available).
Moreover, configuration can be read from environment variables directly by following the same naming convention, e.g. in the example above, one can have environment variable named ``section1:key`` (e.g. ``section1:key=$HOME``) and it will be substituted to the configuration with the highest priority.
There is also additional subcommand which will allow to validate configuration and print found errors. In order to do so, run ``service-config-validate`` subcommand, e.g.:
.. code-block:: shell
@ -95,6 +97,13 @@ libalpm and AUR related configuration. Group name can refer to architecture, e.g
* ``sync_files_database`` - download files database from mirror, boolean, required.
* ``use_ahriman_cache`` - use local pacman package cache instead of system one, boolean, required. With this option enabled you might want to refresh database periodically (available as additional flag for some subcommands). If set to ``no``, databases must be synchronized manually.
``archive`` group
-----------------
Describes settings for packages archives management extensions.
* ``keep_built_packages`` - keep this amount of built packages with different versions, integer, required. ``0`` (or negative number) will effectively disable archives removal.
``auth`` group
--------------
@ -137,6 +146,8 @@ Build related configuration. Group name can refer to architecture, e.g. ``build:
Base repository settings.
* ``architecture`` - repository architecture, string. This field is read-only and generated automatically from run options if possible.
* ``name`` - repository name, string. This field is read-only and generated automatically from run options if possible.
* ``root`` - root path for application, string, required.
``sign:*`` groups

View File

@ -44,9 +44,11 @@ triggers[] = ahriman.core.report.ReportTrigger
triggers[] = ahriman.core.upload.UploadTrigger
triggers[] = ahriman.core.gitremote.RemotePushTrigger
triggers[] = ahriman.core.housekeeping.LogsRotationTrigger
triggers[] = ahriman.core.housekeeping.ArchiveRotationTrigger
; List of well-known triggers. Used only for configuration purposes.
triggers_known[] = ahriman.core.gitremote.RemotePullTrigger
triggers_known[] = ahriman.core.gitremote.RemotePushTrigger
triggers_known[] = ahriman.core.housekeeping.ArchiveRotationTrigger
triggers_known[] = ahriman.core.housekeeping.LogsRotationTrigger
triggers_known[] = ahriman.core.report.ReportTrigger
triggers_known[] = ahriman.core.upload.UploadTrigger

View File

@ -1,3 +1,7 @@
[archive]
; Keep amount of last built packages in archive. 0 means keep all packages
keep_built_packages = 1
[logs-rotation]
; Keep last build logs for each package
keep_last_logs = 5

View File

@ -1,5 +1,6 @@
[build]
; List of well-known triggers. Used only for configuration purposes.
triggers_known[] = ahriman.core.archive.ArchiveTrigger
triggers_known[] = ahriman.core.distributed.WorkerLoaderTrigger
triggers_known[] = ahriman.core.distributed.WorkerTrigger
triggers_known[] = ahriman.core.support.KeyringTrigger

View File

@ -66,7 +66,7 @@ class Status(Handler):
Status.check_status(args.exit_code, packages)
comparator: Callable[[tuple[Package, BuildStatus]], Comparable] = lambda item: item[0].base
filter_fn: Callable[[tuple[Package, BuildStatus]], bool] =\
filter_fn: Callable[[tuple[Package, BuildStatus]], bool] = \
lambda item: args.status is None or item[1].status == args.status
for package, package_status in sorted(filter(filter_fn, packages), key=comparator):
PackagePrinter(package, package_status)(verbose=args.info)

View File

@ -21,6 +21,7 @@ import argparse
from ahriman.application.handlers.handler import Handler, SubParserAction
from ahriman.core.configuration import Configuration
from ahriman.core.utils import walk
from ahriman.models.repository_id import RepositoryId
from ahriman.models.repository_paths import RepositoryPaths
@ -49,6 +50,7 @@ class TreeMigrate(Handler):
target_tree.tree_create()
# perform migration
TreeMigrate.tree_move(current_tree, target_tree)
TreeMigrate.fix_symlinks(target_tree)
@staticmethod
def _set_service_tree_migrate_parser(root: SubParserAction) -> argparse.ArgumentParser:
@ -66,6 +68,22 @@ class TreeMigrate(Handler):
parser.set_defaults(lock=None, quiet=True, report=False)
return parser
@staticmethod
def fix_symlinks(paths: RepositoryPaths) -> None:
"""
fix packages archives symlinks
Args:
paths(RepositoryPaths): new repository paths
"""
archives = {path.name: path for path in walk(paths.archive)}
for symlink in walk(paths.repository):
if symlink.exists(): # no need to check for symlinks as we have just walked through the tree
continue
if (source_archive := archives.get(symlink.name)) is not None:
symlink.unlink()
symlink.symlink_to(source_archive.relative_to(symlink.parent, walk_up=True))
@staticmethod
def tree_move(from_tree: RepositoryPaths, to_tree: RepositoryPaths) -> None:
"""

View File

@ -52,7 +52,7 @@ class Validate(Handler):
"""
from ahriman.core.configuration.validator import Validator
schema = Validate.schema(repository_id, configuration)
schema = Validate.schema(configuration)
validator = Validator(configuration=configuration, schema=schema)
if validator.validate(configuration.dump()):
@ -83,12 +83,11 @@ class Validate(Handler):
return parser
@staticmethod
def schema(repository_id: RepositoryId, configuration: Configuration) -> ConfigurationSchema:
def schema(configuration: Configuration) -> ConfigurationSchema:
"""
get schema with triggers
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance
Returns:
@ -107,12 +106,12 @@ class Validate(Handler):
continue
# default settings if any
for schema_name, schema in trigger_class.configuration_schema(repository_id, None).items():
for schema_name, schema in trigger_class.configuration_schema(None).items():
erased = Validate.schema_erase_required(copy.deepcopy(schema))
root[schema_name] = Validate.schema_merge(root.get(schema_name, {}), erased)
# settings according to enabled triggers
for schema_name, schema in trigger_class.configuration_schema(repository_id, configuration).items():
for schema_name, schema in trigger_class.configuration_schema(configuration).items():
root[schema_name] = Validate.schema_merge(root.get(schema_name, {}), copy.deepcopy(schema))
return root

View File

@ -59,7 +59,7 @@ class Repo(LazyLogging):
"""
return self.root / f"{self.name}.db.tar.gz"
def add(self, path: Path, remove: bool = True) -> None:
def add(self, path: Path, *, remove: bool = True) -> None:
"""
add new package to repository
@ -88,22 +88,24 @@ class Repo(LazyLogging):
check_output("repo-add", *self.sign_args, str(self.repo_path),
cwd=self.root, logger=self.logger, user=self.uid)
def remove(self, package: str, filename: Path) -> None:
def remove(self, package_name: str | None, filename: Path) -> None:
"""
remove package from repository
Args:
package(str): package name to remove
package_name(str | None): package name to remove. If none set, it will be guessed from filename
filename(Path): package filename to remove
"""
package_name = package_name or filename.name.rsplit("-", maxsplit=3)[0]
# remove package and signature (if any) from filesystem
for full_path in self.root.glob(f"**/{filename}*"):
for full_path in self.root.glob(f"**/{filename.name}*"):
full_path.unlink()
# remove package from registry
check_output(
"repo-remove", *self.sign_args, str(self.repo_path), package,
exception=BuildError.from_process(package),
"repo-remove", *self.sign_args, str(self.repo_path), package_name,
exception=BuildError.from_process(package_name),
cwd=self.root,
logger=self.logger,
user=self.uid,

View File

@ -0,0 +1,20 @@
#
# Copyright (c) 2021-2025 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from ahriman.core.archive.archive_trigger import ArchiveTrigger

View File

@ -0,0 +1,127 @@
#
# Copyright (c) 2021-2025 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import datetime
from pathlib import Path
from ahriman.core.alpm.repo import Repo
from ahriman.core.log import LazyLogging
from ahriman.core.utils import utcnow
from ahriman.models.package import Package
from ahriman.models.repository_paths import RepositoryPaths
class ArchiveTree(LazyLogging):
"""
wrapper around archive tree
Attributes:
paths(RepositoryPaths): repository paths instance
repository_id(RepositoryId): repository unique identifier
sign_args(list[str]): additional args which have to be used to sign repository archive
"""
def __init__(self, repository_path: RepositoryPaths, sign_args: list[str]) -> None:
"""
Args:
repository_path(RepositoryPaths): repository paths instance
sign_args(list[str]): additional args which have to be used to sign repository archive
"""
self.paths = repository_path
self.repository_id = repository_path.repository_id
self.sign_args = sign_args
def repository_for(self, date: datetime.date | None = None) -> Path:
"""
get full path to repository at the specified date
Args:
date(datetime.date | None, optional): date to generate path. If none supplied then today will be used
Returns:
Path: path to the repository root
"""
date = date or utcnow().today()
return (
self.paths.archive
/ "repos"
/ date.strftime("%Y")
/ date.strftime("%m")
/ date.strftime("%d")
/ self.repository_id.name
/ self.repository_id.architecture
)
def symlinks_create(self, packages: list[Package]) -> None:
"""
create symlinks for the specified packages in today's repository
Args:
packages(list[Package]): list of packages to be updated
"""
root = self.repository_for()
repo = Repo(self.repository_id.name, self.paths, self.sign_args, root)
for package in packages:
archive = self.paths.archive_for(package.base)
for package_name, single in package.packages.items():
if single.filename is None:
self.logger.warning("received empty package filename for %s", package_name)
continue
has_file = False
for file in archive.glob(f"{single.filename}*"):
if not (symlink := root / file.name).exists():
has_file |= True
symlink.symlink_to(file.relative_to(symlink.parent, walk_up=True))
if has_file:
repo.add(root / single.filename)
def symlinks_fix(self) -> None:
"""
remove broken symlinks across all repositories
"""
for root, _, files in self.paths.archive.walk():
*_, name, architecture = root.parts
if self.repository_id.name != name or self.repository_id.architecture != architecture:
continue # we only process same name repositories
for file in files:
path = root / file
if not path.is_symlink():
continue # find symlinks only
if path.exists():
continue # filter out not broken symlinks
repo = Repo(self.repository_id.name, self.paths, self.sign_args, root)
repo.remove(None, path)
def tree_create(self) -> None:
"""
create repository tree for current repository
"""
path = self.repository_for()
if path.exists():
return
with self.paths.preserve_owner(self.paths.archive):
path.mkdir(0o755, parents=True)

View File

@ -0,0 +1,71 @@
#
# Copyright (c) 2021-2025 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from ahriman.core import context
from ahriman.core.archive.archive_tree import ArchiveTree
from ahriman.core.configuration import Configuration
from ahriman.core.sign.gpg import GPG
from ahriman.core.triggers import Trigger
from ahriman.models.package import Package
from ahriman.models.repository_id import RepositoryId
from ahriman.models.result import Result
class ArchiveTrigger(Trigger):
"""
archive repository extension
Attributes:
paths(RepositoryPaths): repository paths instance
"""
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
"""
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance
"""
Trigger.__init__(self, repository_id, configuration)
self.paths = configuration.repository_paths
ctx = context.get()
self.tree = ArchiveTree(self.paths, ctx.get(GPG).repository_sign_args)
def on_result(self, result: Result, packages: list[Package]) -> None:
"""
run trigger
Args:
result(Result): build result
packages(list[Package]): list of all available packages
"""
self.tree.symlinks_create(packages)
def on_start(self) -> None:
"""
trigger action which will be called at the start of the application
"""
self.tree.tree_create()
def on_stop(self) -> None:
"""
trigger action which will be called before the stop of the application
"""
self.tree.symlinks_fix()

View File

@ -19,6 +19,7 @@
#
# pylint: disable=too-many-public-methods
import configparser
import os
import shlex
import sys
@ -42,7 +43,6 @@ class Configuration(configparser.RawConfigParser):
SYSTEM_CONFIGURATION_PATH(Path): (class attribute) default system configuration path distributed by package
includes(list[Path]): list of includes which were read
path(Path | None): path to root configuration file
repository_id(RepositoryId | None): repository unique identifier
Examples:
Configuration class provides additional method in order to handle application configuration. Since this class is
@ -93,7 +93,7 @@ class Configuration(configparser.RawConfigParser):
},
)
self.repository_id: RepositoryId | None = None
self._repository_id: RepositoryId | None = None
self.path: Path | None = None
self.includes: list[Path] = []
@ -128,6 +128,32 @@ class Configuration(configparser.RawConfigParser):
"""
return self.getpath("settings", "logging")
@property
def repository_id(self) -> RepositoryId | None:
"""
repository identifier
Returns:
RepositoryId: repository unique identifier
"""
return self._repository_id
@repository_id.setter
def repository_id(self, repository_id: RepositoryId | None) -> None:
"""
setter for repository identifier
Args:
repository_id(RepositoryId | None): repository unique identifier
"""
self._repository_id = repository_id
if repository_id is None or repository_id.is_empty:
self.remove_option("repository", "name")
self.remove_option("repository", "architecture")
else:
self.set_option("repository", "name", repository_id.name)
self.set_option("repository", "architecture", repository_id.architecture)
@property
def repository_name(self) -> str:
"""
@ -164,6 +190,7 @@ class Configuration(configparser.RawConfigParser):
"""
configuration = cls()
configuration.load(path)
configuration.load_environment()
configuration.merge_sections(repository_id)
return configuration
@ -288,6 +315,16 @@ class Configuration(configparser.RawConfigParser):
self.read(self.path)
self.load_includes() # load includes
def load_environment(self) -> None:
"""
load environment variables into configuration
"""
for name, value in os.environ.items():
if ":" not in name:
continue
section, key = name.rsplit(":", maxsplit=1)
self.set_option(section, key, value)
def load_includes(self, path: Path | None = None) -> None:
"""
load configuration includes from specified path
@ -356,11 +393,16 @@ class Configuration(configparser.RawConfigParser):
"""
reload configuration if possible or raise exception otherwise
"""
# get current properties and validate input
path, repository_id = self.check_loaded()
for section in self.sections(): # clear current content
# clear current content
for section in self.sections():
self.remove_section(section)
self.load(path)
self.merge_sections(repository_id)
# create another instance and copy values from there
instance = self.from_path(path, repository_id)
self.copy_from(instance)
def set_option(self, section: str, option: str, value: str) -> None:
"""

View File

@ -249,6 +249,10 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
"repository": {
"type": "dict",
"schema": {
"architecture": {
"type": "string",
"empty": False,
},
"name": {
"type": "string",
"empty": False,

View File

@ -0,0 +1,84 @@
#
# Copyright (c) 2021-2025 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import argparse
from dataclasses import replace
from sqlite3 import Connection
from ahriman.application.handlers.handler import Handler
from ahriman.core.alpm.pacman import Pacman
from ahriman.core.configuration import Configuration
from ahriman.models.package import Package
from ahriman.models.pacman_synchronization import PacmanSynchronization
from ahriman.models.repository_paths import RepositoryPaths
__all__ = ["migrate_data"]
def migrate_data(connection: Connection, configuration: Configuration) -> None:
"""
perform data migration
Args:
connection(Connection): database connection
configuration(Configuration): configuration instance
"""
del connection
config_path, _ = configuration.check_loaded()
args = argparse.Namespace(configuration=config_path, architecture=None, repository=None, repository_id=None)
for repository_id in Handler.repositories_extract(args):
paths = replace(configuration.repository_paths, repository_id=repository_id)
pacman = Pacman(repository_id, configuration, refresh_database=PacmanSynchronization.Disabled)
# create archive directory if required
if not paths.archive.is_dir():
with paths.preserve_owner(paths.archive):
paths.archive.mkdir(mode=0o755, parents=True)
move_packages(paths, pacman)
def move_packages(repository_paths: RepositoryPaths, pacman: Pacman) -> None:
"""
move packages from repository to archive and create symbolic links
Args:
repository_paths(RepositoryPaths): repository paths instance
pacman(Pacman): alpm wrapper instance
"""
for source in repository_paths.repository.iterdir():
if not source.is_file(follow_symlinks=False):
continue # skip symbolic links if any
filename = source.name
if filename.startswith(".") or ".pkg." not in filename:
# we don't use package_like method here, because it also filters out signatures
continue
package = Package.from_archive(source, pacman)
# move package to the archive directory
target = repository_paths.archive_for(package.base) / filename
source.rename(target)
# create symlink to the archive
source.symlink_to(target.relative_to(source.parent, walk_up=True))

View File

@ -25,8 +25,16 @@ from typing import Self
from ahriman.core.configuration import Configuration
from ahriman.core.database.migrations import Migrations
from ahriman.core.database.operations import AuthOperations, BuildOperations, ChangesOperations, \
DependenciesOperations, EventOperations, LogsOperations, PackageOperations, PatchOperations
from ahriman.core.database.operations import (
AuthOperations,
BuildOperations,
ChangesOperations,
DependenciesOperations,
EventOperations,
LogsOperations,
PackageOperations,
PatchOperations,
)
from ahriman.models.repository_id import RepositoryId

View File

@ -17,4 +17,5 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from ahriman.core.housekeeping.archive_rotation_trigger import ArchiveRotationTrigger
from ahriman.core.housekeeping.logs_rotation_trigger import LogsRotationTrigger

View File

@ -0,0 +1,115 @@
#
# Copyright (c) 2021-2025 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from collections.abc import Callable
from functools import cmp_to_key
from ahriman.core import context
from ahriman.core.alpm.pacman import Pacman
from ahriman.core.configuration import Configuration
from ahriman.core.triggers import Trigger
from ahriman.core.utils import package_like
from ahriman.models.package import Package
from ahriman.models.repository_id import RepositoryId
from ahriman.models.result import Result
class ArchiveRotationTrigger(Trigger):
"""
remove packages from archive
Attributes:
keep_built_packages(int): number of last packages to keep
paths(RepositoryPaths): repository paths instance
"""
CONFIGURATION_SCHEMA = {
"archive": {
"type": "dict",
"schema": {
"keep_built_packages": {
"type": "integer",
"required": True,
"coerce": "integer",
"min": 0,
},
},
},
}
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
"""
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance
"""
Trigger.__init__(self, repository_id, configuration)
section = next(iter(self.configuration_sections(configuration)))
self.keep_built_packages = max(configuration.getint(section, "keep_built_packages"), 0)
self.paths = configuration.repository_paths
@classmethod
def configuration_sections(cls, configuration: Configuration) -> list[str]:
"""
extract configuration sections from configuration
Args:
configuration(Configuration): configuration instance
Returns:
list[str]: read configuration sections belong to this trigger
"""
return list(cls.CONFIGURATION_SCHEMA.keys())
def archives_remove(self, package: Package, pacman: Pacman) -> None:
"""
remove older versions of the specified package
Args:
package(Package): package which has been updated to check for older versions
pacman(Pacman): alpm wrapper instance
"""
packages: dict[tuple[str, str], Package] = {}
# we can't use here load_archives, because it ignores versions
for full_path in filter(package_like, self.paths.archive_for(package.base).iterdir()):
local = Package.from_archive(full_path, pacman)
packages.setdefault((local.base, local.version), local).packages.update(local.packages)
comparator: Callable[[Package, Package], int] = lambda left, right: left.vercmp(right.version)
to_remove = sorted(packages.values(), key=cmp_to_key(comparator))
for single in to_remove[:-self.keep_built_packages]:
self.logger.info("removing version %s of package %s", single.version, single.base)
for archive in single.packages.values():
for path in self.paths.archive_for(single.base).glob(f"{archive.filename}*"):
path.unlink()
def on_result(self, result: Result, packages: list[Package]) -> None:
"""
run trigger
Args:
result(Result): build result
packages(list[Package]): list of all available packages
"""
ctx = context.get()
pacman = ctx.get(Pacman)
for package in result.success:
self.archives_remove(package, pacman)

View File

@ -17,9 +17,9 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import shutil # shutil.move is used here to ensure cross fs file movement
import shutil
from collections.abc import Iterable
from collections.abc import Generator, Iterable
from pathlib import Path
from tempfile import TemporaryDirectory
@ -27,7 +27,7 @@ from ahriman.core.build_tools.package_archive import PackageArchive
from ahriman.core.build_tools.task import Task
from ahriman.core.repository.cleaner import Cleaner
from ahriman.core.repository.package_info import PackageInfo
from ahriman.core.utils import safe_filename
from ahriman.core.utils import atomic_move, filelock, package_like, safe_filename
from ahriman.models.changes import Changes
from ahriman.models.event import EventType
from ahriman.models.package import Package
@ -41,7 +41,37 @@ class Executor(PackageInfo, Cleaner):
trait for common repository update processes
"""
def _archive_remove(self, description: PackageDescription, package_base: str) -> None:
def _archive_lookup(self, package: Package) -> Generator[Path, None, None]:
"""
check if there is a rebuilt package already
Args:
package(Package): package to check
Yields:
Path: list of built packages and signatures if available, empty list otherwise
"""
archive = self.paths.archive_for(package.base)
# find all packages which have same version
same_version = [
built
for path in filter(package_like, archive.iterdir())
if (built := Package.from_archive(path, self.pacman)).version == package.version
]
# no packages of the same version found
if not same_version:
return
packages = [single for built in same_version for single in built.packages.values()]
# all packages must be either any or same architecture
if not all(single.architecture in ("any", self.architecture) for single in packages):
return
for single in packages:
yield from archive.glob(f"{single.filename}*")
def _archive_rename(self, description: PackageDescription, package_base: str) -> None:
"""
rename package archive removing special symbols
@ -50,11 +80,11 @@ class Executor(PackageInfo, Cleaner):
package_base(str): package base name
"""
if description.filename is None:
self.logger.warning("received empty package name for base %s", package_base)
self.logger.warning("received empty package filename for base %s", package_base)
return # suppress type checking, it never can be none actually
if (safe := safe_filename(description.filename)) != description.filename:
(self.paths.packages / description.filename).rename(self.paths.packages / safe)
atomic_move(self.paths.packages / description.filename, self.paths.packages / safe)
description.filename = safe
def _package_build(self, package: Package, path: Path, packager: str | None,
@ -76,12 +106,22 @@ class Executor(PackageInfo, Cleaner):
task = Task(package, self.configuration, self.architecture, self.paths)
patches = self.reporter.package_patches_get(package.base, None)
commit_sha = task.init(path, patches, local_version)
built = task.build(path, PACKAGER=packager)
loaded_package = Package.from_build(path, self.architecture, None)
if prebuilt := list(self._archive_lookup(loaded_package)):
self.logger.info("using prebuilt packages for %s-%s", loaded_package.base, loaded_package.version)
built = []
for artefact in prebuilt:
with filelock(artefact):
shutil.copy(artefact, path)
built.append(path / artefact.name)
else:
built = task.build(path, PACKAGER=packager)
package.with_packages(built, self.pacman)
for src in built:
dst = self.paths.packages / src.name
shutil.move(src, dst)
atomic_move(src, dst)
return commit_sha
@ -121,7 +161,7 @@ class Executor(PackageInfo, Cleaner):
packager_key(str | None): packager key identifier
"""
if filename is None:
self.logger.warning("received empty package name for base %s", package_base)
self.logger.warning("received empty package filename for base %s", package_base)
return # suppress type checking, it never can be none actually
# in theory, it might be NOT packages directory, but we suppose it is
@ -129,10 +169,10 @@ class Executor(PackageInfo, Cleaner):
files = self.sign.process_sign_package(full_path, packager_key)
for src in files:
archive = self.paths.archive_for(package_base) / src.name
shutil.move(src, archive) # move package to archive directory
if not (symlink := self.paths.repository / archive.name).exists():
symlink.symlink_to(archive.relative_to(symlink.parent, walk_up=True)) # create link to archive
dst = self.paths.archive_for(package_base) / src.name
atomic_move(src, dst) # move package to archive directory
if not (symlink := self.paths.repository / dst.name).exists():
symlink.symlink_to(dst.relative_to(symlink.parent, walk_up=True)) # create link to archive
self.repo.add(self.paths.repository / filename)
@ -259,7 +299,7 @@ class Executor(PackageInfo, Cleaner):
packager = self.packager(packagers, local.base)
for description in local.packages.values():
self._archive_remove(description, local.base)
self._archive_rename(description, local.base)
self._package_update(description.filename, local.base, packager.key)
self.reporter.set_success(local)
result.add_updated(local)

View File

@ -80,8 +80,7 @@ class Trigger(LazyLogging):
return self.repository_id.architecture
@classmethod
def configuration_schema(cls, repository_id: RepositoryId,
configuration: Configuration | None) -> ConfigurationSchema:
def configuration_schema(cls, configuration: Configuration | None) -> ConfigurationSchema:
"""
configuration schema based on supplied service configuration
@ -89,7 +88,6 @@ class Trigger(LazyLogging):
Schema must be in cerberus format, for details and examples you can check built-in triggers.
Args:
repository_id(str): repository unique identifier
configuration(Configuration | None): configuration instance. If set to None, the default schema
should be returned
@ -101,13 +99,15 @@ class Trigger(LazyLogging):
result: ConfigurationSchema = {}
for target in cls.configuration_sections(configuration):
if not configuration.has_section(target):
continue
section, schema_name = configuration.gettype(
target, repository_id, fallback=cls.CONFIGURATION_SCHEMA_FALLBACK)
if schema_name not in cls.CONFIGURATION_SCHEMA:
continue
result[section] = cls.CONFIGURATION_SCHEMA[schema_name]
for section in configuration.sections():
if not (section == target or section.startswith(f"{target}:")):
# either repository specific or exact name
continue
schema_name = configuration.get(section, "type", fallback=section)
if schema_name not in cls.CONFIGURATION_SCHEMA:
continue
result[section] = cls.CONFIGURATION_SCHEMA[schema_name]
return result

View File

@ -17,6 +17,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import atexit
import contextlib
import os
@ -60,17 +61,8 @@ class TriggerLoader(LazyLogging):
def __init__(self) -> None:
""""""
self._on_stop_requested = False
self.triggers: list[Trigger] = []
def __del__(self) -> None:
"""
custom destructor object which calls on_stop in case if it was requested
"""
if not self._on_stop_requested:
return
self.on_stop()
@classmethod
def load(cls, repository_id: RepositoryId, configuration: Configuration) -> Self:
"""
@ -250,10 +242,11 @@ class TriggerLoader(LazyLogging):
run triggers on load
"""
self.logger.debug("executing triggers on start")
self._on_stop_requested = True
for trigger in self.triggers:
with self.__execute_trigger(trigger):
trigger.on_start()
# register on_stop call
atexit.register(self.on_stop)
def on_stop(self) -> None:
"""

View File

@ -18,13 +18,16 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# pylint: disable=too-many-lines
import contextlib
import datetime
import fcntl
import io
import itertools
import logging
import os
import re
import selectors
import shutil
import subprocess
from collections.abc import Callable, Generator, Iterable, Mapping
@ -39,11 +42,13 @@ from ahriman.models.repository_paths import RepositoryPaths
__all__ = [
"atomic_move",
"check_output",
"check_user",
"dataclass_view",
"enum_values",
"extract_user",
"filelock",
"filter_json",
"full_version",
"minmax",
@ -65,6 +70,25 @@ __all__ = [
T = TypeVar("T")
def atomic_move(src: Path, dst: Path) -> None:
"""
move file from ``source`` location to ``destination``. This method uses lock and :func:`shutil.move` to ensure that
file will be copied (if not rename) atomically. This method blocks execution until lock is available
Args:
src(Path): path to the source file
dst(Path): path to the destination
Examples:
This method is a drop-in replacement for :func:`shutil.move` (except it doesn't allow to override copy method)
which first locking destination file. To use it simply call method with arguments::
>>> atomic_move(src, dst)
"""
with filelock(dst):
shutil.move(src, dst)
# pylint: disable=too-many-locals
def check_output(*args: str, exception: Exception | Callable[[int, list[str], str, str], Exception] | None = None,
cwd: Path | None = None, input_data: str | None = None,
@ -233,6 +257,27 @@ def extract_user() -> str | None:
return os.getenv("SUDO_USER") or os.getenv("DOAS_USER") or os.getenv("USER")
@contextlib.contextmanager
def filelock(path: Path) -> Generator[None, None, None]:
"""
lock on file passed as argument
Args:
path(Path): path object on which lock must be performed
"""
lock_path = path.with_name(f".{path.name}")
try:
with lock_path.open("ab") as lock_file:
fd = lock_file.fileno()
try:
fcntl.flock(fd, fcntl.LOCK_EX) # lock file and wait lock is until available
yield
finally:
fcntl.flock(fd, fcntl.LOCK_UN) # unlock file first
finally:
lock_path.unlink(missing_ok=True) # remove lock file at the end
def filter_json(source: dict[str, Any], known_fields: Iterable[str]) -> dict[str, Any]:
"""
filter json object by fields used for json-to-object conversion

View File

@ -520,8 +520,7 @@ class Package(LazyLogging):
else:
remote_version = remote.version
result: int = vercmp(self.version, remote_version)
return result < 0
return self.vercmp(remote_version) < 0
def next_pkgrel(self, local_version: str | None) -> str | None:
"""
@ -540,7 +539,7 @@ class Package(LazyLogging):
if local_version is None:
return None # local version not found, keep upstream pkgrel
if vercmp(self.version, local_version) > 0:
if self.vercmp(local_version) > 0:
return None # upstream version is newer than local one, keep upstream pkgrel
*_, local_pkgrel = parse_version(local_version)
@ -561,6 +560,19 @@ class Package(LazyLogging):
details = "" if self.is_single_package else f""" ({" ".join(sorted(self.packages.keys()))})"""
return f"{self.base}{details}"
def vercmp(self, version: str) -> int:
"""
typed wrapper around :func:`pyalpm.vercmp()`
Args:
version(str): version to compare
Returns:
int: negative if current version is less than provided, positive if greater than and zero if equals
"""
result: int = vercmp(self.version, version)
return result
def view(self) -> dict[str, Any]:
"""
generate json package view

View File

@ -93,7 +93,7 @@ class RepositoryPaths(LazyLogging):
Returns:
Path: archive directory root
"""
return self.root / "archive" / self._suffix
return self.root / "archive"
@property
def build_root(self) -> Path:
@ -237,7 +237,7 @@ class RepositoryPaths(LazyLogging):
set owner of path recursively (from root) to root owner
Notes:
More likely you don't want to call this method explicitly, consider using :func:`preserve_owner`
More likely you don't want to call this method explicitly, consider using :func:`preserve_owner()`
as context manager instead
Args:
@ -309,6 +309,10 @@ class RepositoryPaths(LazyLogging):
path = path or self.root
def walk(root: Path) -> Generator[Path, None, None]:
yield root
if not root.exists():
return
# basically walk, but skipping some content
for child in root.iterdir():
yield child

View File

@ -21,8 +21,18 @@ import aiohttp_jinja2
import logging
from aiohttp.typedefs import Middleware
from aiohttp.web import HTTPClientError, HTTPException, HTTPMethodNotAllowed, HTTPNoContent, HTTPServerError, \
HTTPUnauthorized, Request, StreamResponse, json_response, middleware
from aiohttp.web import (
HTTPClientError,
HTTPException,
HTTPMethodNotAllowed,
HTTPNoContent,
HTTPServerError,
HTTPUnauthorized,
Request,
StreamResponse,
json_response,
middleware,
)
from ahriman.web.middlewares import HandlerType

View File

@ -25,8 +25,12 @@ from ahriman.models.build_status import BuildStatusEnum
from ahriman.models.package import Package
from ahriman.models.user_access import UserAccess
from ahriman.web.apispec.decorators import apidocs
from ahriman.web.schemas import PackageNameSchema, PackageStatusSchema, PackageStatusSimplifiedSchema, \
RepositoryIdSchema
from ahriman.web.schemas import (
PackageNameSchema,
PackageStatusSchema,
PackageStatusSimplifiedSchema,
RepositoryIdSchema,
)
from ahriman.web.views.base import BaseView
from ahriman.web.views.status_view_guard import StatusViewGuard

View File

@ -26,6 +26,7 @@ from tempfile import NamedTemporaryFile
from typing import ClassVar
from ahriman.core.configuration import Configuration
from ahriman.core.utils import atomic_move
from ahriman.models.repository_paths import RepositoryPaths
from ahriman.models.user_access import UserAccess
from ahriman.web.apispec.decorators import apidocs
@ -152,10 +153,8 @@ class UploadView(BaseView):
files.append(await self.save_file(part, target, max_body_size=max_body_size))
# and now we can rename files, which is relatively fast operation
# it is probably good way to call lock here, however
for filename, current_location in files:
target_location = current_location.parent / filename
current_location.rename(target_location)
atomic_move(current_location, target_location)
raise HTTPCreated

View File

@ -37,6 +37,7 @@ SUBPACKAGES = {
"ahriman-triggers": [
prefix / "share" / "ahriman" / "settings" / "ahriman.ini.d" / "00-triggers.ini",
site_packages / "ahriman" / "application" / "handlers" / "triggers_support.py",
site_packages / "ahriman" / "core" / "archive",
site_packages / "ahriman" / "core" / "distributed",
site_packages / "ahriman" / "core" / "support",
],

View File

@ -6,6 +6,7 @@ from unittest.mock import call as MockCall
from ahriman.application.handlers.tree_migrate import TreeMigrate
from ahriman.core.configuration import Configuration
from ahriman.models.package import Package
from ahriman.models.repository_id import RepositoryId
from ahriman.models.repository_paths import RepositoryPaths
@ -16,6 +17,7 @@ def test_run(args: argparse.Namespace, configuration: Configuration, mocker: Moc
"""
tree_create_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.tree_create")
application_mock = mocker.patch("ahriman.application.handlers.tree_migrate.TreeMigrate.tree_move")
symlinks_mock = mocker.patch("ahriman.application.handlers.tree_migrate.TreeMigrate.fix_symlinks")
_, repository_id = configuration.check_loaded()
old_paths = configuration.repository_paths
new_paths = RepositoryPaths(old_paths.root, old_paths.repository_id, _force_current_tree=True)
@ -23,6 +25,37 @@ def test_run(args: argparse.Namespace, configuration: Configuration, mocker: Moc
TreeMigrate.run(args, repository_id, configuration, report=False)
tree_create_mock.assert_called_once_with()
application_mock.assert_called_once_with(old_paths, new_paths)
symlinks_mock.assert_called_once_with(new_paths)
def test_fix_symlinks(repository_paths: RepositoryPaths, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must replace symlinks during migration
"""
mocker.patch("ahriman.models.repository_paths.RepositoryPaths.preserve_owner")
mocker.patch("ahriman.application.handlers.tree_migrate.walk", side_effect=[
[
repository_paths.archive_for(package_ahriman.base) / "file",
repository_paths.archive_for(package_ahriman.base) / "symlink",
],
[
repository_paths.repository / "file",
repository_paths.repository / "symlink",
],
])
mocker.patch("pathlib.Path.exists", autospec=True, side_effect=lambda p: p.name == "file")
unlink_mock = mocker.patch("pathlib.Path.unlink")
symlink_mock = mocker.patch("pathlib.Path.symlink_to")
TreeMigrate.fix_symlinks(repository_paths)
unlink_mock.assert_called_once_with()
symlink_mock.assert_called_once_with(
Path("..") /
".." /
".." /
repository_paths.archive_for(package_ahriman.base).relative_to(repository_paths.root) /
"symlink"
)
def test_move_tree(mocker: MockerFixture) -> None:

View File

@ -2,6 +2,7 @@ import argparse
import json
import pytest
from pathlib import Path
from pytest_mock import MockerFixture
from ahriman.application.handlers.validate import Validate
@ -53,12 +54,50 @@ def test_run_skip(args: argparse.Namespace, configuration: Configuration, mocker
print_mock.assert_not_called()
def test_run_default(args: argparse.Namespace, configuration: Configuration) -> None:
"""
must run on default configuration without errors
"""
args.exit_code = True
_, repository_id = configuration.check_loaded()
default = Configuration.from_path(Configuration.SYSTEM_CONFIGURATION_PATH, repository_id)
# copy autogenerated values
for section, key in (("build", "build_command"), ("repository", "root")):
value = configuration.get(section, key)
default.set_option(section, key, value)
Validate.run(args, repository_id, default, report=False)
def test_run_repo_specific_triggers(args: argparse.Namespace, configuration: Configuration,
resource_path_root: Path) -> None:
"""
must correctly insert repo specific triggers
"""
args.exit_code = True
_, repository_id = configuration.check_loaded()
# remove unused sections
for section in ("archive", "customs3", "github:x86_64", "logs-rotation", "mirrorlist"):
configuration.remove_section(section)
configuration.set_option("report", "target", "test")
for section in ("test", "test:i686", "test:another-repo:x86_64"):
configuration.set_option(section, "type", "html")
configuration.set_option(section, "link_path", "http://link_path")
configuration.set_option(section, "path", "path")
configuration.set_option(section, "template", "template")
configuration.set_option(section, "templates", str(resource_path_root))
Validate.run(args, repository_id, configuration, report=False)
def test_schema(configuration: Configuration) -> None:
"""
must generate full schema correctly
"""
_, repository_id = configuration.check_loaded()
schema = Validate.schema(repository_id, configuration)
schema = Validate.schema(configuration)
# defaults
assert schema.pop("console")
@ -91,9 +130,7 @@ def test_schema_invalid_trigger(configuration: Configuration) -> None:
"""
configuration.set_option("build", "triggers", "some.invalid.trigger.path.Trigger")
configuration.remove_option("build", "triggers_known")
_, repository_id = configuration.check_loaded()
assert Validate.schema(repository_id, configuration) == CONFIGURATION_SCHEMA
assert Validate.schema(configuration) == CONFIGURATION_SCHEMA
def test_schema_erase_required() -> None:

View File

@ -1,8 +1,10 @@
import datetime
import pytest
from dataclasses import replace
from pathlib import Path
from pytest_mock import MockerFixture
from sqlite3 import Cursor
from typing import Any, TypeVar
from unittest.mock import MagicMock, PropertyMock
@ -11,12 +13,14 @@ from ahriman.core.alpm.remote import AUR
from ahriman.core.auth import Auth
from ahriman.core.configuration import Configuration
from ahriman.core.database import SQLite
from ahriman.core.database.migrations import Migrations
from ahriman.core.repository import Repository
from ahriman.core.spawn import Spawn
from ahriman.core.status import Client
from ahriman.core.status.watcher import Watcher
from ahriman.models.aur_package import AURPackage
from ahriman.models.build_status import BuildStatus, BuildStatusEnum
from ahriman.models.migration import Migration
from ahriman.models.package import Package
from ahriman.models.package_description import PackageDescription
from ahriman.models.package_source import PackageSource
@ -48,7 +52,9 @@ def anyvar(cls: type[T], strict: bool = False) -> T:
T: any wrapper
"""
class AnyVar(cls):
"""any value wrapper"""
"""
any value wrapper
"""
def __eq__(self, other: Any) -> bool:
"""
@ -271,16 +277,23 @@ def configuration(repository_id: RepositoryId, tmp_path: Path, resource_path_roo
@pytest.fixture
def database(configuration: Configuration) -> SQLite:
def database(configuration: Configuration, mocker: MockerFixture) -> SQLite:
"""
database fixture
Args:
configuration(Configuration): configuration fixture
mocker(MockerFixture): mocker object
Returns:
SQLite: database test instance
"""
original_method = Migrations.perform_migration
def perform_migration(self: Migrations, cursor: Cursor, migration: Migration) -> None:
original_method(self, cursor, replace(migration, migrate_data=lambda *args: None))
mocker.patch.object(Migrations, "perform_migration", autospec=True, side_effect=perform_migration)
return SQLite.load(configuration)

View File

@ -4,6 +4,15 @@ from pathlib import Path
from pytest_mock import MockerFixture
from ahriman.core.alpm.repo import Repo
from ahriman.models.repository_paths import RepositoryPaths
def test_root(repository_paths: RepositoryPaths) -> None:
"""
must correctly define repository root
"""
assert Repo(repository_paths.repository_id.name, repository_paths, []).root == repository_paths.repository
assert Repo(repository_paths.repository_id.name, repository_paths, [], Path("path")).root == Path("path")
def test_repo_path(repo: Repo) -> None:
@ -22,6 +31,18 @@ def test_repo_add(repo: Repo, mocker: MockerFixture) -> None:
repo.add(Path("path"))
check_output_mock.assert_called_once() # it will be checked later
assert check_output_mock.call_args[0][0] == "repo-add"
assert "--remove" in check_output_mock.call_args[0]
def test_repo_add_no_remove(repo: Repo, mocker: MockerFixture) -> None:
"""
must call repo-add without remove flag
"""
check_output_mock = mocker.patch("ahriman.core.alpm.repo.check_output")
repo.add(Path("path"), remove=False)
check_output_mock.assert_called_once() # it will be checked later
assert "--remove" not in check_output_mock.call_args[0]
def test_repo_init(repo: Repo, mocker: MockerFixture) -> None:

View File

@ -1,8 +1,8 @@
import configparser
from io import StringIO
import pytest
import os
from io import StringIO
from pathlib import Path
from pytest_mock import MockerFixture
from unittest.mock import call as MockCall
@ -20,6 +20,40 @@ def test_architecture(configuration: Configuration) -> None:
assert configuration.architecture == "x86_64"
def test_repository_id(configuration: Configuration, repository_id: RepositoryId) -> None:
"""
must return repository identifier
"""
assert configuration.repository_id == repository_id
assert configuration.get("repository", "name") == repository_id.name
assert configuration.get("repository", "architecture") == repository_id.architecture
def test_repository_id_erase(configuration: Configuration) -> None:
"""
must remove repository identifier properties if empty identifier supplied
"""
configuration.repository_id = None
assert configuration.get("repository", "name", fallback=None) is None
assert configuration.get("repository", "architecture", fallback=None) is None
configuration.repository_id = RepositoryId("", "")
assert configuration.get("repository", "name", fallback=None) is None
assert configuration.get("repository", "architecture", fallback=None) is None
def test_repository_id_update(configuration: Configuration, repository_id: RepositoryId) -> None:
"""
must update repository identifier and related configuration options
"""
repository_id = RepositoryId("i686", repository_id.name)
configuration.repository_id = repository_id
assert configuration.repository_id == repository_id
assert configuration.get("repository", "name") == repository_id.name
assert configuration.get("repository", "architecture") == repository_id.architecture
def test_repository_name(configuration: Configuration) -> None:
"""
must return valid repository name
@ -42,12 +76,16 @@ def test_from_path(repository_id: RepositoryId, mocker: MockerFixture) -> None:
mocker.patch("ahriman.core.configuration.Configuration.get", return_value="ahriman.ini.d")
read_mock = mocker.patch("ahriman.core.configuration.Configuration.read")
load_includes_mock = mocker.patch("ahriman.core.configuration.Configuration.load_includes")
merge_mock = mocker.patch("ahriman.core.configuration.Configuration.merge_sections")
environment_mock = mocker.patch("ahriman.core.configuration.Configuration.load_environment")
path = Path("path")
configuration = Configuration.from_path(path, repository_id)
assert configuration.path == path
read_mock.assert_called_once_with(path)
load_includes_mock.assert_called_once_with()
merge_mock.assert_called_once_with(repository_id)
environment_mock.assert_called_once_with()
def test_from_path_file_missing(repository_id: RepositoryId, mocker: MockerFixture) -> None:
@ -324,6 +362,18 @@ def test_gettype_from_section_no_section(configuration: Configuration) -> None:
configuration.gettype("rsync:x86_64", configuration.repository_id)
def test_load_environment(configuration: Configuration) -> None:
"""
must load environment variables
"""
os.environ["section:key"] = "value1"
os.environ["section:identifier:key"] = "value2"
configuration.load_environment()
assert configuration.get("section", "key") == "value1"
assert configuration.get("section:identifier", "key") == "value2"
def test_load_includes(mocker: MockerFixture) -> None:
"""
must load includes
@ -444,10 +494,12 @@ def test_reload(configuration: Configuration, mocker: MockerFixture) -> None:
"""
load_mock = mocker.patch("ahriman.core.configuration.Configuration.load")
merge_mock = mocker.patch("ahriman.core.configuration.Configuration.merge_sections")
environment_mock = mocker.patch("ahriman.core.configuration.Configuration.load_environment")
configuration.reload()
load_mock.assert_called_once_with(configuration.path)
merge_mock.assert_called_once_with(configuration.repository_id)
environment_mock.assert_called_once_with()
def test_reload_clear(configuration: Configuration, mocker: MockerFixture) -> None:

View File

@ -0,0 +1,82 @@
import pytest
from dataclasses import replace
from pathlib import Path
from pytest_mock import MockerFixture
from sqlite3 import Connection
from typing import Any
from unittest.mock import call as MockCall
from ahriman.core.alpm.pacman import Pacman
from ahriman.core.configuration import Configuration
from ahriman.core.database.migrations.m016_archive import migrate_data, move_packages
from ahriman.models.package import Package
from ahriman.models.repository_paths import RepositoryPaths
def test_migrate_data(connection: Connection, configuration: Configuration, mocker: MockerFixture) -> None:
"""
must perform data migration
"""
_, repository_id = configuration.check_loaded()
repositories = [
repository_id,
replace(repository_id, architecture="i686"),
]
mocker.patch("ahriman.application.handlers.handler.Handler.repositories_extract", return_value=repositories)
migration_mock = mocker.patch("ahriman.core.database.migrations.m016_archive.move_packages")
migrate_data(connection, configuration)
migration_mock.assert_has_calls([
MockCall(replace(configuration.repository_paths, repository_id=repository), pytest.helpers.anyvar(int))
for repository in repositories
])
def test_move_packages(repository_paths: RepositoryPaths, pacman: Pacman, package_ahriman: Package,
mocker: MockerFixture) -> None:
"""
must move packages to the archive directory
"""
def is_file(self: Path, *args: Any, **kwargs: Any) -> bool:
return "file" in self.name
mocker.patch("pathlib.Path.iterdir", return_value=[
repository_paths.repository / ".hidden-file.pkg.tar.xz",
repository_paths.repository / "directory",
repository_paths.repository / "file.pkg.tar.xz",
repository_paths.repository / "file.pkg.tar.xz.sig",
repository_paths.repository / "symlink.pkg.tar.xz",
])
mocker.patch("pathlib.Path.is_dir", return_value=True)
mocker.patch("pathlib.Path.is_file", autospec=True, side_effect=is_file)
archive_mock = mocker.patch("ahriman.models.package.Package.from_archive", return_value=package_ahriman)
rename_mock = mocker.patch("pathlib.Path.rename")
symlink_mock = mocker.patch("pathlib.Path.symlink_to")
move_packages(repository_paths, pacman)
archive_mock.assert_has_calls([
MockCall(repository_paths.repository / "file.pkg.tar.xz", pacman),
MockCall(repository_paths.repository / "file.pkg.tar.xz.sig", pacman),
])
rename_mock.assert_has_calls([
MockCall(repository_paths.archive_for(package_ahriman.base) / "file.pkg.tar.xz"),
MockCall(repository_paths.archive_for(package_ahriman.base) / "file.pkg.tar.xz.sig"),
])
symlink_mock.assert_has_calls([
MockCall(
Path("..") /
".." /
".." /
repository_paths.archive_for(package_ahriman.base).relative_to(repository_paths.root) /
"file.pkg.tar.xz"
),
MockCall(
Path("..") /
".." /
".." /
repository_paths.archive_for(package_ahriman.base).relative_to(repository_paths.root) /
"file.pkg.tar.xz.sig"
),
])

View File

@ -1,13 +1,28 @@
import pytest
from ahriman.core.configuration import Configuration
from ahriman.core.housekeeping import LogsRotationTrigger
from ahriman.core.housekeeping import ArchiveRotationTrigger, LogsRotationTrigger
@pytest.fixture
def archive_rotation_trigger(configuration: Configuration) -> ArchiveRotationTrigger:
"""
archive rotation trigger fixture
Args:
configuration(Configuration): configuration fixture
Returns:
ArchiveRotationTrigger: archive rotation trigger test instance
"""
_, repository_id = configuration.check_loaded()
return ArchiveRotationTrigger(repository_id, configuration)
@pytest.fixture
def logs_rotation_trigger(configuration: Configuration) -> LogsRotationTrigger:
"""
logs roration trigger fixture
logs rotation trigger fixture
Args:
configuration(Configuration): configuration fixture

View File

@ -0,0 +1,83 @@
import pytest
from dataclasses import replace
from pathlib import Path
from pytest_mock import MockerFixture
from typing import Any
from unittest.mock import call as MockCall
from ahriman.core.alpm.pacman import Pacman
from ahriman.core.configuration import Configuration
from ahriman.core.housekeeping import ArchiveRotationTrigger
from ahriman.models.package import Package
from ahriman.models.result import Result
def test_configuration_sections(configuration: Configuration) -> None:
"""
must correctly parse target list
"""
assert ArchiveRotationTrigger.configuration_sections(configuration) == ["archive"]
def test_archives_remove(archive_rotation_trigger: ArchiveRotationTrigger, package_ahriman: Package,
pacman: Pacman, mocker: MockerFixture) -> None:
"""
must remove older packages
"""
def package(version: Any, *args: Any, **kwargs: Any) -> Package:
generated = replace(package_ahriman, version=str(version))
generated.packages = {
key: replace(value, filename=str(version))
for key, value in generated.packages.items()
}
return generated
mocker.patch("pathlib.Path.is_dir", return_value=True)
mocker.patch("ahriman.core.housekeeping.archive_rotation_trigger.package_like", return_value=True)
mocker.patch("pathlib.Path.glob", return_value=[Path(str(i)) for i in range(5)])
mocker.patch("pathlib.Path.iterdir", return_value=[Path(str(i)) for i in range(5)])
mocker.patch("ahriman.models.package.Package.from_archive", side_effect=package)
unlink_mock = mocker.patch("pathlib.Path.unlink", autospec=True)
archive_rotation_trigger.archives_remove(package_ahriman, pacman)
unlink_mock.assert_has_calls([
MockCall(Path("0")),
MockCall(Path("1")),
])
def test_archives_remove_keep(archive_rotation_trigger: ArchiveRotationTrigger, package_ahriman: Package,
pacman: Pacman, mocker: MockerFixture) -> None:
"""
must keep all packages if set to
"""
def package(version: Any, *args: Any, **kwargs: Any) -> Package:
generated = replace(package_ahriman, version=str(version))
generated.packages = {
key: replace(value, filename=str(version))
for key, value in generated.packages.items()
}
return generated
mocker.patch("pathlib.Path.is_dir", return_value=True)
mocker.patch("ahriman.core.housekeeping.archive_rotation_trigger.package_like", return_value=True)
mocker.patch("pathlib.Path.glob", return_value=[Path(str(i)) for i in range(5)])
mocker.patch("pathlib.Path.iterdir", return_value=[Path(str(i)) for i in range(5)])
mocker.patch("ahriman.models.package.Package.from_archive", side_effect=package)
unlink_mock = mocker.patch("pathlib.Path.unlink", autospec=True)
archive_rotation_trigger.keep_built_packages = 0
archive_rotation_trigger.archives_remove(package_ahriman, pacman)
unlink_mock.assert_not_called()
def test_on_result(archive_rotation_trigger: ArchiveRotationTrigger, package_ahriman: Package,
package_python_schedule: Package, mocker: MockerFixture) -> None:
"""
must rotate archives
"""
mocker.patch("ahriman.core._Context.get")
remove_mock = mocker.patch("ahriman.core.housekeeping.ArchiveRotationTrigger.archives_remove")
archive_rotation_trigger.on_result(Result(added=[package_ahriman], failed=[package_python_schedule]), [])
remove_mock.assert_called_once_with(package_ahriman, pytest.helpers.anyvar(int))

View File

@ -14,7 +14,7 @@ def test_configuration_sections(configuration: Configuration) -> None:
assert LogsRotationTrigger.configuration_sections(configuration) == ["logs-rotation"]
def test_rotate(logs_rotation_trigger: LogsRotationTrigger, mocker: MockerFixture) -> None:
def test_on_result(logs_rotation_trigger: LogsRotationTrigger, mocker: MockerFixture) -> None:
"""
must rotate logs
"""

View File

@ -1,5 +1,6 @@
import pytest
from dataclasses import replace
from pathlib import Path
from pytest_mock import MockerFixture
from typing import Any
@ -13,34 +14,214 @@ from ahriman.models.packagers import Packagers
from ahriman.models.user import User
def test_archive_lookup(executor: Executor, package_ahriman: Package, package_python_schedule: Package,
mocker: MockerFixture) -> None:
"""
must existing packages which match the version
"""
mocker.patch("ahriman.models.repository_paths.RepositoryPaths.preserve_owner")
mocker.patch("pathlib.Path.iterdir", return_value=[
Path("1.pkg.tar.zst"),
Path("2.pkg.tar.zst"),
Path("3.pkg.tar.zst"),
])
mocker.patch("ahriman.models.package.Package.from_archive", side_effect=[
package_ahriman,
package_python_schedule,
replace(package_ahriman, version="1"),
])
glob_mock = mocker.patch("pathlib.Path.glob", return_value=[Path("1.pkg.tar.xz")])
assert list(executor._archive_lookup(package_ahriman)) == [Path("1.pkg.tar.xz")]
glob_mock.assert_called_once_with(f"{package_ahriman.packages[package_ahriman.base].filename}*")
def test_archive_lookup_version_mismatch(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must return nothing if no packages found with the same version
"""
mocker.patch("ahriman.models.repository_paths.RepositoryPaths.preserve_owner")
mocker.patch("pathlib.Path.iterdir", return_value=[
Path("1.pkg.tar.zst"),
])
mocker.patch("ahriman.models.package.Package.from_archive", return_value=replace(package_ahriman, version="1"))
assert list(executor._archive_lookup(package_ahriman)) == []
def test_archive_lookup_architecture_mismatch(executor: Executor, package_ahriman: Package,
mocker: MockerFixture) -> None:
"""
must return nothing if architecture doesn't match
"""
package_ahriman.packages[package_ahriman.base].architecture = "x86_64"
mocker.patch("ahriman.core.repository.executor.Executor.architecture", return_value="i686")
mocker.patch("ahriman.models.repository_paths.RepositoryPaths.preserve_owner")
mocker.patch("pathlib.Path.iterdir", return_value=[
Path("1.pkg.tar.zst"),
])
mocker.patch("ahriman.models.package.Package.from_archive", return_value=package_ahriman)
assert list(executor._archive_lookup(package_ahriman)) == []
def test_archive_rename(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must correctly remove package archive
"""
path = "gconf-3.2.6+11+g07808097-10-x86_64.pkg.tar.zst"
safe_path = "gconf-3.2.6-11-g07808097-10-x86_64.pkg.tar.zst"
package_ahriman.packages[package_ahriman.base].filename = path
rename_mock = mocker.patch("ahriman.core.repository.executor.atomic_move")
executor._archive_rename(package_ahriman.packages[package_ahriman.base], package_ahriman.base)
rename_mock.assert_called_once_with(executor.paths.packages / path, executor.paths.packages / safe_path)
assert package_ahriman.packages[package_ahriman.base].filename == safe_path
def test_archive_rename_empty_filename(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must skip renaming if filename is not set
"""
package_ahriman.packages[package_ahriman.base].filename = None
rename_mock = mocker.patch("ahriman.core.repository.executor.atomic_move")
executor._archive_rename(package_ahriman.packages[package_ahriman.base], package_ahriman.base)
rename_mock.assert_not_called()
def test_package_build(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must build single package
"""
mocker.patch("ahriman.core.build_tools.task.Task.build", return_value=[Path(package_ahriman.base)])
status_client_mock = mocker.patch("ahriman.core.status.Client.set_building")
init_mock = mocker.patch("ahriman.core.build_tools.task.Task.init", return_value="sha")
package_mock = mocker.patch("ahriman.models.package.Package.from_build", return_value=package_ahriman)
lookup_mock = mocker.patch("ahriman.core.repository.executor.Executor._archive_lookup", return_value=[])
with_packages_mock = mocker.patch("ahriman.models.package.Package.with_packages")
rename_mock = mocker.patch("ahriman.core.repository.executor.atomic_move")
assert executor._package_build(package_ahriman, Path("local"), "packager", None) == "sha"
status_client_mock.assert_called_once_with(package_ahriman.base)
init_mock.assert_called_once_with(pytest.helpers.anyvar(int), pytest.helpers.anyvar(int), None)
package_mock.assert_called_once_with(Path("local"), executor.architecture, None)
lookup_mock.assert_called_once_with(package_ahriman)
with_packages_mock.assert_called_once_with([Path(package_ahriman.base)], executor.pacman)
rename_mock.assert_called_once_with(Path(package_ahriman.base), executor.paths.packages / package_ahriman.base)
def test_package_build_copy(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must copy package from archive if there are already built ones
"""
path = package_ahriman.packages[package_ahriman.base].filepath
mocker.patch("ahriman.core.build_tools.task.Task.build", return_value=[Path(package_ahriman.base)])
mocker.patch("ahriman.core.build_tools.task.Task.init")
mocker.patch("ahriman.models.package.Package.from_build", return_value=package_ahriman)
mocker.patch("ahriman.core.repository.executor.Executor._archive_lookup", return_value=[path])
mocker.patch("ahriman.core.repository.executor.atomic_move")
mocker.patch("ahriman.models.package.Package.with_packages")
copy_mock = mocker.patch("shutil.copy")
rename_mock = mocker.patch("ahriman.core.repository.executor.atomic_move")
executor._package_build(package_ahriman, Path("local"), "packager", None)
copy_mock.assert_called_once_with(path, Path("local"))
rename_mock.assert_called_once_with(Path("local") / path, executor.paths.packages / path)
def test_package_remove(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must run remove for packages
"""
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
executor._package_remove(package_ahriman.base, package_ahriman.packages[package_ahriman.base].filepath)
repo_remove_mock.assert_called_once_with(
package_ahriman.base, package_ahriman.packages[package_ahriman.base].filepath)
def test_package_remove_failed(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must suppress errors during archive removal
"""
mocker.patch("ahriman.core.alpm.repo.Repo.remove", side_effect=Exception)
executor._package_remove(package_ahriman.base, package_ahriman.packages[package_ahriman.base].filepath)
def test_package_remove_base(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must run remove base from status client
"""
status_client_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_remove")
executor._package_remove_base(package_ahriman.base)
status_client_mock.assert_called_once_with(package_ahriman.base)
def test_package_remove_base_failed(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must suppress errors during base removal
"""
mocker.patch("ahriman.core.status.local_client.LocalClient.package_remove", side_effect=Exception)
executor._package_remove_base(package_ahriman.base)
def test_package_update(executor: Executor, package_ahriman: Package, user: User, mocker: MockerFixture) -> None:
"""
must update built package in repository
"""
rename_mock = mocker.patch("ahriman.core.repository.executor.atomic_move")
symlink_mock = mocker.patch("pathlib.Path.symlink_to")
repo_add_mock = mocker.patch("ahriman.core.alpm.repo.Repo.add")
sign_package_mock = mocker.patch("ahriman.core.sign.gpg.GPG.process_sign_package", side_effect=lambda fn, _: [fn])
filepath = next(package.filepath for package in package_ahriman.packages.values())
executor._package_update(filepath, package_ahriman.base, user.key)
# must move files (once)
rename_mock.assert_called_once_with(
executor.paths.packages / filepath, executor.paths.archive_for(package_ahriman.base) / filepath)
# must sign package
sign_package_mock.assert_called_once_with(executor.paths.packages / filepath, user.key)
# symlink to the archive
symlink_mock.assert_called_once_with(
Path("..") /
".." /
".." /
executor.paths.archive_for(
package_ahriman.base).relative_to(
executor.paths.root) /
filepath)
# must add package
repo_add_mock.assert_called_once_with(executor.paths.repository / filepath)
def test_package_update_empty_filename(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must skip update for package which does not have path
"""
rename_mock = mocker.patch("ahriman.core.repository.executor.atomic_move")
executor._package_update(None, package_ahriman.base, None)
rename_mock.assert_not_called()
def test_process_build(executor: Executor, package_ahriman: Package, passwd: Any, mocker: MockerFixture) -> None:
"""
must run build process
"""
mocker.patch("ahriman.models.repository_paths.getpwuid", return_value=passwd)
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
mocker.patch("ahriman.core.build_tools.task.Task.build", return_value=[Path(package_ahriman.base)])
init_mock = mocker.patch("ahriman.core.build_tools.task.Task.init", return_value="sha")
move_mock = mocker.patch("shutil.move")
status_client_mock = mocker.patch("ahriman.core.status.Client.set_building")
changes_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_changes_get",
return_value=Changes("commit", "change"))
commit_sha_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_changes_update")
depends_on_mock = mocker.patch("ahriman.core.build_tools.package_archive.PackageArchive.depends_on",
return_value=Dependencies())
dependencies_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_dependencies_update")
with_packages_mock = mocker.patch("ahriman.models.package.Package.with_packages")
build_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_build", return_value="sha")
executor.process_build([package_ahriman], Packagers("packager"), bump_pkgrel=False)
init_mock.assert_called_once_with(pytest.helpers.anyvar(int), pytest.helpers.anyvar(int), None)
with_packages_mock.assert_called_once_with([Path(package_ahriman.base)], executor.pacman)
changes_mock.assert_called_once_with(package_ahriman.base)
build_mock.assert_called_once_with(package_ahriman, pytest.helpers.anyvar(Path, strict=True), None, None)
depends_on_mock.assert_called_once_with()
dependencies_mock.assert_called_once_with(package_ahriman.base, Dependencies())
# must move files (once)
move_mock.assert_called_once_with(Path(package_ahriman.base), executor.paths.packages / package_ahriman.base)
# must update status
status_client_mock.assert_called_once_with(package_ahriman.base)
commit_sha_mock.assert_called_once_with(package_ahriman.base, Changes("sha", "change"))
@ -50,7 +231,7 @@ def test_process_build_bump_pkgrel(executor: Executor, package_ahriman: Package,
"""
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
mocker.patch("ahriman.core.build_tools.task.Task.build", return_value=[Path(package_ahriman.base)])
mocker.patch("shutil.move")
mocker.patch("ahriman.core.repository.executor.atomic_move")
init_mock = mocker.patch("ahriman.core.build_tools.task.Task.init")
executor.process_build([package_ahriman], Packagers("packager"), bump_pkgrel=True)
@ -67,7 +248,7 @@ def test_process_build_failure(executor: Executor, package_ahriman: Package, moc
mocker.patch("ahriman.core.repository.executor.Executor.packages_built")
mocker.patch("ahriman.core.build_tools.task.Task.build", return_value=[Path(package_ahriman.base)])
mocker.patch("ahriman.core.build_tools.task.Task.init")
mocker.patch("shutil.move", side_effect=Exception)
mocker.patch("ahriman.core.repository.executor.atomic_move", side_effect=Exception)
status_client_mock = mocker.patch("ahriman.core.status.Client.set_failed")
executor.process_build([package_ahriman])
@ -79,15 +260,15 @@ def test_process_remove_base(executor: Executor, package_ahriman: Package, mocke
must run remove process for whole base
"""
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
status_client_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_remove")
remove_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove")
base_remove_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove_base")
executor.process_remove([package_ahriman.base])
# must remove via alpm wrapper
repo_remove_mock.assert_called_once_with(
remove_mock.assert_called_once_with(
package_ahriman.base, package_ahriman.packages[package_ahriman.base].filepath)
# must update status and remove package files
status_client_mock.assert_called_once_with(package_ahriman.base)
base_remove_mock.assert_called_once_with(package_ahriman.base)
def test_process_remove_with_debug(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
@ -99,12 +280,12 @@ def test_process_remove_with_debug(executor: Executor, package_ahriman: Package,
f"{package_ahriman.base}-debug": package_ahriman.packages[package_ahriman.base],
}
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
mocker.patch("ahriman.core.status.local_client.LocalClient.package_remove")
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
mocker.patch("ahriman.core.repository.executor.Executor._package_remove_base")
remove_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove")
executor.process_remove([package_ahriman.base])
# must remove via alpm wrapper
repo_remove_mock.assert_has_calls([
remove_mock.assert_has_calls([
MockCall(package_ahriman.base, package_ahriman.packages[package_ahriman.base].filepath),
MockCall(f"{package_ahriman.base}-debug", package_ahriman.packages[package_ahriman.base].filepath),
])
@ -116,12 +297,12 @@ def test_process_remove_base_multiple(executor: Executor, package_python_schedul
must run remove process for whole base with multiple packages
"""
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_python_schedule])
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
status_client_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_remove")
remove_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove")
status_client_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove_base")
executor.process_remove([package_python_schedule.base])
# must remove via alpm wrapper
repo_remove_mock.assert_has_calls([
remove_mock.assert_has_calls([
MockCall(package, props.filepath)
for package, props in package_python_schedule.packages.items()
], any_order=True)
@ -135,45 +316,27 @@ def test_process_remove_base_single(executor: Executor, package_python_schedule:
must run remove process for single package in base
"""
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_python_schedule])
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
status_client_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_remove")
remove_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove")
status_client_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove_base")
executor.process_remove(["python2-schedule"])
# must remove via alpm wrapper
repo_remove_mock.assert_called_once_with(
remove_mock.assert_called_once_with(
"python2-schedule", package_python_schedule.packages["python2-schedule"].filepath)
# must not update status
status_client_mock.assert_not_called()
def test_process_remove_failed(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must suppress tree clear errors during package base removal
"""
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
mocker.patch("ahriman.core.status.local_client.LocalClient.package_remove", side_effect=Exception)
executor.process_remove([package_ahriman.base])
def test_process_remove_tree_clear_failed(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must suppress remove errors
"""
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
mocker.patch("ahriman.core.alpm.repo.Repo.remove", side_effect=Exception)
executor.process_remove([package_ahriman.base])
def test_process_remove_nothing(executor: Executor, package_ahriman: Package, package_python_schedule: Package,
mocker: MockerFixture) -> None:
"""
must not remove anything if it was not requested
"""
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
remove_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove")
executor.process_remove([package_python_schedule.base])
repo_remove_mock.assert_not_called()
remove_mock.assert_not_called()
def test_process_remove_unknown(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
@ -181,11 +344,11 @@ def test_process_remove_unknown(executor: Executor, package_ahriman: Package, mo
must remove unknown package base
"""
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[])
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
status_client_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_remove")
remove_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove")
status_client_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove_base")
executor.process_remove([package_ahriman.base])
repo_remove_mock.assert_not_called()
remove_mock.assert_not_called()
status_client_mock.assert_called_once_with(package_ahriman.base)
@ -195,9 +358,8 @@ def test_process_update(executor: Executor, package_ahriman: Package, user: User
"""
mocker.patch("ahriman.core.repository.executor.Executor.load_archives", return_value=[package_ahriman])
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
move_mock = mocker.patch("shutil.move")
repo_add_mock = mocker.patch("ahriman.core.alpm.repo.Repo.add")
sign_package_mock = mocker.patch("ahriman.core.sign.gpg.GPG.process_sign_package", side_effect=lambda fn, _: [fn])
rename_mock = mocker.patch("ahriman.core.repository.executor.Executor._archive_rename")
update_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_update")
status_client_mock = mocker.patch("ahriman.core.status.Client.set_success")
remove_mock = mocker.patch("ahriman.core.repository.executor.Executor.process_remove")
packager_mock = mocker.patch("ahriman.core.repository.executor.Executor.packager", return_value=user)
@ -206,12 +368,8 @@ def test_process_update(executor: Executor, package_ahriman: Package, user: User
# must return complete
assert executor.process_update([filepath], Packagers("packager"))
packager_mock.assert_called_once_with(Packagers("packager"), "ahriman")
# must move files (once)
move_mock.assert_called_once_with(executor.paths.packages / filepath, executor.paths.repository / filepath)
# must sign package
sign_package_mock.assert_called_once_with(executor.paths.packages / filepath, user.key)
# must add package
repo_add_mock.assert_called_once_with(executor.paths.repository / filepath)
rename_mock.assert_called_once_with(package_ahriman.packages[package_ahriman.base], package_ahriman.base)
update_mock.assert_called_once_with(filepath.name, package_ahriman.base, user.key)
# must update status
status_client_mock.assert_called_once_with(package_ahriman)
# must clear directory
@ -226,58 +384,26 @@ def test_process_update_group(executor: Executor, package_python_schedule: Packa
"""
must group single packages under one base
"""
mocker.patch("shutil.move")
mocker.patch("ahriman.core.repository.executor.Executor.load_archives", return_value=[package_python_schedule])
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_python_schedule])
repo_add_mock = mocker.patch("ahriman.core.alpm.repo.Repo.add")
update_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_update")
status_client_mock = mocker.patch("ahriman.core.status.Client.set_success")
remove_mock = mocker.patch("ahriman.core.repository.executor.Executor.process_remove")
executor.process_update([package.filepath for package in package_python_schedule.packages.values()])
repo_add_mock.assert_has_calls([
MockCall(executor.paths.repository / package.filepath)
update_mock.assert_has_calls([
MockCall(package.filename, package_python_schedule.base, None)
for package in package_python_schedule.packages.values()
], any_order=True)
status_client_mock.assert_called_once_with(package_python_schedule)
remove_mock.assert_called_once_with([])
def test_process_update_unsafe(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must encode file name
"""
path = "gconf-3.2.6+11+g07808097-10-x86_64.pkg.tar.zst"
safe_path = "gconf-3.2.6-11-g07808097-10-x86_64.pkg.tar.zst"
package_ahriman.packages[package_ahriman.base].filename = path
mocker.patch("ahriman.core.repository.executor.Executor.load_archives", return_value=[package_ahriman])
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
move_mock = mocker.patch("shutil.move")
repo_add_mock = mocker.patch("ahriman.core.alpm.repo.Repo.add")
executor.process_update([Path(path)])
move_mock.assert_has_calls([
MockCall(executor.paths.packages / path, executor.paths.packages / safe_path),
MockCall(executor.paths.packages / safe_path, executor.paths.repository / safe_path)
])
repo_add_mock.assert_called_once_with(executor.paths.repository / safe_path)
def test_process_empty_filename(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must skip update for package which does not have path
"""
package_ahriman.packages[package_ahriman.base].filename = None
mocker.patch("ahriman.core.repository.executor.Executor.load_archives", return_value=[package_ahriman])
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
executor.process_update([package.filepath for package in package_ahriman.packages.values()])
def test_process_update_failed(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must process update for failed package
"""
mocker.patch("shutil.move", side_effect=Exception)
mocker.patch("ahriman.core.repository.executor.Executor._package_update", side_effect=Exception)
mocker.patch("ahriman.core.repository.executor.Executor.load_archives", return_value=[package_ahriman])
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
status_client_mock = mocker.patch("ahriman.core.status.Client.set_failed")
@ -294,8 +420,7 @@ def test_process_update_removed_package(executor: Executor, package_python_sched
without_python2 = Package.from_json(package_python_schedule.view())
del without_python2.packages["python2-schedule"]
mocker.patch("shutil.move")
mocker.patch("ahriman.core.alpm.repo.Repo.add")
mocker.patch("ahriman.core.repository.executor.Executor._package_update")
mocker.patch("ahriman.core.repository.executor.Executor.load_archives", return_value=[without_python2])
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_python_schedule])
remove_mock = mocker.patch("ahriman.core.repository.executor.Executor.process_remove")

View File

@ -1,4 +1,5 @@
import datetime
import fcntl
import logging
import os
import pytest
@ -9,15 +10,48 @@ from typing import Any
from unittest.mock import call as MockCall
from ahriman.core.exceptions import BuildError, CalledProcessError, OptionError, UnsafeRunError
from ahriman.core.utils import check_output, check_user, dataclass_view, enum_values, extract_user, filter_json, \
full_version, minmax, package_like, parse_version, partition, pretty_datetime, pretty_interval, pretty_size, \
safe_filename, srcinfo_property, srcinfo_property_list, trim_package, utcnow, walk
from ahriman.core.utils import (
atomic_move,
check_output,
check_user,
dataclass_view,
enum_values,
extract_user,
filelock,
filter_json,
full_version,
minmax,
package_like,
parse_version,
partition,
pretty_datetime,
pretty_interval,
pretty_size,
safe_filename,
srcinfo_property,
srcinfo_property_list,
trim_package,
utcnow,
walk,
)
from ahriman.models.package import Package
from ahriman.models.package_source import PackageSource
from ahriman.models.repository_id import RepositoryId
from ahriman.models.repository_paths import RepositoryPaths
def test_atomic_move(mocker: MockerFixture) -> None:
"""
must move file with locking
"""
filelock_mock = mocker.patch("ahriman.core.utils.filelock")
move_mock = mocker.patch("shutil.move")
atomic_move(Path("source"), Path("destination"))
filelock_mock.assert_called_once_with(Path("destination"))
move_mock.assert_called_once_with(Path("source"), Path("destination"))
def test_check_output(mocker: MockerFixture) -> None:
"""
must run command and log result
@ -237,6 +271,53 @@ def test_extract_user() -> None:
assert extract_user() == "doas"
def test_filelock(mocker: MockerFixture) -> None:
"""
must perform file locking
"""
lock_mock = mocker.patch("fcntl.flock")
open_mock = mocker.patch("pathlib.Path.open", autospec=True)
unlink_mock = mocker.patch("pathlib.Path.unlink")
with filelock(Path("local")):
pass
open_mock.assert_called_once_with(Path(".local"), "ab")
lock_mock.assert_has_calls([
MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_EX),
MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_UN),
])
unlink_mock.assert_called_once_with(missing_ok=True)
def test_filelock_remove_lock(mocker: MockerFixture) -> None:
"""
must remove lock file in case of exception
"""
mocker.patch("pathlib.Path.open", side_effect=Exception)
unlink_mock = mocker.patch("pathlib.Path.unlink")
with pytest.raises(Exception):
with filelock(Path("local")):
pass
unlink_mock.assert_called_once_with(missing_ok=True)
def test_filelock_unlock(mocker: MockerFixture) -> None:
"""
must unlock file in case of exception
"""
mocker.patch("pathlib.Path.open")
lock_mock = mocker.patch("fcntl.flock")
with pytest.raises(Exception):
with filelock(Path("local")):
raise Exception
lock_mock.assert_has_calls([
MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_EX),
MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_UN),
])
def test_filter_json(package_ahriman: Package) -> None:
"""
must filter fields by known list

View File

@ -19,10 +19,9 @@ def test_configuration_schema(configuration: Configuration) -> None:
"""
section = "console"
configuration.set_option("report", "target", section)
_, repository_id = configuration.check_loaded()
expected = {section: ReportTrigger.CONFIGURATION_SCHEMA[section]}
assert ReportTrigger.configuration_schema(repository_id, configuration) == expected
assert ReportTrigger.configuration_schema(configuration) == expected
def test_configuration_schema_no_section(configuration: Configuration) -> None:
@ -31,9 +30,7 @@ def test_configuration_schema_no_section(configuration: Configuration) -> None:
"""
section = "abracadabra"
configuration.set_option("report", "target", section)
_, repository_id = configuration.check_loaded()
assert ReportTrigger.configuration_schema(repository_id, configuration) == {}
assert ReportTrigger.configuration_schema(configuration) == {}
def test_configuration_schema_no_schema(configuration: Configuration) -> None:
@ -43,17 +40,15 @@ def test_configuration_schema_no_schema(configuration: Configuration) -> None:
section = "abracadabra"
configuration.set_option("report", "target", section)
configuration.set_option(section, "key", "value")
_, repository_id = configuration.check_loaded()
assert ReportTrigger.configuration_schema(repository_id, configuration) == {}
assert ReportTrigger.configuration_schema(configuration) == {}
def test_configuration_schema_empty(configuration: Configuration) -> None:
"""
must return default schema if no configuration set
"""
_, repository_id = configuration.check_loaded()
assert ReportTrigger.configuration_schema(repository_id, None) == ReportTrigger.CONFIGURATION_SCHEMA
assert ReportTrigger.configuration_schema(None) == ReportTrigger.CONFIGURATION_SCHEMA
def test_configuration_schema_variables() -> None:

View File

@ -153,38 +153,12 @@ def test_on_start(trigger_loader: TriggerLoader, mocker: MockerFixture) -> None:
"""
upload_mock = mocker.patch("ahriman.core.upload.UploadTrigger.on_start")
report_mock = mocker.patch("ahriman.core.report.ReportTrigger.on_start")
atexit_mock = mocker.patch("atexit.register")
trigger_loader.on_start()
assert trigger_loader._on_stop_requested
report_mock.assert_called_once_with()
upload_mock.assert_called_once_with()
def test_on_stop_with_on_start(configuration: Configuration, mocker: MockerFixture) -> None:
"""
must call on_stop on exit if on_start was called
"""
mocker.patch("ahriman.core.upload.UploadTrigger.on_start")
mocker.patch("ahriman.core.report.ReportTrigger.on_start")
on_stop_mock = mocker.patch("ahriman.core.triggers.trigger_loader.TriggerLoader.on_stop")
_, repository_id = configuration.check_loaded()
trigger_loader = TriggerLoader.load(repository_id, configuration)
trigger_loader.on_start()
del trigger_loader
on_stop_mock.assert_called_once_with()
def test_on_stop_without_on_start(configuration: Configuration, mocker: MockerFixture) -> None:
"""
must call not on_stop on exit if on_start wasn't called
"""
on_stop_mock = mocker.patch("ahriman.core.triggers.trigger_loader.TriggerLoader.on_stop")
_, repository_id = configuration.check_loaded()
trigger_loader = TriggerLoader.load(repository_id, configuration)
del trigger_loader
on_stop_mock.assert_not_called()
atexit_mock.assert_called_once_with(trigger_loader.on_stop)
def test_on_stop(trigger_loader: TriggerLoader, mocker: MockerFixture) -> None:

View File

@ -516,6 +516,15 @@ def test_build_status_pretty_print(package_ahriman: Package) -> None:
assert isinstance(package_ahriman.pretty_print(), str)
def test_vercmp(package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must call vercmp
"""
vercmp_mock = mocker.patch("ahriman.models.package.vercmp")
package_ahriman.vercmp("version")
vercmp_mock.assert_called_once_with(package_ahriman.version, "version")
def test_with_packages(package_ahriman: Package, package_python_schedule: Package, pacman: Pacman,
mocker: MockerFixture) -> None:
"""

View File

@ -248,6 +248,28 @@ def test_chown_invalid_path(repository_paths: RepositoryPaths) -> None:
repository_paths._chown(repository_paths.root.parent)
def test_archive_for(repository_paths: RepositoryPaths, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must correctly define archive path
"""
mocker.patch("pathlib.Path.is_dir", return_value=True)
path = repository_paths.archive_for(package_ahriman.base)
assert path == repository_paths.archive / "packages" / "a" / package_ahriman.base
def test_archive_for_create_tree(repository_paths: RepositoryPaths, package_ahriman: Package,
mocker: MockerFixture) -> None:
"""
must create archive directory if it doesn't exist
"""
owner_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.preserve_owner")
mkdir_mock = mocker.patch("pathlib.Path.mkdir")
repository_paths.archive_for(package_ahriman.base)
owner_mock.assert_called_once_with(repository_paths.archive)
mkdir_mock.assert_called_once_with(mode=0o755, parents=True)
def test_cache_for(repository_paths: RepositoryPaths, package_ahriman: Package) -> None:
"""
must return correct path for cache directory
@ -287,13 +309,24 @@ def test_preserve_owner_specific(tmp_path: Path, repository_id: RepositoryId, mo
chown_mock.assert_has_calls([MockCall(repository_paths.root / "content" / "created2")])
def test_preserve_owner_no_directory(tmp_path: Path, repository_id: RepositoryId, mocker: MockerFixture) -> None:
"""
must skip directory scan if it does not exist
"""
repository_paths = RepositoryPaths(tmp_path, repository_id)
chown_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths._chown")
with repository_paths.preserve_owner(Path("empty")):
(repository_paths.root / "created1").touch()
chown_mock.assert_not_called()
def test_tree_clear(repository_paths: RepositoryPaths, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must remove any package related files
"""
paths = {
getattr(repository_paths, prop)(package_ahriman.base)
for prop in dir(repository_paths) if prop.endswith("_for")
repository_paths.cache_for(package_ahriman.base),
}
rmtree_mock = mocker.patch("shutil.rmtree")
@ -313,6 +346,7 @@ def test_tree_create(repository_paths: RepositoryPaths, mocker: MockerFixture) -
for prop in dir(repository_paths)
if not prop.startswith("_")
and prop not in (
"archive_for",
"build_root",
"logger_name",
"logger",

View File

@ -109,7 +109,7 @@ async def test_post(client: TestClient, repository_paths: RepositoryPaths, mocke
local = Path("local")
save_mock = pytest.helpers.patch_view(client.app, "save_file",
AsyncMock(return_value=("filename", local / ".filename")))
rename_mock = mocker.patch("pathlib.Path.rename")
rename_mock = mocker.patch("ahriman.web.views.v1.service.upload.atomic_move")
# no content validation here because it has invalid schema
data = FormData()
@ -118,7 +118,7 @@ async def test_post(client: TestClient, repository_paths: RepositoryPaths, mocke
response = await client.post("/api/v1/service/upload", data=data)
assert response.ok
save_mock.assert_called_once_with(pytest.helpers.anyvar(int), repository_paths.packages, max_body_size=None)
rename_mock.assert_called_once_with(local / "filename")
rename_mock.assert_called_once_with(local / ".filename", local / "filename")
async def test_post_with_sig(client: TestClient, repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
@ -131,7 +131,7 @@ async def test_post_with_sig(client: TestClient, repository_paths: RepositoryPat
("filename", local / ".filename"),
("filename.sig", local / ".filename.sig"),
]))
rename_mock = mocker.patch("pathlib.Path.rename")
rename_mock = mocker.patch("ahriman.web.views.v1.service.upload.atomic_move")
# no content validation here because it has invalid schema
data = FormData()
@ -145,8 +145,8 @@ async def test_post_with_sig(client: TestClient, repository_paths: RepositoryPat
MockCall(pytest.helpers.anyvar(int), repository_paths.packages, max_body_size=None),
])
rename_mock.assert_has_calls([
MockCall(local / "filename"),
MockCall(local / "filename.sig"),
MockCall(local / ".filename", local / "filename"),
MockCall(local / ".filename.sig", local / "filename.sig"),
])

View File

@ -10,6 +10,9 @@ root = /
sync_files_database = no
use_ahriman_cache = no
[archive]
keep_built_packages = 3
[auth]
client_id = client_id
client_secret = client_secret