Compare commits

..

2 Commits

Author SHA1 Message Date
10798b9ba3 fix: correctly process trigger repo specific settings in validator (see #154) 2025-08-01 16:53:15 +03:00
358e3dc4d2 feat: expose repository name and architecure in configuration if available
In some cases there are reference to current repository settings. In
order to handle it correctly two ro options have been added

Related to #154
2025-07-31 14:14:22 +03:00
31 changed files with 301 additions and 902 deletions

View File

@ -132,14 +132,6 @@ ahriman.core.database.migrations.m015\_logs\_process\_id module
:no-undoc-members:
:show-inheritance:
ahriman.core.database.migrations.m016\_archive module
-----------------------------------------------------
.. automodule:: ahriman.core.database.migrations.m016_archive
:members:
:no-undoc-members:
:show-inheritance:
Module contents
---------------

View File

@ -4,14 +4,6 @@ ahriman.core.housekeeping package
Submodules
----------
ahriman.core.housekeeping.archive\_rotation\_trigger module
-----------------------------------------------------------
.. automodule:: ahriman.core.housekeeping.archive_rotation_trigger
:members:
:no-undoc-members:
:show-inheritance:
ahriman.core.housekeeping.logs\_rotation\_trigger module
--------------------------------------------------------

View File

@ -97,13 +97,6 @@ libalpm and AUR related configuration. Group name can refer to architecture, e.g
* ``sync_files_database`` - download files database from mirror, boolean, required.
* ``use_ahriman_cache`` - use local pacman package cache instead of system one, boolean, required. With this option enabled you might want to refresh database periodically (available as additional flag for some subcommands). If set to ``no``, databases must be synchronized manually.
``archive`` group
-----------------
Describes settings for packages archives management extensions.
* ``keep_built_packages`` - keep this amount of built packages with different versions, integer, required. ``0`` (or negative number) will effectively disable archives removal.
``auth`` group
--------------
@ -146,6 +139,8 @@ Build related configuration. Group name can refer to architecture, e.g. ``build:
Base repository settings.
* ``architecture`` - repository architecture, string. This field is read-only and generated automatically from run options if possible.
* ``name`` - repository name, string. This field is read-only and generated automatically from run options if possible.
* ``root`` - root path for application, string, required.
``sign:*`` groups

View File

@ -44,11 +44,9 @@ triggers[] = ahriman.core.report.ReportTrigger
triggers[] = ahriman.core.upload.UploadTrigger
triggers[] = ahriman.core.gitremote.RemotePushTrigger
triggers[] = ahriman.core.housekeeping.LogsRotationTrigger
triggers[] = ahriman.core.housekeeping.ArchiveRotationTrigger
; List of well-known triggers. Used only for configuration purposes.
triggers_known[] = ahriman.core.gitremote.RemotePullTrigger
triggers_known[] = ahriman.core.gitremote.RemotePushTrigger
triggers_known[] = ahriman.core.housekeeping.ArchiveRotationTrigger
triggers_known[] = ahriman.core.housekeeping.LogsRotationTrigger
triggers_known[] = ahriman.core.report.ReportTrigger
triggers_known[] = ahriman.core.upload.UploadTrigger

View File

@ -1,7 +1,3 @@
[archive]
; Keep amount of last built packages in archive. 0 means keep all packages
keep_built_packages = 1
[logs-rotation]
; Keep last build logs for each package
keep_last_logs = 5

View File

@ -21,7 +21,6 @@ import argparse
from ahriman.application.handlers.handler import Handler, SubParserAction
from ahriman.core.configuration import Configuration
from ahriman.core.utils import walk
from ahriman.models.repository_id import RepositoryId
from ahriman.models.repository_paths import RepositoryPaths
@ -50,7 +49,6 @@ class TreeMigrate(Handler):
target_tree.tree_create()
# perform migration
TreeMigrate.tree_move(current_tree, target_tree)
TreeMigrate.fix_symlinks(target_tree)
@staticmethod
def _set_service_tree_migrate_parser(root: SubParserAction) -> argparse.ArgumentParser:
@ -68,22 +66,6 @@ class TreeMigrate(Handler):
parser.set_defaults(lock=None, quiet=True, report=False)
return parser
@staticmethod
def fix_symlinks(paths: RepositoryPaths) -> None:
"""
fix packages archives symlinks
Args:
paths(RepositoryPaths): new repository paths
"""
archives = {path.name: path for path in walk(paths.archive)}
for symlink in walk(paths.repository):
if symlink.exists(): # no need to check for symlinks as we have just walked through the tree
continue
if (source_archive := archives.get(symlink.name)) is not None:
symlink.unlink()
symlink.symlink_to(source_archive.relative_to(symlink.parent, walk_up=True))
@staticmethod
def tree_move(from_tree: RepositoryPaths, to_tree: RepositoryPaths) -> None:
"""
@ -95,7 +77,6 @@ class TreeMigrate(Handler):
"""
# we don't care about devtools chroot
for attribute in (
RepositoryPaths.archive,
RepositoryPaths.packages,
RepositoryPaths.pacman,
RepositoryPaths.repository,

View File

@ -52,7 +52,7 @@ class Validate(Handler):
"""
from ahriman.core.configuration.validator import Validator
schema = Validate.schema(repository_id, configuration)
schema = Validate.schema(configuration)
validator = Validator(configuration=configuration, schema=schema)
if validator.validate(configuration.dump()):
@ -83,12 +83,11 @@ class Validate(Handler):
return parser
@staticmethod
def schema(repository_id: RepositoryId, configuration: Configuration) -> ConfigurationSchema:
def schema(configuration: Configuration) -> ConfigurationSchema:
"""
get schema with triggers
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance
Returns:
@ -107,12 +106,12 @@ class Validate(Handler):
continue
# default settings if any
for schema_name, schema in trigger_class.configuration_schema(repository_id, None).items():
for schema_name, schema in trigger_class.configuration_schema(None).items():
erased = Validate.schema_erase_required(copy.deepcopy(schema))
root[schema_name] = Validate.schema_merge(root.get(schema_name, {}), erased)
# settings according to enabled triggers
for schema_name, schema in trigger_class.configuration_schema(repository_id, configuration).items():
for schema_name, schema in trigger_class.configuration_schema(configuration).items():
root[schema_name] = Validate.schema_merge(root.get(schema_name, {}), copy.deepcopy(schema))
return root

View File

@ -31,21 +31,20 @@ class Repo(LazyLogging):
Attributes:
name(str): repository name
root(Path): repository root
paths(RepositoryPaths): repository paths instance
sign_args(list[str]): additional args which have to be used to sign repository archive
uid(int): uid of the repository owner user
"""
def __init__(self, name: str, paths: RepositoryPaths, sign_args: list[str], root: Path | None = None) -> None:
def __init__(self, name: str, paths: RepositoryPaths, sign_args: list[str]) -> None:
"""
Args:
name(str): repository name
paths(RepositoryPaths): repository paths instance
sign_args(list[str]): additional args which have to be used to sign repository archive
root(Path | None, optional): repository root. If none set, the default will be used (Default value = None)
"""
self.name = name
self.root = root or paths.repository
self.paths = paths
self.uid, _ = paths.root_owner
self.sign_args = sign_args
@ -57,36 +56,28 @@ class Repo(LazyLogging):
Returns:
Path: path to repository database
"""
return self.root / f"{self.name}.db.tar.gz"
return self.paths.repository / f"{self.name}.db.tar.gz"
def add(self, path: Path, *, remove: bool = True) -> None:
def add(self, path: Path) -> None:
"""
add new package to repository
Args:
path(Path): path to archive to add
remove(bool, optional): whether to remove old packages or not (Default value = True)
"""
command = ["repo-add", *self.sign_args]
if remove:
command.extend(["--remove"])
command.extend([str(self.repo_path), str(path)])
# add to repository
check_output(
*command,
"repo-add", *self.sign_args, "-R", str(self.repo_path), str(path),
exception=BuildError.from_process(path.name),
cwd=self.root,
cwd=self.paths.repository,
logger=self.logger,
user=self.uid,
)
user=self.uid)
def init(self) -> None:
"""
create empty repository database. It just calls add with empty arguments
"""
check_output("repo-add", *self.sign_args, str(self.repo_path),
cwd=self.root, logger=self.logger, user=self.uid)
cwd=self.paths.repository, logger=self.logger, user=self.uid)
def remove(self, package: str, filename: Path) -> None:
"""
@ -97,14 +88,13 @@ class Repo(LazyLogging):
filename(Path): package filename to remove
"""
# remove package and signature (if any) from filesystem
for full_path in self.root.glob(f"**/{filename.name}*"):
for full_path in self.paths.repository.glob(f"{filename}*"):
full_path.unlink()
# remove package from registry
check_output(
"repo-remove", *self.sign_args, str(self.repo_path), package,
exception=BuildError.from_process(package),
cwd=self.root,
cwd=self.paths.repository,
logger=self.logger,
user=self.uid,
)
user=self.uid)

View File

@ -43,7 +43,6 @@ class Configuration(configparser.RawConfigParser):
SYSTEM_CONFIGURATION_PATH(Path): (class attribute) default system configuration path distributed by package
includes(list[Path]): list of includes which were read
path(Path | None): path to root configuration file
repository_id(RepositoryId | None): repository unique identifier
Examples:
Configuration class provides additional method in order to handle application configuration. Since this class is
@ -94,7 +93,7 @@ class Configuration(configparser.RawConfigParser):
},
)
self.repository_id: RepositoryId | None = None
self._repository_id: RepositoryId | None = None
self.path: Path | None = None
self.includes: list[Path] = []
@ -129,6 +128,32 @@ class Configuration(configparser.RawConfigParser):
"""
return self.getpath("settings", "logging")
@property
def repository_id(self) -> RepositoryId | None:
"""
repository identifier
Returns:
RepositoryId: repository unique identifier
"""
return self._repository_id
@repository_id.setter
def repository_id(self, repository_id: RepositoryId | None) -> None:
"""
setter for repository identifier
Args:
repository_id(RepositoryId | None): repository unique identifier
"""
self._repository_id = repository_id
if repository_id is None or repository_id.is_empty:
self.remove_option("repository", "name")
self.remove_option("repository", "architecture")
else:
self.set_option("repository", "name", repository_id.name)
self.set_option("repository", "architecture", repository_id.architecture)
@property
def repository_name(self) -> str:
"""

View File

@ -249,6 +249,10 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
"repository": {
"type": "dict",
"schema": {
"architecture": {
"type": "string",
"empty": False,
},
"name": {
"type": "string",
"empty": False,

View File

@ -1,84 +0,0 @@
#
# Copyright (c) 2021-2025 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import argparse
from dataclasses import replace
from sqlite3 import Connection
from ahriman.application.handlers.handler import Handler
from ahriman.core.alpm.pacman import Pacman
from ahriman.core.configuration import Configuration
from ahriman.models.package import Package
from ahriman.models.pacman_synchronization import PacmanSynchronization
from ahriman.models.repository_paths import RepositoryPaths
__all__ = ["migrate_data"]
def migrate_data(connection: Connection, configuration: Configuration) -> None:
"""
perform data migration
Args:
connection(Connection): database connection
configuration(Configuration): configuration instance
"""
del connection
config_path, _ = configuration.check_loaded()
args = argparse.Namespace(configuration=config_path, architecture=None, repository=None, repository_id=None)
for repository_id in Handler.repositories_extract(args):
paths = replace(configuration.repository_paths, repository_id=repository_id)
pacman = Pacman(repository_id, configuration, refresh_database=PacmanSynchronization.Disabled)
# create archive directory if required
if not paths.archive.is_dir():
with paths.preserve_owner(paths.root / "archive"):
paths.archive.mkdir(mode=0o755, parents=True)
move_packages(paths, pacman)
def move_packages(repository_paths: RepositoryPaths, pacman: Pacman) -> None:
"""
move packages from repository to archive and create symbolic links
Args:
repository_paths(RepositoryPaths): repository paths instance
pacman(Pacman): alpm wrapper instance
"""
for source in repository_paths.repository.iterdir():
if not source.is_file(follow_symlinks=False):
continue # skip symbolic links if any
filename = source.name
if filename.startswith(".") or ".pkg." not in filename:
# we don't use package_like method here, because it also filters out signatures
continue
package = Package.from_archive(source, pacman)
# move package to the archive directory
target = repository_paths.archive_for(package.base) / filename
source.rename(target)
# create symlink to the archive
source.symlink_to(target.relative_to(source.parent, walk_up=True))

View File

@ -17,5 +17,4 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from ahriman.core.housekeeping.archive_rotation_trigger import ArchiveRotationTrigger
from ahriman.core.housekeeping.logs_rotation_trigger import LogsRotationTrigger

View File

@ -1,115 +0,0 @@
#
# Copyright (c) 2021-2025 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from collections.abc import Callable
from functools import cmp_to_key
from ahriman.core import context
from ahriman.core.alpm.pacman import Pacman
from ahriman.core.configuration import Configuration
from ahriman.core.triggers import Trigger
from ahriman.core.utils import package_like
from ahriman.models.package import Package
from ahriman.models.repository_id import RepositoryId
from ahriman.models.result import Result
class ArchiveRotationTrigger(Trigger):
"""
remove packages from archive
Attributes:
keep_built_packages(int): number of last packages to keep
paths(RepositoryPaths): repository paths instance
"""
CONFIGURATION_SCHEMA = {
"archive": {
"type": "dict",
"schema": {
"keep_built_packages": {
"type": "integer",
"required": True,
"coerce": "integer",
"min": 0,
},
},
},
}
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
"""
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance
"""
Trigger.__init__(self, repository_id, configuration)
section = next(iter(self.configuration_sections(configuration)))
self.keep_built_packages = max(configuration.getint(section, "keep_built_packages"), 0)
self.paths = configuration.repository_paths
@classmethod
def configuration_sections(cls, configuration: Configuration) -> list[str]:
"""
extract configuration sections from configuration
Args:
configuration(Configuration): configuration instance
Returns:
list[str]: read configuration sections belong to this trigger
"""
return list(cls.CONFIGURATION_SCHEMA.keys())
def archives_remove(self, package: Package, pacman: Pacman) -> None:
"""
remove older versions of the specified package
Args:
package(Package): package which has been updated to check for older versions
pacman(Pacman): alpm wrapper instance
"""
packages: dict[tuple[str, str], Package] = {}
# we can't use here load_archives, because it ignores versions
for full_path in filter(package_like, self.paths.archive_for(package.base).iterdir()):
local = Package.from_archive(full_path, pacman)
packages.setdefault((local.base, local.version), local).packages.update(local.packages)
comparator: Callable[[Package, Package], int] = lambda left, right: left.vercmp(right.version)
to_remove = sorted(packages.values(), key=cmp_to_key(comparator))
for single in to_remove[:-self.keep_built_packages]:
self.logger.info("removing version %s of package %s", single.version, single.base)
for archive in single.packages.values():
for path in self.paths.archive_for(single.base).glob(f"{archive.filename}*"):
path.unlink()
def on_result(self, result: Result, packages: list[Package]) -> None:
"""
run trigger
Args:
result(Result): build result
packages(list[Package]): list of all available packages
"""
ctx = context.get()
pacman = ctx.get(Pacman)
for package in result.success:
self.archives_remove(package, pacman)

View File

@ -17,7 +17,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import shutil # shutil.move is used here to ensure cross fs file movement
import shutil
from collections.abc import Iterable
from pathlib import Path
@ -41,101 +41,6 @@ class Executor(PackageInfo, Cleaner):
trait for common repository update processes
"""
def _archive_rename(self, description: PackageDescription, package_base: str) -> None:
"""
rename package archive removing special symbols
Args:
description(PackageDescription): package description
package_base(str): package base name
"""
if description.filename is None:
self.logger.warning("received empty package name for base %s", package_base)
return # suppress type checking, it never can be none actually
if (safe := safe_filename(description.filename)) != description.filename:
(self.paths.packages / description.filename).rename(self.paths.packages / safe)
description.filename = safe
def _package_build(self, package: Package, path: Path, packager: str | None,
local_version: str | None) -> str | None:
"""
build single package
Args:
package(Package): package to build
path(Path): path to directory with package files
packager(str | None): packager identifier used for this package
local_version(str | None): local version of the package
Returns:
str | None: current commit sha if available
"""
self.reporter.set_building(package.base)
task = Task(package, self.configuration, self.architecture, self.paths)
patches = self.reporter.package_patches_get(package.base, None)
commit_sha = task.init(path, patches, local_version)
built = task.build(path, PACKAGER=packager)
package.with_packages(built, self.pacman)
for src in built:
dst = self.paths.packages / src.name
shutil.move(src, dst)
return commit_sha
def _package_remove(self, package_name: str, path: Path) -> None:
"""
remove single package from repository
Args:
package_name(str): package name
path(Path): path to package archive
"""
try:
self.repo.remove(package_name, path)
except Exception:
self.logger.exception("could not remove %s", package_name)
def _package_remove_base(self, package_base: str) -> None:
"""
remove package base from repository
Args:
package_base(str): package base name:
"""
try:
with self.in_event(package_base, EventType.PackageRemoved):
self.reporter.package_remove(package_base)
except Exception:
self.logger.exception("could not remove base %s", package_base)
def _package_update(self, filename: str | None, package_base: str, packager_key: str | None) -> None:
"""
update built package in repository database
Args:
filename(str | None): archive filename
package_base(str): package base name
packager_key(str | None): packager key identifier
"""
if filename is None:
self.logger.warning("received empty package name for base %s", package_base)
return # suppress type checking, it never can be none actually
# in theory, it might be NOT packages directory, but we suppose it is
full_path = self.paths.packages / filename
files = self.sign.process_sign_package(full_path, packager_key)
for src in files:
dst = self.paths.archive_for(package_base) / src.name
src.rename(dst) # move package to archive directory
if not (symlink := self.paths.repository / dst.name).exists():
symlink.symlink_to(dst.relative_to(symlink.parent, walk_up=True)) # create link to archive
self.repo.add(self.paths.repository / filename)
def process_build(self, updates: Iterable[Package], packagers: Packagers | None = None, *,
bump_pkgrel: bool = False) -> Result:
"""
@ -150,6 +55,21 @@ class Executor(PackageInfo, Cleaner):
Returns:
Result: build result
"""
def build_single(package: Package, local_path: Path, packager_id: str | None) -> str | None:
self.reporter.set_building(package.base)
task = Task(package, self.configuration, self.architecture, self.paths)
local_version = local_versions.get(package.base) if bump_pkgrel else None
patches = self.reporter.package_patches_get(package.base, None)
commit_sha = task.init(local_path, patches, local_version)
built = task.build(local_path, PACKAGER=packager_id)
package.with_packages(built, self.pacman)
for src in built:
dst = self.paths.packages / src.name
shutil.move(src, dst)
return commit_sha
packagers = packagers or Packagers()
local_versions = {package.base: package.version for package in self.packages()}
@ -160,21 +80,16 @@ class Executor(PackageInfo, Cleaner):
try:
with self.in_event(single.base, EventType.PackageUpdated, failure=EventType.PackageUpdateFailed):
packager = self.packager(packagers, single.base)
local_version = local_versions.get(single.base) if bump_pkgrel else None
commit_sha = self._package_build(single, Path(dir_name), packager.packager_id, local_version)
last_commit_sha = build_single(single, Path(dir_name), packager.packager_id)
# update commit hash for changes keeping current diff if there is any
changes = self.reporter.package_changes_get(single.base)
self.reporter.package_changes_update(single.base, Changes(commit_sha, changes.changes))
self.reporter.package_changes_update(single.base, Changes(last_commit_sha, changes.changes))
# update dependencies list
package_archive = PackageArchive(self.paths.build_root, single, self.pacman, self.scan_paths)
dependencies = package_archive.depends_on()
self.reporter.package_dependencies_update(single.base, dependencies)
# update result set
result.add_updated(single)
except Exception:
self.reporter.set_failed(single.base)
result.add_failed(single)
@ -192,6 +107,19 @@ class Executor(PackageInfo, Cleaner):
Returns:
Result: remove result
"""
def remove_base(package_base: str) -> None:
try:
with self.in_event(package_base, EventType.PackageRemoved):
self.reporter.package_remove(package_base)
except Exception:
self.logger.exception("could not remove base %s", package_base)
def remove_package(package: str, archive_path: Path) -> None:
try:
self.repo.remove(package, archive_path) # remove the package itself
except Exception:
self.logger.exception("could not remove %s", package)
packages_to_remove: dict[str, Path] = {}
bases_to_remove: list[str] = []
@ -208,7 +136,6 @@ class Executor(PackageInfo, Cleaner):
})
bases_to_remove.append(local.base)
result.add_removed(local)
elif requested.intersection(local.packages.keys()):
packages_to_remove.update({
package: properties.filepath
@ -225,11 +152,11 @@ class Executor(PackageInfo, Cleaner):
# remove packages from repository files
for package, filename in packages_to_remove.items():
self._package_remove(package, filename)
remove_package(package, filename)
# remove bases from registered
for package in bases_to_remove:
self._package_remove_base(package)
remove_base(package)
return result
@ -245,6 +172,27 @@ class Executor(PackageInfo, Cleaner):
Returns:
Result: path to repository database
"""
def rename(archive: PackageDescription, package_base: str) -> None:
if archive.filename is None:
self.logger.warning("received empty package name for base %s", package_base)
return # suppress type checking, it never can be none actually
if (safe := safe_filename(archive.filename)) != archive.filename:
shutil.move(self.paths.packages / archive.filename, self.paths.packages / safe)
archive.filename = safe
def update_single(name: str | None, package_base: str, packager_key: str | None) -> None:
if name is None:
self.logger.warning("received empty package name for base %s", package_base)
return # suppress type checking, it never can be none actually
# in theory, it might be NOT packages directory, but we suppose it is
full_path = self.paths.packages / name
files = self.sign.process_sign_package(full_path, packager_key)
for src in files:
dst = self.paths.repository / safe_filename(src.name)
shutil.move(src, dst)
package_path = self.paths.repository / safe_filename(name)
self.repo.add(package_path)
current_packages = {package.base: package for package in self.packages()}
local_versions = {package_base: package.version for package_base, package in current_packages.items()}
@ -259,8 +207,8 @@ class Executor(PackageInfo, Cleaner):
packager = self.packager(packagers, local.base)
for description in local.packages.values():
self._archive_rename(description, local.base)
self._package_update(description.filename, local.base, packager.key)
rename(description, local.base)
update_single(description.filename, local.base, packager.key)
self.reporter.set_success(local)
result.add_updated(local)
@ -268,13 +216,12 @@ class Executor(PackageInfo, Cleaner):
if local.base in current_packages:
current_package_archives = set(current_packages[local.base].packages.keys())
removed_packages.extend(current_package_archives.difference(local.packages))
except Exception:
self.reporter.set_failed(local.base)
result.add_failed(local)
self.logger.exception("could not process %s", local.base)
self.clear_packages()
self.process_remove(removed_packages)
return result

View File

@ -80,8 +80,7 @@ class Trigger(LazyLogging):
return self.repository_id.architecture
@classmethod
def configuration_schema(cls, repository_id: RepositoryId,
configuration: Configuration | None) -> ConfigurationSchema:
def configuration_schema(cls, configuration: Configuration | None) -> ConfigurationSchema:
"""
configuration schema based on supplied service configuration
@ -89,7 +88,6 @@ class Trigger(LazyLogging):
Schema must be in cerberus format, for details and examples you can check built-in triggers.
Args:
repository_id(str): repository unique identifier
configuration(Configuration | None): configuration instance. If set to None, the default schema
should be returned
@ -101,13 +99,15 @@ class Trigger(LazyLogging):
result: ConfigurationSchema = {}
for target in cls.configuration_sections(configuration):
if not configuration.has_section(target):
continue
section, schema_name = configuration.gettype(
target, repository_id, fallback=cls.CONFIGURATION_SCHEMA_FALLBACK)
if schema_name not in cls.CONFIGURATION_SCHEMA:
continue
result[section] = cls.CONFIGURATION_SCHEMA[schema_name]
for section in configuration.sections():
if not (section == target or section.startswith(f"{target}:")):
# either repository specific or exact name
continue
schema_name = configuration.get(section, "type", fallback=section)
if schema_name not in cls.CONFIGURATION_SCHEMA:
continue
result[section] = cls.CONFIGURATION_SCHEMA[schema_name]
return result

View File

@ -520,7 +520,8 @@ class Package(LazyLogging):
else:
remote_version = remote.version
return self.vercmp(remote_version) < 0
result: int = vercmp(self.version, remote_version)
return result < 0
def next_pkgrel(self, local_version: str | None) -> str | None:
"""
@ -539,7 +540,7 @@ class Package(LazyLogging):
if local_version is None:
return None # local version not found, keep upstream pkgrel
if self.vercmp(local_version) > 0:
if vercmp(self.version, local_version) > 0:
return None # upstream version is newer than local one, keep upstream pkgrel
*_, local_pkgrel = parse_version(local_version)
@ -560,19 +561,6 @@ class Package(LazyLogging):
details = "" if self.is_single_package else f""" ({" ".join(sorted(self.packages.keys()))})"""
return f"{self.base}{details}"
def vercmp(self, version: str) -> int:
"""
typed wrapper around :func:`pyalpm.vercmp()`
Args:
version(str): version to compare
Returns:
int: negative if current version is less than provided, positive if greater than and zero if equals
"""
result: int = vercmp(self.version, version)
return result
def view(self) -> dict[str, Any]:
"""
generate json package view

View File

@ -85,16 +85,6 @@ class RepositoryPaths(LazyLogging):
return Path(self.repository_id.architecture) # legacy tree suffix
return Path(self.repository_id.name) / self.repository_id.architecture
@property
def archive(self) -> Path:
"""
archive directory root
Returns:
Path: archive directory root
"""
return self.root / "archive" / self._suffix
@property
def build_root(self) -> Path:
"""
@ -237,7 +227,7 @@ class RepositoryPaths(LazyLogging):
set owner of path recursively (from root) to root owner
Notes:
More likely you don't want to call this method explicitly, consider using :func:`preserve_owner()`
More likely you don't want to call this method explicitly, consider using :func:`preserve_owner`
as context manager instead
Args:
@ -259,23 +249,6 @@ class RepositoryPaths(LazyLogging):
set_owner(path)
path = path.parent
def archive_for(self, package_base: str) -> Path:
"""
get path to archive specified search criteria
Args:
package_base(str): package base name
Returns:
Path: path to archive directory for package base
"""
directory = self.archive / "packages" / package_base[0] / package_base
if not directory.is_dir(): # create if not exists
with self.preserve_owner(self.archive):
directory.mkdir(mode=0o755, parents=True)
return directory
def cache_for(self, package_base: str) -> Path:
"""
get path to cached PKGBUILD and package sources for the package base
@ -309,9 +282,6 @@ class RepositoryPaths(LazyLogging):
path = path or self.root
def walk(root: Path) -> Generator[Path, None, None]:
if not root.exists():
return
# basically walk, but skipping some content
for child in root.iterdir():
yield child
@ -350,7 +320,6 @@ class RepositoryPaths(LazyLogging):
with self.preserve_owner():
for directory in (
self.archive,
self.cache,
self.chroot,
self.packages,

View File

@ -6,7 +6,6 @@ from unittest.mock import call as MockCall
from ahriman.application.handlers.tree_migrate import TreeMigrate
from ahriman.core.configuration import Configuration
from ahriman.models.package import Package
from ahriman.models.repository_id import RepositoryId
from ahriman.models.repository_paths import RepositoryPaths
@ -17,7 +16,6 @@ def test_run(args: argparse.Namespace, configuration: Configuration, mocker: Moc
"""
tree_create_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.tree_create")
application_mock = mocker.patch("ahriman.application.handlers.tree_migrate.TreeMigrate.tree_move")
symlinks_mock = mocker.patch("ahriman.application.handlers.tree_migrate.TreeMigrate.fix_symlinks")
_, repository_id = configuration.check_loaded()
old_paths = configuration.repository_paths
new_paths = RepositoryPaths(old_paths.root, old_paths.repository_id, _force_current_tree=True)
@ -25,37 +23,6 @@ def test_run(args: argparse.Namespace, configuration: Configuration, mocker: Moc
TreeMigrate.run(args, repository_id, configuration, report=False)
tree_create_mock.assert_called_once_with()
application_mock.assert_called_once_with(old_paths, new_paths)
symlinks_mock.assert_called_once_with(new_paths)
def test_fix_symlinks(repository_paths: RepositoryPaths, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must replace symlinks during migration
"""
mocker.patch("ahriman.models.repository_paths.RepositoryPaths.preserve_owner")
mocker.patch("ahriman.application.handlers.tree_migrate.walk", side_effect=[
[
repository_paths.archive_for(package_ahriman.base) / "file",
repository_paths.archive_for(package_ahriman.base) / "symlink",
],
[
repository_paths.repository / "file",
repository_paths.repository / "symlink",
],
])
mocker.patch("pathlib.Path.exists", autospec=True, side_effect=lambda p: p.name == "file")
unlink_mock = mocker.patch("pathlib.Path.unlink")
symlink_mock = mocker.patch("pathlib.Path.symlink_to")
TreeMigrate.fix_symlinks(repository_paths)
unlink_mock.assert_called_once_with()
symlink_mock.assert_called_once_with(
Path("..") /
".." /
".." /
repository_paths.archive_for(package_ahriman.base).relative_to(repository_paths.root) /
"symlink"
)
def test_move_tree(mocker: MockerFixture) -> None:
@ -69,7 +36,6 @@ def test_move_tree(mocker: MockerFixture) -> None:
TreeMigrate.tree_move(from_paths, to_paths)
rename_mock.assert_has_calls([
MockCall(from_paths.archive, to_paths.archive),
MockCall(from_paths.packages, to_paths.packages),
MockCall(from_paths.pacman, to_paths.pacman),
MockCall(from_paths.repository, to_paths.repository),

View File

@ -2,6 +2,7 @@ import argparse
import json
import pytest
from pathlib import Path
from pytest_mock import MockerFixture
from ahriman.application.handlers.validate import Validate
@ -53,12 +54,50 @@ def test_run_skip(args: argparse.Namespace, configuration: Configuration, mocker
print_mock.assert_not_called()
def test_run_default(args: argparse.Namespace, configuration: Configuration) -> None:
"""
must run on default configuration without errors
"""
args.exit_code = True
_, repository_id = configuration.check_loaded()
default = Configuration.from_path(Configuration.SYSTEM_CONFIGURATION_PATH, repository_id)
# copy autogenerated values
for section, key in (("build", "build_command"), ("repository", "root")):
value = configuration.get(section, key)
default.set_option(section, key, value)
Validate.run(args, repository_id, default, report=False)
def test_run_repo_specific_triggers(args: argparse.Namespace, configuration: Configuration,
resource_path_root: Path) -> None:
"""
must correctly insert repo specific triggers
"""
args.exit_code = True
_, repository_id = configuration.check_loaded()
# remove unused sections
for section in ("customs3", "github:x86_64", "logs-rotation", "mirrorlist"):
configuration.remove_section(section)
configuration.set_option("report", "target", "test")
for section in ("test", "test:i686", "test:another-repo:x86_64"):
configuration.set_option(section, "type", "html")
configuration.set_option(section, "link_path", "http://link_path")
configuration.set_option(section, "path", "path")
configuration.set_option(section, "template", "template")
configuration.set_option(section, "templates", str(resource_path_root))
Validate.run(args, repository_id, configuration, report=False)
def test_schema(configuration: Configuration) -> None:
"""
must generate full schema correctly
"""
_, repository_id = configuration.check_loaded()
schema = Validate.schema(repository_id, configuration)
schema = Validate.schema(configuration)
# defaults
assert schema.pop("console")
@ -91,9 +130,7 @@ def test_schema_invalid_trigger(configuration: Configuration) -> None:
"""
configuration.set_option("build", "triggers", "some.invalid.trigger.path.Trigger")
configuration.remove_option("build", "triggers_known")
_, repository_id = configuration.check_loaded()
assert Validate.schema(repository_id, configuration) == CONFIGURATION_SCHEMA
assert Validate.schema(configuration) == CONFIGURATION_SCHEMA
def test_schema_erase_required() -> None:

View File

@ -1,10 +1,8 @@
import datetime
import pytest
from dataclasses import replace
from pathlib import Path
from pytest_mock import MockerFixture
from sqlite3 import Cursor
from typing import Any, TypeVar
from unittest.mock import MagicMock, PropertyMock
@ -13,14 +11,12 @@ from ahriman.core.alpm.remote import AUR
from ahriman.core.auth import Auth
from ahriman.core.configuration import Configuration
from ahriman.core.database import SQLite
from ahriman.core.database.migrations import Migrations
from ahriman.core.repository import Repository
from ahriman.core.spawn import Spawn
from ahriman.core.status import Client
from ahriman.core.status.watcher import Watcher
from ahriman.models.aur_package import AURPackage
from ahriman.models.build_status import BuildStatus, BuildStatusEnum
from ahriman.models.migration import Migration
from ahriman.models.package import Package
from ahriman.models.package_description import PackageDescription
from ahriman.models.package_source import PackageSource
@ -52,9 +48,7 @@ def anyvar(cls: type[T], strict: bool = False) -> T:
T: any wrapper
"""
class AnyVar(cls):
"""
any value wrapper
"""
"""any value wrapper"""
def __eq__(self, other: Any) -> bool:
"""
@ -277,23 +271,16 @@ def configuration(repository_id: RepositoryId, tmp_path: Path, resource_path_roo
@pytest.fixture
def database(configuration: Configuration, mocker: MockerFixture) -> SQLite:
def database(configuration: Configuration) -> SQLite:
"""
database fixture
Args:
configuration(Configuration): configuration fixture
mocker(MockerFixture): mocker object
Returns:
SQLite: database test instance
"""
original_method = Migrations.perform_migration
def perform_migration(self: Migrations, cursor: Cursor, migration: Migration) -> None:
original_method(self, cursor, replace(migration, migrate_data=lambda *args: None))
mocker.patch.object(Migrations, "perform_migration", autospec=True, side_effect=perform_migration)
return SQLite.load(configuration)

View File

@ -4,15 +4,6 @@ from pathlib import Path
from pytest_mock import MockerFixture
from ahriman.core.alpm.repo import Repo
from ahriman.models.repository_paths import RepositoryPaths
def test_root(repository_paths: RepositoryPaths) -> None:
"""
must correctly define repository root
"""
assert Repo(repository_paths.repository_id.name, repository_paths, []).root == repository_paths.repository
assert Repo(repository_paths.repository_id.name, repository_paths, [], Path("path")).root == Path("path")
def test_repo_path(repo: Repo) -> None:
@ -31,18 +22,6 @@ def test_repo_add(repo: Repo, mocker: MockerFixture) -> None:
repo.add(Path("path"))
check_output_mock.assert_called_once() # it will be checked later
assert check_output_mock.call_args[0][0] == "repo-add"
assert "--remove" in check_output_mock.call_args[0]
def test_repo_add_no_remove(repo: Repo, mocker: MockerFixture) -> None:
"""
must call repo-add without remove flag
"""
check_output_mock = mocker.patch("ahriman.core.alpm.repo.check_output")
repo.add(Path("path"), remove=False)
check_output_mock.assert_called_once() # it will be checked later
assert "--remove" not in check_output_mock.call_args[0]
def test_repo_init(repo: Repo, mocker: MockerFixture) -> None:

View File

@ -20,6 +20,40 @@ def test_architecture(configuration: Configuration) -> None:
assert configuration.architecture == "x86_64"
def test_repository_id(configuration: Configuration, repository_id: RepositoryId) -> None:
"""
must return repository identifier
"""
assert configuration.repository_id == repository_id
assert configuration.get("repository", "name") == repository_id.name
assert configuration.get("repository", "architecture") == repository_id.architecture
def test_repository_id_erase(configuration: Configuration) -> None:
"""
must remove repository identifier properties if empty identifier supplied
"""
configuration.repository_id = None
assert configuration.get("repository", "name", fallback=None) is None
assert configuration.get("repository", "architecture", fallback=None) is None
configuration.repository_id = RepositoryId("", "")
assert configuration.get("repository", "name", fallback=None) is None
assert configuration.get("repository", "architecture", fallback=None) is None
def test_repository_id_update(configuration: Configuration, repository_id: RepositoryId) -> None:
"""
must update repository identifier and related configuration options
"""
repository_id = RepositoryId("i686", repository_id.name)
configuration.repository_id = repository_id
assert configuration.repository_id == repository_id
assert configuration.get("repository", "name") == repository_id.name
assert configuration.get("repository", "architecture") == repository_id.architecture
def test_repository_name(configuration: Configuration) -> None:
"""
must return valid repository name

View File

@ -1,82 +0,0 @@
import pytest
from dataclasses import replace
from pathlib import Path
from pytest_mock import MockerFixture
from sqlite3 import Connection
from typing import Any
from unittest.mock import call as MockCall
from ahriman.core.alpm.pacman import Pacman
from ahriman.core.configuration import Configuration
from ahriman.core.database.migrations.m016_archive import migrate_data, move_packages
from ahriman.models.package import Package
from ahriman.models.repository_paths import RepositoryPaths
def test_migrate_data(connection: Connection, configuration: Configuration, mocker: MockerFixture) -> None:
"""
must perform data migration
"""
_, repository_id = configuration.check_loaded()
repositories = [
repository_id,
replace(repository_id, architecture="i686"),
]
mocker.patch("ahriman.application.handlers.handler.Handler.repositories_extract", return_value=repositories)
migration_mock = mocker.patch("ahriman.core.database.migrations.m016_archive.move_packages")
migrate_data(connection, configuration)
migration_mock.assert_has_calls([
MockCall(replace(configuration.repository_paths, repository_id=repository), pytest.helpers.anyvar(int))
for repository in repositories
])
def test_move_packages(repository_paths: RepositoryPaths, pacman: Pacman, package_ahriman: Package,
mocker: MockerFixture) -> None:
"""
must move packages to the archive directory
"""
def is_file(self: Path, *args: Any, **kwargs: Any) -> bool:
return "file" in self.name
mocker.patch("pathlib.Path.iterdir", return_value=[
repository_paths.repository / ".hidden-file.pkg.tar.xz",
repository_paths.repository / "directory",
repository_paths.repository / "file.pkg.tar.xz",
repository_paths.repository / "file.pkg.tar.xz.sig",
repository_paths.repository / "symlink.pkg.tar.xz",
])
mocker.patch("pathlib.Path.is_dir", return_value=True)
mocker.patch("pathlib.Path.is_file", autospec=True, side_effect=is_file)
archive_mock = mocker.patch("ahriman.models.package.Package.from_archive", return_value=package_ahriman)
rename_mock = mocker.patch("pathlib.Path.rename")
symlink_mock = mocker.patch("pathlib.Path.symlink_to")
move_packages(repository_paths, pacman)
archive_mock.assert_has_calls([
MockCall(repository_paths.repository / "file.pkg.tar.xz", pacman),
MockCall(repository_paths.repository / "file.pkg.tar.xz.sig", pacman),
])
rename_mock.assert_has_calls([
MockCall(repository_paths.archive_for(package_ahriman.base) / "file.pkg.tar.xz"),
MockCall(repository_paths.archive_for(package_ahriman.base) / "file.pkg.tar.xz.sig"),
])
symlink_mock.assert_has_calls([
MockCall(
Path("..") /
".." /
".." /
repository_paths.archive_for(package_ahriman.base).relative_to(repository_paths.root) /
"file.pkg.tar.xz"
),
MockCall(
Path("..") /
".." /
".." /
repository_paths.archive_for(package_ahriman.base).relative_to(repository_paths.root) /
"file.pkg.tar.xz.sig"
),
])

View File

@ -1,28 +1,13 @@
import pytest
from ahriman.core.configuration import Configuration
from ahriman.core.housekeeping import ArchiveRotationTrigger, LogsRotationTrigger
@pytest.fixture
def archive_rotation_trigger(configuration: Configuration) -> ArchiveRotationTrigger:
"""
archive rotation trigger fixture
Args:
configuration(Configuration): configuration fixture
Returns:
ArchiveRotationTrigger: archive rotation trigger test instance
"""
_, repository_id = configuration.check_loaded()
return ArchiveRotationTrigger(repository_id, configuration)
from ahriman.core.housekeeping import LogsRotationTrigger
@pytest.fixture
def logs_rotation_trigger(configuration: Configuration) -> LogsRotationTrigger:
"""
logs rotation trigger fixture
logs roration trigger fixture
Args:
configuration(Configuration): configuration fixture

View File

@ -1,83 +0,0 @@
import pytest
from dataclasses import replace
from pathlib import Path
from pytest_mock import MockerFixture
from typing import Any
from unittest.mock import call as MockCall
from ahriman.core.alpm.pacman import Pacman
from ahriman.core.configuration import Configuration
from ahriman.core.housekeeping import ArchiveRotationTrigger
from ahriman.models.package import Package
from ahriman.models.result import Result
def test_configuration_sections(configuration: Configuration) -> None:
"""
must correctly parse target list
"""
assert ArchiveRotationTrigger.configuration_sections(configuration) == ["archive"]
def test_archives_remove(archive_rotation_trigger: ArchiveRotationTrigger, package_ahriman: Package,
pacman: Pacman, mocker: MockerFixture) -> None:
"""
must remove older packages
"""
def package(version: Any, *args: Any, **kwargs: Any) -> Package:
generated = replace(package_ahriman, version=str(version))
generated.packages = {
key: replace(value, filename=str(version))
for key, value in generated.packages.items()
}
return generated
mocker.patch("pathlib.Path.is_dir", return_value=True)
mocker.patch("ahriman.core.housekeeping.archive_rotation_trigger.package_like", return_value=True)
mocker.patch("pathlib.Path.glob", return_value=[Path(str(i)) for i in range(5)])
mocker.patch("pathlib.Path.iterdir", return_value=[Path(str(i)) for i in range(5)])
mocker.patch("ahriman.models.package.Package.from_archive", side_effect=package)
unlink_mock = mocker.patch("pathlib.Path.unlink", autospec=True)
archive_rotation_trigger.archives_remove(package_ahriman, pacman)
unlink_mock.assert_has_calls([
MockCall(Path("0")),
MockCall(Path("1")),
])
def test_archives_remove_keep(archive_rotation_trigger: ArchiveRotationTrigger, package_ahriman: Package,
pacman: Pacman, mocker: MockerFixture) -> None:
"""
must keep all packages if set to
"""
def package(version: Any, *args: Any, **kwargs: Any) -> Package:
generated = replace(package_ahriman, version=str(version))
generated.packages = {
key: replace(value, filename=str(version))
for key, value in generated.packages.items()
}
return generated
mocker.patch("pathlib.Path.is_dir", return_value=True)
mocker.patch("ahriman.core.housekeeping.archive_rotation_trigger.package_like", return_value=True)
mocker.patch("pathlib.Path.glob", return_value=[Path(str(i)) for i in range(5)])
mocker.patch("pathlib.Path.iterdir", return_value=[Path(str(i)) for i in range(5)])
mocker.patch("ahriman.models.package.Package.from_archive", side_effect=package)
unlink_mock = mocker.patch("pathlib.Path.unlink", autospec=True)
archive_rotation_trigger.keep_built_packages = 0
archive_rotation_trigger.archives_remove(package_ahriman, pacman)
unlink_mock.assert_not_called()
def test_on_result(archive_rotation_trigger: ArchiveRotationTrigger, package_ahriman: Package,
package_python_schedule: Package, mocker: MockerFixture) -> None:
"""
must rotate archives
"""
mocker.patch("ahriman.core._Context.get")
remove_mock = mocker.patch("ahriman.core.housekeeping.ArchiveRotationTrigger.archives_remove")
archive_rotation_trigger.on_result(Result(added=[package_ahriman], failed=[package_python_schedule]), [])
remove_mock.assert_called_once_with(package_ahriman, pytest.helpers.anyvar(int))

View File

@ -14,7 +14,7 @@ def test_configuration_sections(configuration: Configuration) -> None:
assert LogsRotationTrigger.configuration_sections(configuration) == ["logs-rotation"]
def test_on_result(logs_rotation_trigger: LogsRotationTrigger, mocker: MockerFixture) -> None:
def test_rotate(logs_rotation_trigger: LogsRotationTrigger, mocker: MockerFixture) -> None:
"""
must rotate logs
"""

View File

@ -13,139 +13,34 @@ from ahriman.models.packagers import Packagers
from ahriman.models.user import User
def test_archive_rename(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must correctly remove package archive
"""
path = "gconf-3.2.6+11+g07808097-10-x86_64.pkg.tar.zst"
safe_path = "gconf-3.2.6-11-g07808097-10-x86_64.pkg.tar.zst"
package_ahriman.packages[package_ahriman.base].filename = path
rename_mock = mocker.patch("pathlib.Path.rename")
executor._archive_rename(package_ahriman.packages[package_ahriman.base], package_ahriman.base)
rename_mock.assert_called_once_with(executor.paths.packages / safe_path)
assert package_ahriman.packages[package_ahriman.base].filename == safe_path
def test_archive_rename_empty_filename(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must skip renaming if filename is not set
"""
package_ahriman.packages[package_ahriman.base].filename = None
rename_mock = mocker.patch("pathlib.Path.rename")
executor._archive_rename(package_ahriman.packages[package_ahriman.base], package_ahriman.base)
rename_mock.assert_not_called()
def test_package_build(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must build single package
"""
mocker.patch("ahriman.core.build_tools.task.Task.build", return_value=[Path(package_ahriman.base)])
status_client_mock = mocker.patch("ahriman.core.status.Client.set_building")
init_mock = mocker.patch("ahriman.core.build_tools.task.Task.init", return_value="sha")
with_packages_mock = mocker.patch("ahriman.models.package.Package.with_packages")
move_mock = mocker.patch("shutil.move")
assert executor._package_build(package_ahriman, Path("local"), "packager", None) == "sha"
status_client_mock.assert_called_once_with(package_ahriman.base)
init_mock.assert_called_once_with(pytest.helpers.anyvar(int), pytest.helpers.anyvar(int), None)
with_packages_mock.assert_called_once_with([Path(package_ahriman.base)], executor.pacman)
move_mock.assert_called_once_with(Path(package_ahriman.base), executor.paths.packages / package_ahriman.base)
def test_package_remove(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must run remove for packages
"""
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
executor._package_remove(package_ahriman.base, package_ahriman.packages[package_ahriman.base].filepath)
repo_remove_mock.assert_called_once_with(
package_ahriman.base, package_ahriman.packages[package_ahriman.base].filepath)
def test_package_remove_failed(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must suppress errors during archive removal
"""
mocker.patch("ahriman.core.alpm.repo.Repo.remove", side_effect=Exception)
executor._package_remove(package_ahriman.base, package_ahriman.packages[package_ahriman.base].filepath)
def test_package_remove_base(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must run remove base from status client
"""
status_client_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_remove")
executor._package_remove_base(package_ahriman.base)
status_client_mock.assert_called_once_with(package_ahriman.base)
def test_package_remove_base_failed(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must suppress errors during base removal
"""
mocker.patch("ahriman.core.status.local_client.LocalClient.package_remove", side_effect=Exception)
executor._package_remove_base(package_ahriman.base)
def test_package_update(executor: Executor, package_ahriman: Package, user: User, mocker: MockerFixture) -> None:
"""
must update built package in repository
"""
rename_mock = mocker.patch("pathlib.Path.rename")
symlink_mock = mocker.patch("pathlib.Path.symlink_to")
repo_add_mock = mocker.patch("ahriman.core.alpm.repo.Repo.add")
sign_package_mock = mocker.patch("ahriman.core.sign.gpg.GPG.process_sign_package", side_effect=lambda fn, _: [fn])
filepath = next(package.filepath for package in package_ahriman.packages.values())
executor._package_update(filepath, package_ahriman.base, user.key)
# must move files (once)
rename_mock.assert_called_once_with(executor.paths.archive_for(package_ahriman.base) / filepath)
# must sign package
sign_package_mock.assert_called_once_with(executor.paths.packages / filepath, user.key)
# symlink to the archive
symlink_mock.assert_called_once_with(
Path("..") /
".." /
".." /
executor.paths.archive_for(
package_ahriman.base).relative_to(
executor.paths.root) /
filepath)
# must add package
repo_add_mock.assert_called_once_with(executor.paths.repository / filepath)
def test_package_update_empty_filename(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must skip update for package which does not have path
"""
rename_mock = mocker.patch("pathlib.Path.rename")
executor._package_update(None, package_ahriman.base, None)
rename_mock.assert_not_called()
def test_process_build(executor: Executor, package_ahriman: Package, passwd: Any, mocker: MockerFixture) -> None:
"""
must run build process
"""
mocker.patch("ahriman.models.repository_paths.getpwuid", return_value=passwd)
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
mocker.patch("ahriman.core.build_tools.task.Task.build", return_value=[Path(package_ahriman.base)])
init_mock = mocker.patch("ahriman.core.build_tools.task.Task.init", return_value="sha")
move_mock = mocker.patch("shutil.move")
status_client_mock = mocker.patch("ahriman.core.status.Client.set_building")
changes_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_changes_get",
return_value=Changes("commit", "change"))
commit_sha_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_changes_update")
depends_on_mock = mocker.patch("ahriman.core.build_tools.package_archive.PackageArchive.depends_on",
return_value=Dependencies())
dependencies_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_dependencies_update")
build_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_build", return_value="sha")
with_packages_mock = mocker.patch("ahriman.models.package.Package.with_packages")
executor.process_build([package_ahriman], Packagers("packager"), bump_pkgrel=False)
init_mock.assert_called_once_with(pytest.helpers.anyvar(int), pytest.helpers.anyvar(int), None)
with_packages_mock.assert_called_once_with([Path(package_ahriman.base)], executor.pacman)
changes_mock.assert_called_once_with(package_ahriman.base)
build_mock.assert_called_once_with(package_ahriman, pytest.helpers.anyvar(Path, strict=True), None, None)
depends_on_mock.assert_called_once_with()
dependencies_mock.assert_called_once_with(package_ahriman.base, Dependencies())
# must move files (once)
move_mock.assert_called_once_with(Path(package_ahriman.base), executor.paths.packages / package_ahriman.base)
# must update status
status_client_mock.assert_called_once_with(package_ahriman.base)
commit_sha_mock.assert_called_once_with(package_ahriman.base, Changes("sha", "change"))
@ -184,15 +79,15 @@ def test_process_remove_base(executor: Executor, package_ahriman: Package, mocke
must run remove process for whole base
"""
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
remove_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove")
base_remove_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove_base")
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
status_client_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_remove")
executor.process_remove([package_ahriman.base])
# must remove via alpm wrapper
remove_mock.assert_called_once_with(
repo_remove_mock.assert_called_once_with(
package_ahriman.base, package_ahriman.packages[package_ahriman.base].filepath)
# must update status and remove package files
base_remove_mock.assert_called_once_with(package_ahriman.base)
status_client_mock.assert_called_once_with(package_ahriman.base)
def test_process_remove_with_debug(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
@ -204,12 +99,12 @@ def test_process_remove_with_debug(executor: Executor, package_ahriman: Package,
f"{package_ahriman.base}-debug": package_ahriman.packages[package_ahriman.base],
}
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
mocker.patch("ahriman.core.repository.executor.Executor._package_remove_base")
remove_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove")
mocker.patch("ahriman.core.status.local_client.LocalClient.package_remove")
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
executor.process_remove([package_ahriman.base])
# must remove via alpm wrapper
remove_mock.assert_has_calls([
repo_remove_mock.assert_has_calls([
MockCall(package_ahriman.base, package_ahriman.packages[package_ahriman.base].filepath),
MockCall(f"{package_ahriman.base}-debug", package_ahriman.packages[package_ahriman.base].filepath),
])
@ -221,12 +116,12 @@ def test_process_remove_base_multiple(executor: Executor, package_python_schedul
must run remove process for whole base with multiple packages
"""
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_python_schedule])
remove_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove")
status_client_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove_base")
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
status_client_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_remove")
executor.process_remove([package_python_schedule.base])
# must remove via alpm wrapper
remove_mock.assert_has_calls([
repo_remove_mock.assert_has_calls([
MockCall(package, props.filepath)
for package, props in package_python_schedule.packages.items()
], any_order=True)
@ -240,27 +135,45 @@ def test_process_remove_base_single(executor: Executor, package_python_schedule:
must run remove process for single package in base
"""
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_python_schedule])
remove_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove")
status_client_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove_base")
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
status_client_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_remove")
executor.process_remove(["python2-schedule"])
# must remove via alpm wrapper
remove_mock.assert_called_once_with(
repo_remove_mock.assert_called_once_with(
"python2-schedule", package_python_schedule.packages["python2-schedule"].filepath)
# must not update status
status_client_mock.assert_not_called()
def test_process_remove_failed(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must suppress tree clear errors during package base removal
"""
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
mocker.patch("ahriman.core.status.local_client.LocalClient.package_remove", side_effect=Exception)
executor.process_remove([package_ahriman.base])
def test_process_remove_tree_clear_failed(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must suppress remove errors
"""
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
mocker.patch("ahriman.core.alpm.repo.Repo.remove", side_effect=Exception)
executor.process_remove([package_ahriman.base])
def test_process_remove_nothing(executor: Executor, package_ahriman: Package, package_python_schedule: Package,
mocker: MockerFixture) -> None:
"""
must not remove anything if it was not requested
"""
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
remove_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove")
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
executor.process_remove([package_python_schedule.base])
remove_mock.assert_not_called()
repo_remove_mock.assert_not_called()
def test_process_remove_unknown(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
@ -268,11 +181,11 @@ def test_process_remove_unknown(executor: Executor, package_ahriman: Package, mo
must remove unknown package base
"""
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[])
remove_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove")
status_client_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_remove_base")
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
status_client_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_remove")
executor.process_remove([package_ahriman.base])
remove_mock.assert_not_called()
repo_remove_mock.assert_not_called()
status_client_mock.assert_called_once_with(package_ahriman.base)
@ -282,8 +195,9 @@ def test_process_update(executor: Executor, package_ahriman: Package, user: User
"""
mocker.patch("ahriman.core.repository.executor.Executor.load_archives", return_value=[package_ahriman])
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
rename_mock = mocker.patch("ahriman.core.repository.executor.Executor._archive_rename")
update_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_update")
move_mock = mocker.patch("shutil.move")
repo_add_mock = mocker.patch("ahriman.core.alpm.repo.Repo.add")
sign_package_mock = mocker.patch("ahriman.core.sign.gpg.GPG.process_sign_package", side_effect=lambda fn, _: [fn])
status_client_mock = mocker.patch("ahriman.core.status.Client.set_success")
remove_mock = mocker.patch("ahriman.core.repository.executor.Executor.process_remove")
packager_mock = mocker.patch("ahriman.core.repository.executor.Executor.packager", return_value=user)
@ -292,8 +206,12 @@ def test_process_update(executor: Executor, package_ahriman: Package, user: User
# must return complete
assert executor.process_update([filepath], Packagers("packager"))
packager_mock.assert_called_once_with(Packagers("packager"), "ahriman")
rename_mock.assert_called_once_with(package_ahriman.packages[package_ahriman.base], package_ahriman.base)
update_mock.assert_called_once_with(filepath.name, package_ahriman.base, user.key)
# must move files (once)
move_mock.assert_called_once_with(executor.paths.packages / filepath, executor.paths.repository / filepath)
# must sign package
sign_package_mock.assert_called_once_with(executor.paths.packages / filepath, user.key)
# must add package
repo_add_mock.assert_called_once_with(executor.paths.repository / filepath)
# must update status
status_client_mock.assert_called_once_with(package_ahriman)
# must clear directory
@ -308,26 +226,58 @@ def test_process_update_group(executor: Executor, package_python_schedule: Packa
"""
must group single packages under one base
"""
mocker.patch("shutil.move")
mocker.patch("ahriman.core.repository.executor.Executor.load_archives", return_value=[package_python_schedule])
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_python_schedule])
update_mock = mocker.patch("ahriman.core.repository.executor.Executor._package_update")
repo_add_mock = mocker.patch("ahriman.core.alpm.repo.Repo.add")
status_client_mock = mocker.patch("ahriman.core.status.Client.set_success")
remove_mock = mocker.patch("ahriman.core.repository.executor.Executor.process_remove")
executor.process_update([package.filepath for package in package_python_schedule.packages.values()])
update_mock.assert_has_calls([
MockCall(package.filename, package_python_schedule.base, None)
repo_add_mock.assert_has_calls([
MockCall(executor.paths.repository / package.filepath)
for package in package_python_schedule.packages.values()
], any_order=True)
status_client_mock.assert_called_once_with(package_python_schedule)
remove_mock.assert_called_once_with([])
def test_process_update_unsafe(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must encode file name
"""
path = "gconf-3.2.6+11+g07808097-10-x86_64.pkg.tar.zst"
safe_path = "gconf-3.2.6-11-g07808097-10-x86_64.pkg.tar.zst"
package_ahriman.packages[package_ahriman.base].filename = path
mocker.patch("ahriman.core.repository.executor.Executor.load_archives", return_value=[package_ahriman])
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
move_mock = mocker.patch("shutil.move")
repo_add_mock = mocker.patch("ahriman.core.alpm.repo.Repo.add")
executor.process_update([Path(path)])
move_mock.assert_has_calls([
MockCall(executor.paths.packages / path, executor.paths.packages / safe_path),
MockCall(executor.paths.packages / safe_path, executor.paths.repository / safe_path)
])
repo_add_mock.assert_called_once_with(executor.paths.repository / safe_path)
def test_process_empty_filename(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must skip update for package which does not have path
"""
package_ahriman.packages[package_ahriman.base].filename = None
mocker.patch("ahriman.core.repository.executor.Executor.load_archives", return_value=[package_ahriman])
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
executor.process_update([package.filepath for package in package_ahriman.packages.values()])
def test_process_update_failed(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must process update for failed package
"""
mocker.patch("ahriman.core.repository.executor.Executor._package_update", side_effect=Exception)
mocker.patch("shutil.move", side_effect=Exception)
mocker.patch("ahriman.core.repository.executor.Executor.load_archives", return_value=[package_ahriman])
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
status_client_mock = mocker.patch("ahriman.core.status.Client.set_failed")
@ -344,7 +294,8 @@ def test_process_update_removed_package(executor: Executor, package_python_sched
without_python2 = Package.from_json(package_python_schedule.view())
del without_python2.packages["python2-schedule"]
mocker.patch("ahriman.core.repository.executor.Executor._package_update")
mocker.patch("shutil.move")
mocker.patch("ahriman.core.alpm.repo.Repo.add")
mocker.patch("ahriman.core.repository.executor.Executor.load_archives", return_value=[without_python2])
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_python_schedule])
remove_mock = mocker.patch("ahriman.core.repository.executor.Executor.process_remove")

View File

@ -19,10 +19,9 @@ def test_configuration_schema(configuration: Configuration) -> None:
"""
section = "console"
configuration.set_option("report", "target", section)
_, repository_id = configuration.check_loaded()
expected = {section: ReportTrigger.CONFIGURATION_SCHEMA[section]}
assert ReportTrigger.configuration_schema(repository_id, configuration) == expected
assert ReportTrigger.configuration_schema(configuration) == expected
def test_configuration_schema_no_section(configuration: Configuration) -> None:
@ -31,9 +30,7 @@ def test_configuration_schema_no_section(configuration: Configuration) -> None:
"""
section = "abracadabra"
configuration.set_option("report", "target", section)
_, repository_id = configuration.check_loaded()
assert ReportTrigger.configuration_schema(repository_id, configuration) == {}
assert ReportTrigger.configuration_schema(configuration) == {}
def test_configuration_schema_no_schema(configuration: Configuration) -> None:
@ -43,17 +40,15 @@ def test_configuration_schema_no_schema(configuration: Configuration) -> None:
section = "abracadabra"
configuration.set_option("report", "target", section)
configuration.set_option(section, "key", "value")
_, repository_id = configuration.check_loaded()
assert ReportTrigger.configuration_schema(repository_id, configuration) == {}
assert ReportTrigger.configuration_schema(configuration) == {}
def test_configuration_schema_empty(configuration: Configuration) -> None:
"""
must return default schema if no configuration set
"""
_, repository_id = configuration.check_loaded()
assert ReportTrigger.configuration_schema(repository_id, None) == ReportTrigger.CONFIGURATION_SCHEMA
assert ReportTrigger.configuration_schema(None) == ReportTrigger.CONFIGURATION_SCHEMA
def test_configuration_schema_variables() -> None:

View File

@ -516,15 +516,6 @@ def test_build_status_pretty_print(package_ahriman: Package) -> None:
assert isinstance(package_ahriman.pretty_print(), str)
def test_vercmp(package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must call vercmp
"""
vercmp_mock = mocker.patch("ahriman.models.package.vercmp")
package_ahriman.vercmp("version")
vercmp_mock.assert_called_once_with(package_ahriman.version, "version")
def test_with_packages(package_ahriman: Package, package_python_schedule: Package, pacman: Pacman,
mocker: MockerFixture) -> None:
"""

View File

@ -248,28 +248,6 @@ def test_chown_invalid_path(repository_paths: RepositoryPaths) -> None:
repository_paths._chown(repository_paths.root.parent)
def test_archive_for(repository_paths: RepositoryPaths, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must correctly define archive path
"""
mocker.patch("pathlib.Path.is_dir", return_value=True)
path = repository_paths.archive_for(package_ahriman.base)
assert path == repository_paths.archive / "packages" / "a" / package_ahriman.base
def test_archive_for_create_tree(repository_paths: RepositoryPaths, package_ahriman: Package,
mocker: MockerFixture) -> None:
"""
must create archive directory if it doesn't exist
"""
owner_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.preserve_owner")
mkdir_mock = mocker.patch("pathlib.Path.mkdir")
repository_paths.archive_for(package_ahriman.base)
owner_mock.assert_called_once_with(repository_paths.archive)
mkdir_mock.assert_called_once_with(mode=0o755, parents=True)
def test_cache_for(repository_paths: RepositoryPaths, package_ahriman: Package) -> None:
"""
must return correct path for cache directory
@ -309,24 +287,13 @@ def test_preserve_owner_specific(tmp_path: Path, repository_id: RepositoryId, mo
chown_mock.assert_has_calls([MockCall(repository_paths.root / "content" / "created2")])
def test_preserve_owner_no_directory(tmp_path: Path, repository_id: RepositoryId, mocker: MockerFixture) -> None:
"""
must skip directory scan if it does not exist
"""
repository_paths = RepositoryPaths(tmp_path, repository_id)
chown_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths._chown")
with repository_paths.preserve_owner(Path("empty")):
(repository_paths.root / "created1").touch()
chown_mock.assert_not_called()
def test_tree_clear(repository_paths: RepositoryPaths, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must remove any package related files
"""
paths = {
repository_paths.cache_for(package_ahriman.base),
getattr(repository_paths, prop)(package_ahriman.base)
for prop in dir(repository_paths) if prop.endswith("_for")
}
rmtree_mock = mocker.patch("shutil.rmtree")
@ -346,7 +313,6 @@ def test_tree_create(repository_paths: RepositoryPaths, mocker: MockerFixture) -
for prop in dir(repository_paths)
if not prop.startswith("_")
and prop not in (
"archive_for",
"build_root",
"logger_name",
"logger",

View File

@ -10,9 +10,6 @@ root = /
sync_files_database = no
use_ahriman_cache = no
[archive]
keep_built_packages = 3
[auth]
client_id = client_id
client_secret = client_secret