Compare commits

..

1 Commits

Author SHA1 Message Date
693c6161ef tree demo 2025-07-23 14:50:30 +03:00
26 changed files with 281 additions and 275 deletions

View File

@ -139,8 +139,6 @@ Build related configuration. Group name can refer to architecture, e.g. ``build:
Base repository settings. Base repository settings.
* ``architecture`` - repository architecture, string. This field is read-only and generated automatically from run options if possible.
* ``name`` - repository name, string. This field is read-only and generated automatically from run options if possible.
* ``root`` - root path for application, string, required. * ``root`` - root path for application, string, required.
``sign:*`` groups ``sign:*`` groups

View File

@ -52,7 +52,7 @@ class Validate(Handler):
""" """
from ahriman.core.configuration.validator import Validator from ahriman.core.configuration.validator import Validator
schema = Validate.schema(configuration) schema = Validate.schema(repository_id, configuration)
validator = Validator(configuration=configuration, schema=schema) validator = Validator(configuration=configuration, schema=schema)
if validator.validate(configuration.dump()): if validator.validate(configuration.dump()):
@ -83,11 +83,12 @@ class Validate(Handler):
return parser return parser
@staticmethod @staticmethod
def schema(configuration: Configuration) -> ConfigurationSchema: def schema(repository_id: RepositoryId, configuration: Configuration) -> ConfigurationSchema:
""" """
get schema with triggers get schema with triggers
Args: Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance configuration(Configuration): configuration instance
Returns: Returns:
@ -106,12 +107,12 @@ class Validate(Handler):
continue continue
# default settings if any # default settings if any
for schema_name, schema in trigger_class.configuration_schema(None).items(): for schema_name, schema in trigger_class.configuration_schema(repository_id, None).items():
erased = Validate.schema_erase_required(copy.deepcopy(schema)) erased = Validate.schema_erase_required(copy.deepcopy(schema))
root[schema_name] = Validate.schema_merge(root.get(schema_name, {}), erased) root[schema_name] = Validate.schema_merge(root.get(schema_name, {}), erased)
# settings according to enabled triggers # settings according to enabled triggers
for schema_name, schema in trigger_class.configuration_schema(configuration).items(): for schema_name, schema in trigger_class.configuration_schema(repository_id, configuration).items():
root[schema_name] = Validate.schema_merge(root.get(schema_name, {}), copy.deepcopy(schema)) root[schema_name] = Validate.schema_merge(root.get(schema_name, {}), copy.deepcopy(schema))
return root return root

View File

@ -31,20 +31,21 @@ class Repo(LazyLogging):
Attributes: Attributes:
name(str): repository name name(str): repository name
paths(RepositoryPaths): repository paths instance root(Path): repository root
sign_args(list[str]): additional args which have to be used to sign repository archive sign_args(list[str]): additional args which have to be used to sign repository archive
uid(int): uid of the repository owner user uid(int): uid of the repository owner user
""" """
def __init__(self, name: str, paths: RepositoryPaths, sign_args: list[str]) -> None: def __init__(self, name: str, paths: RepositoryPaths, sign_args: list[str], root: Path | None = None) -> None:
""" """
Args: Args:
name(str): repository name name(str): repository name
paths(RepositoryPaths): repository paths instance paths(RepositoryPaths): repository paths instance
sign_args(list[str]): additional args which have to be used to sign repository archive sign_args(list[str]): additional args which have to be used to sign repository archive
root(Path | None, optional): repository root. If none set, the default will be used (Default value = None)
""" """
self.name = name self.name = name
self.paths = paths self.root = root or paths.repository
self.uid, _ = paths.root_owner self.uid, _ = paths.root_owner
self.sign_args = sign_args self.sign_args = sign_args
@ -56,28 +57,36 @@ class Repo(LazyLogging):
Returns: Returns:
Path: path to repository database Path: path to repository database
""" """
return self.paths.repository / f"{self.name}.db.tar.gz" return self.root / f"{self.name}.db.tar.gz"
def add(self, path: Path) -> None: def add(self, path: Path, remove: bool = True) -> None:
""" """
add new package to repository add new package to repository
Args: Args:
path(Path): path to archive to add path(Path): path to archive to add
remove(bool, optional): whether to remove old packages or not (Default value = True)
""" """
command = ["repo-add", *self.sign_args]
if remove:
command.extend(["--remove"])
command.extend([str(self.repo_path), str(path)])
# add to repository
check_output( check_output(
"repo-add", *self.sign_args, "-R", str(self.repo_path), str(path), *command,
exception=BuildError.from_process(path.name), exception=BuildError.from_process(path.name),
cwd=self.paths.repository, cwd=self.root,
logger=self.logger, logger=self.logger,
user=self.uid) user=self.uid,
)
def init(self) -> None: def init(self) -> None:
""" """
create empty repository database. It just calls add with empty arguments create empty repository database. It just calls add with empty arguments
""" """
check_output("repo-add", *self.sign_args, str(self.repo_path), check_output("repo-add", *self.sign_args, str(self.repo_path),
cwd=self.paths.repository, logger=self.logger, user=self.uid) cwd=self.root, logger=self.logger, user=self.uid)
def remove(self, package: str, filename: Path) -> None: def remove(self, package: str, filename: Path) -> None:
""" """
@ -88,13 +97,14 @@ class Repo(LazyLogging):
filename(Path): package filename to remove filename(Path): package filename to remove
""" """
# remove package and signature (if any) from filesystem # remove package and signature (if any) from filesystem
for full_path in self.paths.repository.glob(f"{filename}*"): for full_path in self.root.glob(f"**/{filename}*"):
full_path.unlink() full_path.unlink()
# remove package from registry # remove package from registry
check_output( check_output(
"repo-remove", *self.sign_args, str(self.repo_path), package, "repo-remove", *self.sign_args, str(self.repo_path), package,
exception=BuildError.from_process(package), exception=BuildError.from_process(package),
cwd=self.paths.repository, cwd=self.root,
logger=self.logger, logger=self.logger,
user=self.uid) user=self.uid,
)

View File

@ -43,6 +43,7 @@ class Configuration(configparser.RawConfigParser):
SYSTEM_CONFIGURATION_PATH(Path): (class attribute) default system configuration path distributed by package SYSTEM_CONFIGURATION_PATH(Path): (class attribute) default system configuration path distributed by package
includes(list[Path]): list of includes which were read includes(list[Path]): list of includes which were read
path(Path | None): path to root configuration file path(Path | None): path to root configuration file
repository_id(RepositoryId | None): repository unique identifier
Examples: Examples:
Configuration class provides additional method in order to handle application configuration. Since this class is Configuration class provides additional method in order to handle application configuration. Since this class is
@ -93,7 +94,7 @@ class Configuration(configparser.RawConfigParser):
}, },
) )
self._repository_id: RepositoryId | None = None self.repository_id: RepositoryId | None = None
self.path: Path | None = None self.path: Path | None = None
self.includes: list[Path] = [] self.includes: list[Path] = []
@ -128,32 +129,6 @@ class Configuration(configparser.RawConfigParser):
""" """
return self.getpath("settings", "logging") return self.getpath("settings", "logging")
@property
def repository_id(self) -> RepositoryId | None:
"""
repository identifier
Returns:
RepositoryId: repository unique identifier
"""
return self._repository_id
@repository_id.setter
def repository_id(self, repository_id: RepositoryId | None) -> None:
"""
setter for repository identifier
Args:
repository_id(RepositoryId | None): repository unique identifier
"""
self._repository_id = repository_id
if repository_id is None or repository_id.is_empty:
self.remove_option("repository", "name")
self.remove_option("repository", "architecture")
else:
self.set_option("repository", "name", repository_id.name)
self.set_option("repository", "architecture", repository_id.architecture)
@property @property
def repository_name(self) -> str: def repository_name(self) -> str:
""" """

View File

@ -57,7 +57,7 @@ class ConfigurationMultiDict(dict[str, Any]):
OptionError: if the key already exists in the dictionary, but not a single value list or a string OptionError: if the key already exists in the dictionary, but not a single value list or a string
""" """
match self.get(key): match self.get(key):
case [current_value] | (str() as current_value): case [current_value] | str(current_value):
value = f"{current_value} {value}" value = f"{current_value} {value}"
case None: case None:
pass pass

View File

@ -249,10 +249,6 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
"repository": { "repository": {
"type": "dict", "type": "dict",
"schema": { "schema": {
"architecture": {
"type": "string",
"empty": False,
},
"name": { "name": {
"type": "string", "type": "string",
"empty": False, "empty": False,

View File

@ -47,7 +47,6 @@ class LogsRotationTrigger(Trigger):
}, },
}, },
} }
REQUIRES_REPOSITORY = True
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None: def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
""" """

View File

@ -336,7 +336,6 @@ class ReportTrigger(Trigger):
}, },
}, },
} }
REQUIRES_REPOSITORY = True
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None: def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
""" """

View File

@ -17,7 +17,7 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
import shutil import shutil # shutil.move is used here to ensure cross fs file movement
from collections.abc import Iterable from collections.abc import Iterable
from pathlib import Path from pathlib import Path
@ -41,6 +41,101 @@ class Executor(PackageInfo, Cleaner):
trait for common repository update processes trait for common repository update processes
""" """
def _archive_remove(self, description: PackageDescription, package_base: str) -> None:
"""
rename package archive removing special symbols
Args:
description(PackageDescription): package description
package_base(str): package base name
"""
if description.filename is None:
self.logger.warning("received empty package name for base %s", package_base)
return # suppress type checking, it never can be none actually
if (safe := safe_filename(description.filename)) != description.filename:
(self.paths.packages / description.filename).rename(self.paths.packages / safe)
description.filename = safe
def _package_build(self, package: Package, path: Path, packager: str | None,
local_version: str | None) -> str | None:
"""
build single package
Args:
package(Package): package to build
path(Path): path to directory with package files
packager(str | None): packager identifier used for this package
local_version(str | None): local version of the package
Returns:
str | None: current commit sha if available
"""
self.reporter.set_building(package.base)
task = Task(package, self.configuration, self.architecture, self.paths)
patches = self.reporter.package_patches_get(package.base, None)
commit_sha = task.init(path, patches, local_version)
built = task.build(path, PACKAGER=packager)
package.with_packages(built, self.pacman)
for src in built:
dst = self.paths.packages / src.name
shutil.move(src, dst)
return commit_sha
def _package_remove(self, package_name: str, path: Path) -> None:
"""
remove single package from repository
Args:
package_name(str): package name
path(Path): path to package archive
"""
try:
self.repo.remove(package_name, path)
except Exception:
self.logger.exception("could not remove %s", package_name)
def _package_remove_base(self, package_base: str) -> None:
"""
remove package base from repository
Args:
package_base(str): package base name:
"""
try:
with self.in_event(package_base, EventType.PackageRemoved):
self.reporter.package_remove(package_base)
except Exception:
self.logger.exception("could not remove base %s", package_base)
def _package_update(self, filename: str | None, package_base: str, packager_key: str | None) -> None:
"""
update built package in repository database
Args:
filename(str | None): archive filename
package_base(str): package base name
packager_key(str | None): packager key identifier
"""
if filename is None:
self.logger.warning("received empty package name for base %s", package_base)
return # suppress type checking, it never can be none actually
# in theory, it might be NOT packages directory, but we suppose it is
full_path = self.paths.packages / filename
files = self.sign.process_sign_package(full_path, packager_key)
for src in files:
archive = self.paths.archive_for(package_base) / src.name
shutil.move(src, archive) # move package to archive directory
if not (symlink := self.paths.repository / archive.name).exists():
symlink.symlink_to(archive.relative_to(symlink.parent, walk_up=True)) # create link to archive
self.repo.add(self.paths.repository / filename)
def process_build(self, updates: Iterable[Package], packagers: Packagers | None = None, *, def process_build(self, updates: Iterable[Package], packagers: Packagers | None = None, *,
bump_pkgrel: bool = False) -> Result: bump_pkgrel: bool = False) -> Result:
""" """
@ -55,21 +150,6 @@ class Executor(PackageInfo, Cleaner):
Returns: Returns:
Result: build result Result: build result
""" """
def build_single(package: Package, local_path: Path, packager_id: str | None) -> str | None:
self.reporter.set_building(package.base)
task = Task(package, self.configuration, self.architecture, self.paths)
local_version = local_versions.get(package.base) if bump_pkgrel else None
patches = self.reporter.package_patches_get(package.base, None)
commit_sha = task.init(local_path, patches, local_version)
built = task.build(local_path, PACKAGER=packager_id)
package.with_packages(built, self.pacman)
for src in built:
dst = self.paths.packages / src.name
shutil.move(src, dst)
return commit_sha
packagers = packagers or Packagers() packagers = packagers or Packagers()
local_versions = {package.base: package.version for package in self.packages()} local_versions = {package.base: package.version for package in self.packages()}
@ -80,16 +160,21 @@ class Executor(PackageInfo, Cleaner):
try: try:
with self.in_event(single.base, EventType.PackageUpdated, failure=EventType.PackageUpdateFailed): with self.in_event(single.base, EventType.PackageUpdated, failure=EventType.PackageUpdateFailed):
packager = self.packager(packagers, single.base) packager = self.packager(packagers, single.base)
last_commit_sha = build_single(single, Path(dir_name), packager.packager_id) local_version = local_versions.get(single.base) if bump_pkgrel else None
commit_sha = self._package_build(single, Path(dir_name), packager.packager_id, local_version)
# update commit hash for changes keeping current diff if there is any # update commit hash for changes keeping current diff if there is any
changes = self.reporter.package_changes_get(single.base) changes = self.reporter.package_changes_get(single.base)
self.reporter.package_changes_update(single.base, Changes(last_commit_sha, changes.changes)) self.reporter.package_changes_update(single.base, Changes(commit_sha, changes.changes))
# update dependencies list # update dependencies list
package_archive = PackageArchive(self.paths.build_root, single, self.pacman, self.scan_paths) package_archive = PackageArchive(self.paths.build_root, single, self.pacman, self.scan_paths)
dependencies = package_archive.depends_on() dependencies = package_archive.depends_on()
self.reporter.package_dependencies_update(single.base, dependencies) self.reporter.package_dependencies_update(single.base, dependencies)
# update result set # update result set
result.add_updated(single) result.add_updated(single)
except Exception: except Exception:
self.reporter.set_failed(single.base) self.reporter.set_failed(single.base)
result.add_failed(single) result.add_failed(single)
@ -107,19 +192,6 @@ class Executor(PackageInfo, Cleaner):
Returns: Returns:
Result: remove result Result: remove result
""" """
def remove_base(package_base: str) -> None:
try:
with self.in_event(package_base, EventType.PackageRemoved):
self.reporter.package_remove(package_base)
except Exception:
self.logger.exception("could not remove base %s", package_base)
def remove_package(package: str, archive_path: Path) -> None:
try:
self.repo.remove(package, archive_path) # remove the package itself
except Exception:
self.logger.exception("could not remove %s", package)
packages_to_remove: dict[str, Path] = {} packages_to_remove: dict[str, Path] = {}
bases_to_remove: list[str] = [] bases_to_remove: list[str] = []
@ -136,6 +208,7 @@ class Executor(PackageInfo, Cleaner):
}) })
bases_to_remove.append(local.base) bases_to_remove.append(local.base)
result.add_removed(local) result.add_removed(local)
elif requested.intersection(local.packages.keys()): elif requested.intersection(local.packages.keys()):
packages_to_remove.update({ packages_to_remove.update({
package: properties.filepath package: properties.filepath
@ -152,11 +225,11 @@ class Executor(PackageInfo, Cleaner):
# remove packages from repository files # remove packages from repository files
for package, filename in packages_to_remove.items(): for package, filename in packages_to_remove.items():
remove_package(package, filename) self._package_remove(package, filename)
# remove bases from registered # remove bases from registered
for package in bases_to_remove: for package in bases_to_remove:
remove_base(package) self._package_remove_base(package)
return result return result
@ -172,27 +245,6 @@ class Executor(PackageInfo, Cleaner):
Returns: Returns:
Result: path to repository database Result: path to repository database
""" """
def rename(archive: PackageDescription, package_base: str) -> None:
if archive.filename is None:
self.logger.warning("received empty package name for base %s", package_base)
return # suppress type checking, it never can be none actually
if (safe := safe_filename(archive.filename)) != archive.filename:
shutil.move(self.paths.packages / archive.filename, self.paths.packages / safe)
archive.filename = safe
def update_single(name: str | None, package_base: str, packager_key: str | None) -> None:
if name is None:
self.logger.warning("received empty package name for base %s", package_base)
return # suppress type checking, it never can be none actually
# in theory, it might be NOT packages directory, but we suppose it is
full_path = self.paths.packages / name
files = self.sign.process_sign_package(full_path, packager_key)
for src in files:
dst = self.paths.repository / safe_filename(src.name)
shutil.move(src, dst)
package_path = self.paths.repository / safe_filename(name)
self.repo.add(package_path)
current_packages = {package.base: package for package in self.packages()} current_packages = {package.base: package for package in self.packages()}
local_versions = {package_base: package.version for package_base, package in current_packages.items()} local_versions = {package_base: package.version for package_base, package in current_packages.items()}
@ -207,8 +259,8 @@ class Executor(PackageInfo, Cleaner):
packager = self.packager(packagers, local.base) packager = self.packager(packagers, local.base)
for description in local.packages.values(): for description in local.packages.values():
rename(description, local.base) self._archive_remove(description, local.base)
update_single(description.filename, local.base, packager.key) self._package_update(description.filename, local.base, packager.key)
self.reporter.set_success(local) self.reporter.set_success(local)
result.add_updated(local) result.add_updated(local)
@ -216,12 +268,13 @@ class Executor(PackageInfo, Cleaner):
if local.base in current_packages: if local.base in current_packages:
current_package_archives = set(current_packages[local.base].packages.keys()) current_package_archives = set(current_packages[local.base].packages.keys())
removed_packages.extend(current_package_archives.difference(local.packages)) removed_packages.extend(current_package_archives.difference(local.packages))
except Exception: except Exception:
self.reporter.set_failed(local.base) self.reporter.set_failed(local.base)
result.add_failed(local) result.add_failed(local)
self.logger.exception("could not process %s", local.base) self.logger.exception("could not process %s", local.base)
self.clear_packages()
self.clear_packages()
self.process_remove(removed_packages) self.process_remove(removed_packages)
return result return result

View File

@ -103,7 +103,6 @@ class KeyringTrigger(Trigger):
}, },
}, },
} }
REQUIRES_REPOSITORY = True
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None: def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
""" """

View File

@ -90,7 +90,6 @@ class MirrorlistTrigger(Trigger):
}, },
}, },
} }
REQUIRES_REPOSITORY = True
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None: def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
""" """

View File

@ -36,7 +36,6 @@ class Trigger(LazyLogging):
CONFIGURATION_SCHEMA(ConfigurationSchema): (class attribute) configuration schema template CONFIGURATION_SCHEMA(ConfigurationSchema): (class attribute) configuration schema template
CONFIGURATION_SCHEMA_FALLBACK(str | None): (class attribute) optional fallback option for defining CONFIGURATION_SCHEMA_FALLBACK(str | None): (class attribute) optional fallback option for defining
configuration schema type used configuration schema type used
REQUIRES_REPOSITORY(bool): (class attribute) either trigger requires loaded repository or not
configuration(Configuration): configuration instance configuration(Configuration): configuration instance
repository_id(RepositoryId): repository unique identifier repository_id(RepositoryId): repository unique identifier
@ -60,7 +59,6 @@ class Trigger(LazyLogging):
CONFIGURATION_SCHEMA: ClassVar[ConfigurationSchema] = {} CONFIGURATION_SCHEMA: ClassVar[ConfigurationSchema] = {}
CONFIGURATION_SCHEMA_FALLBACK: ClassVar[str | None] = None CONFIGURATION_SCHEMA_FALLBACK: ClassVar[str | None] = None
REQUIRES_REPOSITORY: ClassVar[bool] = True
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None: def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
""" """
@ -81,18 +79,9 @@ class Trigger(LazyLogging):
""" """
return self.repository_id.architecture return self.repository_id.architecture
@property
def is_allowed_to_run(self) -> bool:
"""
whether trigger allowed to run or not
Returns:
bool: ``True`` in case if trigger allowed to run and ``False`` otherwise
"""
return not (self.REQUIRES_REPOSITORY and self.repository_id.is_empty)
@classmethod @classmethod
def configuration_schema(cls, configuration: Configuration | None) -> ConfigurationSchema: def configuration_schema(cls, repository_id: RepositoryId,
configuration: Configuration | None) -> ConfigurationSchema:
""" """
configuration schema based on supplied service configuration configuration schema based on supplied service configuration
@ -100,6 +89,7 @@ class Trigger(LazyLogging):
Schema must be in cerberus format, for details and examples you can check built-in triggers. Schema must be in cerberus format, for details and examples you can check built-in triggers.
Args: Args:
repository_id(str): repository unique identifier
configuration(Configuration | None): configuration instance. If set to None, the default schema configuration(Configuration | None): configuration instance. If set to None, the default schema
should be returned should be returned
@ -111,12 +101,10 @@ class Trigger(LazyLogging):
result: ConfigurationSchema = {} result: ConfigurationSchema = {}
for target in cls.configuration_sections(configuration): for target in cls.configuration_sections(configuration):
for section in configuration.sections(): if not configuration.has_section(target):
if not (section == target or section.startswith(f"{target}:")):
# either repository specific or exact name
continue continue
schema_name = configuration.get(section, "type", fallback=section) section, schema_name = configuration.gettype(
target, repository_id, fallback=cls.CONFIGURATION_SCHEMA_FALLBACK)
if schema_name not in cls.CONFIGURATION_SCHEMA: if schema_name not in cls.CONFIGURATION_SCHEMA:
continue continue
result[section] = cls.CONFIGURATION_SCHEMA[schema_name] result[section] = cls.CONFIGURATION_SCHEMA[schema_name]

View File

@ -17,7 +17,6 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
import atexit
import contextlib import contextlib
import os import os
@ -61,8 +60,17 @@ class TriggerLoader(LazyLogging):
def __init__(self) -> None: def __init__(self) -> None:
"""""" """"""
self._on_stop_requested = False
self.triggers: list[Trigger] = [] self.triggers: list[Trigger] = []
def __del__(self) -> None:
"""
custom destructor object which calls on_stop in case if it was requested
"""
if not self._on_stop_requested:
return
self.on_stop()
@classmethod @classmethod
def load(cls, repository_id: RepositoryId, configuration: Configuration) -> Self: def load(cls, repository_id: RepositoryId, configuration: Configuration) -> Self:
""" """
@ -77,9 +85,8 @@ class TriggerLoader(LazyLogging):
""" """
instance = cls() instance = cls()
instance.triggers = [ instance.triggers = [
trigger instance.load_trigger(trigger, repository_id, configuration)
for trigger_name in instance.selected_triggers(configuration) for trigger in instance.selected_triggers(configuration)
if (trigger := instance.load_trigger(trigger_name, repository_id, configuration)).is_allowed_to_run
] ]
return instance return instance
@ -243,11 +250,10 @@ class TriggerLoader(LazyLogging):
run triggers on load run triggers on load
""" """
self.logger.debug("executing triggers on start") self.logger.debug("executing triggers on start")
self._on_stop_requested = True
for trigger in self.triggers: for trigger in self.triggers:
with self.__execute_trigger(trigger): with self.__execute_trigger(trigger):
trigger.on_start() trigger.on_start()
# register on_stop call
atexit.register(self.on_stop)
def on_stop(self) -> None: def on_stop(self) -> None:
""" """

View File

@ -160,7 +160,6 @@ class UploadTrigger(Trigger):
}, },
}, },
} }
REQUIRES_REPOSITORY = True
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None: def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
""" """

View File

@ -85,6 +85,16 @@ class RepositoryPaths(LazyLogging):
return Path(self.repository_id.architecture) # legacy tree suffix return Path(self.repository_id.architecture) # legacy tree suffix
return Path(self.repository_id.name) / self.repository_id.architecture return Path(self.repository_id.name) / self.repository_id.architecture
@property
def archive(self) -> Path:
"""
archive directory root
Returns:
Path: archive directory root
"""
return self.root / "archive" / self._suffix
@property @property
def build_root(self) -> Path: def build_root(self) -> Path:
""" """
@ -249,6 +259,23 @@ class RepositoryPaths(LazyLogging):
set_owner(path) set_owner(path)
path = path.parent path = path.parent
def archive_for(self, package_base: str) -> Path:
"""
get path to archive specified search criteria
Args:
package_base(str): package base name
Returns:
Path: path to archive directory for package base
"""
directory = self.archive / "packages" / package_base[0] / package_base
if not directory.is_dir(): # create if not exists
with self.preserve_owner(self.archive):
directory.mkdir(mode=0o755, parents=True)
return directory
def cache_for(self, package_base: str) -> Path: def cache_for(self, package_base: str) -> Path:
""" """
get path to cached PKGBUILD and package sources for the package base get path to cached PKGBUILD and package sources for the package base
@ -320,6 +347,7 @@ class RepositoryPaths(LazyLogging):
with self.preserve_owner(): with self.preserve_owner():
for directory in ( for directory in (
self.archive,
self.cache, self.cache,
self.chroot, self.chroot,
self.packages, self.packages,

View File

@ -2,7 +2,6 @@ import argparse
import json import json
import pytest import pytest
from pathlib import Path
from pytest_mock import MockerFixture from pytest_mock import MockerFixture
from ahriman.application.handlers.validate import Validate from ahriman.application.handlers.validate import Validate
@ -54,50 +53,12 @@ def test_run_skip(args: argparse.Namespace, configuration: Configuration, mocker
print_mock.assert_not_called() print_mock.assert_not_called()
def test_run_default(args: argparse.Namespace, configuration: Configuration) -> None:
"""
must run on default configuration without errors
"""
args.exit_code = True
_, repository_id = configuration.check_loaded()
default = Configuration.from_path(Configuration.SYSTEM_CONFIGURATION_PATH, repository_id)
# copy autogenerated values
for section, key in (("build", "build_command"), ("repository", "root")):
value = configuration.get(section, key)
default.set_option(section, key, value)
Validate.run(args, repository_id, default, report=False)
def test_run_repo_specific_triggers(args: argparse.Namespace, configuration: Configuration,
resource_path_root: Path) -> None:
"""
must correctly insert repo specific triggers
"""
args.exit_code = True
_, repository_id = configuration.check_loaded()
# remove unused sections
for section in ("customs3", "github:x86_64", "logs-rotation", "mirrorlist"):
configuration.remove_section(section)
configuration.set_option("report", "target", "test")
for section in ("test", "test:i686", "test:another-repo:x86_64"):
configuration.set_option(section, "type", "html")
configuration.set_option(section, "link_path", "http://link_path")
configuration.set_option(section, "path", "path")
configuration.set_option(section, "template", "template")
configuration.set_option(section, "templates", str(resource_path_root))
Validate.run(args, repository_id, configuration, report=False)
def test_schema(configuration: Configuration) -> None: def test_schema(configuration: Configuration) -> None:
""" """
must generate full schema correctly must generate full schema correctly
""" """
schema = Validate.schema(configuration) _, repository_id = configuration.check_loaded()
schema = Validate.schema(repository_id, configuration)
# defaults # defaults
assert schema.pop("console") assert schema.pop("console")
@ -130,7 +91,9 @@ def test_schema_invalid_trigger(configuration: Configuration) -> None:
""" """
configuration.set_option("build", "triggers", "some.invalid.trigger.path.Trigger") configuration.set_option("build", "triggers", "some.invalid.trigger.path.Trigger")
configuration.remove_option("build", "triggers_known") configuration.remove_option("build", "triggers_known")
assert Validate.schema(configuration) == CONFIGURATION_SCHEMA _, repository_id = configuration.check_loaded()
assert Validate.schema(repository_id, configuration) == CONFIGURATION_SCHEMA
def test_schema_erase_required() -> None: def test_schema_erase_required() -> None:

View File

@ -20,40 +20,6 @@ def test_architecture(configuration: Configuration) -> None:
assert configuration.architecture == "x86_64" assert configuration.architecture == "x86_64"
def test_repository_id(configuration: Configuration, repository_id: RepositoryId) -> None:
"""
must return repository identifier
"""
assert configuration.repository_id == repository_id
assert configuration.get("repository", "name") == repository_id.name
assert configuration.get("repository", "architecture") == repository_id.architecture
def test_repository_id_erase(configuration: Configuration) -> None:
"""
must remove repository identifier properties if empty identifier supplied
"""
configuration.repository_id = None
assert configuration.get("repository", "name", fallback=None) is None
assert configuration.get("repository", "architecture", fallback=None) is None
configuration.repository_id = RepositoryId("", "")
assert configuration.get("repository", "name", fallback=None) is None
assert configuration.get("repository", "architecture", fallback=None) is None
def test_repository_id_update(configuration: Configuration, repository_id: RepositoryId) -> None:
"""
must update repository identifier and related configuration options
"""
repository_id = RepositoryId("i686", repository_id.name)
configuration.repository_id = repository_id
assert configuration.repository_id == repository_id
assert configuration.get("repository", "name") == repository_id.name
assert configuration.get("repository", "architecture") == repository_id.architecture
def test_repository_name(configuration: Configuration) -> None: def test_repository_name(configuration: Configuration) -> None:
""" """
must return valid repository name must return valid repository name

View File

@ -7,13 +7,6 @@ from ahriman.core.status import Client
from ahriman.models.result import Result from ahriman.models.result import Result
def test_requires_repository() -> None:
"""
must require repository identifier to be set to start
"""
assert LogsRotationTrigger.REQUIRES_REPOSITORY
def test_configuration_sections(configuration: Configuration) -> None: def test_configuration_sections(configuration: Configuration) -> None:
""" """
must correctly parse target list must correctly parse target list

View File

@ -5,13 +5,6 @@ from ahriman.core.report import ReportTrigger
from ahriman.models.result import Result from ahriman.models.result import Result
def test_requires_repository() -> None:
"""
must require repository identifier to be set to start
"""
assert ReportTrigger.REQUIRES_REPOSITORY
def test_configuration_sections(configuration: Configuration) -> None: def test_configuration_sections(configuration: Configuration) -> None:
""" """
must correctly parse target list must correctly parse target list

View File

@ -7,13 +7,6 @@ from ahriman.core.sign.gpg import GPG
from ahriman.core.support import KeyringTrigger from ahriman.core.support import KeyringTrigger
def test_requires_repository() -> None:
"""
must require repository identifier to be set to start
"""
assert KeyringTrigger.REQUIRES_REPOSITORY
def test_configuration_sections(configuration: Configuration) -> None: def test_configuration_sections(configuration: Configuration) -> None:
""" """
must correctly parse target list must correctly parse target list

View File

@ -4,13 +4,6 @@ from ahriman.core.configuration import Configuration
from ahriman.core.support import MirrorlistTrigger from ahriman.core.support import MirrorlistTrigger
def test_requires_repository() -> None:
"""
must require repository identifier to be set to start
"""
assert MirrorlistTrigger.REQUIRES_REPOSITORY
def test_configuration_sections(configuration: Configuration) -> None: def test_configuration_sections(configuration: Configuration) -> None:
""" """
must correctly parse target list must correctly parse target list

View File

@ -3,7 +3,6 @@ from unittest.mock import MagicMock
from ahriman.core.configuration import Configuration from ahriman.core.configuration import Configuration
from ahriman.core.report import ReportTrigger from ahriman.core.report import ReportTrigger
from ahriman.core.triggers import Trigger from ahriman.core.triggers import Trigger
from ahriman.models.repository_id import RepositoryId
from ahriman.models.result import Result from ahriman.models.result import Result
@ -14,28 +13,16 @@ def test_architecture(trigger: Trigger) -> None:
assert trigger.architecture == trigger.repository_id.architecture assert trigger.architecture == trigger.repository_id.architecture
def test_is_allowed_to_run(trigger: Trigger) -> None:
"""
must return flag correctly
"""
assert trigger.is_allowed_to_run
trigger.repository_id = RepositoryId("", "")
assert not trigger.is_allowed_to_run
trigger.REQUIRES_REPOSITORY = False
assert trigger.is_allowed_to_run
def test_configuration_schema(configuration: Configuration) -> None: def test_configuration_schema(configuration: Configuration) -> None:
""" """
must return used configuration schema must return used configuration schema
""" """
section = "console" section = "console"
configuration.set_option("report", "target", section) configuration.set_option("report", "target", section)
_, repository_id = configuration.check_loaded()
expected = {section: ReportTrigger.CONFIGURATION_SCHEMA[section]} expected = {section: ReportTrigger.CONFIGURATION_SCHEMA[section]}
assert ReportTrigger.configuration_schema(configuration) == expected assert ReportTrigger.configuration_schema(repository_id, configuration) == expected
def test_configuration_schema_no_section(configuration: Configuration) -> None: def test_configuration_schema_no_section(configuration: Configuration) -> None:
@ -44,7 +31,9 @@ def test_configuration_schema_no_section(configuration: Configuration) -> None:
""" """
section = "abracadabra" section = "abracadabra"
configuration.set_option("report", "target", section) configuration.set_option("report", "target", section)
assert ReportTrigger.configuration_schema(configuration) == {} _, repository_id = configuration.check_loaded()
assert ReportTrigger.configuration_schema(repository_id, configuration) == {}
def test_configuration_schema_no_schema(configuration: Configuration) -> None: def test_configuration_schema_no_schema(configuration: Configuration) -> None:
@ -54,15 +43,17 @@ def test_configuration_schema_no_schema(configuration: Configuration) -> None:
section = "abracadabra" section = "abracadabra"
configuration.set_option("report", "target", section) configuration.set_option("report", "target", section)
configuration.set_option(section, "key", "value") configuration.set_option(section, "key", "value")
_, repository_id = configuration.check_loaded()
assert ReportTrigger.configuration_schema(configuration) == {} assert ReportTrigger.configuration_schema(repository_id, configuration) == {}
def test_configuration_schema_empty(configuration: Configuration) -> None: def test_configuration_schema_empty(configuration: Configuration) -> None:
""" """
must return default schema if no configuration set must return default schema if no configuration set
""" """
assert ReportTrigger.configuration_schema(None) == ReportTrigger.CONFIGURATION_SCHEMA _, repository_id = configuration.check_loaded()
assert ReportTrigger.configuration_schema(repository_id, None) == ReportTrigger.CONFIGURATION_SCHEMA
def test_configuration_schema_variables() -> None: def test_configuration_schema_variables() -> None:

View File

@ -153,12 +153,38 @@ def test_on_start(trigger_loader: TriggerLoader, mocker: MockerFixture) -> None:
""" """
upload_mock = mocker.patch("ahriman.core.upload.UploadTrigger.on_start") upload_mock = mocker.patch("ahriman.core.upload.UploadTrigger.on_start")
report_mock = mocker.patch("ahriman.core.report.ReportTrigger.on_start") report_mock = mocker.patch("ahriman.core.report.ReportTrigger.on_start")
atexit_mock = mocker.patch("atexit.register")
trigger_loader.on_start() trigger_loader.on_start()
assert trigger_loader._on_stop_requested
report_mock.assert_called_once_with() report_mock.assert_called_once_with()
upload_mock.assert_called_once_with() upload_mock.assert_called_once_with()
atexit_mock.assert_called_once_with(trigger_loader.on_stop)
def test_on_stop_with_on_start(configuration: Configuration, mocker: MockerFixture) -> None:
"""
must call on_stop on exit if on_start was called
"""
mocker.patch("ahriman.core.upload.UploadTrigger.on_start")
mocker.patch("ahriman.core.report.ReportTrigger.on_start")
on_stop_mock = mocker.patch("ahriman.core.triggers.trigger_loader.TriggerLoader.on_stop")
_, repository_id = configuration.check_loaded()
trigger_loader = TriggerLoader.load(repository_id, configuration)
trigger_loader.on_start()
del trigger_loader
on_stop_mock.assert_called_once_with()
def test_on_stop_without_on_start(configuration: Configuration, mocker: MockerFixture) -> None:
"""
must call not on_stop on exit if on_start wasn't called
"""
on_stop_mock = mocker.patch("ahriman.core.triggers.trigger_loader.TriggerLoader.on_stop")
_, repository_id = configuration.check_loaded()
trigger_loader = TriggerLoader.load(repository_id, configuration)
del trigger_loader
on_stop_mock.assert_not_called()
def test_on_stop(trigger_loader: TriggerLoader, mocker: MockerFixture) -> None: def test_on_stop(trigger_loader: TriggerLoader, mocker: MockerFixture) -> None:

View File

@ -5,13 +5,6 @@ from ahriman.core.upload import UploadTrigger
from ahriman.models.result import Result from ahriman.models.result import Result
def test_requires_repository() -> None:
"""
must require repository identifier to be set to start
"""
assert UploadTrigger.REQUIRES_REPOSITORY
def test_configuration_sections(configuration: Configuration) -> None: def test_configuration_sections(configuration: Configuration) -> None:
""" """
must correctly parse target list must correctly parse target list

View File

@ -140,6 +140,8 @@ dynamic_version = "{[project]name}.__version__"
extras = [ extras = [
{ replace = "ref", of = ["project", "extras"], extend = true }, { replace = "ref", of = ["project", "extras"], extend = true },
] ]
# TODO: steamline shlex usage after https://github.com/iterative/shtab/pull/192 merge
handle_redirect = true
pip_pre = true pip_pre = true
set_env.PYTHONPATH = "src" set_env.PYTHONPATH = "src"
set_env.SPHINX_APIDOC_OPTIONS = "members,no-undoc-members,show-inheritance" set_env.SPHINX_APIDOC_OPTIONS = "members,no-undoc-members,show-inheritance"
@ -147,14 +149,18 @@ commands = [
[ [
"shtab", "shtab",
{ replace = "ref", of = ["flags", "shtab"], extend = true }, { replace = "ref", of = ["flags", "shtab"], extend = true },
"--shell", "bash", "--shell",
"--output", "package/share/bash-completion/completions/_ahriman", "bash",
">",
"package/share/bash-completion/completions/_ahriman",
], ],
[ [
"shtab", "shtab",
{ replace = "ref", of = ["flags", "shtab"], extend = true }, { replace = "ref", of = ["flags", "shtab"], extend = true },
"--shell", "zsh", "--shell",
"--output", "package/share/zsh/site-functions/_ahriman", "zsh",
">",
"package/share/zsh/site-functions/_ahriman",
], ],
[ [
"argparse-manpage", "argparse-manpage",

View File

@ -18,9 +18,11 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
import importlib import importlib
import shlex
import sys import sys
from tox.config.sets import EnvConfigSet from tox.config.sets import EnvConfigSet
from tox.config.types import Command
from tox.plugin import impl from tox.plugin import impl
from tox.session.state import State from tox.session.state import State
from tox.tox_env.api import ToxEnv from tox.tox_env.api import ToxEnv
@ -54,6 +56,35 @@ def _extract_version(env_conf: EnvConfigSet, python_path: str | None = None) ->
return {"VERSION": version} return {"VERSION": version}
def _wrap_commands(env_conf: EnvConfigSet, shell: str = "bash") -> None:
"""
wrap commands into shell if there is redirect
Args:
env_conf(EnvConfigSet): the core configuration object
shell(str, optional): shell command to use (Default value = "bash")
"""
if not env_conf["handle_redirect"]:
return
# append shell just in case
env_conf["allowlist_externals"].append(shell)
for command in env_conf["commands"]:
if len(command.args) < 3: # command itself, redirect and output
continue
redirect, output = command.args[-2:]
if redirect not in (">", "2>", "&>"):
continue
command.args = [
shell,
"-c",
f"{Command(command.args[:-2]).shell} {redirect} {shlex.quote(output)}",
]
@impl @impl
def tox_add_env_config(env_conf: EnvConfigSet, state: State) -> None: def tox_add_env_config(env_conf: EnvConfigSet, state: State) -> None:
""" """
@ -72,6 +103,12 @@ def tox_add_env_config(env_conf: EnvConfigSet, state: State) -> None:
default="", default="",
desc="import path for the version variable", desc="import path for the version variable",
) )
env_conf.add_config(
keys=["handle_redirect"],
of_type=bool,
default=False,
desc="wrap commands to handle redirects if any",
)
@impl @impl
@ -87,3 +124,5 @@ def tox_before_run_commands(tox_env: ToxEnv) -> None:
python_path = set_env.load("PYTHONPATH") if "PYTHONPATH" in set_env else None python_path = set_env.load("PYTHONPATH") if "PYTHONPATH" in set_env else None
set_env.update(_extract_version(env_conf, python_path)) set_env.update(_extract_version(env_conf, python_path))
_wrap_commands(env_conf)