From 05f87a36d6beb828cf4edef861b0eac531ca0cdd Mon Sep 17 00:00:00 2001 From: Evgenii Alekseev Date: Fri, 6 Sep 2024 16:16:52 +0300 Subject: [PATCH 01/10] generate filenames without using makepkg --- src/ahriman/core/build_tools/task.py | 49 +++++++++++++++++++--------- 1 file changed, 34 insertions(+), 15 deletions(-) diff --git a/src/ahriman/core/build_tools/task.py b/src/ahriman/core/build_tools/task.py index 17fec01c..ab6816e5 100644 --- a/src/ahriman/core/build_tools/task.py +++ b/src/ahriman/core/build_tools/task.py @@ -17,13 +17,14 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . # +from collections.abc import Generator from pathlib import Path from ahriman.core.build_tools.sources import Sources from ahriman.core.configuration import Configuration from ahriman.core.exceptions import BuildError from ahriman.core.log import LazyLogging -from ahriman.core.utils import check_output +from ahriman.core.utils import check_output, package_like from ahriman.models.package import Package from ahriman.models.pkgbuild_patch import PkgbuildPatch from ahriman.models.repository_paths import RepositoryPaths @@ -67,6 +68,36 @@ class Task(LazyLogging): self.makepkg_flags = configuration.getlist("build", "makepkg_flags", fallback=[]) self.makechrootpkg_flags = configuration.getlist("build", "makechrootpkg_flags", fallback=[]) + def _package_archives(self, sources_dir: Path, source_files: list[Path]) -> list[Path]: + """ + extract package archives from the directory + + Args: + sources_dir(Path): path to where sources are + source_files(list[Path]): list of files which were initially in the directory + + Returns: + list[Path]: list of file paths which looks like freshly generated archives + """ + def files() -> Generator[Path, None, None]: + for filepath in sources_dir.iterdir(): + if filepath in source_files: + continue # skip files which were already there + if filepath.suffix == ".log": + continue # skip log files + if not package_like(filepath): + continue # path doesn't look like a package + yield filepath + + # debug packages are always formed as package.base-debug + # see /usr/share/makepkg/util/pkgbuild.sh for more details + debug_package_prefix = f"{self.package.base}-debug-" + return [ + package + for package in files() + if self.include_debug_packages or not package.name.startswith(debug_package_prefix) + ] + def build(self, sources_dir: Path, **kwargs: str | None) -> list[Path]: """ run package build @@ -91,6 +122,7 @@ class Task(LazyLogging): } self.logger.info("using environment variables %s", environment) + source_files = list(sources_dir.iterdir()) check_output( *command, exception=BuildError.from_process(self.package.base), @@ -100,20 +132,7 @@ class Task(LazyLogging): environment=environment, ) - package_list_command = ["makepkg", "--packagelist"] - if not self.include_debug_packages: - package_list_command.append("OPTIONS=(!debug)") # disable debug flag manually - packages = check_output( - *package_list_command, - exception=BuildError.from_process(self.package.base), - cwd=sources_dir, - logger=self.logger, - environment=environment, - ).splitlines() - # some dirty magic here - # the filter is applied in order to make sure that result will only contain packages which were actually built - # e.g. in some cases packagelist command produces debug packages which were not actually built - return list(filter(lambda path: path.is_file(), map(Path, packages))) + return self._package_archives(sources_dir, source_files) def init(self, sources_dir: Path, patches: list[PkgbuildPatch], local_version: str | None) -> str | None: """ From 6e232f0cd6d62d50f8d4bf5de6c2ffa9f9d09ca8 Mon Sep 17 00:00:00 2001 From: Evgenii Alekseev Date: Mon, 9 Sep 2024 18:43:39 +0300 Subject: [PATCH 02/10] pkgbuild parser impl --- src/ahriman/core/utils.py | 14 +- src/ahriman/models/package.py | 47 ++--- src/ahriman/models/pkgbuild.py | 298 +++++++++++++++++++++++++++ src/ahriman/models/pkgbuild_patch.py | 15 ++ 4 files changed, 338 insertions(+), 36 deletions(-) create mode 100644 src/ahriman/models/pkgbuild.py diff --git a/src/ahriman/core/utils.py b/src/ahriman/core/utils.py index ab1651ef..bf50f9e5 100644 --- a/src/ahriman/core/utils.py +++ b/src/ahriman/core/utils.py @@ -27,7 +27,7 @@ import re import selectors import subprocess -from collections.abc import Callable, Generator, Iterable +from collections.abc import Callable, Generator, Iterable, Mapping from dataclasses import asdict from enum import Enum from pathlib import Path @@ -407,7 +407,7 @@ def safe_filename(source: str) -> str: return re.sub(r"[^A-Za-z\d\-._~:\[\]@]", "-", source) -def srcinfo_property(key: str, srcinfo: dict[str, Any], package_srcinfo: dict[str, Any], *, +def srcinfo_property(key: str, srcinfo: Mapping[str, Any], package_srcinfo: Mapping[str, Any], *, default: Any = None) -> Any: """ extract property from SRCINFO. This method extracts property from package if this property is presented in @@ -416,8 +416,8 @@ def srcinfo_property(key: str, srcinfo: dict[str, Any], package_srcinfo: dict[st Args: key(str): key to extract - srcinfo(dict[str, Any]): root structure of SRCINFO - package_srcinfo(dict[str, Any]): package specific SRCINFO + srcinfo(Mapping[str, Any]): root structure of SRCINFO + package_srcinfo(Mapping[str, Any]): package specific SRCINFO default(Any, optional): the default value for the specified key (Default value = None) Returns: @@ -426,7 +426,7 @@ def srcinfo_property(key: str, srcinfo: dict[str, Any], package_srcinfo: dict[st return package_srcinfo.get(key) or srcinfo.get(key) or default -def srcinfo_property_list(key: str, srcinfo: dict[str, Any], package_srcinfo: dict[str, Any], *, +def srcinfo_property_list(key: str, srcinfo: Mapping[str, Any], package_srcinfo: Mapping[str, Any], *, architecture: str | None = None) -> list[Any]: """ extract list property from SRCINFO. Unlike :func:`srcinfo_property()` it supposes that default return value is @@ -435,8 +435,8 @@ def srcinfo_property_list(key: str, srcinfo: dict[str, Any], package_srcinfo: di Args: key(str): key to extract - srcinfo(dict[str, Any]): root structure of SRCINFO - package_srcinfo(dict[str, Any]): package specific SRCINFO + srcinfo(Mapping[str, Any]): root structure of SRCINFO + package_srcinfo(Mapping[str, Any]): package specific SRCINFO architecture(str | None, optional): package architecture if set (Default value = None) Returns: diff --git a/src/ahriman/models/package.py b/src/ahriman/models/package.py index 9f988aba..cf4c0649 100644 --- a/src/ahriman/models/package.py +++ b/src/ahriman/models/package.py @@ -37,6 +37,7 @@ from ahriman.core.log import LazyLogging from ahriman.core.utils import check_output, dataclass_view, full_version, parse_version, srcinfo_property_list, utcnow from ahriman.models.package_description import PackageDescription from ahriman.models.package_source import PackageSource +from ahriman.models.pkgbuild import Pkgbuild from ahriman.models.remote_source import RemoteSource from ahriman.models.repository_paths import RepositoryPaths @@ -255,25 +256,23 @@ class Package(LazyLogging): Returns: Self: package properties - - Raises: - PackageInfoError: if there are parsing errors """ - srcinfo_source = check_output("makepkg", "--printsrcinfo", cwd=path) - srcinfo, errors = parse_srcinfo(srcinfo_source) - if errors: - raise PackageInfoError(errors) + pkgbuild = Pkgbuild.from_file(path / "PKGBUILD") packages = { package: PackageDescription( - depends=srcinfo_property_list("depends", srcinfo, properties, architecture=architecture), - make_depends=srcinfo_property_list("makedepends", srcinfo, properties, architecture=architecture), - opt_depends=srcinfo_property_list("optdepends", srcinfo, properties, architecture=architecture), - check_depends=srcinfo_property_list("checkdepends", srcinfo, properties, architecture=architecture), + depends=srcinfo_property_list("depends", pkgbuild, properties, architecture=architecture), + make_depends=srcinfo_property_list("makedepends", pkgbuild, properties, architecture=architecture), + opt_depends=srcinfo_property_list("optdepends", pkgbuild, properties, architecture=architecture), + check_depends=srcinfo_property_list("checkdepends", pkgbuild, properties, architecture=architecture), ) - for package, properties in srcinfo["packages"].items() + for package, properties in pkgbuild.packages().items() } - version = full_version(srcinfo.get("epoch"), srcinfo["pkgver"], srcinfo["pkgrel"]) + version = full_version( + pkgbuild.get_as("epoch", str, default=None), + pkgbuild.get_as("pkgver", str), + pkgbuild.get_as("pkgrel", str), + ) remote = RemoteSource( source=PackageSource.Local, @@ -284,7 +283,7 @@ class Package(LazyLogging): ) return cls( - base=srcinfo["pkgbase"], + base=pkgbuild.get_as("pkgbase", str), version=version, remote=remote, packages=packages, @@ -363,16 +362,12 @@ class Package(LazyLogging): Raises: PackageInfoError: if there are parsing errors """ - srcinfo_source = check_output("makepkg", "--printsrcinfo", cwd=path) - srcinfo, errors = parse_srcinfo(srcinfo_source) - if errors: - raise PackageInfoError(errors) - + pkgbuild = Pkgbuild.from_file(path / "PKGBUILD") # we could use arch property, but for consistency it is better to call special method architectures = Package.supported_architectures(path) for architecture in architectures: - for source in srcinfo_property_list("source", srcinfo, {}, architecture=architecture): + for source in srcinfo_property_list("source", pkgbuild, {}, architecture=architecture): if "::" in source: _, source = source.split("::", 1) # in case if filename is specified, remove it @@ -383,7 +378,7 @@ class Package(LazyLogging): yield Path(source) - if (install := srcinfo.get("install", None)) is not None: + if isinstance(install := pkgbuild.get("install"), str): # well, in reality it is either None or str yield Path(install) @staticmethod @@ -396,15 +391,9 @@ class Package(LazyLogging): Returns: set[str]: list of package supported architectures - - Raises: - PackageInfoError: if there are parsing errors """ - srcinfo_source = check_output("makepkg", "--printsrcinfo", cwd=path) - srcinfo, errors = parse_srcinfo(srcinfo_source) - if errors: - raise PackageInfoError(errors) - return set(srcinfo.get("arch", [])) + pkgbuild = Pkgbuild.from_file(path / "PKGBUILD") + return set(pkgbuild.get("arch", [])) def _package_list_property(self, extractor: Callable[[PackageDescription], list[str]]) -> list[str]: """ diff --git a/src/ahriman/models/pkgbuild.py b/src/ahriman/models/pkgbuild.py new file mode 100644 index 00000000..72e658c0 --- /dev/null +++ b/src/ahriman/models/pkgbuild.py @@ -0,0 +1,298 @@ +# +# Copyright (c) 2021-2024 ahriman team. +# +# This file is part of ahriman +# (see https://github.com/arcan1s/ahriman). +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +import re +import shlex + +from collections.abc import Generator, Iterator, Mapping +from dataclasses import dataclass +from enum import StrEnum +from io import StringIO +from pathlib import Path +from typing import IO, Self, TypeVar, cast + +from ahriman.models.pkgbuild_patch import PkgbuildPatch + + +T = TypeVar("T", str, list[str]) +U = TypeVar("U", str, list[str], None) + + +class PkgbuildToken(StrEnum): + """ + well-known tokens dictionary + + Attributes: + ArrayStarts(PkgbuildToken): (class attribute) array starts token + ArrayEnds(PkgbuildToken): (class attribute) array ends token + FunctionDeclaration(PkgbuildToken): (class attribute) function declaration token + FunctionStarts(PkgbuildToken): (class attribute) function starts token + FunctionEnds(PkgbuildToken): (class attribute) function ends token + """ + + ArrayStarts = "(" + ArrayEnds = ")" + + FunctionDeclaration = "()" + FunctionStarts = "{" + FunctionEnds = "}" + + +@dataclass(frozen=True) +class Pkgbuild(Mapping[str, str | list[str]]): + """ + simple pkgbuild reader implementation in pure python, because others sucks + + Attributes: + fields(dict[str, PkgbuildPatch]): PKGBUILD fields + """ + + fields: dict[str, PkgbuildPatch] + + _ARRAY_ASSIGNMENT_REGEX = re.compile(r"^(?P\w+)=$") + _STRING_ASSIGNMENT_REGEX = re.compile(r"^(?P\w+)=(?P.+)$") + # in addition functions can have dash to usual assignment + _FUNCTION_DECLARATION_REGEX = re.compile(r"^(?P[\w-]+)$") + + @property + def variables(self) -> dict[str, str]: + """ + list of variables defined and (maybe) used in this PKGBUILD + + Returns: + dict[str, str]: map of variable name to its value. The value will be included here in case if it presented + in the internal dictionary, it is not a function and the value has string type + """ + return { + key: value.value + for key, value in self.fields.items() + if not value.is_function and isinstance(value.value, str) + } + + @classmethod + def from_file(cls, path: Path) -> Self: + """ + parse PKGBUILD from the file + + Args: + path(Path): path to the PKGBUILD file + + Returns: + Self: constructed instance of self + """ + with path.open() as input_file: + return cls.from_io(input_file) + + @classmethod + def from_io(cls, stream: IO[str]) -> Self: + """ + parse PKGBUILD from input stream + + Args: + stream: IO[str]: input stream containing PKGBUILD content + + Returns: + Self: constructed instance of self + """ + fields = {} + + parser = shlex.shlex(stream, posix=True, punctuation_chars=True) + while token := parser.get_token(): + try: + key, value = cls._parse_token(token, parser) + fields[key] = value + except StopIteration: + break + + return cls(fields) + + @staticmethod + def _parse_array(parser: shlex.shlex) -> list[str]: + """ + parse array from the PKGBUILD. This method will extract tokens from parser until it matches closing array, + modifying source parser state + + Args: + parser(shlex.shlex): shell parser instance + + Returns: + list[str]: extracted arrays elements + + Raises: + ValueError: if array is not closed + """ + def extract() -> Generator[str, None, None]: + while token := parser.get_token(): + if token == PkgbuildToken.ArrayEnds: + break + yield token + + if token != PkgbuildToken.ArrayEnds: + raise ValueError("No closing array bracket found") + + return list(extract()) + + @staticmethod + def _parse_function(parser: shlex.shlex) -> str: + """ + parse function from the PKGBUILD. This method will extract tokens from parser until it matches closing function, + modifying source parser state. Instead of trying to combine tokens together, it uses positions of the file + and read content again in this range + + Args: + parser(shlex.shlex): shell parser instance + + Returns: + str: function body + + Raises: + ValueError: if function body wasn't found or parser input stream doesn't support position reading + """ + io: IO[str] = parser.instream # type: ignore[assignment] + + # find start and end positions + start_position, end_position = -1, -1 + while token := parser.get_token(): + match token: + case PkgbuildToken.FunctionStarts: + start_position = io.tell() + case PkgbuildToken.FunctionEnds: + end_position = io.tell() + break + + if not 0 < start_position < end_position: + raise ValueError("Function body wasn't found") + + # read the specified interval from source stream + io.seek(start_position - 1) # start from the previous symbol ({) + content = io.read(end_position - start_position + 1) + + return content + + @staticmethod + def _parse_token(token: str, parser: shlex.shlex) -> tuple[str, PkgbuildPatch]: + """ + parse single token to the PKGBUILD field + + Args: + token(str): current token + parser(shlex.shlex): shell parser instance + + Returns: + tuple[str, PkgbuildPatch]: extracted a pair of key and its value + + Raises: + StopIteration: if iteration reaches the end of the file' + """ + # simple assignment rule + if (match := Pkgbuild._STRING_ASSIGNMENT_REGEX.match(token)) is not None: + key = match.group("key") + value = match.group("value") + return key, PkgbuildPatch(key, value) + + match parser.get_token(): + # array processing. Arrays will be sent as "key=", "(", values, ")" + case PkgbuildToken.ArrayStarts if (match := Pkgbuild._ARRAY_ASSIGNMENT_REGEX.match(token)) is not None: + key = match.group("key") + value = Pkgbuild._parse_array(parser) + return key, PkgbuildPatch(key, value) + + # functions processing. Function will be sent as "name", "()", "{", body, "}" + case PkgbuildToken.FunctionDeclaration if Pkgbuild._FUNCTION_DECLARATION_REGEX.match(token): + key = f"{token}{PkgbuildToken.FunctionDeclaration}" + value = Pkgbuild._parse_function(parser) + return token, PkgbuildPatch(key, value) # this is not mistake, assign to token without () + + # some random token received without continuation, lets guess it is empty assignment (i.e. key=) + case other if other is not None: + return Pkgbuild._parse_token(other, parser) + + # reached the end of the parser + case None: + raise StopIteration + + def get_as(self, key: str, return_type: type[T], **kwargs: T | U) -> T | U: + """ + type guard for getting value by key + + Args: + key(str): key name + return_type(type[T]): return type, either ``str`` or ``list[str]`` + default(U): default value to return if no key found + + Returns: + T | U: value associated with key or default value if no value found and fallback is provided + + Raises: + KeyError: if no key found and no default has been provided + """ + del return_type + + if key not in self: + if "default" in kwargs: + return kwargs["default"] + raise KeyError(key) + + return cast(T, self[key]) + + def packages(self) -> dict[str, Self]: + """ + extract properties from internal package functions + + Returns: + dict[str, Self]: map of package name to its inner properties if defined + """ + packages = [self["pkgname"]] if isinstance(self["pkgname"], str) else self["pkgname"] + + def io(package_name: str) -> IO[str]: + # try to read package specific function and fallback to default otherwise + content = self.get_as(f"package_{package_name}", str, default=None) or self.get_as("package", str) + return StringIO(content) + + return {package: self.from_io(io(package)) for package in packages} + + def __getitem__(self, key: str) -> str | list[str]: + """ + get the field of the PKGBUILD + + Args: + key(str): key name + + Returns: + str | list[str]: value by the key + """ + return self.fields[key].substitute(self.variables) + + def __iter__(self) -> Iterator[str]: + """ + iterate over the fields + + Returns: + Iterator[str]: keys iterator + """ + return iter(self.fields) + + def __len__(self) -> int: + """ + get length of the mapping + + Returns: + int: amount of the fields in this PKGBUILD + """ + return len(self.fields) diff --git a/src/ahriman/models/pkgbuild_patch.py b/src/ahriman/models/pkgbuild_patch.py index 808ac8cb..cdb3c407 100644 --- a/src/ahriman/models/pkgbuild_patch.py +++ b/src/ahriman/models/pkgbuild_patch.py @@ -21,6 +21,7 @@ import shlex from dataclasses import dataclass, fields from pathlib import Path +from string import Template from typing import Any, Generator, Self from ahriman.core.utils import dataclass_view, filter_json @@ -167,6 +168,20 @@ class PkgbuildPatch: return f"{self.key} {self.value}" # no quoting enabled here return f"""{self.key}={PkgbuildPatch.quote(self.value)}""" + def substitute(self, variables: dict[str, str]) -> str | list[str]: + """ + substitute variables into the value + + Args: + variables(dict[str, str]): map of variables available for usage + + Returns: + str | list[str]: substituted value. All unknown variables will remain the same + """ + if isinstance(self.value, str): + return Template(self.value).safe_substitute(variables) + return [Template(value).safe_substitute(variables) for value in self.value] + def view(self) -> dict[str, Any]: """ generate json patch view From e7eccca3422d37f336b1b28fdc6f0abb048c3c35 Mon Sep 17 00:00:00 2001 From: Evgenii Alekseev Date: Wed, 11 Sep 2024 02:57:37 +0300 Subject: [PATCH 03/10] completely remove makepkg calls --- src/ahriman/core/build_tools/task.py | 22 ++++++++ src/ahriman/core/repository/package_info.py | 2 +- .../core/repository/repository_properties.py | 3 - src/ahriman/core/repository/update_handler.py | 9 +-- src/ahriman/models/package.py | 55 ++++++++++--------- src/ahriman/models/pkgbuild.py | 16 +++--- src/ahriman/models/pkgbuild_patch.py | 3 +- 7 files changed, 63 insertions(+), 47 deletions(-) diff --git a/src/ahriman/core/build_tools/task.py b/src/ahriman/core/build_tools/task.py index ab6816e5..d410618f 100644 --- a/src/ahriman/core/build_tools/task.py +++ b/src/ahriman/core/build_tools/task.py @@ -159,3 +159,25 @@ class Task(LazyLogging): patch.write(sources_dir / "PKGBUILD") return last_commit_sha + + def setup(self, sources_dir: Path) -> None: + """ + setup chroot environment without building package itself. This function, in particular, useful in case if it is + required to refresh pkgver to the actual value without package building + + Args: + sources_dir(Path): path to where sources are + """ + command = [self.build_command, "-r", str(self.paths.chroot)] + command.extend(self.archbuild_flags) + command.extend(["--"] + self.makechrootpkg_flags) + command.extend(["--"] + self.makepkg_flags + ["--nobuild"]) + self.logger.info("using %s for %s", command, self.package.base) + + check_output( + *command, + exception=BuildError.from_process(self.package.base), + cwd=sources_dir, + logger=self.logger, + user=self.uid, + ) diff --git a/src/ahriman/core/repository/package_info.py b/src/ahriman/core/repository/package_info.py index 1ab0b3f0..2382d53c 100644 --- a/src/ahriman/core/repository/package_info.py +++ b/src/ahriman/core/repository/package_info.py @@ -58,7 +58,7 @@ class PackageInfo(RepositoryProperties): # force version to max of them self.logger.warning("version of %s differs, found %s and %s", current.base, current.version, local.version) - if current.is_outdated(local, self.paths, calculate_version=False): + if current.is_outdated(local, self.configuration, calculate_version=False): current.version = local.version current.packages.update(local.packages) except Exception: diff --git a/src/ahriman/core/repository/repository_properties.py b/src/ahriman/core/repository/repository_properties.py index 322db191..6190846f 100644 --- a/src/ahriman/core/repository/repository_properties.py +++ b/src/ahriman/core/repository/repository_properties.py @@ -51,7 +51,6 @@ class RepositoryProperties(EventLogger, LazyLogging): scan_paths(ScanPaths): scan paths for the implicit dependencies sign(GPG): GPG wrapper instance triggers(TriggerLoader): triggers holder - vcs_allowed_age(int): maximal age of the VCS packages before they will be checked """ def __init__(self, repository_id: RepositoryId, configuration: Configuration, database: SQLite, *, report: bool, @@ -70,8 +69,6 @@ class RepositoryProperties(EventLogger, LazyLogging): self.configuration = configuration self.database = database - self.vcs_allowed_age = configuration.getint("build", "vcs_allowed_age", fallback=0) - self.paths: RepositoryPaths = configuration.repository_paths # additional workaround for pycharm typing self.ignore_list = configuration.getlist("build", "ignore_packages", fallback=[]) diff --git a/src/ahriman/core/repository/update_handler.py b/src/ahriman/core/repository/update_handler.py index 0c6ee228..20a93511 100644 --- a/src/ahriman/core/repository/update_handler.py +++ b/src/ahriman/core/repository/update_handler.py @@ -67,10 +67,7 @@ class UpdateHandler(PackageInfo, Cleaner): try: remote = load_remote(local) - if local.is_outdated( - remote, self.paths, - vcs_allowed_age=self.vcs_allowed_age, - calculate_version=vcs): + if local.is_outdated(remote, self.configuration, calculate_version=vcs): self.reporter.set_pending(local.base) self.event(local.base, EventType.PackageOutdated, "Remote version is newer than local") result.append(remote) @@ -154,9 +151,7 @@ class UpdateHandler(PackageInfo, Cleaner): if local is None: continue # we don't add packages automatically - if local.is_outdated(remote, self.paths, - vcs_allowed_age=self.vcs_allowed_age, - calculate_version=vcs): + if local.is_outdated(remote, self.configuration, calculate_version=vcs): self.reporter.set_pending(local.base) self.event(local.base, EventType.PackageOutdated, "Locally pulled sources are outdated") result.append(remote) diff --git a/src/ahriman/models/package.py b/src/ahriman/models/package.py index cf4c0649..e9dd4a4b 100644 --- a/src/ahriman/models/package.py +++ b/src/ahriman/models/package.py @@ -26,20 +26,18 @@ from collections.abc import Callable, Generator, Iterable from dataclasses import dataclass from pathlib import Path from pyalpm import vercmp # type: ignore[import-not-found] -from srcinfo.parse import parse_srcinfo # type: ignore[import-untyped] from typing import Any, Self from urllib.parse import urlparse from ahriman.core.alpm.pacman import Pacman from ahriman.core.alpm.remote import AUR, Official, OfficialSyncdb -from ahriman.core.exceptions import PackageInfoError +from ahriman.core.configuration import Configuration from ahriman.core.log import LazyLogging -from ahriman.core.utils import check_output, dataclass_view, full_version, parse_version, srcinfo_property_list, utcnow +from ahriman.core.utils import dataclass_view, full_version, parse_version, srcinfo_property_list, utcnow from ahriman.models.package_description import PackageDescription from ahriman.models.package_source import PackageSource from ahriman.models.pkgbuild import Pkgbuild from ahriman.models.remote_source import RemoteSource -from ahriman.models.repository_paths import RepositoryPaths @dataclass(kw_only=True) @@ -415,39 +413,43 @@ class Package(LazyLogging): return sorted(set(generator())) - def actual_version(self, paths: RepositoryPaths) -> str: + def actual_version(self, configuration: Configuration) -> str: """ additional method to handle VCS package versions Args: - paths(RepositoryPaths): repository paths instance + configuration(Configuration): configuration instance Returns: str: package version if package is not VCS and current version according to VCS otherwise - - Raises: - PackageInfoError: if there are parsing errors """ if not self.is_vcs: return self.version - from ahriman.core.build_tools.sources import Sources + from ahriman.core.build_tools.task import Task - Sources.load(paths.cache_for(self.base), self, [], paths) + _, repository_id = configuration.check_loaded() + paths = configuration.repository_paths + task = Task(self, configuration, repository_id.architecture, paths) try: - # update pkgver first - check_output("makepkg", "--nodeps", "--nobuild", cwd=paths.cache_for(self.base), logger=self.logger) - # generate new .SRCINFO and put it to parser - srcinfo_source = check_output("makepkg", "--printsrcinfo", - cwd=paths.cache_for(self.base), logger=self.logger) - srcinfo, errors = parse_srcinfo(srcinfo_source) - if errors: - raise PackageInfoError(errors) + # create fresh chroot environment, fetch sources and - automagically - update PKGBUILD + task.init(paths.cache_for(self.base), [], None) + task.setup(paths.cache_for(self.base)) - return full_version(srcinfo.get("epoch"), srcinfo["pkgver"], srcinfo["pkgrel"]) + pkgbuild = Pkgbuild.from_file(paths.cache_for(self.base) / "PKGBUILD") + + return full_version( + pkgbuild.get_as("epoch", str, default=None), + pkgbuild.get_as("pkgver", str), + pkgbuild.get_as("pkgrel", str), + ) except Exception: - self.logger.exception("cannot determine version of VCS package, make sure that VCS tools are installed") + self.logger.exception("cannot determine version of VCS package") + finally: + # clear log files generated by devtools + for log_file in paths.cache_for(self.base).glob("*.log"): + log_file.unlink() return self.version @@ -502,26 +504,25 @@ class Package(LazyLogging): if package.build_date is not None ) - def is_outdated(self, remote: Package, paths: RepositoryPaths, *, - vcs_allowed_age: float | int = 0, + def is_outdated(self, remote: Package, configuration: Configuration, *, calculate_version: bool = True) -> bool: """ check if package is out-of-dated Args: remote(Package): package properties from remote source - paths(RepositoryPaths): repository paths instance. Required for VCS packages cache - vcs_allowed_age(float | int, optional): max age of the built packages before they will be - forced to calculate actual version (Default value = 0) + configuration(Configuration): configuration instance calculate_version(bool, optional): expand version to actual value (by calculating git versions) (Default value = True) Returns: bool: ``True`` if the package is out-of-dated and ``False`` otherwise """ + vcs_allowed_age = configuration.getint("build", "vcs_allowed_age", fallback=0) min_vcs_build_date = utcnow().timestamp() - vcs_allowed_age + if calculate_version and not self.is_newer_than(min_vcs_build_date): - remote_version = remote.actual_version(paths) + remote_version = remote.actual_version(configuration) else: remote_version = remote.version diff --git a/src/ahriman/models/pkgbuild.py b/src/ahriman/models/pkgbuild.py index 72e658c0..da45a96b 100644 --- a/src/ahriman/models/pkgbuild.py +++ b/src/ahriman/models/pkgbuild.py @@ -65,10 +65,10 @@ class Pkgbuild(Mapping[str, str | list[str]]): fields: dict[str, PkgbuildPatch] - _ARRAY_ASSIGNMENT_REGEX = re.compile(r"^(?P\w+)=$") - _STRING_ASSIGNMENT_REGEX = re.compile(r"^(?P\w+)=(?P.+)$") - # in addition functions can have dash to usual assignment - _FUNCTION_DECLARATION_REGEX = re.compile(r"^(?P[\w-]+)$") + _ARRAY_ASSIGNMENT = re.compile(r"^(?P\w+)=$") + _STRING_ASSIGNMENT = re.compile(r"^(?P\w+)=(?P.+)$") + # in addition, functions can have dash to usual assignment + _FUNCTION_DECLARATION = re.compile(r"^(?P[\w-]+)$") @property def variables(self) -> dict[str, str]: @@ -201,20 +201,20 @@ class Pkgbuild(Mapping[str, str | list[str]]): StopIteration: if iteration reaches the end of the file' """ # simple assignment rule - if (match := Pkgbuild._STRING_ASSIGNMENT_REGEX.match(token)) is not None: + if (match := Pkgbuild._STRING_ASSIGNMENT.match(token)) is not None: key = match.group("key") value = match.group("value") return key, PkgbuildPatch(key, value) match parser.get_token(): # array processing. Arrays will be sent as "key=", "(", values, ")" - case PkgbuildToken.ArrayStarts if (match := Pkgbuild._ARRAY_ASSIGNMENT_REGEX.match(token)) is not None: + case PkgbuildToken.ArrayStarts if (match := Pkgbuild._ARRAY_ASSIGNMENT.match(token)) is not None: key = match.group("key") value = Pkgbuild._parse_array(parser) return key, PkgbuildPatch(key, value) # functions processing. Function will be sent as "name", "()", "{", body, "}" - case PkgbuildToken.FunctionDeclaration if Pkgbuild._FUNCTION_DECLARATION_REGEX.match(token): + case PkgbuildToken.FunctionDeclaration if Pkgbuild._FUNCTION_DECLARATION.match(token): key = f"{token}{PkgbuildToken.FunctionDeclaration}" value = Pkgbuild._parse_function(parser) return token, PkgbuildPatch(key, value) # this is not mistake, assign to token without () @@ -234,7 +234,7 @@ class Pkgbuild(Mapping[str, str | list[str]]): Args: key(str): key name return_type(type[T]): return type, either ``str`` or ``list[str]`` - default(U): default value to return if no key found + default(U, optional): default value to return if no key found Returns: T | U: value associated with key or default value if no value found and fallback is provided diff --git a/src/ahriman/models/pkgbuild_patch.py b/src/ahriman/models/pkgbuild_patch.py index cdb3c407..b4efe3a2 100644 --- a/src/ahriman/models/pkgbuild_patch.py +++ b/src/ahriman/models/pkgbuild_patch.py @@ -176,7 +176,8 @@ class PkgbuildPatch: variables(dict[str, str]): map of variables available for usage Returns: - str | list[str]: substituted value. All unknown variables will remain the same + str | list[str]: substituted value. All unknown variables will remain as links to their values. + This function doesn't support recursive substitution """ if isinstance(self.value, str): return Template(self.value).safe_substitute(variables) From 07eb930bd1d310670e65c5aba9c51f538036be69 Mon Sep 17 00:00:00 2001 From: Evgenii Alekseev Date: Wed, 11 Sep 2024 03:14:23 +0300 Subject: [PATCH 04/10] simplify typed get --- src/ahriman/models/package.py | 14 +++++++------- src/ahriman/models/pkgbuild.py | 7 ++----- 2 files changed, 9 insertions(+), 12 deletions(-) diff --git a/src/ahriman/models/package.py b/src/ahriman/models/package.py index e9dd4a4b..20f20f77 100644 --- a/src/ahriman/models/package.py +++ b/src/ahriman/models/package.py @@ -267,9 +267,9 @@ class Package(LazyLogging): for package, properties in pkgbuild.packages().items() } version = full_version( - pkgbuild.get_as("epoch", str, default=None), - pkgbuild.get_as("pkgver", str), - pkgbuild.get_as("pkgrel", str), + pkgbuild.get_as("epoch", default=None), + pkgbuild.get_as("pkgver"), + pkgbuild.get_as("pkgrel"), ) remote = RemoteSource( @@ -281,7 +281,7 @@ class Package(LazyLogging): ) return cls( - base=pkgbuild.get_as("pkgbase", str), + base=pkgbuild.get_as("pkgbase"), version=version, remote=remote, packages=packages, @@ -440,9 +440,9 @@ class Package(LazyLogging): pkgbuild = Pkgbuild.from_file(paths.cache_for(self.base) / "PKGBUILD") return full_version( - pkgbuild.get_as("epoch", str, default=None), - pkgbuild.get_as("pkgver", str), - pkgbuild.get_as("pkgrel", str), + pkgbuild.get_as("epoch", default=None), + pkgbuild.get_as("pkgver"), + pkgbuild.get_as("pkgrel"), ) except Exception: self.logger.exception("cannot determine version of VCS package") diff --git a/src/ahriman/models/pkgbuild.py b/src/ahriman/models/pkgbuild.py index da45a96b..9fffbce1 100644 --- a/src/ahriman/models/pkgbuild.py +++ b/src/ahriman/models/pkgbuild.py @@ -227,13 +227,12 @@ class Pkgbuild(Mapping[str, str | list[str]]): case None: raise StopIteration - def get_as(self, key: str, return_type: type[T], **kwargs: T | U) -> T | U: + def get_as(self, key: str, **kwargs: T | U) -> T | U: """ type guard for getting value by key Args: key(str): key name - return_type(type[T]): return type, either ``str`` or ``list[str]`` default(U, optional): default value to return if no key found Returns: @@ -242,8 +241,6 @@ class Pkgbuild(Mapping[str, str | list[str]]): Raises: KeyError: if no key found and no default has been provided """ - del return_type - if key not in self: if "default" in kwargs: return kwargs["default"] @@ -262,7 +259,7 @@ class Pkgbuild(Mapping[str, str | list[str]]): def io(package_name: str) -> IO[str]: # try to read package specific function and fallback to default otherwise - content = self.get_as(f"package_{package_name}", str, default=None) or self.get_as("package", str) + content = self.get_as(f"package_{package_name}", default=None) or self.get_as("package") return StringIO(content) return {package: self.from_io(io(package)) for package in packages} From d9a2045d3286de3935c4e1378b1147964807daa0 Mon Sep 17 00:00:00 2001 From: Evgenii Alekseev Date: Thu, 12 Sep 2024 03:26:38 +0300 Subject: [PATCH 05/10] try to improve parser --- src/ahriman/core/build_tools/task.py | 27 ++++----------------------- src/ahriman/models/package.py | 2 +- src/ahriman/models/pkgbuild.py | 20 ++++++++++++++++---- 3 files changed, 21 insertions(+), 28 deletions(-) diff --git a/src/ahriman/core/build_tools/task.py b/src/ahriman/core/build_tools/task.py index d410618f..2c61a613 100644 --- a/src/ahriman/core/build_tools/task.py +++ b/src/ahriman/core/build_tools/task.py @@ -98,12 +98,13 @@ class Task(LazyLogging): if self.include_debug_packages or not package.name.startswith(debug_package_prefix) ] - def build(self, sources_dir: Path, **kwargs: str | None) -> list[Path]: + def build(self, sources_dir: Path, *, dry_run: bool = False, **kwargs: str | None) -> list[Path]: """ run package build Args: sources_dir(Path): path to where sources are + dry_run(bool, optional): do not perform build itself (Default value = False) **kwargs(str | None): environment variables to be passed to build processes Returns: @@ -113,6 +114,8 @@ class Task(LazyLogging): command.extend(self.archbuild_flags) command.extend(["--"] + self.makechrootpkg_flags) command.extend(["--"] + self.makepkg_flags) + if dry_run: + command.extend(["--nobuild"]) self.logger.info("using %s for %s", command, self.package.base) environment: dict[str, str] = { @@ -159,25 +162,3 @@ class Task(LazyLogging): patch.write(sources_dir / "PKGBUILD") return last_commit_sha - - def setup(self, sources_dir: Path) -> None: - """ - setup chroot environment without building package itself. This function, in particular, useful in case if it is - required to refresh pkgver to the actual value without package building - - Args: - sources_dir(Path): path to where sources are - """ - command = [self.build_command, "-r", str(self.paths.chroot)] - command.extend(self.archbuild_flags) - command.extend(["--"] + self.makechrootpkg_flags) - command.extend(["--"] + self.makepkg_flags + ["--nobuild"]) - self.logger.info("using %s for %s", command, self.package.base) - - check_output( - *command, - exception=BuildError.from_process(self.package.base), - cwd=sources_dir, - logger=self.logger, - user=self.uid, - ) diff --git a/src/ahriman/models/package.py b/src/ahriman/models/package.py index 20f20f77..e07e52bb 100644 --- a/src/ahriman/models/package.py +++ b/src/ahriman/models/package.py @@ -435,7 +435,7 @@ class Package(LazyLogging): try: # create fresh chroot environment, fetch sources and - automagically - update PKGBUILD task.init(paths.cache_for(self.base), [], None) - task.setup(paths.cache_for(self.base)) + task.build(paths.cache_for(self.base), dry_run=False) pkgbuild = Pkgbuild.from_file(paths.cache_for(self.base) / "PKGBUILD") diff --git a/src/ahriman/models/pkgbuild.py b/src/ahriman/models/pkgbuild.py index 9fffbce1..f8f103db 100644 --- a/src/ahriman/models/pkgbuild.py +++ b/src/ahriman/models/pkgbuild.py @@ -39,11 +39,11 @@ class PkgbuildToken(StrEnum): well-known tokens dictionary Attributes: - ArrayStarts(PkgbuildToken): (class attribute) array starts token ArrayEnds(PkgbuildToken): (class attribute) array ends token + ArrayStarts(PkgbuildToken): (class attribute) array starts token FunctionDeclaration(PkgbuildToken): (class attribute) function declaration token - FunctionStarts(PkgbuildToken): (class attribute) function starts token FunctionEnds(PkgbuildToken): (class attribute) function ends token + FunctionStarts(PkgbuildToken): (class attribute) function starts token """ ArrayStarts = "(" @@ -113,6 +113,10 @@ class Pkgbuild(Mapping[str, str | list[str]]): fields = {} parser = shlex.shlex(stream, posix=True, punctuation_chars=True) + # ignore substitution and extend bash symbols + parser.wordchars += "${}#:+" + # in case of default behaviour, it will ignore, for example, segment part of url outside of quotes + parser.commenters = "" while token := parser.get_token(): try: key, value = cls._parse_token(token, parser) @@ -180,7 +184,7 @@ class Pkgbuild(Mapping[str, str | list[str]]): raise ValueError("Function body wasn't found") # read the specified interval from source stream - io.seek(start_position - 1) # start from the previous symbol ({) + io.seek(start_position - 1) # start from the previous symbol ("{") content = io.read(end_position - start_position + 1) return content @@ -198,7 +202,7 @@ class Pkgbuild(Mapping[str, str | list[str]]): tuple[str, PkgbuildPatch]: extracted a pair of key and its value Raises: - StopIteration: if iteration reaches the end of the file' + StopIteration: if iteration reaches the end of the file """ # simple assignment rule if (match := Pkgbuild._STRING_ASSIGNMENT.match(token)) is not None: @@ -219,6 +223,14 @@ class Pkgbuild(Mapping[str, str | list[str]]): value = Pkgbuild._parse_function(parser) return token, PkgbuildPatch(key, value) # this is not mistake, assign to token without () + # special function case, where "(" and ")" are separated tokens, e.g. "pkgver ( )" + case PkgbuildToken.ArrayStarts if Pkgbuild._FUNCTION_DECLARATION.match(token): + next_token = parser.get_token() + if next_token == PkgbuildToken.ArrayEnds: # replace closing bracket with "()" + next_token = PkgbuildToken.FunctionDeclaration + parser.push_token(next_token) # type: ignore[arg-type] + return Pkgbuild._parse_token(token, parser) + # some random token received without continuation, lets guess it is empty assignment (i.e. key=) case other if other is not None: return Pkgbuild._parse_token(other, parser) From e64ada0067c5e4511f96e385dbb839c666bf93a5 Mon Sep 17 00:00:00 2001 From: Evgenii Alekseev Date: Fri, 13 Sep 2024 01:48:38 +0300 Subject: [PATCH 06/10] docs and recipes updatte --- .github/workflows/setup.sh | 2 -- Dockerfile | 4 ---- docs/faq/general.rst | 6 +----- package/archlinux/PKGBUILD | 8 ++------ 4 files changed, 3 insertions(+), 17 deletions(-) diff --git a/.github/workflows/setup.sh b/.github/workflows/setup.sh index 5ed7afcb..54f6b13e 100755 --- a/.github/workflows/setup.sh +++ b/.github/workflows/setup.sh @@ -15,8 +15,6 @@ pacman -Sy --noconfirm devtools git pyalpm python-inflection python-passlib pyth pacman -Sy --noconfirm --asdeps base-devel python-build python-flit python-installer python-tox python-wheel # optional dependencies if [[ -z $MINIMAL_INSTALL ]]; then - # VCS support - pacman -Sy --noconfirm breezy darcs mercurial subversion # web server pacman -Sy --noconfirm python-aioauth-client python-aiohttp python-aiohttp-apispec-git python-aiohttp-cors python-aiohttp-jinja2 python-aiohttp-security python-aiohttp-session python-cryptography python-jinja # additional features diff --git a/Dockerfile b/Dockerfile index 5a887293..18fbd7f4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -31,7 +31,6 @@ RUN useradd -m -d "/home/build" -s "/usr/bin/nologin" build && \ echo "build ALL=(ALL) NOPASSWD: ALL" > "/etc/sudoers.d/build" COPY "docker/install-aur-package.sh" "/usr/local/bin/install-aur-package" ## install package dependencies -## darcs is not installed by reasons, because it requires a lot haskell packages which dramatically increase image size RUN pacman -Sy --noconfirm --asdeps \ devtools \ git \ @@ -50,9 +49,7 @@ RUN pacman -Sy --noconfirm --asdeps \ python-wheel \ && \ pacman -Sy --noconfirm --asdeps \ - breezy \ git \ - mercurial \ python-aiohttp \ python-boto3 \ python-cerberus \ @@ -61,7 +58,6 @@ RUN pacman -Sy --noconfirm --asdeps \ python-matplotlib \ python-systemd \ rsync \ - subversion \ && \ runuser -u build -- install-aur-package \ python-aioauth-client \ diff --git a/docs/faq/general.rst b/docs/faq/general.rst index 8ac04879..c7f00682 100644 --- a/docs/faq/general.rst +++ b/docs/faq/general.rst @@ -265,11 +265,7 @@ TL;DR How to update VCS packages ^^^^^^^^^^^^^^^^^^^^^^^^^^ -Normally the service handles VCS packages correctly, however it requires additional dependencies: - -.. code-block:: shell - - pacman -S breezy darcs mercurial subversion +Normally the service handles VCS packages correctly. The version is updated in clean chroot, no additional actions are required. How to review changes before build ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/package/archlinux/PKGBUILD b/package/archlinux/PKGBUILD index 59b4b36f..31b7ca90 100644 --- a/package/archlinux/PKGBUILD +++ b/package/archlinux/PKGBUILD @@ -9,10 +9,7 @@ url="https://github.com/arcan1s/ahriman" license=('GPL3') depends=('devtools>=1:1.0.0' 'git' 'pyalpm' 'python-inflection' 'python-passlib' 'python-pyelftools' 'python-requests' 'python-srcinfo') makedepends=('python-build' 'python-flit' 'python-installer' 'python-wheel') -optdepends=('breezy: -bzr packages support' - 'darcs: -darcs packages support' - 'mercurial: -hg packages support' - 'python-aioauth-client: web server with OAuth2 authorization' +optdepends=('python-aioauth-client: web server with OAuth2 authorization' 'python-aiohttp: web server' 'python-aiohttp-apispec>=3.0.0: web server' 'python-aiohttp-cors: web server' @@ -26,8 +23,7 @@ optdepends=('breezy: -bzr packages support' 'python-requests-unixsocket2: client report to web server by unix socket' 'python-jinja: html report generation' 'python-systemd: journal support' - 'rsync: sync by using rsync' - 'subversion: -svn packages support') + 'rsync: sync by using rsync') source=("https://github.com/arcan1s/ahriman/releases/download/$pkgver/$pkgname-$pkgver.tar.gz" 'ahriman.sysusers' 'ahriman.tmpfiles') From 28bc6edd666fc2becc163fcf61a91bc151e3a856 Mon Sep 17 00:00:00 2001 From: Evgenii Alekseev Date: Fri, 13 Sep 2024 13:43:16 +0300 Subject: [PATCH 07/10] never raise keyerror instead return empty string --- src/ahriman/core/status/watcher.py | 2 +- src/ahriman/models/package.py | 12 +---- src/ahriman/models/pkgbuild.py | 78 +++++++++++++++--------------- 3 files changed, 41 insertions(+), 51 deletions(-) diff --git a/src/ahriman/core/status/watcher.py b/src/ahriman/core/status/watcher.py index fa838710..0c0bc1d5 100644 --- a/src/ahriman/core/status/watcher.py +++ b/src/ahriman/core/status/watcher.py @@ -199,7 +199,7 @@ class Watcher(LazyLogging): proxy methods for reporter client Args: - item(str): property name: + item(str): property name Returns: Any: attribute by its name diff --git a/src/ahriman/models/package.py b/src/ahriman/models/package.py index e07e52bb..bc479bf5 100644 --- a/src/ahriman/models/package.py +++ b/src/ahriman/models/package.py @@ -266,11 +266,7 @@ class Package(LazyLogging): ) for package, properties in pkgbuild.packages().items() } - version = full_version( - pkgbuild.get_as("epoch", default=None), - pkgbuild.get_as("pkgver"), - pkgbuild.get_as("pkgrel"), - ) + version = full_version(pkgbuild.epoch, pkgbuild.pkgver, pkgbuild.pkgrel) remote = RemoteSource( source=PackageSource.Local, @@ -439,11 +435,7 @@ class Package(LazyLogging): pkgbuild = Pkgbuild.from_file(paths.cache_for(self.base) / "PKGBUILD") - return full_version( - pkgbuild.get_as("epoch", default=None), - pkgbuild.get_as("pkgver"), - pkgbuild.get_as("pkgrel"), - ) + return full_version(pkgbuild.epoch, pkgbuild.pkgver, pkgbuild.pkgrel) except Exception: self.logger.exception("cannot determine version of VCS package") finally: diff --git a/src/ahriman/models/pkgbuild.py b/src/ahriman/models/pkgbuild.py index f8f103db..7f36609e 100644 --- a/src/ahriman/models/pkgbuild.py +++ b/src/ahriman/models/pkgbuild.py @@ -25,15 +25,11 @@ from dataclasses import dataclass from enum import StrEnum from io import StringIO from pathlib import Path -from typing import IO, Self, TypeVar, cast +from typing import Any, IO, Self from ahriman.models.pkgbuild_patch import PkgbuildPatch -T = TypeVar("T", str, list[str]) -U = TypeVar("U", str, list[str], None) - - class PkgbuildToken(StrEnum): """ well-known tokens dictionary @@ -119,12 +115,12 @@ class Pkgbuild(Mapping[str, str | list[str]]): parser.commenters = "" while token := parser.get_token(): try: - key, value = cls._parse_token(token, parser) - fields[key] = value + patch = cls._parse_token(token, parser) + fields[patch.key] = patch except StopIteration: break - return cls(fields) + return cls({key: value for key, value in fields.items() if key}) @staticmethod def _parse_array(parser: shlex.shlex) -> list[str]: @@ -175,7 +171,7 @@ class Pkgbuild(Mapping[str, str | list[str]]): while token := parser.get_token(): match token: case PkgbuildToken.FunctionStarts: - start_position = io.tell() + start_position = io.tell() - 1 case PkgbuildToken.FunctionEnds: end_position = io.tell() break @@ -184,13 +180,13 @@ class Pkgbuild(Mapping[str, str | list[str]]): raise ValueError("Function body wasn't found") # read the specified interval from source stream - io.seek(start_position - 1) # start from the previous symbol ("{") - content = io.read(end_position - start_position + 1) + io.seek(start_position - 1) # start from the previous symbol + content = io.read(end_position - start_position) return content @staticmethod - def _parse_token(token: str, parser: shlex.shlex) -> tuple[str, PkgbuildPatch]: + def _parse_token(token: str, parser: shlex.shlex) -> PkgbuildPatch: """ parse single token to the PKGBUILD field @@ -199,7 +195,7 @@ class Pkgbuild(Mapping[str, str | list[str]]): parser(shlex.shlex): shell parser instance Returns: - tuple[str, PkgbuildPatch]: extracted a pair of key and its value + PkgbuildPatch: extracted a PKGBUILD node Raises: StopIteration: if iteration reaches the end of the file @@ -208,20 +204,20 @@ class Pkgbuild(Mapping[str, str | list[str]]): if (match := Pkgbuild._STRING_ASSIGNMENT.match(token)) is not None: key = match.group("key") value = match.group("value") - return key, PkgbuildPatch(key, value) + return PkgbuildPatch(key, value) match parser.get_token(): # array processing. Arrays will be sent as "key=", "(", values, ")" case PkgbuildToken.ArrayStarts if (match := Pkgbuild._ARRAY_ASSIGNMENT.match(token)) is not None: key = match.group("key") value = Pkgbuild._parse_array(parser) - return key, PkgbuildPatch(key, value) + return PkgbuildPatch(key, value) # functions processing. Function will be sent as "name", "()", "{", body, "}" case PkgbuildToken.FunctionDeclaration if Pkgbuild._FUNCTION_DECLARATION.match(token): key = f"{token}{PkgbuildToken.FunctionDeclaration}" value = Pkgbuild._parse_function(parser) - return token, PkgbuildPatch(key, value) # this is not mistake, assign to token without () + return PkgbuildPatch(key, value) # this is not mistake, assign to token without () # special function case, where "(" and ")" are separated tokens, e.g. "pkgver ( )" case PkgbuildToken.ArrayStarts if Pkgbuild._FUNCTION_DECLARATION.match(token): @@ -239,27 +235,6 @@ class Pkgbuild(Mapping[str, str | list[str]]): case None: raise StopIteration - def get_as(self, key: str, **kwargs: T | U) -> T | U: - """ - type guard for getting value by key - - Args: - key(str): key name - default(U, optional): default value to return if no key found - - Returns: - T | U: value associated with key or default value if no value found and fallback is provided - - Raises: - KeyError: if no key found and no default has been provided - """ - if key not in self: - if "default" in kwargs: - return kwargs["default"] - raise KeyError(key) - - return cast(T, self[key]) - def packages(self) -> dict[str, Self]: """ extract properties from internal package functions @@ -271,14 +246,29 @@ class Pkgbuild(Mapping[str, str | list[str]]): def io(package_name: str) -> IO[str]: # try to read package specific function and fallback to default otherwise - content = self.get_as(f"package_{package_name}", default=None) or self.get_as("package") + # content = self.get_as(f"package_{package_name}") or self.get_as("package") + content = getattr(self, f"package_{package_name}") or self.package return StringIO(content) return {package: self.from_io(io(package)) for package in packages} + def __getattr__(self, item: str) -> Any: + """ + proxy method for PKGBUILD properties + + Args: + item(str): property name + + Returns: + Any: attribute by its name + """ + return self[item] + def __getitem__(self, key: str) -> str | list[str]: """ - get the field of the PKGBUILD + get the field of the PKGBUILD. This method tries to get exact key value if possible; if none found, it tries to + fetch function with the same name. And, finally, it returns empty value if nothing found, so this function never + raises an ``KeyError``.exception`` Args: key(str): key name @@ -286,7 +276,15 @@ class Pkgbuild(Mapping[str, str | list[str]]): Returns: str | list[str]: value by the key """ - return self.fields[key].substitute(self.variables) + value = self.fields.get(key) + # if the key wasn't found and user didn't ask for function explicitly, we can try to get by function name + if value is None and not key.endswith(PkgbuildToken.FunctionDeclaration): + value = self.fields.get(f"{key}{PkgbuildToken.FunctionDeclaration}") + # if we still didn't find anything, we fall back to empty value (just like shell) + if value is None: + value = PkgbuildPatch(key, "") + + return value.substitute(self.variables) def __iter__(self) -> Iterator[str]: """ From cf8915a457dd54a7c715548e4160e831969aac7d Mon Sep 17 00:00:00 2001 From: Evgenii Alekseev Date: Fri, 13 Sep 2024 18:10:58 +0300 Subject: [PATCH 08/10] udpate tests --- .github/workflows/setup.sh | 2 +- Dockerfile | 1 - package/archlinux/PKGBUILD | 2 +- pyproject.toml | 1 - src/ahriman/models/package.py | 6 +- src/ahriman/models/pkgbuild.py | 9 +- tests/ahriman/core/build_tools/test_task.py | 128 +++++---- .../core/repository/test_update_handler.py | 12 +- tests/ahriman/core/test_utils.py | 10 +- tests/ahriman/core/upload/test_http_upload.py | 4 +- tests/ahriman/core/upload/test_s3.py | 4 +- tests/ahriman/models/test_package.py | 175 +++++------- tests/ahriman/models/test_pkgbuild_patch.py | 8 + .../models/package_ahriman_pkgbuild | 55 ++++ .../models/package_ahriman_srcinfo | 45 --- .../models/package_gcc10_pkgbuild | 270 ++++++++++++++++++ .../models/package_gcc10_srcinfo | 57 ---- .../package_jellyfin-ffmpeg5-bin_srcinfo | 28 -- .../package_jellyfin-ffmpeg6-bin_pkgbuild | 31 ++ .../models/package_tpacpi-bat-git_pkgbuild | 30 ++ .../models/package_tpacpi-bat-git_srcinfo | 17 -- .../testresources/models/package_yay_pkgbuild | 37 +++ .../testresources/models/package_yay_srcinfo | 21 -- 23 files changed, 595 insertions(+), 358 deletions(-) create mode 100644 tests/testresources/models/package_ahriman_pkgbuild delete mode 100644 tests/testresources/models/package_ahriman_srcinfo create mode 100644 tests/testresources/models/package_gcc10_pkgbuild delete mode 100644 tests/testresources/models/package_gcc10_srcinfo delete mode 100644 tests/testresources/models/package_jellyfin-ffmpeg5-bin_srcinfo create mode 100644 tests/testresources/models/package_jellyfin-ffmpeg6-bin_pkgbuild create mode 100644 tests/testresources/models/package_tpacpi-bat-git_pkgbuild delete mode 100644 tests/testresources/models/package_tpacpi-bat-git_srcinfo create mode 100644 tests/testresources/models/package_yay_pkgbuild delete mode 100644 tests/testresources/models/package_yay_srcinfo diff --git a/.github/workflows/setup.sh b/.github/workflows/setup.sh index 54f6b13e..b52bd430 100755 --- a/.github/workflows/setup.sh +++ b/.github/workflows/setup.sh @@ -10,7 +10,7 @@ echo -e '[arcanisrepo]\nServer = https://repo.arcanis.me/$arch\nSigLevel = Never # refresh the image pacman -Syu --noconfirm # main dependencies -pacman -Sy --noconfirm devtools git pyalpm python-inflection python-passlib python-pyelftools python-requests python-srcinfo python-systemd sudo +pacman -Sy --noconfirm devtools git pyalpm python-inflection python-passlib python-pyelftools python-requests python-systemd sudo # make dependencies pacman -Sy --noconfirm --asdeps base-devel python-build python-flit python-installer python-tox python-wheel # optional dependencies diff --git a/Dockerfile b/Dockerfile index 18fbd7f4..d00f0c63 100644 --- a/Dockerfile +++ b/Dockerfile @@ -39,7 +39,6 @@ RUN pacman -Sy --noconfirm --asdeps \ python-passlib \ python-pyelftools \ python-requests \ - python-srcinfo \ && \ pacman -Sy --noconfirm --asdeps \ base-devel \ diff --git a/package/archlinux/PKGBUILD b/package/archlinux/PKGBUILD index 31b7ca90..f73feb53 100644 --- a/package/archlinux/PKGBUILD +++ b/package/archlinux/PKGBUILD @@ -7,7 +7,7 @@ pkgdesc="ArcH linux ReposItory MANager" arch=('any') url="https://github.com/arcan1s/ahriman" license=('GPL3') -depends=('devtools>=1:1.0.0' 'git' 'pyalpm' 'python-inflection' 'python-passlib' 'python-pyelftools' 'python-requests' 'python-srcinfo') +depends=('devtools>=1:1.0.0' 'git' 'pyalpm' 'python-inflection' 'python-passlib' 'python-pyelftools' 'python-requests') makedepends=('python-build' 'python-flit' 'python-installer' 'python-wheel') optdepends=('python-aioauth-client: web server with OAuth2 authorization' 'python-aiohttp: web server' diff --git a/pyproject.toml b/pyproject.toml index e85e1865..c72d137c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,7 +21,6 @@ dependencies = [ "passlib", "pyelftools", "requests", - "srcinfo", ] dynamic = ["version"] diff --git a/src/ahriman/models/package.py b/src/ahriman/models/package.py index bc479bf5..23e5f93c 100644 --- a/src/ahriman/models/package.py +++ b/src/ahriman/models/package.py @@ -277,7 +277,7 @@ class Package(LazyLogging): ) return cls( - base=pkgbuild.get_as("pkgbase"), + base=pkgbuild.pkgbase, version=version, remote=remote, packages=packages, @@ -372,7 +372,7 @@ class Package(LazyLogging): yield Path(source) - if isinstance(install := pkgbuild.get("install"), str): # well, in reality it is either None or str + if install := pkgbuild.get("install"): yield Path(install) @staticmethod @@ -431,7 +431,7 @@ class Package(LazyLogging): try: # create fresh chroot environment, fetch sources and - automagically - update PKGBUILD task.init(paths.cache_for(self.base), [], None) - task.build(paths.cache_for(self.base), dry_run=False) + task.build(paths.cache_for(self.base), dry_run=True) pkgbuild = Pkgbuild.from_file(paths.cache_for(self.base) / "PKGBUILD") diff --git a/src/ahriman/models/pkgbuild.py b/src/ahriman/models/pkgbuild.py index 7f36609e..211c4799 100644 --- a/src/ahriman/models/pkgbuild.py +++ b/src/ahriman/models/pkgbuild.py @@ -120,6 +120,12 @@ class Pkgbuild(Mapping[str, str | list[str]]): except StopIteration: break + # pkgbase is optional field, the pkgname must be used instead if not set + # however, pkgname is not presented is "package()" functions which we are parsing here too, + # thus, in our terms, it is optional too + if "pkgbase" not in fields: + fields["pkgbase"] = fields.get("pkgname") + return cls({key: value for key, value in fields.items() if key}) @staticmethod @@ -281,8 +287,9 @@ class Pkgbuild(Mapping[str, str | list[str]]): if value is None and not key.endswith(PkgbuildToken.FunctionDeclaration): value = self.fields.get(f"{key}{PkgbuildToken.FunctionDeclaration}") # if we still didn't find anything, we fall back to empty value (just like shell) + # to avoid recursion here, we can just drop from the method if value is None: - value = PkgbuildPatch(key, "") + return "" return value.substitute(self.variables) diff --git a/tests/ahriman/core/build_tools/test_task.py b/tests/ahriman/core/build_tools/test_task.py index 399baeac..2433733f 100644 --- a/tests/ahriman/core/build_tools/test_task.py +++ b/tests/ahriman/core/build_tools/test_task.py @@ -2,37 +2,65 @@ import pytest from pathlib import Path from pytest_mock import MockerFixture -from unittest.mock import call as MockCall from ahriman.core.build_tools.task import Task from ahriman.models.pkgbuild_patch import PkgbuildPatch +def test_package_archives(task_ahriman: Task, mocker: MockerFixture) -> None: + """ + must correctly return list of new files + """ + mocker.patch("pathlib.Path.iterdir", return_value=[ + Path(f"{task_ahriman.package.base}-{task_ahriman.package.version}-any.pkg.tar.xz"), + Path(f"{task_ahriman.package.base}-debug-{task_ahriman.package.version}-any.pkg.tar.xz"), + Path("source.pkg.tar.xz"), + Path("randomfile"), + Path("namcap.log"), + ]) + assert task_ahriman._package_archives(Path("local"), [Path("source.pkg.tar.xz")]) == [ + Path(f"{task_ahriman.package.base}-{task_ahriman.package.version}-any.pkg.tar.xz"), + Path(f"{task_ahriman.package.base}-debug-{task_ahriman.package.version}-any.pkg.tar.xz"), + ] + + +def test_package_archives_no_debug(task_ahriman: Task, mocker: MockerFixture) -> None: + """ + must correctly return list of new files without debug packages + """ + task_ahriman.include_debug_packages = False + mocker.patch("pathlib.Path.iterdir", return_value=[ + Path(f"{task_ahriman.package.base}-{task_ahriman.package.version}-any.pkg.tar.xz"), + Path(f"{task_ahriman.package.base}-debug-{task_ahriman.package.version}-any.pkg.tar.xz"), + Path("source.pkg.tar.xz"), + Path("randomfile"), + Path("namcap.log"), + ]) + assert task_ahriman._package_archives(Path("local"), [Path("source.pkg.tar.xz")]) == [ + Path(f"{task_ahriman.package.base}-{task_ahriman.package.version}-any.pkg.tar.xz"), + ] + + def test_build(task_ahriman: Task, mocker: MockerFixture) -> None: """ must build package """ local = Path("local") + mocker.patch("pathlib.Path.iterdir", return_value=["file"]) check_output_mock = mocker.patch("ahriman.core.build_tools.task.check_output") + archives_mock = mocker.patch("ahriman.core.build_tools.task.Task._package_archives", + return_value=[task_ahriman.package.base]) - task_ahriman.build(local) - check_output_mock.assert_has_calls([ - MockCall( - "extra-x86_64-build", "-r", str(task_ahriman.paths.chroot), "--", "--", "--skippgpcheck", - exception=pytest.helpers.anyvar(int), - cwd=local, - logger=task_ahriman.logger, - user=task_ahriman.uid, - environment={}, - ), - MockCall( - "makepkg", "--packagelist", - exception=pytest.helpers.anyvar(int), - cwd=local, - logger=task_ahriman.logger, - environment={}, - ), - ]) + assert task_ahriman.build(local) == [task_ahriman.package.base] + check_output_mock.assert_called_once_with( + "extra-x86_64-build", "-r", str(task_ahriman.paths.chroot), "--", "--", "--skippgpcheck", + exception=pytest.helpers.anyvar(int), + cwd=local, + logger=task_ahriman.logger, + user=task_ahriman.uid, + environment={}, + ) + archives_mock.assert_called_once_with(local, ["file"]) def test_build_environment(task_ahriman: Task, mocker: MockerFixture) -> None: @@ -40,55 +68,41 @@ def test_build_environment(task_ahriman: Task, mocker: MockerFixture) -> None: must build package with environment variables set """ local = Path("local") + mocker.patch("pathlib.Path.iterdir", return_value=["file"]) + mocker.patch("ahriman.core.build_tools.task.Task._package_archives", return_value=[task_ahriman.package.base]) check_output_mock = mocker.patch("ahriman.core.build_tools.task.check_output") + environment = {"variable": "value"} task_ahriman.build(local, **environment, empty=None) - check_output_mock.assert_has_calls([ - MockCall( - "extra-x86_64-build", "-r", str(task_ahriman.paths.chroot), "--", "--", "--skippgpcheck", - exception=pytest.helpers.anyvar(int), - cwd=local, - logger=task_ahriman.logger, - user=task_ahriman.uid, - environment=environment, - ), - MockCall( - "makepkg", "--packagelist", - exception=pytest.helpers.anyvar(int), - cwd=local, - logger=task_ahriman.logger, - environment=environment, - ), - ]) + check_output_mock.assert_called_once_with( + "extra-x86_64-build", "-r", str(task_ahriman.paths.chroot), "--", "--", "--skippgpcheck", + exception=pytest.helpers.anyvar(int), + cwd=local, + logger=task_ahriman.logger, + user=task_ahriman.uid, + environment=environment, + ) -def test_build_no_debug(task_ahriman: Task, mocker: MockerFixture) -> None: +def test_build_dry_run(task_ahriman: Task, mocker: MockerFixture) -> None: """ - must filter debug packages from result + must run devtools in dry-run mode """ local = Path("local") + mocker.patch("pathlib.Path.iterdir", return_value=["file"]) + mocker.patch("ahriman.core.build_tools.task.Task._package_archives", return_value=[task_ahriman.package.base]) check_output_mock = mocker.patch("ahriman.core.build_tools.task.check_output") - task_ahriman.include_debug_packages = False - task_ahriman.build(local) - check_output_mock.assert_has_calls([ - MockCall( - "extra-x86_64-build", "-r", str(task_ahriman.paths.chroot), "--", "--", "--skippgpcheck", - exception=pytest.helpers.anyvar(int), - cwd=local, - logger=task_ahriman.logger, - user=task_ahriman.uid, - environment={}, - ), - MockCall( - "makepkg", "--packagelist", "OPTIONS=(!debug)", - exception=pytest.helpers.anyvar(int), - cwd=local, - logger=task_ahriman.logger, - environment={}, - ), - ]) + assert task_ahriman.build(local, dry_run=True) == [task_ahriman.package.base] + check_output_mock.assert_called_once_with( + "extra-x86_64-build", "-r", str(task_ahriman.paths.chroot), "--", "--", "--skippgpcheck", "--nobuild", + exception=pytest.helpers.anyvar(int), + cwd=local, + logger=task_ahriman.logger, + user=task_ahriman.uid, + environment={}, + ) def test_init(task_ahriman: Task, mocker: MockerFixture) -> None: diff --git a/tests/ahriman/core/repository/test_update_handler.py b/tests/ahriman/core/repository/test_update_handler.py index e82b5806..ce0b62d5 100644 --- a/tests/ahriman/core/repository/test_update_handler.py +++ b/tests/ahriman/core/repository/test_update_handler.py @@ -31,8 +31,7 @@ def test_updates_aur(update_handler: UpdateHandler, package_ahriman: Package, event_mock.assert_called_once_with(package_ahriman.base, EventType.PackageOutdated, pytest.helpers.anyvar(str, True)) package_is_outdated_mock.assert_called_once_with( - package_ahriman, update_handler.paths, - vcs_allowed_age=update_handler.vcs_allowed_age, + package_ahriman, update_handler.configuration, calculate_version=True) @@ -119,8 +118,7 @@ def test_updates_aur_ignore_vcs(update_handler: UpdateHandler, package_ahriman: assert not update_handler.updates_aur([], vcs=False) package_is_outdated_mock.assert_called_once_with( - package_ahriman, update_handler.paths, - vcs_allowed_age=update_handler.vcs_allowed_age, + package_ahriman, update_handler.configuration, calculate_version=False) @@ -227,8 +225,7 @@ def test_updates_local(update_handler: UpdateHandler, package_ahriman: Package, event_mock.assert_called_once_with(package_ahriman.base, EventType.PackageOutdated, pytest.helpers.anyvar(str, True)) package_is_outdated_mock.assert_called_once_with( - package_ahriman, update_handler.paths, - vcs_allowed_age=update_handler.vcs_allowed_age, + package_ahriman, update_handler.configuration, calculate_version=True) @@ -245,8 +242,7 @@ def test_updates_local_ignore_vcs(update_handler: UpdateHandler, package_ahriman assert not update_handler.updates_local(vcs=False) package_is_outdated_mock.assert_called_once_with( - package_ahriman, update_handler.paths, - vcs_allowed_age=update_handler.vcs_allowed_age, + package_ahriman, update_handler.configuration, calculate_version=False) diff --git a/tests/ahriman/core/test_utils.py b/tests/ahriman/core/test_utils.py index d825a81d..c57dca55 100644 --- a/tests/ahriman/core/test_utils.py +++ b/tests/ahriman/core/test_utils.py @@ -468,11 +468,11 @@ def test_walk(resource_path_root: Path) -> None: resource_path_root / "models" / "package_ahriman_aur", resource_path_root / "models" / "package_akonadi_aur", resource_path_root / "models" / "package_ahriman_files", - resource_path_root / "models" / "package_ahriman_srcinfo", - resource_path_root / "models" / "package_gcc10_srcinfo", - resource_path_root / "models" / "package_jellyfin-ffmpeg5-bin_srcinfo", - resource_path_root / "models" / "package_tpacpi-bat-git_srcinfo", - resource_path_root / "models" / "package_yay_srcinfo", + resource_path_root / "models" / "package_ahriman_pkgbuild", + resource_path_root / "models" / "package_gcc10_pkgbuild", + resource_path_root / "models" / "package_jellyfin-ffmpeg6-bin_pkgbuild", + resource_path_root / "models" / "package_tpacpi-bat-git_pkgbuild", + resource_path_root / "models" / "package_yay_pkgbuild", resource_path_root / "web" / "templates" / "build-status" / "alerts.jinja2", resource_path_root / "web" / "templates" / "build-status" / "key-import-modal.jinja2", resource_path_root / "web" / "templates" / "build-status" / "login-modal.jinja2", diff --git a/tests/ahriman/core/upload/test_http_upload.py b/tests/ahriman/core/upload/test_http_upload.py index 9eae379c..b57b0fa4 100644 --- a/tests/ahriman/core/upload/test_http_upload.py +++ b/tests/ahriman/core/upload/test_http_upload.py @@ -15,8 +15,8 @@ def test_calculate_hash_small(resource_path_root: Path) -> None: """ must calculate checksum for path which is single chunk """ - path = resource_path_root / "models" / "package_ahriman_srcinfo" - assert HttpUpload.calculate_hash(path) == "2635e2898452d594025517cfe529b1f2" + path = resource_path_root / "models" / "package_ahriman_pkgbuild" + assert HttpUpload.calculate_hash(path) == "7136fc388980dc043f9f869d57c5ce0c" def test_get_body_get_hashes() -> None: diff --git a/tests/ahriman/core/upload/test_s3.py b/tests/ahriman/core/upload/test_s3.py index 8ca3ecf2..62ba45e6 100644 --- a/tests/ahriman/core/upload/test_s3.py +++ b/tests/ahriman/core/upload/test_s3.py @@ -49,8 +49,8 @@ def test_calculate_etag_small(resource_path_root: Path) -> None: """ must calculate checksum for path which is single chunk """ - path = resource_path_root / "models" / "package_ahriman_srcinfo" - assert S3.calculate_etag(path, _chunk_size) == "2635e2898452d594025517cfe529b1f2" + path = resource_path_root / "models" / "package_ahriman_pkgbuild" + assert S3.calculate_etag(path, _chunk_size) == "7136fc388980dc043f9f869d57c5ce0c" def test_files_remove(s3_remote_objects: list[Any]) -> None: diff --git a/tests/ahriman/models/test_package.py b/tests/ahriman/models/test_package.py index 31027eaa..f4fca4af 100644 --- a/tests/ahriman/models/test_package.py +++ b/tests/ahriman/models/test_package.py @@ -1,17 +1,16 @@ -import pytest - from pathlib import Path + from pytest_mock import MockerFixture -from srcinfo.parse import parse_srcinfo from unittest.mock import MagicMock from ahriman.core.alpm.pacman import Pacman -from ahriman.core.exceptions import PackageInfoError +from ahriman.core.configuration import Configuration from ahriman.core.utils import utcnow from ahriman.models.aur_package import AURPackage from ahriman.models.package import Package from ahriman.models.package_description import PackageDescription -from ahriman.models.repository_paths import RepositoryPaths +from ahriman.models.pkgbuild import Pkgbuild +from ahriman.models.pkgbuild_patch import PkgbuildPatch def test_depends(package_python_schedule: Package) -> None: @@ -52,9 +51,8 @@ def test_depends_build_with_version_and_overlap(mocker: MockerFixture, resource_ """ must load correct list of dependencies with version """ - - srcinfo = (resource_path_root / "models" / "package_gcc10_srcinfo").read_text() - mocker.patch("ahriman.models.package.check_output", return_value=srcinfo) + pkgbuild = resource_path_root / "models" / "package_gcc10_pkgbuild" + mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=Pkgbuild.from_file(pkgbuild)) package_gcc10 = Package.from_build(Path("local"), "x86_64", None) assert package_gcc10.depends_build == { @@ -179,10 +177,10 @@ def test_from_aur(package_ahriman: Package, aur_package_ahriman: AURPackage, moc def test_from_build(package_ahriman: Package, mocker: MockerFixture, resource_path_root: Path) -> None: """ - must construct package from srcinfo + must construct package from PKGBUILD """ - srcinfo = (resource_path_root / "models" / "package_ahriman_srcinfo").read_text() - mocker.patch("ahriman.models.package.check_output", return_value=srcinfo) + pkgbuild = resource_path_root / "models" / "package_ahriman_pkgbuild" + mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=Pkgbuild.from_file(pkgbuild)) package = Package.from_build(Path("path"), "x86_64", "packager") assert package_ahriman.packages.keys() == package.packages.keys() @@ -193,15 +191,15 @@ def test_from_build(package_ahriman: Package, mocker: MockerFixture, resource_pa def test_from_build_multiple_packages(mocker: MockerFixture, resource_path_root: Path) -> None: """ - must construct package from srcinfo with dependencies per-package overrides + must construct package from PKGBUILD with dependencies per-package overrides """ - srcinfo = (resource_path_root / "models" / "package_gcc10_srcinfo").read_text() - mocker.patch("ahriman.models.package.check_output", return_value=srcinfo) + pkgbuild = resource_path_root / "models" / "package_gcc10_pkgbuild" + mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=Pkgbuild.from_file(pkgbuild)) package = Package.from_build(Path("path"), "x86_64", None) assert package.packages == { "gcc10": PackageDescription( - depends=["gcc10-libs=10.3.0-2", "binutils>=2.28", "libmpc", "zstd"], + depends=["gcc10-libs=10.5.0-2", "binutils>=2.28", "libmpc", "zstd"], make_depends=["binutils", "doxygen", "git", "libmpc", "python"], opt_depends=[], check_depends=["dejagnu", "inetutils"], @@ -213,7 +211,7 @@ def test_from_build_multiple_packages(mocker: MockerFixture, resource_path_root: check_depends=["dejagnu", "inetutils"], ), "gcc10-fortran": PackageDescription( - depends=["gcc10=10.3.0-2"], + depends=["gcc10=10.5.0-2"], make_depends=["binutils", "doxygen", "git", "libmpc", "python"], opt_depends=[], check_depends=["dejagnu", "inetutils"], @@ -225,12 +223,12 @@ def test_from_build_architecture(mocker: MockerFixture, resource_path_root: Path """ must construct package with architecture specific depends list """ - srcinfo = (resource_path_root / "models" / "package_jellyfin-ffmpeg5-bin_srcinfo").read_text() - mocker.patch("ahriman.models.package.check_output", return_value=srcinfo) + pkgbuild = resource_path_root / "models" / "package_jellyfin-ffmpeg6-bin_pkgbuild" + mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=Pkgbuild.from_file(pkgbuild)) package = Package.from_build(Path("path"), "x86_64", None) assert package.packages == { - "jellyfin-ffmpeg5-bin": PackageDescription( + "jellyfin-ffmpeg6-bin": PackageDescription( depends=["glibc"], make_depends=[], opt_depends=[ @@ -249,17 +247,6 @@ def test_from_build_architecture(mocker: MockerFixture, resource_path_root: Path } -def test_from_build_failed(mocker: MockerFixture) -> None: - """ - must raise exception if there are errors during srcinfo load - """ - mocker.patch("ahriman.models.package.check_output", return_value="") - mocker.patch("ahriman.models.package.parse_srcinfo", return_value=({"packages": {}}, ["an error"])) - - with pytest.raises(PackageInfoError): - Package.from_build(Path("path"), "x86_64", None) - - def test_from_json_view_1(package_ahriman: Package) -> None: """ must construct same object from json @@ -299,11 +286,10 @@ def test_local_files(mocker: MockerFixture, resource_path_root: Path) -> None: """ must extract local file sources """ - srcinfo = (resource_path_root / "models" / "package_yay_srcinfo").read_text() - parsed_srcinfo, _ = parse_srcinfo(srcinfo) - parsed_srcinfo["source"] = ["local-file.tar.gz"] - mocker.patch("ahriman.models.package.parse_srcinfo", return_value=(parsed_srcinfo, [])) - mocker.patch("ahriman.models.package.check_output", return_value=srcinfo) + pkgbuild = resource_path_root / "models" / "package_yay_pkgbuild" + parsed_pkgbuild = Pkgbuild.from_file(pkgbuild) + parsed_pkgbuild.fields["source"] = PkgbuildPatch("source", ["local-file.tar.gz"]) + mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=parsed_pkgbuild) mocker.patch("ahriman.models.package.Package.supported_architectures", return_value=["any"]) assert list(Package.local_files(Path("path"))) == [Path("local-file.tar.gz")] @@ -311,35 +297,23 @@ def test_local_files(mocker: MockerFixture, resource_path_root: Path) -> None: def test_local_files_empty(mocker: MockerFixture, resource_path_root: Path) -> None: """ - must extract empty local files list when there is no local files + must extract empty local files list when there are no local files """ - srcinfo = (resource_path_root / "models" / "package_yay_srcinfo").read_text() - mocker.patch("ahriman.models.package.check_output", return_value=srcinfo) + pkgbuild = resource_path_root / "models" / "package_yay_pkgbuild" + mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=Pkgbuild.from_file(pkgbuild)) mocker.patch("ahriman.models.package.Package.supported_architectures", return_value=["any"]) assert not list(Package.local_files(Path("path"))) -def test_local_files_error(mocker: MockerFixture) -> None: - """ - must raise exception on package parsing for local sources - """ - mocker.patch("ahriman.models.package.check_output", return_value="") - mocker.patch("ahriman.models.package.parse_srcinfo", return_value=({"packages": {}}, ["an error"])) - - with pytest.raises(PackageInfoError): - list(Package.local_files(Path("path"))) - - def test_local_files_schema(mocker: MockerFixture, resource_path_root: Path) -> None: """ must skip local file source when file schema is used """ - srcinfo = (resource_path_root / "models" / "package_yay_srcinfo").read_text() - parsed_srcinfo, _ = parse_srcinfo(srcinfo) - parsed_srcinfo["source"] = ["file:///local-file.tar.gz"] - mocker.patch("ahriman.models.package.parse_srcinfo", return_value=(parsed_srcinfo, [])) - mocker.patch("ahriman.models.package.check_output", return_value="") + pkgbuild = resource_path_root / "models" / "package_yay_pkgbuild" + parsed_pkgbuild = Pkgbuild.from_file(pkgbuild) + parsed_pkgbuild.fields["source"] = PkgbuildPatch("source", ["file:///local-file.tar.gz"]) + mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=parsed_pkgbuild) mocker.patch("ahriman.models.package.Package.supported_architectures", return_value=["any"]) assert not list(Package.local_files(Path("path"))) @@ -349,11 +323,10 @@ def test_local_files_with_install(mocker: MockerFixture, resource_path_root: Pat """ must extract local file sources with install file """ - srcinfo = (resource_path_root / "models" / "package_yay_srcinfo").read_text() - parsed_srcinfo, _ = parse_srcinfo(srcinfo) - parsed_srcinfo["install"] = "install" - mocker.patch("ahriman.models.package.parse_srcinfo", return_value=(parsed_srcinfo, [])) - mocker.patch("ahriman.models.package.check_output", return_value="") + pkgbuild = resource_path_root / "models" / "package_yay_pkgbuild" + parsed_pkgbuild = Pkgbuild.from_file(pkgbuild) + parsed_pkgbuild.fields["install"] = PkgbuildPatch("install", "install") + mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=parsed_pkgbuild) mocker.patch("ahriman.models.package.Package.supported_architectures", return_value=["any"]) assert list(Package.local_files(Path("path"))) == [Path("install")] @@ -363,64 +336,49 @@ def test_supported_architectures(mocker: MockerFixture, resource_path_root: Path """ must generate list of available architectures """ - srcinfo = (resource_path_root / "models" / "package_yay_srcinfo").read_text() - mocker.patch("ahriman.models.package.check_output", return_value=srcinfo) + pkgbuild = resource_path_root / "models" / "package_yay_pkgbuild" + mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=Pkgbuild.from_file(pkgbuild)) assert Package.supported_architectures(Path("path")) == \ - {"i686", "pentium4", "x86_64", "arm", "armv7h", "armv6h", "aarch64"} + {"i686", "pentium4", "x86_64", "arm", "armv7h", "armv6h", "aarch64", "riscv64"} -def test_supported_architectures_failed(mocker: MockerFixture) -> None: - """ - must raise exception if there are errors during srcinfo load for architectures - """ - mocker.patch("ahriman.models.package.check_output", return_value="") - mocker.patch("ahriman.models.package.parse_srcinfo", return_value=({"packages": {}}, ["an error"])) - - with pytest.raises(PackageInfoError): - Package.supported_architectures(Path("path")) - - -def test_actual_version(package_ahriman: Package, repository_paths: RepositoryPaths) -> None: +def test_actual_version(package_ahriman: Package, configuration: Configuration) -> None: """ must return same actual_version as version is """ - assert package_ahriman.actual_version(repository_paths) == package_ahriman.version + assert package_ahriman.actual_version(configuration) == package_ahriman.version -def test_actual_version_vcs(package_tpacpi_bat_git: Package, repository_paths: RepositoryPaths, +def test_actual_version_vcs(package_tpacpi_bat_git: Package, configuration: Configuration, mocker: MockerFixture, resource_path_root: Path) -> None: """ must return valid actual_version for VCS package """ - srcinfo = (resource_path_root / "models" / "package_tpacpi-bat-git_srcinfo").read_text() - mocker.patch("ahriman.models.package.check_output", return_value=srcinfo) - mocker.patch("ahriman.core.build_tools.sources.Sources.load") + pkgbuild = resource_path_root / "models" / "package_tpacpi-bat-git_pkgbuild" + mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=Pkgbuild.from_file(pkgbuild)) + mocker.patch("pathlib.Path.glob", return_value=[Path("local")]) + init_mock = mocker.patch("ahriman.core.build_tools.task.Task.init") + build_mock = mocker.patch("ahriman.core.build_tools.task.Task.build") + unlink_mock = mocker.patch("pathlib.Path.unlink") - assert package_tpacpi_bat_git.actual_version(repository_paths) == "3.1.r13.g4959b52-1" + assert package_tpacpi_bat_git.actual_version(configuration) == "3.1.r13.g4959b52-1" + init_mock.assert_called_once_with(configuration.repository_paths.cache_for(package_tpacpi_bat_git.base), [], None) + build_mock.assert_called_once_with(configuration.repository_paths.cache_for(package_tpacpi_bat_git.base), + dry_run=True) + unlink_mock.assert_called_once_with() -def test_actual_version_srcinfo_failed(package_tpacpi_bat_git: Package, repository_paths: RepositoryPaths, - mocker: MockerFixture) -> None: +def test_actual_version_failed(package_tpacpi_bat_git: Package, configuration: Configuration, + mocker: MockerFixture) -> None: """ must return same version in case if exception occurred """ - mocker.patch("ahriman.models.package.check_output", side_effect=Exception()) - mocker.patch("ahriman.core.build_tools.sources.Sources.load") + mocker.patch("ahriman.core.build_tools.task.Task.init", side_effect=Exception()) + mocker.patch("pathlib.Path.glob", return_value=[Path("local")]) + unlink_mock = mocker.patch("pathlib.Path.unlink") - assert package_tpacpi_bat_git.actual_version(repository_paths) == package_tpacpi_bat_git.version - - -def test_actual_version_vcs_failed(package_tpacpi_bat_git: Package, repository_paths: RepositoryPaths, - mocker: MockerFixture) -> None: - """ - must return same version in case if there are errors during parse - """ - mocker.patch("pathlib.Path.read_text", return_value="") - mocker.patch("ahriman.models.package.parse_srcinfo", return_value=({"packages": {}}, ["an error"])) - mocker.patch("ahriman.models.package.check_output") - mocker.patch("ahriman.core.build_tools.sources.Sources.load") - - assert package_tpacpi_bat_git.actual_version(repository_paths) == package_tpacpi_bat_git.version + assert package_tpacpi_bat_git.actual_version(configuration) == package_tpacpi_bat_git.version + unlink_mock.assert_called_once_with() def test_full_depends(package_ahriman: Package, package_python_schedule: Package, pyalpm_package_ahriman: MagicMock, @@ -461,17 +419,17 @@ def test_is_newer_than(package_ahriman: Package, package_python_schedule: Packag assert not package_python_schedule.is_newer_than(min_date) -def test_is_outdated_false(package_ahriman: Package, repository_paths: RepositoryPaths, mocker: MockerFixture) -> None: +def test_is_outdated_false(package_ahriman: Package, configuration: Configuration, mocker: MockerFixture) -> None: """ must be not outdated for the same package """ actual_version_mock = mocker.patch("ahriman.models.package.Package.actual_version", return_value=package_ahriman.version) - assert not package_ahriman.is_outdated(package_ahriman, repository_paths) - actual_version_mock.assert_called_once_with(repository_paths) + assert not package_ahriman.is_outdated(package_ahriman, configuration) + actual_version_mock.assert_called_once_with(configuration) -def test_is_outdated_true(package_ahriman: Package, repository_paths: RepositoryPaths, mocker: MockerFixture) -> None: +def test_is_outdated_true(package_ahriman: Package, configuration: Configuration, mocker: MockerFixture) -> None: """ must be outdated for the new version """ @@ -479,27 +437,28 @@ def test_is_outdated_true(package_ahriman: Package, repository_paths: Repository other.version = other.version.replace("-1", "-2") actual_version_mock = mocker.patch("ahriman.models.package.Package.actual_version", return_value=other.version) - assert package_ahriman.is_outdated(other, repository_paths) - actual_version_mock.assert_called_once_with(repository_paths) + assert package_ahriman.is_outdated(other, configuration) + actual_version_mock.assert_called_once_with(configuration) -def test_is_outdated_no_version_calculation(package_ahriman: Package, repository_paths: RepositoryPaths, +def test_is_outdated_no_version_calculation(package_ahriman: Package, configuration: Configuration, mocker: MockerFixture) -> None: """ must not call actual version if calculation is disabled """ actual_version_mock = mocker.patch("ahriman.models.package.Package.actual_version") - assert not package_ahriman.is_outdated(package_ahriman, repository_paths, calculate_version=False) + assert not package_ahriman.is_outdated(package_ahriman, configuration, calculate_version=False) actual_version_mock.assert_not_called() -def test_is_outdated_fresh_package(package_ahriman: Package, repository_paths: RepositoryPaths, +def test_is_outdated_fresh_package(package_ahriman: Package, configuration: Configuration, mocker: MockerFixture) -> None: """ must not call actual version if package is never than specified time """ + configuration.set_option("build", "vcs_allowed_age", str(int(utcnow().timestamp()))) actual_version_mock = mocker.patch("ahriman.models.package.Package.actual_version") - assert not package_ahriman.is_outdated(package_ahriman, repository_paths, vcs_allowed_age=utcnow().timestamp()) + assert not package_ahriman.is_outdated(package_ahriman, configuration) actual_version_mock.assert_not_called() diff --git a/tests/ahriman/models/test_pkgbuild_patch.py b/tests/ahriman/models/test_pkgbuild_patch.py index 70d95ed3..5764ac68 100644 --- a/tests/ahriman/models/test_pkgbuild_patch.py +++ b/tests/ahriman/models/test_pkgbuild_patch.py @@ -132,6 +132,14 @@ def test_serialize_list() -> None: assert PkgbuildPatch("key", ["val'ue", "val\"ue2"]).serialize() == """key=('val'"'"'ue' 'val"ue2')""" +def test_substitute() -> None: + """ + must correctly substitute variables + """ + assert PkgbuildPatch("key", "$env $value").substitute({"env": "variable"}) == "variable $value" + assert PkgbuildPatch("key", ["$env $value"]).substitute({"env": "variable"}) == ["variable $value"] + + def test_write(mocker: MockerFixture) -> None: """ must write serialized value to the file diff --git a/tests/testresources/models/package_ahriman_pkgbuild b/tests/testresources/models/package_ahriman_pkgbuild new file mode 100644 index 00000000..79348362 --- /dev/null +++ b/tests/testresources/models/package_ahriman_pkgbuild @@ -0,0 +1,55 @@ +# Maintainer: Evgeniy Alekseev + +pkgname='ahriman' +pkgver=2.6.0 +pkgrel=1 +pkgdesc="ArcH linux ReposItory MANager" +arch=('any') +url="https://github.com/arcan1s/ahriman" +license=('GPL3') +depends=('devtools' 'git' 'pyalpm' 'python-cerberus' 'python-inflection' 'python-passlib' 'python-requests' 'python-setuptools' 'python-srcinfo') +makedepends=('python-build' 'python-installer' 'python-wheel') +optdepends=('breezy: -bzr packages support' + 'darcs: -darcs packages support' + 'mercurial: -hg packages support' + 'python-aioauth-client: web server with OAuth2 authorization' + 'python-aiohttp: web server' + 'python-aiohttp-debugtoolbar: web server with enabled debug panel' + 'python-aiohttp-jinja2: web server' + 'python-aiohttp-security: web server with authorization' + 'python-aiohttp-session: web server with authorization' + 'python-boto3: sync to s3' + 'python-cryptography: web server with authorization' + 'python-requests-unixsocket: client report to web server by unix socket' + 'python-jinja: html report generation' + 'rsync: sync by using rsync' + 'subversion: -svn packages support') +source=("https://github.com/arcan1s/ahriman/releases/download/$pkgver/$pkgname-$pkgver-src.tar.xz" + 'ahriman.sysusers' + 'ahriman.tmpfiles') +backup=('etc/ahriman.ini' + 'etc/ahriman.ini.d/logging.ini') + +build() { + cd "$pkgname" + + python -m build --wheel --no-isolation +} + +package() { + cd "$pkgname" + + python -m installer --destdir="$pkgdir" "dist/$pkgname-$pkgver-py3-none-any.whl" + + # python-installer actually thinks that you cannot just copy files to root + # thus we need to copy them manually + install -Dm644 "$pkgdir/usr/share/$pkgname/settings/ahriman.ini" "$pkgdir/etc/ahriman.ini" + install -Dm644 "$pkgdir/usr/share/$pkgname/settings/ahriman.ini.d/logging.ini" "$pkgdir/etc/ahriman.ini.d/logging.ini" + + install -Dm644 "$srcdir/$pkgname.sysusers" "$pkgdir/usr/lib/sysusers.d/$pkgname.conf" + install -Dm644 "$srcdir/$pkgname.tmpfiles" "$pkgdir/usr/lib/tmpfiles.d/$pkgname.conf" +} + +sha512sums=('ec1f64e463455761d72be7f7b8b51b3b4424685c96a2d5eee6afa1c93780c8d7f8a39487a2f2f3bd83d2b58a93279e1392a965a4b905795e58ca686fb21123a1' + '53d37efec812afebf86281716259f9ea78a307b83897166c72777251c3eebcb587ecee375d907514781fb2a5c808cbb24ef9f3f244f12740155d0603bf213131' + '62b2eccc352d33853ef243c9cddd63663014aa97b87242f1b5bc5099a7dbd69ff3821f24ffc58e1b7f2387bd4e9e9712cc4c67f661b1724ad99cdf09b3717794') diff --git a/tests/testresources/models/package_ahriman_srcinfo b/tests/testresources/models/package_ahriman_srcinfo deleted file mode 100644 index 214a5491..00000000 --- a/tests/testresources/models/package_ahriman_srcinfo +++ /dev/null @@ -1,45 +0,0 @@ -pkgbase = ahriman - pkgdesc = ArcH linux ReposItory MANager - pkgver = 2.6.0 - pkgrel = 1 - url = https://github.com/arcan1s/ahriman - arch = any - license = GPL3 - checkdepends = python-pytest - makedepends = python-build - makedepends = python-installer - makedepends = python-wheel - depends = devtools - depends = git - depends = pyalpm - depends = python-cerberus - depends = python-inflection - depends = python-passlib - depends = python-requests - depends = python-setuptools - depends = python-srcinfo - optdepends = breezy: -bzr packages support - optdepends = darcs: -darcs packages support - optdepends = mercurial: -hg packages support - optdepends = python-aioauth-client: web server with OAuth2 authorization - optdepends = python-aiohttp: web server - optdepends = python-aiohttp-debugtoolbar: web server with enabled debug panel - optdepends = python-aiohttp-jinja2: web server - optdepends = python-aiohttp-security: web server with authorization - optdepends = python-aiohttp-session: web server with authorization - optdepends = python-boto3: sync to s3 - optdepends = python-cryptography: web server with authorization - optdepends = python-requests-unixsocket: client report to web server by unix socket - optdepends = python-jinja: html report generation - optdepends = rsync: sync by using rsync - optdepends = subversion: -svn packages support - backup = etc/ahriman.ini - backup = etc/ahriman.ini.d/logging.ini - source = https://github.com/arcan1s/ahriman/releases/download/2.6.0/ahriman-2.6.0-src.tar.xz - source = ahriman.sysusers - source = ahriman.tmpfiles - sha512sums = ec1f64e463455761d72be7f7b8b51b3b4424685c96a2d5eee6afa1c93780c8d7f8a39487a2f2f3bd83d2b58a93279e1392a965a4b905795e58ca686fb21123a1 - sha512sums = 53d37efec812afebf86281716259f9ea78a307b83897166c72777251c3eebcb587ecee375d907514781fb2a5c808cbb24ef9f3f244f12740155d0603bf213131 - sha512sums = 62b2eccc352d33853ef243c9cddd63663014aa97b87242f1b5bc5099a7dbd69ff3821f24ffc58e1b7f2387bd4e9e9712cc4c67f661b1724ad99cdf09b3717794 - -pkgname = ahriman \ No newline at end of file diff --git a/tests/testresources/models/package_gcc10_pkgbuild b/tests/testresources/models/package_gcc10_pkgbuild new file mode 100644 index 00000000..47294a4e --- /dev/null +++ b/tests/testresources/models/package_gcc10_pkgbuild @@ -0,0 +1,270 @@ +# Maintainer: Chris Severance aur.severach aATt spamgourmet dott com +# Contributor: Jonathon Fernyhough +# Contributor: Giancarlo Razzolini +# Contributor: Frederik Schwan +# Contributor: Bartłomiej Piotrowski +# Contributor: Allan McRae +# Contributor: Daniel Kozak + +set -u +pkgbase='gcc10' +pkgname=("${pkgbase}"{,-libs,-fortran}) +pkgver='10.5.0' +_majorver="${pkgver%%.*}" +_islver='0.24' +pkgrel='2' +pkgdesc='The GNU Compiler Collection (10.x.x)' +arch=('x86_64') +url='https://gcc.gnu.org' +license=('GPL-3.0-or-later' 'LGPL-3.0+' 'GFDL-1.3' 'LicenseRef-custom') +makedepends=('binutils' 'doxygen' 'git' 'libmpc' 'python') +checkdepends=('dejagnu' 'inetutils') +options=('!emptydirs' '!lto' '!buildflags') +source=( + "https://sourceware.org/pub/gcc/releases/gcc-${pkgver}/gcc-${pkgver}.tar.xz"{,.sig} + "https://sourceware.org/pub/gcc/infrastructure/isl-${_islver}.tar.bz2" + 'c89' + 'c99' +) +validpgpkeys=( + 'F3691687D867B81B51CE07D9BBE43771487328A9' # bpiotrowski@archlinux.org + '86CFFCA918CF3AF47147588051E8B148A9999C34' # evangelos@foutrelis.com + '13975A70E63C361C73AE69EF6EEB81F8981C74C7' # richard.guenther@gmail.com + 'D3A93CAD751C2AF4F8C7AD516C35B99309B5FA62' # Jakub Jelinek +) +md5sums=('c7d1958570fbd1cd859b015774b9987a' + 'SKIP' + 'dd2f7b78e118c25bd96134a52aae7f4d' + 'd5fd2672deb5f97a2c4bdab486470abe' + 'd99ba9f4bd860e274f17040ee51cd1bf') +b2sums=('9b71761f4015649514677784443886e59733ac3845f7dfaa4343f46327d36c08c403c444b9e492b870ac0b3f2e3568f972b7700a0ef05a497fb4066079b3143b' + 'SKIP' + '88a178dad5fe9c33be5ec5fe4ac9abc0e075a86cff9184f75cedb7c47de67ce3be273bd0db72286ba0382f4016e9d74855ead798ad7bccb015b853931731828e' + 'a76d19c7830b0a141302890522086fc1548c177611501caac7e66d576e541b64ca3f6e977de715268a9872dfdd6368a011b92e01f7944ec0088f899ac0d2a2a5' + '02b655b5668f7dea51c3b3e4ff46d5a4aee5a04ed5e26b98a6470f39c2e98ddc0519bffeeedd982c31ef3c171457e4d1beaff32767d1aedd9346837aac4ec3ee') + +_CHOST="${CHOST:=}" # https://bbs.archlinux.org/viewtopic.php?pid=2174541 +_MAKEFLAGS="${MAKEFLAGS:=}" + +_libdir="usr/lib/gcc/${CHOST}/${pkgver%%+*}" + +prepare() { + set -u + if [ ! -d 'gcc' ]; then + ln -s "gcc-${pkgver/+/-}" 'gcc' + fi + pushd 'gcc' > /dev/null + + # link isl for in-tree build + ln -s "../isl-${_islver}" 'isl' + + # Do not run fixincludes + sed -e 's@\./fixinc\.sh@-c true@' -i 'gcc/Makefile.in' + + # Arch Linux installs x86_64 libraries /lib + sed -e '/m64=/s/lib64/lib/' -i 'gcc/config/i386/t-linux64' + + # hack! - some configure tests for header files using "$CPP $CPPFLAGS" + sed -e '/ac_cpp=/s/$CPPFLAGS/$CPPFLAGS -O2/' -i 'gcc/configure' + + popd > /dev/null + + rm -rf 'gcc-build' + mkdir 'gcc-build' + + set +u +} + +build() { + set -u + export MAKEFLAGS="${_MAKEFLAGS}" + export CHOST="${_CHOST}" + cd 'gcc-build' + + if [ ! -s 'Makefile' ]; then + # The following options are one per line, mostly sorted so they are easy to diff compare to other gcc packages. + local _conf=( + --build="${CHOST}" + --disable-libssp + --disable-libstdcxx-pch + --disable-libunwind-exceptions + --disable-multilib + --disable-werror + --enable-__cxa_atexit + --enable-cet='auto' + --enable-checking='release' + --enable-clocale='gnu' + --enable-default-pie + --enable-default-ssp + --enable-gnu-indirect-function + --enable-gnu-unique-object + --enable-languages='c,c++,fortran,lto' + --enable-linker-build-id + --enable-lto + --enable-plugin + --enable-shared + --enable-threads='posix' + --enable-version-specific-runtime-libs + --infodir='/usr/share/info' + --libdir='/usr/lib' + --libexecdir='/usr/lib' + --mandir='/usr/share/man' + --program-suffix="-${_majorver}" + --with-bugurl='https://bugs.archlinux.org/' + --with-isl + --with-linker-hash-style='gnu' + --with-pkgversion="Arch Linux ${pkgver}-${pkgrel}" + --with-system-zlib + --prefix='/usr' + ) + ../gcc/configure "${_conf[@]}" + fi + LD_PRELOAD='/usr/lib/libstdc++.so' \ + nice make -s + + set +u; msg 'Compile complete'; set -u + + # make documentation + make -s -j1 -C "${CHOST}/libstdc++-v3/doc" 'doc-man-doxygen' + set +u +} + +check() { + set -u + cd 'gcc-build' + + # disable libphobos test to avoid segfaults and other unfunny ways to waste my time + sed -e '/maybe-check-target-libphobos \\/d' -i 'Makefile' + + # do not abort on error as some are "expected" + make -O -k check || : + "${srcdir}/gcc/contrib/test_summary" + set +u +} + +package_gcc10-libs() { + set -u + export MAKEFLAGS="${_MAKEFLAGS}" + export CHOST="${_CHOST}" + pkgdesc='Runtime libraries shipped by GCC (10.x.x)' + depends=('glibc>=2.27') + options=('!emptydirs' '!strip') + provides=('libgfortran.so' 'libubsan.so' 'libasan.so' 'libtsan.so' 'liblsan.so') + + cd 'gcc-build' + LD_PRELOAD='/usr/lib/libstdc++.so' \ + make -C "${CHOST}/libgcc" DESTDIR="${pkgdir}" install-shared + mv "${pkgdir}/${_libdir}"/../lib/* "${pkgdir}/${_libdir}" + rmdir "${pkgdir}/${_libdir}/../lib" + rm -f "${pkgdir}/${_libdir}/libgcc_eh.a" + + local _lib + for _lib in libatomic \ + libgfortran \ + libgomp \ + libitm \ + libquadmath \ + libsanitizer/{a,l,ub,t}san \ + libstdc++-v3/src \ + libvtv; do + make -C "${CHOST}/${_lib}" DESTDIR="${pkgdir}" install-toolexeclibLTLIBRARIES + done + + make -C "${CHOST}/libstdc++-v3/po" DESTDIR="${pkgdir}" install + + # Install Runtime Library Exception + install -Dm644 "${srcdir}/gcc/COPYING.RUNTIME" \ + "${pkgdir}/usr/share/licenses/${pkgname}/RUNTIME.LIBRARY.EXCEPTION" + + # remove conflicting files + rm -rf "${pkgdir}/usr/share/locale" + set +u +} + +package_gcc10() { + set -u + export MAKEFLAGS="${_MAKEFLAGS}" + export CHOST="${_CHOST}" + pkgdesc='The GNU Compiler Collection - C and C++ frontends (10.x.x)' + depends=("${pkgbase}-libs=${pkgver}-${pkgrel}" 'binutils>=2.28' 'libmpc' 'zstd') + options=('!emptydirs' 'staticlibs') + + cd 'gcc-build' + + make -C 'gcc' DESTDIR="${pkgdir}" install-driver install-cpp install-gcc-ar \ + c++.install-common install-headers install-plugin install-lto-wrapper + + install -m755 -t "${pkgdir}/${_libdir}/" gcc/{cc1,cc1plus,collect2,lto1,gcov{,-tool}} + + make -C "${CHOST}/libgcc" DESTDIR="${pkgdir}" install + rm -rf "${pkgdir}/${_libdir}/../lib" + + make -C "${CHOST}/libstdc++-v3/src" DESTDIR="${pkgdir}" install + make -C "${CHOST}/libstdc++-v3/include" DESTDIR="${pkgdir}" install + make -C "${CHOST}/libstdc++-v3/libsupc++" DESTDIR="${pkgdir}" install + make -C "${CHOST}/libstdc++-v3/python" DESTDIR="${pkgdir}" install + rm -f "${pkgdir}/${_libdir}"/libstdc++.so* + + make DESTDIR="${pkgdir}" install-fixincludes + make -C 'gcc' DESTDIR="${pkgdir}" install-mkheaders + + make -C 'lto-plugin' DESTDIR="${pkgdir}" install + install -dm755 "${pkgdir}/${_libdir}/bfd-plugins/" + ln -s "/${_libdir}/liblto_plugin.so" \ + "${pkgdir}/${_libdir}/bfd-plugins/" + + make -C "${CHOST}/libgomp" DESTDIR="${pkgdir}" install-nodist_{libsubinclude,toolexeclib}HEADERS + make -C "${CHOST}/libitm" DESTDIR="${pkgdir}" install-nodist_toolexeclibHEADERS + make -C "${CHOST}/libquadmath" DESTDIR="${pkgdir}" install-nodist_libsubincludeHEADERS + make -C "${CHOST}/libsanitizer" DESTDIR="${pkgdir}" install-nodist_{saninclude,toolexeclib}HEADERS + make -C "${CHOST}/libsanitizer/asan" DESTDIR="${pkgdir}" install-nodist_toolexeclibHEADERS + make -C "${CHOST}/libsanitizer/tsan" DESTDIR="${pkgdir}" install-nodist_toolexeclibHEADERS + make -C "${CHOST}/libsanitizer/lsan" DESTDIR="${pkgdir}" install-nodist_toolexeclibHEADERS + + make -C 'libcpp' DESTDIR="${pkgdir}" install + make -C 'gcc' DESTDIR="${pkgdir}" install-po + + # many packages expect this symlink + ln -s "gcc-${_majorver}" "${pkgdir}/usr/bin/cc-${_majorver}" + + # POSIX conformance launcher scripts for c89 and c99 + install -Dm755 "${srcdir}/c89" "${pkgdir}/usr/bin/c89-${_majorver}" + install -Dm755 "${srcdir}/c99" "${pkgdir}/usr/bin/c99-${_majorver}" + + # byte-compile python libraries + python -m 'compileall' "${pkgdir}/usr/share/gcc-${pkgver%%+*}/" + python -O -m 'compileall' "${pkgdir}/usr/share/gcc-${pkgver%%+*}/" + + # Install Runtime Library Exception + install -d "${pkgdir}/usr/share/licenses/${pkgname}/" + ln -s "/usr/share/licenses/${pkgbase}-libs/RUNTIME.LIBRARY.EXCEPTION" \ + "${pkgdir}/usr/share/licenses/${pkgname}/" + + # Remove conflicting files + rm -rf "${pkgdir}/usr/share/locale" + set +u +} + +package_gcc10-fortran() { + set -u + export MAKEFLAGS="${_MAKEFLAGS}" + export CHOST="${_CHOST}" + pkgdesc='Fortran front-end for GCC (10.x.x)' + depends=("${pkgbase}=${pkgver}-${pkgrel}") + + cd 'gcc-build' + make -C "${CHOST}/libgfortran" DESTDIR="${pkgdir}" install-cafexeclibLTLIBRARIES \ + install-{toolexeclibDATA,nodist_fincludeHEADERS,gfor_cHEADERS} + make -C "${CHOST}/libgomp" DESTDIR="${pkgdir}" install-nodist_fincludeHEADERS + make -C 'gcc' DESTDIR="${pkgdir}" fortran.install-common + install -Dm755 'gcc/f951' "${pkgdir}/${_libdir}/f951" + + ln -s "gfortran-${_majorver}" "${pkgdir}/usr/bin/f95-${_majorver}" + + # Install Runtime Library Exception + install -d "${pkgdir}/usr/share/licenses/${pkgname}/" + ln -s "/usr/share/licenses/${pkgbase}-libs/RUNTIME.LIBRARY.EXCEPTION" \ + "${pkgdir}/usr/share/licenses/${pkgname}/" + set +u +} +set +u \ No newline at end of file diff --git a/tests/testresources/models/package_gcc10_srcinfo b/tests/testresources/models/package_gcc10_srcinfo deleted file mode 100644 index a3a4ce65..00000000 --- a/tests/testresources/models/package_gcc10_srcinfo +++ /dev/null @@ -1,57 +0,0 @@ -pkgbase = gcc10 - pkgdesc = The GNU Compiler Collection (10.x.x) - pkgver = 10.3.0 - pkgrel = 2 - url = https://gcc.gnu.org - arch = x86_64 - license = GPL - license = LGPL - license = FDL - license = custom - checkdepends = dejagnu - checkdepends = inetutils - makedepends = binutils - makedepends = doxygen - makedepends = git - makedepends = libmpc - makedepends = python - options = !emptydirs - options = !lto - source = https://sourceware.org/pub/gcc/releases/gcc-10.3.0/gcc-10.3.0.tar.xz - source = https://sourceware.org/pub/gcc/releases/gcc-10.3.0/gcc-10.3.0.tar.xz.sig - source = https://mirror.sobukus.de/files/src/isl/isl-0.24.tar.xz - source = c89 - source = c99 - validpgpkeys = F3691687D867B81B51CE07D9BBE43771487328A9 - validpgpkeys = 86CFFCA918CF3AF47147588051E8B148A9999C34 - validpgpkeys = 13975A70E63C361C73AE69EF6EEB81F8981C74C7 - validpgpkeys = D3A93CAD751C2AF4F8C7AD516C35B99309B5FA62 - b2sums = ac7898f5eb8a7c5f151a526d1bb38913a68b50a65e4d010ac09fa20b6c801c671c790d780f23ccb8e4ecdfc686f4aa588082ccc9eb5c80c7b0e30788f824c1eb - b2sums = SKIP - b2sums = 39cbfd18ad05778e3a5a44429261b45e4abc3efe7730ee890674d968890fe5e52c73bc1f8d271c7c3bc72d5754e3f7fcb209bd139e823d19cb9ea4ce1440164d - b2sums = a76d19c7830b0a141302890522086fc1548c177611501caac7e66d576e541b64ca3f6e977de715268a9872dfdd6368a011b92e01f7944ec0088f899ac0d2a2a5 - b2sums = 02b655b5668f7dea51c3b3e4ff46d5a4aee5a04ed5e26b98a6470f39c2e98ddc0519bffeeedd982c31ef3c171457e4d1beaff32767d1aedd9346837aac4ec3ee - -pkgname = gcc10 - pkgdesc = The GNU Compiler Collection - C and C++ frontends (10.x.x) - depends = gcc10-libs=10.3.0-2 - depends = binutils>=2.28 - depends = libmpc - depends = zstd - options = !emptydirs - options = staticlibs - -pkgname = gcc10-libs - pkgdesc = Runtime libraries shipped by GCC (10.x.x) - depends = glibc>=2.27 - provides = libgfortran.so - provides = libubsan.so - provides = libasan.so - provides = libtsan.so - provides = liblsan.so - options = !emptydirs - options = !strip - -pkgname = gcc10-fortran - pkgdesc = Fortran front-end for GCC (10.x.x) - depends = gcc10=10.3.0-2 diff --git a/tests/testresources/models/package_jellyfin-ffmpeg5-bin_srcinfo b/tests/testresources/models/package_jellyfin-ffmpeg5-bin_srcinfo deleted file mode 100644 index 6e8ec5b0..00000000 --- a/tests/testresources/models/package_jellyfin-ffmpeg5-bin_srcinfo +++ /dev/null @@ -1,28 +0,0 @@ -pkgbase = jellyfin-ffmpeg5-bin - pkgdesc = FFmpeg5 binary version for Jellyfin - pkgver = 5.1.2 - pkgrel = 7 - url = https://github.com/jellyfin/jellyfin-ffmpeg - arch = x86_64 - arch = aarch64 - license = GPL3 - optdepends = intel-media-driver: for Intel VAAPI support (Broadwell and newer) - optdepends = intel-media-sdk: for Intel Quick Sync Video - optdepends = onevpl-intel-gpu: for Intel Quick Sync Video (12th Gen and newer) - optdepends = intel-compute-runtime: for Intel OpenCL runtime based Tonemapping - optdepends = libva-intel-driver: for Intel legacy VAAPI support (10th Gen and older) - optdepends = libva-mesa-driver: for AMD VAAPI support - optdepends = nvidia-utils: for Nvidia NVDEC/NVENC support - optdepends = opencl-amd: for AMD OpenCL runtime based Tonemapping - optdepends = vulkan-radeon: for AMD RADV Vulkan support - optdepends = vulkan-intel: for Intel ANV Vulkan support - conflicts = jellyfin-ffmpeg - conflicts = jellyfin-ffmpeg5 - source_x86_64 = https://repo.jellyfin.org/releases/ffmpeg/5.1.2-7/jellyfin-ffmpeg_5.1.2-7_portable_linux64-gpl.tar.xz - depends_x86_64 = glibc>=2.23 - sha256sums_x86_64 = 78420fd1edbaf24a07e92938878d8582d895e009cae02c8e9d5be3f26de905e3 - source_aarch64 = https://repo.jellyfin.org/releases/ffmpeg/5.1.2-7/jellyfin-ffmpeg_5.1.2-7_portable_linuxarm64-gpl.tar.xz - depends_aarch64 = glibc>=2.27 - sha256sums_aarch64 = 8ac4066981f203c2b442754eaf7286b4e481df9692d0ff8910a824d89c831df0 - -pkgname = jellyfin-ffmpeg5-bin \ No newline at end of file diff --git a/tests/testresources/models/package_jellyfin-ffmpeg6-bin_pkgbuild b/tests/testresources/models/package_jellyfin-ffmpeg6-bin_pkgbuild new file mode 100644 index 00000000..e1da7ab5 --- /dev/null +++ b/tests/testresources/models/package_jellyfin-ffmpeg6-bin_pkgbuild @@ -0,0 +1,31 @@ +# Maintainer : nyanmisaka + +pkgname=jellyfin-ffmpeg6-bin +pkgver=6.0 +pkgrel=6 +pkgdesc='FFmpeg6 binary version for Jellyfin' +arch=('x86_64' 'aarch64') +url='https://github.com/jellyfin/jellyfin-ffmpeg' +license=('GPL3') +depends_x86_64=('glibc>=2.23') +depends_aarch64=('glibc>=2.27') +optdepends=('intel-media-driver: for Intel VAAPI support (Broadwell and newer)' + 'intel-media-sdk: for Intel Quick Sync Video' + 'onevpl-intel-gpu: for Intel Quick Sync Video (12th Gen and newer)' + 'intel-compute-runtime: for Intel OpenCL runtime based Tonemapping' + 'libva-intel-driver: for Intel legacy VAAPI support (10th Gen and older)' + 'libva-mesa-driver: for AMD VAAPI support' + 'nvidia-utils: for Nvidia NVDEC/NVENC support' + 'opencl-amd: for AMD OpenCL runtime based Tonemapping' + 'vulkan-radeon: for AMD RADV Vulkan support' + 'vulkan-intel: for Intel ANV Vulkan support') +conflicts=('jellyfin-ffmpeg' 'jellyfin-ffmpeg5' 'jellyfin-ffmpeg5-bin' 'jellyfin-ffmpeg6') +source_x86_64=("https://repo.jellyfin.org/releases/ffmpeg/${pkgver}-${pkgrel}/jellyfin-ffmpeg_${pkgver}-${pkgrel}_portable_linux64-gpl.tar.xz") +source_aarch64=("https://repo.jellyfin.org/releases/ffmpeg/${pkgver}-${pkgrel}/jellyfin-ffmpeg_${pkgver}-${pkgrel}_portable_linuxarm64-gpl.tar.xz") +sha256sums_x86_64=('32cbe40942d26072faa1182835ccc89029883766de11778c731b529aa632ff37') +sha256sums_aarch64=('22b8f2a3c92c6b1c9e6830a6631f08f3f0a7ae80739ace71ad30704a28045184') + +package() { + install -Dm 755 ffmpeg ${pkgdir}/usr/lib/jellyfin-ffmpeg/ffmpeg + install -Dm 755 ffprobe ${pkgdir}/usr/lib/jellyfin-ffmpeg/ffprobe +} diff --git a/tests/testresources/models/package_tpacpi-bat-git_pkgbuild b/tests/testresources/models/package_tpacpi-bat-git_pkgbuild new file mode 100644 index 00000000..95b9926c --- /dev/null +++ b/tests/testresources/models/package_tpacpi-bat-git_pkgbuild @@ -0,0 +1,30 @@ +# Maintainer: Frederik Schwan +# Contributor: Lucky + +pkgname=tpacpi-bat-git +pkgver=3.1.r13.g4959b52 +pkgrel=1 +pkgdesc='A Perl script with ACPI calls for recent ThinkPads which are not supported by tp_smapi' +arch=('any') +url='https://github.com/teleshoes/tpacpi-bat' +license=('GPL3') +depends=('perl' 'acpi_call') +makedepends=('git') +provides=('tpacpi-bat') +conflicts=('tpacpi-bat') +backup=('etc/conf.d/tpacpi') +source=('git+https://github.com/teleshoes/tpacpi-bat.git') +b2sums=('SKIP') + +pkgver() { + cd ${pkgname/-git/} + echo $(git describe --tags | sed 's/^v//;s/\([^-]*-g\)/r\1/;s/-/./g') +} + +package() { + cd ${pkgname/-git/} + + install -Dm755 tpacpi-bat "${pkgdir}"/usr/bin/tpacpi-bat + install -Dm644 examples/systemd_dynamic_threshold/tpacpi.service "${pkgdir}"/usr/lib/systemd/system/tpacpi-bat.service + install -Dm644 examples/systemd_dynamic_threshold/tpacpi.conf.d "${pkgdir}"/etc/conf.d/tpacpi +} diff --git a/tests/testresources/models/package_tpacpi-bat-git_srcinfo b/tests/testresources/models/package_tpacpi-bat-git_srcinfo deleted file mode 100644 index 58fddf92..00000000 --- a/tests/testresources/models/package_tpacpi-bat-git_srcinfo +++ /dev/null @@ -1,17 +0,0 @@ -pkgbase = tpacpi-bat-git - pkgdesc = A Perl script with ACPI calls for recent ThinkPads which are not supported by tp_smapi - pkgver = 3.1.r13.g4959b52 - pkgrel = 1 - url = https://github.com/teleshoes/tpacpi-bat - arch = any - license = GPL3 - makedepends = git - depends = perl - depends = acpi_call - provides = tpacpi-bat - conflicts = tpacpi-bat - backup = etc/conf.d/tpacpi - source = git+https://github.com/teleshoes/tpacpi-bat.git - b2sums = SKIP - -pkgname = tpacpi-bat-git diff --git a/tests/testresources/models/package_yay_pkgbuild b/tests/testresources/models/package_yay_pkgbuild new file mode 100644 index 00000000..e5a97d90 --- /dev/null +++ b/tests/testresources/models/package_yay_pkgbuild @@ -0,0 +1,37 @@ +# Maintainer: Jguer +pkgname=yay +pkgver=12.3.5 +pkgrel=1 +pkgdesc="Yet another yogurt. Pacman wrapper and AUR helper written in go." +arch=('i686' 'pentium4' 'x86_64' 'arm' 'armv7h' 'armv6h' 'aarch64' 'riscv64') +url="https://github.com/Jguer/yay" +options=(!lto) +license=('GPL-3.0-or-later') +depends=( + 'pacman>6.1' + 'git' +) +optdepends=( + 'sudo: privilege elevation' + 'doas: privilege elevation' +) +makedepends=('go>=1.21') +source=("${pkgname}-${pkgver}.tar.gz::https://github.com/Jguer/yay/archive/v${pkgver}.tar.gz") +sha256sums=('2fb6121a6eb4c5e6afaf22212b2ed15022500a4bc34bb3dc0f9782c1d43c3962') + +build() { + export GOPATH="$srcdir"/gopath + export CGO_CPPFLAGS="${CPPFLAGS}" + export CGO_CFLAGS="${CFLAGS}" + export CGO_CXXFLAGS="${CXXFLAGS}" + export CGO_LDFLAGS="${LDFLAGS}" + export CGO_ENABLED=1 + + cd "$srcdir/$pkgname-$pkgver" + make VERSION=$pkgver DESTDIR="$pkgdir" PREFIX="/usr" build +} + +package() { + cd "$srcdir/$pkgname-$pkgver" + make VERSION=$pkgver DESTDIR="$pkgdir" PREFIX="/usr" install +} diff --git a/tests/testresources/models/package_yay_srcinfo b/tests/testresources/models/package_yay_srcinfo deleted file mode 100644 index 9d87b70b..00000000 --- a/tests/testresources/models/package_yay_srcinfo +++ /dev/null @@ -1,21 +0,0 @@ -pkgbase = yay - pkgdesc = Yet another yogurt. Pacman wrapper and AUR helper written in go. - pkgver = 10.2.0 - pkgrel = 1 - url = https://github.com/Jguer/yay - arch = i686 - arch = pentium4 - arch = x86_64 - arch = arm - arch = armv7h - arch = armv6h - arch = aarch64 - license = GPL3 - makedepends = go - depends = pacman>5 - depends = git - optdepends = sudo - source = yay-10.2.0.tar.gz::https://github.com/Jguer/yay/archive/v10.2.0.tar.gz - sha256sums = 755d049ec09cc20bdcbb004b12ab4e35ba3bb94a7dce9dfa544d24f87deda8aa - -pkgname = yay From 88f81d5d58068b5314993d8702ca3aa819ee297c Mon Sep 17 00:00:00 2001 From: Evgenii Alekseev Date: Fri, 13 Sep 2024 23:42:44 +0300 Subject: [PATCH 09/10] add support of array expansion --- src/ahriman/core/alpm/pkgbuild_parser.py | 253 ++++++++++++++++++ src/ahriman/models/package.py | 8 +- src/ahriman/models/pkgbuild.py | 207 ++------------ .../handlers/test_handler_versions.py | 4 +- .../ahriman/core/alpm/test_pkgbuild_parser.py | 0 tests/ahriman/models/test_pkgbuild.py | 0 6 files changed, 281 insertions(+), 191 deletions(-) create mode 100644 src/ahriman/core/alpm/pkgbuild_parser.py create mode 100644 tests/ahriman/core/alpm/test_pkgbuild_parser.py create mode 100644 tests/ahriman/models/test_pkgbuild.py diff --git a/src/ahriman/core/alpm/pkgbuild_parser.py b/src/ahriman/core/alpm/pkgbuild_parser.py new file mode 100644 index 00000000..8e357f70 --- /dev/null +++ b/src/ahriman/core/alpm/pkgbuild_parser.py @@ -0,0 +1,253 @@ +# +# Copyright (c) 2021-2024 ahriman team. +# +# This file is part of ahriman +# (see https://github.com/arcan1s/ahriman). +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +import itertools +import re +import shlex + +from collections.abc import Generator +from enum import StrEnum +from typing import IO + +from ahriman.models.pkgbuild_patch import PkgbuildPatch + + +class PkgbuildToken(StrEnum): + """ + well-known tokens dictionary + + Attributes: + ArrayEnds(PkgbuildToken): (class attribute) array ends token + ArrayStarts(PkgbuildToken): (class attribute) array starts token + Comma(PkgbuildToken): (class attribute) comma token + Comment(PkgbuildToken): (class attribute) comment token + FunctionDeclaration(PkgbuildToken): (class attribute) function declaration token + FunctionEnds(PkgbuildToken): (class attribute) function ends token + FunctionStarts(PkgbuildToken): (class attribute) function starts token + """ + + ArrayStarts = "(" + ArrayEnds = ")" + + Comma = "," + + Comment = "#" + + FunctionDeclaration = "()" + FunctionStarts = "{" + FunctionEnds = "}" + + +class PkgbuildParser(shlex.shlex): + """ + simple pkgbuild reader implementation in pure python, because others suck + """ + + _ARRAY_ASSIGNMENT = re.compile(r"^(?P\w+)=$") + # in addition to usual assignment, functions can have dash + _FUNCTION_DECLARATION = re.compile(r"^(?P[\w-]+)$") + _STRING_ASSIGNMENT = re.compile(r"^(?P\w+)=(?P.+)$") + + def __init__(self, stream: IO[str]) -> None: + """ + default constructor + + Args: + stream(IO[str]): input stream containing PKGBUILD content + """ + shlex.shlex.__init__(self, stream, posix=True, punctuation_chars=True) + self._io = stream # direct access without type casting + + # ignore substitution and extend bash symbols + self.wordchars += "${}#:+-@" + # in case of default behaviour, it will ignore, for example, segment part of url outside of quotes + self.commenters = "" + + @staticmethod + def _expand_array(array: list[str]) -> list[str]: + """ + bash array expansion simulator. It takes raw parsed array and tries to expand constructions like + ``(first prefix-{mid1,mid2}-suffix last)`` into ``(first, prefix-mid1-suffix prefix-mid2-suffix last)`` + + Args: + array(list[str]): input array + + Returns: + list[str]: either source array or expanded array if possible + + Raises: + ValueError: if there are errors in parser + """ + # we are using comma as marker for expansion (if any) + if PkgbuildToken.Comma not in array: + return array + # again sanity check, for expansion there are at least 3 elements (first, last and comma) + if len(array) < 3: + return array + + result = [] + buffer, prefix = [], None + + for index, (first, second) in enumerate(itertools.pairwise(array)): + match (first, second): + # in this case we check if expansion should be started + # this condition matches "prefix{first", "," + case (_, PkgbuildToken.Comma) if PkgbuildToken.FunctionStarts in first: + prefix, part = first.rsplit(PkgbuildToken.FunctionStarts, maxsplit=1) + buffer.append(f"{prefix}{part}") + + # the last element case, it matches either ",", "last}" or ",", "last}suffix" + # in case if there is suffix, it must be appended to all list elements + case (PkgbuildToken.Comma, _) if prefix is not None and PkgbuildToken.FunctionEnds in second: + part, suffix = second.rsplit(PkgbuildToken.FunctionEnds, maxsplit=1) + buffer.append(f"{prefix}{part}") + result.extend([f"{part}{suffix}" for part in buffer]) + # reset state + buffer, prefix = [], None + + # we have already prefix string, so we are in progress of expansion + # we always operate the last element, so this matches ",", "next" + case (PkgbuildToken.Comma, _) if prefix is not None: + buffer.append(f"{prefix}{second}") + + # exactly first element of the list + case (_, _) if prefix is None and index == 0: + result.append(first) + + # any next normal element + case (_, _) if prefix is None: + result.append(second) + + # small sanity check + if prefix is not None: + raise ValueError(f"Could not expand `{array}` as array") + + return result + + def _parse_array(self) -> list[str]: + """ + parse array from the PKGBUILD. This method will extract tokens from parser until it matches closing array, + modifying source parser state + + Returns: + list[str]: extracted arrays elements + + Raises: + ValueError: if array is not closed + """ + def extract() -> Generator[str, None, None]: + while token := self.get_token(): + if token == PkgbuildToken.ArrayEnds: + break + if token == PkgbuildToken.Comment: + self.instream.readline() + continue + yield token + + if token != PkgbuildToken.ArrayEnds: + raise ValueError("No closing array bracket found") + + return self._expand_array(list(extract())) + + def _parse_function(self) -> str: + """ + parse function from the PKGBUILD. This method will extract tokens from parser until it matches closing function, + modifying source parser state. Instead of trying to combine tokens together, it uses positions of the file + and read content again in this range + + Returns: + str: function body + + Raises: + ValueError: if function body wasn't found or parser input stream doesn't support position reading + """ + # find start and end positions + start_position, end_position = -1, -1 + while token := self.get_token(): + match token: + case PkgbuildToken.FunctionStarts: + start_position = self._io.tell() - 1 + case PkgbuildToken.FunctionEnds: + end_position = self._io.tell() + break + + if not 0 < start_position < end_position: + raise ValueError("Function body wasn't found") + + # read the specified interval from source stream + self._io.seek(start_position - 1) # start from the previous symbol + content = self._io.read(end_position - start_position) + + return content + + def _parse_token(self, token: str) -> Generator[PkgbuildPatch, None, None]: + """ + parse single token to the PKGBUILD field + + Args: + token(str): current token + + Yields: + PkgbuildPatch: extracted a PKGBUILD node + """ + # simple assignment rule + if (match := self._STRING_ASSIGNMENT.match(token)) is not None: + key = match.group("key") + value = match.group("value") + yield PkgbuildPatch(key, value) + return + + if token == PkgbuildToken.Comment: + self.instream.readline() + return + + match self.get_token(): + # array processing. Arrays will be sent as "key=", "(", values, ")" + case PkgbuildToken.ArrayStarts if (match := self._ARRAY_ASSIGNMENT.match(token)) is not None: + key = match.group("key") + value = self._parse_array() + yield PkgbuildPatch(key, value) + + # functions processing. Function will be sent as "name", "()", "{", body, "}" + case PkgbuildToken.FunctionDeclaration if self._FUNCTION_DECLARATION.match(token): + key = f"{token}{PkgbuildToken.FunctionDeclaration}" + value = self._parse_function() + yield PkgbuildPatch(key, value) # this is not mistake, assign to token without () + + # special function case, where "(" and ")" are separated tokens, e.g. "pkgver ( )" + case PkgbuildToken.ArrayStarts if self._FUNCTION_DECLARATION.match(token): + next_token = self.get_token() + if next_token == PkgbuildToken.ArrayEnds: # replace closing bracket with "()" + next_token = PkgbuildToken.FunctionDeclaration + self.push_token(next_token) # type: ignore[arg-type] + yield from self._parse_token(token) + + # some random token received without continuation, lets guess it is empty assignment (i.e. key=) + case other if other is not None: + yield from self._parse_token(other) + + def parse(self) -> Generator[PkgbuildPatch, None, None]: + """ + parse source stream and yield parsed entries + + Yields: + PkgbuildPatch: extracted a PKGBUILD node + """ + for token in self: + yield from self._parse_token(token) diff --git a/src/ahriman/models/package.py b/src/ahriman/models/package.py index 23e5f93c..ad20f4ab 100644 --- a/src/ahriman/models/package.py +++ b/src/ahriman/models/package.py @@ -266,7 +266,7 @@ class Package(LazyLogging): ) for package, properties in pkgbuild.packages().items() } - version = full_version(pkgbuild.epoch, pkgbuild.pkgver, pkgbuild.pkgrel) + version = full_version(pkgbuild.get("epoch"), pkgbuild["pkgver"], pkgbuild["pkgrel"]) remote = RemoteSource( source=PackageSource.Local, @@ -277,7 +277,7 @@ class Package(LazyLogging): ) return cls( - base=pkgbuild.pkgbase, + base=pkgbuild["pkgbase"], version=version, remote=remote, packages=packages, @@ -372,7 +372,7 @@ class Package(LazyLogging): yield Path(source) - if install := pkgbuild.get("install"): + if (install := pkgbuild.get("install")) is not None: yield Path(install) @staticmethod @@ -435,7 +435,7 @@ class Package(LazyLogging): pkgbuild = Pkgbuild.from_file(paths.cache_for(self.base) / "PKGBUILD") - return full_version(pkgbuild.epoch, pkgbuild.pkgver, pkgbuild.pkgrel) + return full_version(pkgbuild.get("epoch"), pkgbuild["pkgver"], pkgbuild["pkgrel"]) except Exception: self.logger.exception("cannot determine version of VCS package") finally: diff --git a/src/ahriman/models/pkgbuild.py b/src/ahriman/models/pkgbuild.py index 211c4799..72cd6e32 100644 --- a/src/ahriman/models/pkgbuild.py +++ b/src/ahriman/models/pkgbuild.py @@ -17,43 +17,20 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . # -import re -import shlex - -from collections.abc import Generator, Iterator, Mapping +from collections.abc import Iterator, Mapping from dataclasses import dataclass -from enum import StrEnum from io import StringIO from pathlib import Path from typing import Any, IO, Self +from ahriman.core.alpm.pkgbuild_parser import PkgbuildParser, PkgbuildToken from ahriman.models.pkgbuild_patch import PkgbuildPatch -class PkgbuildToken(StrEnum): - """ - well-known tokens dictionary - - Attributes: - ArrayEnds(PkgbuildToken): (class attribute) array ends token - ArrayStarts(PkgbuildToken): (class attribute) array starts token - FunctionDeclaration(PkgbuildToken): (class attribute) function declaration token - FunctionEnds(PkgbuildToken): (class attribute) function ends token - FunctionStarts(PkgbuildToken): (class attribute) function starts token - """ - - ArrayStarts = "(" - ArrayEnds = ")" - - FunctionDeclaration = "()" - FunctionStarts = "{" - FunctionEnds = "}" - - @dataclass(frozen=True) -class Pkgbuild(Mapping[str, str | list[str]]): +class Pkgbuild(Mapping[str, Any]): """ - simple pkgbuild reader implementation in pure python, because others sucks + model and proxy for PKGBUILD properties Attributes: fields(dict[str, PkgbuildPatch]): PKGBUILD fields @@ -61,11 +38,6 @@ class Pkgbuild(Mapping[str, str | list[str]]): fields: dict[str, PkgbuildPatch] - _ARRAY_ASSIGNMENT = re.compile(r"^(?P\w+)=$") - _STRING_ASSIGNMENT = re.compile(r"^(?P\w+)=(?P.+)$") - # in addition, functions can have dash to usual assignment - _FUNCTION_DECLARATION = re.compile(r"^(?P[\w-]+)$") - @property def variables(self) -> dict[str, str]: """ @@ -106,141 +78,17 @@ class Pkgbuild(Mapping[str, str | list[str]]): Returns: Self: constructed instance of self """ - fields = {} - - parser = shlex.shlex(stream, posix=True, punctuation_chars=True) - # ignore substitution and extend bash symbols - parser.wordchars += "${}#:+" - # in case of default behaviour, it will ignore, for example, segment part of url outside of quotes - parser.commenters = "" - while token := parser.get_token(): - try: - patch = cls._parse_token(token, parser) - fields[patch.key] = patch - except StopIteration: - break + parser = PkgbuildParser(stream) + fields = {patch.key: patch for patch in parser.parse()} # pkgbase is optional field, the pkgname must be used instead if not set # however, pkgname is not presented is "package()" functions which we are parsing here too, # thus, in our terms, it is optional too - if "pkgbase" not in fields: - fields["pkgbase"] = fields.get("pkgname") + if "pkgbase" not in fields and "pkgname" in fields: + fields["pkgbase"] = fields["pkgname"] return cls({key: value for key, value in fields.items() if key}) - @staticmethod - def _parse_array(parser: shlex.shlex) -> list[str]: - """ - parse array from the PKGBUILD. This method will extract tokens from parser until it matches closing array, - modifying source parser state - - Args: - parser(shlex.shlex): shell parser instance - - Returns: - list[str]: extracted arrays elements - - Raises: - ValueError: if array is not closed - """ - def extract() -> Generator[str, None, None]: - while token := parser.get_token(): - if token == PkgbuildToken.ArrayEnds: - break - yield token - - if token != PkgbuildToken.ArrayEnds: - raise ValueError("No closing array bracket found") - - return list(extract()) - - @staticmethod - def _parse_function(parser: shlex.shlex) -> str: - """ - parse function from the PKGBUILD. This method will extract tokens from parser until it matches closing function, - modifying source parser state. Instead of trying to combine tokens together, it uses positions of the file - and read content again in this range - - Args: - parser(shlex.shlex): shell parser instance - - Returns: - str: function body - - Raises: - ValueError: if function body wasn't found or parser input stream doesn't support position reading - """ - io: IO[str] = parser.instream # type: ignore[assignment] - - # find start and end positions - start_position, end_position = -1, -1 - while token := parser.get_token(): - match token: - case PkgbuildToken.FunctionStarts: - start_position = io.tell() - 1 - case PkgbuildToken.FunctionEnds: - end_position = io.tell() - break - - if not 0 < start_position < end_position: - raise ValueError("Function body wasn't found") - - # read the specified interval from source stream - io.seek(start_position - 1) # start from the previous symbol - content = io.read(end_position - start_position) - - return content - - @staticmethod - def _parse_token(token: str, parser: shlex.shlex) -> PkgbuildPatch: - """ - parse single token to the PKGBUILD field - - Args: - token(str): current token - parser(shlex.shlex): shell parser instance - - Returns: - PkgbuildPatch: extracted a PKGBUILD node - - Raises: - StopIteration: if iteration reaches the end of the file - """ - # simple assignment rule - if (match := Pkgbuild._STRING_ASSIGNMENT.match(token)) is not None: - key = match.group("key") - value = match.group("value") - return PkgbuildPatch(key, value) - - match parser.get_token(): - # array processing. Arrays will be sent as "key=", "(", values, ")" - case PkgbuildToken.ArrayStarts if (match := Pkgbuild._ARRAY_ASSIGNMENT.match(token)) is not None: - key = match.group("key") - value = Pkgbuild._parse_array(parser) - return PkgbuildPatch(key, value) - - # functions processing. Function will be sent as "name", "()", "{", body, "}" - case PkgbuildToken.FunctionDeclaration if Pkgbuild._FUNCTION_DECLARATION.match(token): - key = f"{token}{PkgbuildToken.FunctionDeclaration}" - value = Pkgbuild._parse_function(parser) - return PkgbuildPatch(key, value) # this is not mistake, assign to token without () - - # special function case, where "(" and ")" are separated tokens, e.g. "pkgver ( )" - case PkgbuildToken.ArrayStarts if Pkgbuild._FUNCTION_DECLARATION.match(token): - next_token = parser.get_token() - if next_token == PkgbuildToken.ArrayEnds: # replace closing bracket with "()" - next_token = PkgbuildToken.FunctionDeclaration - parser.push_token(next_token) # type: ignore[arg-type] - return Pkgbuild._parse_token(token, parser) - - # some random token received without continuation, lets guess it is empty assignment (i.e. key=) - case other if other is not None: - return Pkgbuild._parse_token(other, parser) - - # reached the end of the parser - case None: - raise StopIteration - def packages(self) -> dict[str, Self]: """ extract properties from internal package functions @@ -252,44 +100,33 @@ class Pkgbuild(Mapping[str, str | list[str]]): def io(package_name: str) -> IO[str]: # try to read package specific function and fallback to default otherwise - # content = self.get_as(f"package_{package_name}") or self.get_as("package") - content = getattr(self, f"package_{package_name}") or self.package + content = self.get(f"package_{package_name}") or self["package"] return StringIO(content) return {package: self.from_io(io(package)) for package in packages} - def __getattr__(self, item: str) -> Any: - """ - proxy method for PKGBUILD properties - - Args: - item(str): property name - - Returns: - Any: attribute by its name - """ - return self[item] - - def __getitem__(self, key: str) -> str | list[str]: + def __getitem__(self, item: str) -> Any: """ get the field of the PKGBUILD. This method tries to get exact key value if possible; if none found, it tries to - fetch function with the same name. And, finally, it returns empty value if nothing found, so this function never - raises an ``KeyError``.exception`` + fetch function with the same name Args: - key(str): key name + item(str): key name Returns: - str | list[str]: value by the key + Any: substituted value by the key + + Raises: + KeyError: if key doesn't exist """ - value = self.fields.get(key) + value = self.fields.get(item) # if the key wasn't found and user didn't ask for function explicitly, we can try to get by function name - if value is None and not key.endswith(PkgbuildToken.FunctionDeclaration): - value = self.fields.get(f"{key}{PkgbuildToken.FunctionDeclaration}") - # if we still didn't find anything, we fall back to empty value (just like shell) - # to avoid recursion here, we can just drop from the method + if value is None and not item.endswith(PkgbuildToken.FunctionDeclaration): + value = self.fields.get(f"{item}{PkgbuildToken.FunctionDeclaration}") + + # if we still didn't find anything, we can just raise the exception if value is None: - return "" + raise KeyError(item) return value.substitute(self.variables) diff --git a/tests/ahriman/application/handlers/test_handler_versions.py b/tests/ahriman/application/handlers/test_handler_versions.py index 73602a42..be64b9c2 100644 --- a/tests/ahriman/application/handlers/test_handler_versions.py +++ b/tests/ahriman/application/handlers/test_handler_versions.py @@ -28,9 +28,9 @@ def test_package_dependencies() -> None: """ must extract package dependencies """ - packages = dict(Versions.package_dependencies("srcinfo")) + packages = dict(Versions.package_dependencies("requests")) assert packages - assert packages.get("parse") is not None + assert packages.get("urllib3") is not None def test_package_dependencies_missing() -> None: diff --git a/tests/ahriman/core/alpm/test_pkgbuild_parser.py b/tests/ahriman/core/alpm/test_pkgbuild_parser.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/ahriman/models/test_pkgbuild.py b/tests/ahriman/models/test_pkgbuild.py new file mode 100644 index 00000000..e69de29b From 287ac30f58dbc360136b77ed0007454c5f255a93 Mon Sep 17 00:00:00 2001 From: Evgenii Alekseev Date: Sat, 14 Sep 2024 01:34:10 +0300 Subject: [PATCH 10/10] docs update --- docs/ahriman.core.alpm.rst | 8 ++++++++ docs/ahriman.models.rst | 8 ++++++++ 2 files changed, 16 insertions(+) diff --git a/docs/ahriman.core.alpm.rst b/docs/ahriman.core.alpm.rst index 1b218d86..ec47db9a 100644 --- a/docs/ahriman.core.alpm.rst +++ b/docs/ahriman.core.alpm.rst @@ -28,6 +28,14 @@ ahriman.core.alpm.pacman\_database module :no-undoc-members: :show-inheritance: +ahriman.core.alpm.pkgbuild\_parser module +----------------------------------------- + +.. automodule:: ahriman.core.alpm.pkgbuild_parser + :members: + :no-undoc-members: + :show-inheritance: + ahriman.core.alpm.repo module ----------------------------- diff --git a/docs/ahriman.models.rst b/docs/ahriman.models.rst index a1af1af1..e671b36f 100644 --- a/docs/ahriman.models.rst +++ b/docs/ahriman.models.rst @@ -172,6 +172,14 @@ ahriman.models.pacman\_synchronization module :no-undoc-members: :show-inheritance: +ahriman.models.pkgbuild module +------------------------------ + +.. automodule:: ahriman.models.pkgbuild + :members: + :no-undoc-members: + :show-inheritance: + ahriman.models.pkgbuild\_patch module -------------------------------------