Compare commits

...

13 Commits

117 changed files with 1607 additions and 678 deletions

View File

@ -10,13 +10,11 @@ echo -e '[arcanisrepo]\nServer = https://repo.arcanis.me/$arch\nSigLevel = Never
# refresh the image
pacman -Syu --noconfirm
# main dependencies
pacman -Sy --noconfirm devtools git pyalpm python-inflection python-passlib python-pyelftools python-requests python-srcinfo python-systemd sudo
pacman -Sy --noconfirm devtools git pyalpm python-inflection python-passlib python-pyelftools python-requests python-systemd sudo
# make dependencies
pacman -Sy --noconfirm --asdeps base-devel python-build python-flit python-installer python-tox python-wheel
# optional dependencies
if [[ -z $MINIMAL_INSTALL ]]; then
# VCS support
pacman -Sy --noconfirm breezy darcs mercurial subversion
# web server
pacman -Sy --noconfirm python-aioauth-client python-aiohttp python-aiohttp-apispec-git python-aiohttp-cors python-aiohttp-jinja2 python-aiohttp-security python-aiohttp-session python-cryptography python-jinja
# additional features

View File

@ -84,8 +84,6 @@ Again, the most checks can be performed by `tox` command, though some additional
def __init__(self, *args: Any, **kwargs: Any) -> None:
"""
default constructor
Args:
*args(Any): positional arguments
**kwargs(Any): keyword arguments
@ -93,6 +91,8 @@ Again, the most checks can be performed by `tox` command, though some additional
self.instance_attribute = ""
```
Note missing comment for the `__init__` method, which is the special case.
* Type annotations are the must, even for local functions. For the function argument `self` (for instance methods) and `cls` (for class methods) should not be annotated.
* For collection types built-in classes must be used if possible (e.g. `dict` instead of `typing.Dict`, `tuple` instead of `typing.Tuple`). In case if built-in type is not available, but `collections.abc` provides interface, it must be used (e.g. `collections.abc.Awaitable` instead of `typing.Awaitable`, `collections.abc.Iterable` instead of `typing.Iterable`). For union classes, the bar operator (`|`) must be used (e.g. `float | int` instead of `typing.Union[float, int]`), which also includes `typing.Optional` (e.g. `str | None` instead of `Optional[str]`).
* `classmethod` should (almost) always return `Self`. In case of mypy warning (e.g. if there is a branch in which function doesn't return the instance of `cls`) consider using `staticmethod` instead.

View File

@ -31,7 +31,6 @@ RUN useradd -m -d "/home/build" -s "/usr/bin/nologin" build && \
echo "build ALL=(ALL) NOPASSWD: ALL" > "/etc/sudoers.d/build"
COPY "docker/install-aur-package.sh" "/usr/local/bin/install-aur-package"
## install package dependencies
## darcs is not installed by reasons, because it requires a lot haskell packages which dramatically increase image size
RUN pacman -Sy --noconfirm --asdeps \
devtools \
git \
@ -40,7 +39,6 @@ RUN pacman -Sy --noconfirm --asdeps \
python-passlib \
python-pyelftools \
python-requests \
python-srcinfo \
&& \
pacman -Sy --noconfirm --asdeps \
base-devel \
@ -50,9 +48,7 @@ RUN pacman -Sy --noconfirm --asdeps \
python-wheel \
&& \
pacman -Sy --noconfirm --asdeps \
breezy \
git \
mercurial \
python-aiohttp \
python-boto3 \
python-cerberus \
@ -61,7 +57,6 @@ RUN pacman -Sy --noconfirm --asdeps \
python-matplotlib \
python-systemd \
rsync \
subversion \
&& \
runuser -u build -- install-aur-package \
python-aioauth-client \

View File

@ -28,6 +28,14 @@ ahriman.core.alpm.pacman\_database module
:no-undoc-members:
:show-inheritance:
ahriman.core.alpm.pkgbuild\_parser module
-----------------------------------------
.. automodule:: ahriman.core.alpm.pkgbuild_parser
:members:
:no-undoc-members:
:show-inheritance:
ahriman.core.alpm.repo module
-----------------------------

View File

@ -172,6 +172,14 @@ ahriman.models.pacman\_synchronization module
:no-undoc-members:
:show-inheritance:
ahriman.models.pkgbuild module
------------------------------
.. automodule:: ahriman.models.pkgbuild
:members:
:no-undoc-members:
:show-inheritance:
ahriman.models.pkgbuild\_patch module
-------------------------------------

View File

@ -265,11 +265,7 @@ TL;DR
How to update VCS packages
^^^^^^^^^^^^^^^^^^^^^^^^^^
Normally the service handles VCS packages correctly, however it requires additional dependencies:
.. code-block:: shell
pacman -S breezy darcs mercurial subversion
Normally the service handles VCS packages correctly. The version is updated in clean chroot, no additional actions are required.
How to review changes before build
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

View File

@ -7,12 +7,9 @@ pkgdesc="ArcH linux ReposItory MANager"
arch=('any')
url="https://github.com/arcan1s/ahriman"
license=('GPL3')
depends=('devtools>=1:1.0.0' 'git' 'pyalpm' 'python-inflection' 'python-passlib' 'python-pyelftools' 'python-requests' 'python-srcinfo')
depends=('devtools>=1:1.0.0' 'git' 'pyalpm' 'python-inflection' 'python-passlib' 'python-pyelftools' 'python-requests')
makedepends=('python-build' 'python-flit' 'python-installer' 'python-wheel')
optdepends=('breezy: -bzr packages support'
'darcs: -darcs packages support'
'mercurial: -hg packages support'
'python-aioauth-client: web server with OAuth2 authorization'
optdepends=('python-aioauth-client: web server with OAuth2 authorization'
'python-aiohttp: web server'
'python-aiohttp-apispec>=3.0.0: web server'
'python-aiohttp-cors: web server'
@ -26,8 +23,7 @@ optdepends=('breezy: -bzr packages support'
'python-requests-unixsocket2: client report to web server by unix socket'
'python-jinja: html report generation'
'python-systemd: journal support'
'rsync: sync by using rsync'
'subversion: -svn packages support')
'rsync: sync by using rsync')
source=("https://github.com/arcan1s/ahriman/releases/download/$pkgver/$pkgname-$pkgver.tar.gz"
'ahriman.sysusers'
'ahriman.tmpfiles')

View File

@ -21,7 +21,6 @@ dependencies = [
"passlib",
"pyelftools",
"requests",
"srcinfo",
]
dynamic = ["version"]

View File

@ -40,8 +40,6 @@ class ApplicationProperties(LazyLogging):
def __init__(self, repository_id: RepositoryId, configuration: Configuration, *, report: bool,
refresh_pacman_database: PacmanSynchronization = PacmanSynchronization.Disabled) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -49,8 +49,6 @@ class UpdatesIterator(Iterator[list[str] | None]):
def __init__(self, application: Application, interval: int) -> None:
"""
default constructor
Args:
application(Application): application instance
interval(int): predefined interval for updates

View File

@ -37,8 +37,6 @@ class LocalUpdater(Updater):
def __init__(self, repository: Repository) -> None:
"""
default constructor
Args:
repository(Repository): repository instance
"""

View File

@ -43,8 +43,6 @@ class RemoteUpdater(Updater):
def __init__(self, workers: list[Worker], repository_id: RepositoryId, configuration: Configuration) -> None:
"""
default constructor
Args:
workers(list[Worker]): worker identifiers
repository_id(RepositoryId): repository unique identifier

View File

@ -66,8 +66,6 @@ class Lock(LazyLogging):
def __init__(self, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration) -> None:
"""
default constructor
Args:
args(argparse.Namespace): command line args
repository_id(RepositoryId): repository unique identifier

View File

@ -33,9 +33,7 @@ class _Context:
"""
def __init__(self) -> None:
"""
default constructor. Must not be used directly
"""
""""""
self._content: dict[str, Any] = {}
def get(self, key: ContextKey[T] | type[T]) -> T:

View File

@ -49,8 +49,6 @@ class Pacman(LazyLogging):
def __init__(self, repository_id: RepositoryId, configuration: Configuration, *,
refresh_database: PacmanSynchronization) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -45,8 +45,6 @@ class PacmanDatabase(SyncHttpClient):
def __init__(self, database: DB, configuration: Configuration) -> None:
"""
default constructor
Args:
database(DB): pyalpm database object
configuration(Configuration): configuration instance

View File

@ -0,0 +1,290 @@
#
# Copyright (c) 2021-2024 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import itertools
import re
import shlex
from collections.abc import Generator
from enum import StrEnum
from typing import IO
from ahriman.core.exceptions import PkgbuildParserError
from ahriman.models.pkgbuild_patch import PkgbuildPatch
class PkgbuildToken(StrEnum):
"""
well-known tokens dictionary
Attributes:
ArrayEnds(PkgbuildToken): (class attribute) array ends token
ArrayStarts(PkgbuildToken): (class attribute) array starts token
Comma(PkgbuildToken): (class attribute) comma token
Comment(PkgbuildToken): (class attribute) comment token
FunctionDeclaration(PkgbuildToken): (class attribute) function declaration token
FunctionEnds(PkgbuildToken): (class attribute) function ends token
FunctionStarts(PkgbuildToken): (class attribute) function starts token
"""
ArrayStarts = "("
ArrayEnds = ")"
Comma = ","
Comment = "#"
FunctionDeclaration = "()"
FunctionStarts = "{"
FunctionEnds = "}"
class PkgbuildParser(shlex.shlex):
"""
simple pkgbuild reader implementation in pure python, because others suck.
What is it:
#. Simple PKGBUILD parser written in python.
#. No shell execution, so it is free from random shell attacks.
#. Able to parse simple constructions (assignments, comments, functions, arrays).
What it is not:
#. Fully functional shell parser.
#. Shell executor.
#. No parameter expansion.
For more details what does it support, please, consult with the test cases.
Examples:
This class is heavily based on :mod:`shlex` parser, but instead of strings operates with the
:class:`ahriman.models.pkgbuild_patch.PkgbuildPatch` objects. The main way to use it is to call :func:`parse()`
function and collect parsed objects, e.g.::
>>> parser = PkgbuildParser(StringIO("input string"))
>>> for patch in parser.parse():
>>> print(f"{patch.key} = {patch.value}")
It doesn't store the state of the fields (but operates with the :mod:`shlex` parser state), so no shell
post-processing is performed (e.g. variable substitution).
"""
_ARRAY_ASSIGNMENT = re.compile(r"^(?P<key>\w+)=$")
# in addition to usual assignment, functions can have dash
_FUNCTION_DECLARATION = re.compile(r"^(?P<key>[\w-]+)$")
_STRING_ASSIGNMENT = re.compile(r"^(?P<key>\w+)=(?P<value>.+)$")
def __init__(self, stream: IO[str]) -> None:
"""
Args:
stream(IO[str]): input stream containing PKGBUILD content
"""
shlex.shlex.__init__(self, stream, posix=True, punctuation_chars=True)
self._io = stream # direct access without type casting
# ignore substitution and extend bash symbols
self.wordchars += "${}#:+-@"
# in case of default behaviour, it will ignore, for example, segment part of url outside of quotes
self.commenters = ""
@staticmethod
def _expand_array(array: list[str]) -> list[str]:
"""
bash array expansion simulator. It takes raw array and tries to expand constructions like
``(first prefix-{mid1,mid2}-suffix last)`` into ``(first, prefix-mid1-suffix prefix-mid2-suffix last)``
Args:
array(list[str]): input array
Returns:
list[str]: either source array or expanded array if possible
Raises:
PkgbuildParserError: if there are errors in parser
"""
# we are using comma as marker for expansion (if any)
if PkgbuildToken.Comma not in array:
return array
# again sanity check, for expansion there are at least 3 elements (first, last and comma)
if len(array) < 3:
return array
result = []
buffer, prefix = [], None
for index, (first, second) in enumerate(itertools.pairwise(array)):
match (first, second):
# in this case we check if expansion should be started
# this condition matches "prefix{first", ","
case (_, PkgbuildToken.Comma) if PkgbuildToken.FunctionStarts in first:
prefix, part = first.rsplit(PkgbuildToken.FunctionStarts, maxsplit=1)
buffer.append(f"{prefix}{part}")
# the last element case, it matches either ",", "last}" or ",", "last}suffix"
# in case if there is suffix, it must be appended to all list elements
case (PkgbuildToken.Comma, _) if prefix is not None and PkgbuildToken.FunctionEnds in second:
part, suffix = second.rsplit(PkgbuildToken.FunctionEnds, maxsplit=1)
buffer.append(f"{prefix}{part}")
result.extend([f"{part}{suffix}" for part in buffer])
# reset state
buffer, prefix = [], None
# we have already prefix string, so we are in progress of expansion
# we always operate the last element, so this matches ",", "next"
case (PkgbuildToken.Comma, _) if prefix is not None:
buffer.append(f"{prefix}{second}")
# exactly first element of the list
case (_, _) if prefix is None and index == 0:
result.append(first)
# any next normal element
case (_, _) if prefix is None:
result.append(second)
# small sanity check
if prefix is not None:
raise PkgbuildParserError("error in array expansion", array)
return result
def _parse_array(self) -> list[str]:
"""
parse array from the PKGBUILD. This method will extract tokens from parser until it matches closing array,
modifying source parser state
Returns:
list[str]: extracted arrays elements
Raises:
PkgbuildParserError: if array is not closed
"""
def extract() -> Generator[str, None, None]:
while token := self.get_token():
if token == PkgbuildToken.ArrayEnds:
break
if token == PkgbuildToken.Comment:
self.instream.readline()
continue
yield token
if token != PkgbuildToken.ArrayEnds:
raise PkgbuildParserError("no closing array bracket found")
return self._expand_array(list(extract()))
def _parse_function(self) -> str:
"""
parse function from the PKGBUILD. This method will extract tokens from parser until it matches closing function,
modifying source parser state. Instead of trying to combine tokens together, it uses positions of the file
and reads content again in this range
Returns:
str: function body
Raises:
PkgbuildParserError: if function body wasn't found or parser input stream doesn't support position reading
"""
# find start and end positions
start_position = end_position = -1
counter = 0 # simple processing of the inner "{" and "}"
while token := self.get_token():
match token:
case PkgbuildToken.FunctionStarts:
if counter == 0:
start_position = self._io.tell() - 1
counter += 1
case PkgbuildToken.FunctionEnds:
end_position = self._io.tell()
counter -= 1
if counter == 0:
break
if not 0 < start_position < end_position:
raise PkgbuildParserError("function body wasn't found")
# read the specified interval from source stream
self._io.seek(start_position - 1) # start from the previous symbol
content = self._io.read(end_position - start_position)
# special case of the end of file
if self.state == self.eof: # type: ignore[attr-defined]
content += self._io.read()
# reset position (because the last position was before the next token starts)
self._io.seek(end_position)
return content
def _parse_token(self, token: str) -> Generator[PkgbuildPatch, None, None]:
"""
parse single token to the PKGBUILD field
Args:
token(str): current token
Yields:
PkgbuildPatch: extracted a PKGBUILD node
"""
# simple assignment rule
if (match := self._STRING_ASSIGNMENT.match(token)) is not None:
key = match.group("key")
value = match.group("value")
yield PkgbuildPatch(key, value)
return
if token == PkgbuildToken.Comment:
self.instream.readline()
return
match self.get_token():
# array processing. Arrays will be sent as "key=", "(", values, ")"
case PkgbuildToken.ArrayStarts if (match := self._ARRAY_ASSIGNMENT.match(token)) is not None:
key = match.group("key")
value = self._parse_array()
yield PkgbuildPatch(key, value)
# functions processing. Function will be sent as "name", "()", "{", body, "}"
case PkgbuildToken.FunctionDeclaration if self._FUNCTION_DECLARATION.match(token):
key = f"{token}{PkgbuildToken.FunctionDeclaration}"
value = self._parse_function()
yield PkgbuildPatch(key, value) # this is not mistake, assign to token without ()
# special function case, where "(" and ")" are separated tokens, e.g. "pkgver ( )"
case PkgbuildToken.ArrayStarts if self._FUNCTION_DECLARATION.match(token):
next_token = self.get_token()
if next_token == PkgbuildToken.ArrayEnds: # replace closing bracket with "()"
next_token = PkgbuildToken.FunctionDeclaration
self.push_token(next_token) # type: ignore[arg-type]
yield from self._parse_token(token)
# some random token received without continuation, lets guess it is empty assignment (i.e. key=)
case other if other is not None:
yield from self._parse_token(other)
def parse(self) -> Generator[PkgbuildPatch, None, None]:
"""
parse source stream and yield parsed entries
Yields:
PkgbuildPatch: extracted a PKGBUILD node
"""
for token in self:
yield from self._parse_token(token)

View File

@ -38,8 +38,6 @@ class Repo(LazyLogging):
def __init__(self, name: str, paths: RepositoryPaths, sign_args: list[str]) -> None:
"""
default constructor
Args:
name(str): repository name
paths(RepositoryPaths): repository paths instance

View File

@ -38,8 +38,6 @@ class Auth(LazyLogging):
def __init__(self, configuration: Configuration, provider: AuthSettings = AuthSettings.Disabled) -> None:
"""
default constructor
Args:
configuration(Configuration): configuration instance
provider(AuthSettings, optional): authorization type definition (Default value = AuthSettings.Disabled)

View File

@ -37,8 +37,6 @@ class Mapping(Auth):
def __init__(self, configuration: Configuration, database: SQLite,
provider: AuthSettings = AuthSettings.Configuration) -> None:
"""
default constructor
Args:
configuration(Configuration): configuration instance
database(SQLite): database instance

View File

@ -43,8 +43,6 @@ class OAuth(Mapping):
def __init__(self, configuration: Configuration, database: SQLite,
provider: AuthSettings = AuthSettings.OAuth) -> None:
"""
default constructor
Args:
configuration(Configuration): configuration instance
database(SQLite): database instance

View File

@ -41,8 +41,6 @@ class PAM(Mapping):
def __init__(self, configuration: Configuration, database: SQLite,
provider: AuthSettings = AuthSettings.PAM) -> None:
"""
default constructor
Args:
configuration(Configuration): configuration instance
database(SQLite): database instance

View File

@ -45,8 +45,6 @@ class PackageArchive:
def __init__(self, root: Path, package: Package, pacman: Pacman, scan_paths: ScanPaths) -> None:
"""
default constructor
Args:
root(Path): path to root filesystem
package(Package): package descriptor

View File

@ -17,13 +17,14 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from collections.abc import Generator
from pathlib import Path
from ahriman.core.build_tools.sources import Sources
from ahriman.core.configuration import Configuration
from ahriman.core.exceptions import BuildError
from ahriman.core.log import LazyLogging
from ahriman.core.utils import check_output
from ahriman.core.utils import check_output, package_like
from ahriman.models.package import Package
from ahriman.models.pkgbuild_patch import PkgbuildPatch
from ahriman.models.repository_paths import RepositoryPaths
@ -48,8 +49,6 @@ class Task(LazyLogging):
def __init__(self, package: Package, configuration: Configuration, architecture: str,
paths: RepositoryPaths) -> None:
"""
default constructor
Args:
package(Package): package definitions
configuration(Configuration): configuration instance
@ -67,12 +66,43 @@ class Task(LazyLogging):
self.makepkg_flags = configuration.getlist("build", "makepkg_flags", fallback=[])
self.makechrootpkg_flags = configuration.getlist("build", "makechrootpkg_flags", fallback=[])
def build(self, sources_dir: Path, **kwargs: str | None) -> list[Path]:
def _package_archives(self, sources_dir: Path, source_files: list[Path]) -> list[Path]:
"""
extract package archives from the directory
Args:
sources_dir(Path): path to where sources are
source_files(list[Path]): list of files which were initially in the directory
Returns:
list[Path]: list of file paths which looks like freshly generated archives
"""
def files() -> Generator[Path, None, None]:
for filepath in sources_dir.iterdir():
if filepath in source_files:
continue # skip files which were already there
if filepath.suffix == ".log":
continue # skip log files
if not package_like(filepath):
continue # path doesn't look like a package
yield filepath
# debug packages are always formed as package.base-debug
# see /usr/share/makepkg/util/pkgbuild.sh for more details
debug_package_prefix = f"{self.package.base}-debug-"
return [
package
for package in files()
if self.include_debug_packages or not package.name.startswith(debug_package_prefix)
]
def build(self, sources_dir: Path, *, dry_run: bool = False, **kwargs: str | None) -> list[Path]:
"""
run package build
Args:
sources_dir(Path): path to where sources are
dry_run(bool, optional): do not perform build itself (Default value = False)
**kwargs(str | None): environment variables to be passed to build processes
Returns:
@ -82,6 +112,8 @@ class Task(LazyLogging):
command.extend(self.archbuild_flags)
command.extend(["--"] + self.makechrootpkg_flags)
command.extend(["--"] + self.makepkg_flags)
if dry_run:
command.extend(["--nobuild"])
self.logger.info("using %s for %s", command, self.package.base)
environment: dict[str, str] = {
@ -91,6 +123,7 @@ class Task(LazyLogging):
}
self.logger.info("using environment variables %s", environment)
source_files = list(sources_dir.iterdir())
check_output(
*command,
exception=BuildError.from_process(self.package.base),
@ -100,20 +133,7 @@ class Task(LazyLogging):
environment=environment,
)
package_list_command = ["makepkg", "--packagelist"]
if not self.include_debug_packages:
package_list_command.append("OPTIONS=(!debug)") # disable debug flag manually
packages = check_output(
*package_list_command,
exception=BuildError.from_process(self.package.base),
cwd=sources_dir,
logger=self.logger,
environment=environment,
).splitlines()
# some dirty magic here
# the filter is applied in order to make sure that result will only contain packages which were actually built
# e.g. in some cases packagelist command produces debug packages which were not actually built
return list(filter(lambda path: path.is_file(), map(Path, packages)))
return self._package_archives(sources_dir, source_files)
def init(self, sources_dir: Path, patches: list[PkgbuildPatch], local_version: str | None) -> str | None:
"""

View File

@ -71,8 +71,6 @@ class Configuration(configparser.RawConfigParser):
def __init__(self, allow_no_value: bool = False) -> None:
"""
default constructor. In the most cases must not be called directly
Args:
allow_no_value(bool, optional): copies :class:`configparser.RawConfigParser` behaviour. In case if it is set
to ``True``, the keys without values will be allowed (Default value = False)

View File

@ -41,8 +41,6 @@ class Validator(RootValidator):
def __init__(self, *args: Any, **kwargs: Any) -> None:
"""
default constructor
Args:
configuration(Configuration): configuration instance used for extraction
*args(Any): positional arguments to be passed to base validator

View File

@ -41,8 +41,6 @@ class Migrations(LazyLogging):
def __init__(self, connection: Connection, configuration: Configuration) -> None:
"""
default constructor
Args:
connection(Connection): database connection
configuration(Configuration): configuration instance

View File

@ -41,8 +41,6 @@ class Operations(LazyLogging):
def __init__(self, path: Path, repository_id: RepositoryId, repository_paths: RepositoryPaths) -> None:
"""
default constructor
Args:
path(Path): path to the database file
repository_id(RepositoryId): repository unique identifier

View File

@ -59,8 +59,6 @@ class DistributedSystem(Trigger, WebClient):
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -34,8 +34,6 @@ class WorkerTrigger(DistributedSystem):
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -36,8 +36,6 @@ class WorkersCache(LazyLogging):
def __init__(self, configuration: Configuration) -> None:
"""
default constructor
Args:
configuration(Configuration): configuration instance
"""

View File

@ -33,8 +33,6 @@ class BuildError(RuntimeError):
def __init__(self, package_base: str, stderr: str | None = None) -> None:
"""
default constructor
Args:
package_base(str): package base raised exception
stderr(str | None, optional): stderr of the process if available (Default value = None)
@ -67,8 +65,6 @@ class CalledProcessError(subprocess.CalledProcessError):
def __init__(self, status_code: int, process: list[str], stderr: str) -> None:
"""
default constructor
Args:
status_code(int): process return code
process(list[str]): process argument list
@ -94,9 +90,7 @@ class DuplicateRunError(RuntimeError):
"""
def __init__(self) -> None:
"""
default constructor
"""
""""""
RuntimeError.__init__(
self, "Another application instance is run. This error can be suppressed by using --force flag.")
@ -119,9 +113,7 @@ class GitRemoteError(RuntimeError):
"""
def __init__(self) -> None:
"""
default constructor
"""
""""""
RuntimeError.__init__(self, "Git remote failed")
@ -132,8 +124,6 @@ class InitializeError(RuntimeError):
def __init__(self, details: str) -> None:
"""
default constructor
Args:
details(str): details of the exception
"""
@ -147,8 +137,6 @@ class MigrationError(RuntimeError):
def __init__(self, details: str) -> None:
"""
default constructor
Args:
details(str): error details
"""
@ -162,8 +150,6 @@ class MissingArchitectureError(ValueError):
def __init__(self, command: str) -> None:
"""
default constructor
Args:
command(str): command name which throws exception
"""
@ -177,8 +163,6 @@ class MultipleArchitecturesError(ValueError):
def __init__(self, command: str, repositories: list[RepositoryId] | None = None) -> None:
"""
default constructor
Args:
command(str): command name which throws exception
repositories(list[RepositoryId] | None, optional): found repository list (Default value = None)
@ -196,8 +180,6 @@ class OptionError(ValueError):
def __init__(self, value: Any) -> None:
"""
default constructor
Args:
value(Any): option value
"""
@ -211,8 +193,6 @@ class PackageInfoError(RuntimeError):
def __init__(self, details: Any) -> None:
"""
default constructor
Args:
details(Any): error details
"""
@ -226,14 +206,29 @@ class PacmanError(RuntimeError):
def __init__(self, details: Any) -> None:
"""
default constructor
Args:
details(Any): error details
"""
RuntimeError.__init__(self, f"Could not perform operation with pacman: `{details}`")
class PkgbuildParserError(ValueError):
"""
exception raises in case of PKGBUILD parser errors
"""
def __init__(self, reason: str, source: Any = None) -> None:
"""
Args:
reason(str): parser error reason
source(Any, optional): source line if available (Default value = None)
"""
message = f"Could not parse PKGBUILD: {reason}"
if source is not None:
message += f", source: `{source}`"
ValueError.__init__(self, message)
class PathError(ValueError):
"""
exception which will be raised on path which is not belong to root directory
@ -241,8 +236,6 @@ class PathError(ValueError):
def __init__(self, path: Path, root: Path) -> None:
"""
default constructor
Args:
path(Path): path which raised an exception
root(Path): repository root (i.e. ahriman home)
@ -257,8 +250,6 @@ class PasswordError(ValueError):
def __init__(self, details: Any) -> None:
"""
default constructor
Args:
details(Any); error details
"""
@ -272,8 +263,6 @@ class PartitionError(RuntimeError):
def __init__(self, count: int) -> None:
"""
default constructor
Args:
count(int): count of partitions
"""
@ -286,9 +275,7 @@ class PkgbuildGeneratorError(RuntimeError):
"""
def __init__(self) -> None:
"""
default constructor
"""
""""""
RuntimeError.__init__(self, "Could not generate package")
@ -298,9 +285,7 @@ class ReportError(RuntimeError):
"""
def __init__(self) -> None:
"""
default constructor
"""
""""""
RuntimeError.__init__(self, "Report failed")
@ -310,9 +295,7 @@ class SynchronizationError(RuntimeError):
"""
def __init__(self) -> None:
"""
default constructor
"""
""""""
RuntimeError.__init__(self, "Sync failed")
@ -323,8 +306,6 @@ class UnknownPackageError(ValueError):
def __init__(self, package_base: str) -> None:
"""
default constructor
Args:
package_base(str): package base name
"""
@ -338,8 +319,6 @@ class UnsafeRunError(RuntimeError):
def __init__(self, current_uid: int, root_uid: int) -> None:
"""
default constructor
Args:
current_uid(int): current user ID
root_uid(int): ID of the owner of root directory

View File

@ -33,8 +33,6 @@ class AurPrinter(StringPrinter):
def __init__(self, package: AURPackage) -> None:
"""
default constructor
Args:
package(AURPackage): AUR package description
"""

View File

@ -28,8 +28,6 @@ class BuildPrinter(StringPrinter):
def __init__(self, package: Package, is_success: bool, use_utf: bool) -> None:
"""
default constructor
Args:
package(Package): built package
is_success(bool): ``True`` in case if build has success status and ``False`` otherwise

View File

@ -32,8 +32,6 @@ class ChangesPrinter(Printer):
def __init__(self, changes: Changes) -> None:
"""
default constructor
Args:
changes(Changes): package changes
"""

View File

@ -33,8 +33,6 @@ class ConfigurationPathsPrinter(StringPrinter):
def __init__(self, root: Path, includes: list[Path]) -> None:
"""
default constructor
Args:
root(Path): path to root configuration file
includes(list[Path]): list of include files

View File

@ -42,8 +42,6 @@ class ConfigurationPrinter(StringPrinter):
def __init__(self, section: str, values: dict[str, str]) -> None:
"""
default constructor
Args:
section(str): section name
values(dict[str, str]): configuration values dictionary

View File

@ -34,8 +34,6 @@ class EventStatsPrinter(StringPrinter):
def __init__(self, event_type: str, events: list[float | int]) -> None:
"""
default constructor
Args:
event_type(str): event type used for this statistics
events(list[float | int]): event values to build statistics

View File

@ -34,8 +34,6 @@ class PackagePrinter(StringPrinter):
def __init__(self, package: Package, status: BuildStatus) -> None:
"""
default constructor
Args:
package(Package): package description
status(BuildStatus): build status

View File

@ -33,8 +33,6 @@ class PackageStatsPrinter(StringPrinter):
def __init__(self, events: dict[str, int]) -> None:
"""
default constructor
Args:
events(dict[str, int]): map of package to its event frequency
"""

View File

@ -32,8 +32,6 @@ class PatchPrinter(StringPrinter):
def __init__(self, package_base: str, patches: list[PkgbuildPatch]) -> None:
"""
default constructor
Args:
package_base(str): package base
patches(list[PkgbuildPatch]): PKGBUILD patch object

View File

@ -32,8 +32,6 @@ class RepositoryPrinter(StringPrinter):
def __init__(self, repository_id: RepositoryId) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
"""

View File

@ -28,8 +28,6 @@ class StatusPrinter(StringPrinter):
def __init__(self, status: BuildStatus) -> None:
"""
default constructor
Args:
status(BuildStatus): build status
"""

View File

@ -30,8 +30,6 @@ class StringPrinter(Printer):
def __init__(self, content: str) -> None:
"""
default constructor
Args:
content(str): any content string
"""

View File

@ -32,8 +32,6 @@ class TreePrinter(StringPrinter):
def __init__(self, level: int, packages: list[Package]) -> None:
"""
default constructor
Args:
level(int): dependencies tree level
packages(list[Package]): packages which belong to this level

View File

@ -34,8 +34,6 @@ class UpdatePrinter(StringPrinter):
def __init__(self, remote: Package, local_version: str | None) -> None:
"""
default constructor
Args:
remote(Package): remote (new) package object
local_version(str | None): local version of the package if any

View File

@ -32,8 +32,6 @@ class UserPrinter(StringPrinter):
def __init__(self, user: User) -> None:
"""
default constructor
Args:
user(User): user to print
"""

View File

@ -35,8 +35,6 @@ class ValidationPrinter(StringPrinter):
def __init__(self, node: str, errors: list[str | dict[str, Any]]) -> None:
"""
default constructor
Args:
node(str): root level name
errors(list[str | dict[str, Any]]): validation errors

View File

@ -31,8 +31,6 @@ class VersionPrinter(StringPrinter):
def __init__(self, title: str, packages: dict[str, str]) -> None:
"""
default constructor
Args:
title(str): title of the message
packages(dict[str, str]): map of package name to its version

View File

@ -45,8 +45,6 @@ class RemotePull(LazyLogging):
def __init__(self, repository_id: RepositoryId, configuration: Configuration, section: str) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -64,8 +64,6 @@ class RemotePullTrigger(Trigger):
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -46,8 +46,6 @@ class RemotePush(LazyLogging):
def __init__(self, reporter: Client, configuration: Configuration, section: str) -> None:
"""
default constructor
Args:
reporter(Client): reporter client
configuration(Configuration): configuration instance

View File

@ -76,8 +76,6 @@ class RemotePushTrigger(Trigger):
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -44,8 +44,6 @@ class SyncHttpClient(LazyLogging):
def __init__(self, configuration: Configuration | None = None, section: str | None = None, *,
suppress_errors: bool = False) -> None:
"""
default constructor
Args:
configuration(Configuration | None, optional): configuration instance (Default value = None)
section(str | None, optional): settings section name (Default value = None)

View File

@ -40,8 +40,6 @@ class HttpLogHandler(logging.Handler):
def __init__(self, repository_id: RepositoryId, configuration: Configuration, *,
report: bool, suppress_errors: bool) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -31,8 +31,6 @@ class _JournalHandler(NullHandler):
def __init__(self, *args: Any, **kwargs: Any) -> None:
"""
default constructor
Args:
*args(Any): positional arguments
**kwargs(Any): keyword arguments

View File

@ -35,8 +35,6 @@ class Console(Report):
def __init__(self, repository_id: RepositoryId, configuration: Configuration, section: str) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -51,8 +51,6 @@ class Email(Report, JinjaTemplate):
def __init__(self, repository_id: RepositoryId, configuration: Configuration, section: str) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -36,8 +36,6 @@ class HTML(Report, JinjaTemplate):
def __init__(self, repository_id: RepositoryId, configuration: Configuration, section: str) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -73,8 +73,6 @@ class JinjaTemplate:
def __init__(self, repository_id: RepositoryId, configuration: Configuration, section: str) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -42,8 +42,6 @@ class RemoteCall(Report):
def __init__(self, repository_id: RepositoryId, configuration: Configuration, section: str) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -56,8 +56,6 @@ class Report(LazyLogging):
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -384,8 +384,6 @@ class ReportTrigger(Trigger):
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -46,8 +46,6 @@ class RSS(Report, JinjaTemplate):
def __init__(self, repository_id: RepositoryId, configuration: Configuration, section: str) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -44,8 +44,6 @@ class Telegram(Report, JinjaTemplate, SyncHttpClient):
def __init__(self, repository_id: RepositoryId, configuration: Configuration, section: str) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -58,7 +58,7 @@ class PackageInfo(RepositoryProperties):
# force version to max of them
self.logger.warning("version of %s differs, found %s and %s",
current.base, current.version, local.version)
if current.is_outdated(local, self.paths, calculate_version=False):
if current.is_outdated(local, self.configuration, calculate_version=False):
current.version = local.version
current.packages.update(local.packages)
except Exception:

View File

@ -51,14 +51,11 @@ class RepositoryProperties(EventLogger, LazyLogging):
scan_paths(ScanPaths): scan paths for the implicit dependencies
sign(GPG): GPG wrapper instance
triggers(TriggerLoader): triggers holder
vcs_allowed_age(int): maximal age of the VCS packages before they will be checked
"""
def __init__(self, repository_id: RepositoryId, configuration: Configuration, database: SQLite, *, report: bool,
refresh_pacman_database: PacmanSynchronization) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance
@ -70,8 +67,6 @@ class RepositoryProperties(EventLogger, LazyLogging):
self.configuration = configuration
self.database = database
self.vcs_allowed_age = configuration.getint("build", "vcs_allowed_age", fallback=0)
self.paths: RepositoryPaths = configuration.repository_paths # additional workaround for pycharm typing
self.ignore_list = configuration.getlist("build", "ignore_packages", fallback=[])

View File

@ -67,10 +67,7 @@ class UpdateHandler(PackageInfo, Cleaner):
try:
remote = load_remote(local)
if local.is_outdated(
remote, self.paths,
vcs_allowed_age=self.vcs_allowed_age,
calculate_version=vcs):
if local.is_outdated(remote, self.configuration, calculate_version=vcs):
self.reporter.set_pending(local.base)
self.event(local.base, EventType.PackageOutdated, "Remote version is newer than local")
result.append(remote)
@ -154,9 +151,7 @@ class UpdateHandler(PackageInfo, Cleaner):
if local is None:
continue # we don't add packages automatically
if local.is_outdated(remote, self.paths,
vcs_allowed_age=self.vcs_allowed_age,
calculate_version=vcs):
if local.is_outdated(remote, self.configuration, calculate_version=vcs):
self.reporter.set_pending(local.base)
self.event(local.base, EventType.PackageOutdated, "Locally pulled sources are outdated")
result.append(remote)

View File

@ -38,8 +38,6 @@ class GPG(SyncHttpClient):
def __init__(self, configuration: Configuration) -> None:
"""
default constructor
Args:
configuration(Configuration): configuration instance
"""

View File

@ -46,8 +46,6 @@ class Spawn(Thread, LazyLogging):
def __init__(self, args_parser: argparse.ArgumentParser, command_arguments: list[str]) -> None:
"""
default constructor
Args:
args_parser(argparse.ArgumentParser): command line parser for the application
command_arguments(list[str]): base command line arguments

View File

@ -40,8 +40,6 @@ class LocalClient(Client):
def __init__(self, repository_id: RepositoryId, database: SQLite) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
database(SQLite): database instance:

View File

@ -44,8 +44,6 @@ class Watcher(LazyLogging):
def __init__(self, client: Client) -> None:
"""
default constructor
Args:
client(Client): reporter instance
"""
@ -199,7 +197,7 @@ class Watcher(LazyLogging):
proxy methods for reporter client
Args:
item(str): property name:
item(str): property name
Returns:
Any: attribute by its name

View File

@ -45,8 +45,6 @@ class WebClient(Client, SyncAhrimanClient):
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -106,8 +106,6 @@ class KeyringTrigger(Trigger):
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -93,8 +93,6 @@ class MirrorlistTrigger(Trigger):
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -40,8 +40,6 @@ class PackageCreator:
def __init__(self, configuration: Configuration, generator: PkgbuildGenerator) -> None:
"""
default constructor
Args:
configuration(Configuration): configuration instance
generator(PkgbuildGenerator): PKGBUILD generator instance

View File

@ -47,8 +47,6 @@ class KeyringGenerator(PkgbuildGenerator):
def __init__(self, database: SQLite, sign: GPG, repository_id: RepositoryId,
configuration: Configuration, section: str) -> None:
"""
default constructor
Args:
database(SQLite): database instance
sign(GPG): GPG wrapper instance

View File

@ -41,8 +41,6 @@ class MirrorlistGenerator(PkgbuildGenerator):
def __init__(self, repository_id: RepositoryId, configuration: Configuration, section: str) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -38,8 +38,6 @@ class Leaf:
def __init__(self, package: Package) -> None:
"""
default constructor
Args:
package(Package): package properties
"""
@ -122,8 +120,6 @@ class Tree:
def __init__(self, leaves: list[Leaf]) -> None:
"""
default constructor
Args:
leaves(list[Leaf]): leaves to build the tree
"""

View File

@ -61,8 +61,6 @@ class Trigger(LazyLogging):
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -59,9 +59,7 @@ class TriggerLoader(LazyLogging):
"""
def __init__(self) -> None:
"""
default constructor
"""
""""""
self._on_stop_requested = False
self.triggers: list[Trigger] = []

View File

@ -44,8 +44,6 @@ class GitHub(Upload, HttpUpload):
def __init__(self, repository_id: RepositoryId, configuration: Configuration, section: str) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -42,8 +42,6 @@ class RemoteService(Upload, HttpUpload):
def __init__(self, repository_id: RepositoryId, configuration: Configuration, section: str) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -37,8 +37,6 @@ class Rsync(Upload):
def __init__(self, repository_id: RepositoryId, configuration: Configuration, section: str) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -43,8 +43,6 @@ class S3(Upload):
def __init__(self, repository_id: RepositoryId, configuration: Configuration, section: str) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -57,8 +57,6 @@ class Upload(LazyLogging):
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -163,8 +163,6 @@ class UploadTrigger(Trigger):
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance

View File

@ -27,7 +27,7 @@ import re
import selectors
import subprocess
from collections.abc import Callable, Generator, Iterable
from collections.abc import Callable, Generator, Iterable, Mapping
from dataclasses import asdict
from enum import Enum
from pathlib import Path
@ -407,7 +407,7 @@ def safe_filename(source: str) -> str:
return re.sub(r"[^A-Za-z\d\-._~:\[\]@]", "-", source)
def srcinfo_property(key: str, srcinfo: dict[str, Any], package_srcinfo: dict[str, Any], *,
def srcinfo_property(key: str, srcinfo: Mapping[str, Any], package_srcinfo: Mapping[str, Any], *,
default: Any = None) -> Any:
"""
extract property from SRCINFO. This method extracts property from package if this property is presented in
@ -416,8 +416,8 @@ def srcinfo_property(key: str, srcinfo: dict[str, Any], package_srcinfo: dict[st
Args:
key(str): key to extract
srcinfo(dict[str, Any]): root structure of SRCINFO
package_srcinfo(dict[str, Any]): package specific SRCINFO
srcinfo(Mapping[str, Any]): root structure of SRCINFO
package_srcinfo(Mapping[str, Any]): package specific SRCINFO
default(Any, optional): the default value for the specified key (Default value = None)
Returns:
@ -426,7 +426,7 @@ def srcinfo_property(key: str, srcinfo: dict[str, Any], package_srcinfo: dict[st
return package_srcinfo.get(key) or srcinfo.get(key) or default
def srcinfo_property_list(key: str, srcinfo: dict[str, Any], package_srcinfo: dict[str, Any], *,
def srcinfo_property_list(key: str, srcinfo: Mapping[str, Any], package_srcinfo: Mapping[str, Any], *,
architecture: str | None = None) -> list[Any]:
"""
extract list property from SRCINFO. Unlike :func:`srcinfo_property()` it supposes that default return value is
@ -435,8 +435,8 @@ def srcinfo_property_list(key: str, srcinfo: dict[str, Any], package_srcinfo: di
Args:
key(str): key to extract
srcinfo(dict[str, Any]): root structure of SRCINFO
package_srcinfo(dict[str, Any]): package specific SRCINFO
srcinfo(Mapping[str, Any]): root structure of SRCINFO
package_srcinfo(Mapping[str, Any]): package specific SRCINFO
architecture(str | None, optional): package architecture if set (Default value = None)
Returns:

View File

@ -55,8 +55,6 @@ class Event:
def __init__(self, event: str | EventType, object_id: str, message: str | None = None, created: int | None = None,
**kwargs: Any):
"""
default constructor
Args:
event(str | EventType): event type
object_id(str): object identifier

View File

@ -44,9 +44,7 @@ class MetricsTimer:
"""
def __init__(self) -> None:
"""
default constructor
"""
""""""
self.start_time: float | None = None
@property

View File

@ -26,19 +26,18 @@ from collections.abc import Callable, Generator, Iterable
from dataclasses import dataclass
from pathlib import Path
from pyalpm import vercmp # type: ignore[import-not-found]
from srcinfo.parse import parse_srcinfo # type: ignore[import-untyped]
from typing import Any, Self
from urllib.parse import urlparse
from ahriman.core.alpm.pacman import Pacman
from ahriman.core.alpm.remote import AUR, Official, OfficialSyncdb
from ahriman.core.exceptions import PackageInfoError
from ahriman.core.configuration import Configuration
from ahriman.core.log import LazyLogging
from ahriman.core.utils import check_output, dataclass_view, full_version, parse_version, srcinfo_property_list, utcnow
from ahriman.core.utils import dataclass_view, full_version, parse_version, srcinfo_property_list, utcnow
from ahriman.models.package_description import PackageDescription
from ahriman.models.package_source import PackageSource
from ahriman.models.pkgbuild import Pkgbuild
from ahriman.models.remote_source import RemoteSource
from ahriman.models.repository_paths import RepositoryPaths
@dataclass(kw_only=True)
@ -255,25 +254,19 @@ class Package(LazyLogging):
Returns:
Self: package properties
Raises:
PackageInfoError: if there are parsing errors
"""
srcinfo_source = check_output("makepkg", "--printsrcinfo", cwd=path)
srcinfo, errors = parse_srcinfo(srcinfo_source)
if errors:
raise PackageInfoError(errors)
pkgbuild = Pkgbuild.from_file(path / "PKGBUILD")
packages = {
package: PackageDescription(
depends=srcinfo_property_list("depends", srcinfo, properties, architecture=architecture),
make_depends=srcinfo_property_list("makedepends", srcinfo, properties, architecture=architecture),
opt_depends=srcinfo_property_list("optdepends", srcinfo, properties, architecture=architecture),
check_depends=srcinfo_property_list("checkdepends", srcinfo, properties, architecture=architecture),
depends=srcinfo_property_list("depends", pkgbuild, properties, architecture=architecture),
make_depends=srcinfo_property_list("makedepends", pkgbuild, properties, architecture=architecture),
opt_depends=srcinfo_property_list("optdepends", pkgbuild, properties, architecture=architecture),
check_depends=srcinfo_property_list("checkdepends", pkgbuild, properties, architecture=architecture),
)
for package, properties in srcinfo["packages"].items()
for package, properties in pkgbuild.packages().items()
}
version = full_version(srcinfo.get("epoch"), srcinfo["pkgver"], srcinfo["pkgrel"])
version = full_version(pkgbuild.get("epoch"), pkgbuild["pkgver"], pkgbuild["pkgrel"])
remote = RemoteSource(
source=PackageSource.Local,
@ -284,7 +277,7 @@ class Package(LazyLogging):
)
return cls(
base=srcinfo["pkgbase"],
base=pkgbuild["pkgbase"],
version=version,
remote=remote,
packages=packages,
@ -363,16 +356,12 @@ class Package(LazyLogging):
Raises:
PackageInfoError: if there are parsing errors
"""
srcinfo_source = check_output("makepkg", "--printsrcinfo", cwd=path)
srcinfo, errors = parse_srcinfo(srcinfo_source)
if errors:
raise PackageInfoError(errors)
pkgbuild = Pkgbuild.from_file(path / "PKGBUILD")
# we could use arch property, but for consistency it is better to call special method
architectures = Package.supported_architectures(path)
for architecture in architectures:
for source in srcinfo_property_list("source", srcinfo, {}, architecture=architecture):
for source in srcinfo_property_list("source", pkgbuild, {}, architecture=architecture):
if "::" in source:
_, source = source.split("::", 1) # in case if filename is specified, remove it
@ -383,7 +372,7 @@ class Package(LazyLogging):
yield Path(source)
if (install := srcinfo.get("install", None)) is not None:
if (install := pkgbuild.get("install")) is not None:
yield Path(install)
@staticmethod
@ -396,15 +385,9 @@ class Package(LazyLogging):
Returns:
set[str]: list of package supported architectures
Raises:
PackageInfoError: if there are parsing errors
"""
srcinfo_source = check_output("makepkg", "--printsrcinfo", cwd=path)
srcinfo, errors = parse_srcinfo(srcinfo_source)
if errors:
raise PackageInfoError(errors)
return set(srcinfo.get("arch", []))
pkgbuild = Pkgbuild.from_file(path / "PKGBUILD")
return set(pkgbuild.get("arch", []))
def _package_list_property(self, extractor: Callable[[PackageDescription], list[str]]) -> list[str]:
"""
@ -426,39 +409,39 @@ class Package(LazyLogging):
return sorted(set(generator()))
def actual_version(self, paths: RepositoryPaths) -> str:
def actual_version(self, configuration: Configuration) -> str:
"""
additional method to handle VCS package versions
Args:
paths(RepositoryPaths): repository paths instance
configuration(Configuration): configuration instance
Returns:
str: package version if package is not VCS and current version according to VCS otherwise
Raises:
PackageInfoError: if there are parsing errors
"""
if not self.is_vcs:
return self.version
from ahriman.core.build_tools.sources import Sources
from ahriman.core.build_tools.task import Task
Sources.load(paths.cache_for(self.base), self, [], paths)
_, repository_id = configuration.check_loaded()
paths = configuration.repository_paths
task = Task(self, configuration, repository_id.architecture, paths)
try:
# update pkgver first
check_output("makepkg", "--nodeps", "--nobuild", cwd=paths.cache_for(self.base), logger=self.logger)
# generate new .SRCINFO and put it to parser
srcinfo_source = check_output("makepkg", "--printsrcinfo",
cwd=paths.cache_for(self.base), logger=self.logger)
srcinfo, errors = parse_srcinfo(srcinfo_source)
if errors:
raise PackageInfoError(errors)
# create fresh chroot environment, fetch sources and - automagically - update PKGBUILD
task.init(paths.cache_for(self.base), [], None)
task.build(paths.cache_for(self.base), dry_run=True)
return full_version(srcinfo.get("epoch"), srcinfo["pkgver"], srcinfo["pkgrel"])
pkgbuild = Pkgbuild.from_file(paths.cache_for(self.base) / "PKGBUILD")
return full_version(pkgbuild.get("epoch"), pkgbuild["pkgver"], pkgbuild["pkgrel"])
except Exception:
self.logger.exception("cannot determine version of VCS package, make sure that VCS tools are installed")
self.logger.exception("cannot determine version of VCS package")
finally:
# clear log files generated by devtools
for log_file in paths.cache_for(self.base).glob("*.log"):
log_file.unlink()
return self.version
@ -513,26 +496,25 @@ class Package(LazyLogging):
if package.build_date is not None
)
def is_outdated(self, remote: Package, paths: RepositoryPaths, *,
vcs_allowed_age: float | int = 0,
def is_outdated(self, remote: Package, configuration: Configuration, *,
calculate_version: bool = True) -> bool:
"""
check if package is out-of-dated
Args:
remote(Package): package properties from remote source
paths(RepositoryPaths): repository paths instance. Required for VCS packages cache
vcs_allowed_age(float | int, optional): max age of the built packages before they will be
forced to calculate actual version (Default value = 0)
configuration(Configuration): configuration instance
calculate_version(bool, optional): expand version to actual value (by calculating git versions)
(Default value = True)
Returns:
bool: ``True`` if the package is out-of-dated and ``False`` otherwise
"""
vcs_allowed_age = configuration.getint("build", "vcs_allowed_age", fallback=0)
min_vcs_build_date = utcnow().timestamp() - vcs_allowed_age
if calculate_version and not self.is_newer_than(min_vcs_build_date):
remote_version = remote.actual_version(paths)
remote_version = remote.actual_version(configuration)
else:
remote_version = remote.version

View File

@ -0,0 +1,149 @@
#
# Copyright (c) 2021-2024 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from collections.abc import Iterator, Mapping
from dataclasses import dataclass
from io import StringIO
from pathlib import Path
from typing import Any, IO, Self
from ahriman.core.alpm.pkgbuild_parser import PkgbuildParser, PkgbuildToken
from ahriman.models.pkgbuild_patch import PkgbuildPatch
@dataclass(frozen=True)
class Pkgbuild(Mapping[str, Any]):
"""
model and proxy for PKGBUILD properties
Attributes:
fields(dict[str, PkgbuildPatch]): PKGBUILD fields
"""
fields: dict[str, PkgbuildPatch]
@property
def variables(self) -> dict[str, str]:
"""
list of variables defined and (maybe) used in this PKGBUILD
Returns:
dict[str, str]: map of variable name to its value. The value will be included here in case if it presented
in the internal dictionary, it is not a function and the value has string type
"""
return {
key: value.value
for key, value in self.fields.items()
if not value.is_function and isinstance(value.value, str)
}
@classmethod
def from_file(cls, path: Path) -> Self:
"""
parse PKGBUILD from the file
Args:
path(Path): path to the PKGBUILD file
Returns:
Self: constructed instance of self
"""
with path.open() as input_file:
return cls.from_io(input_file)
@classmethod
def from_io(cls, stream: IO[str]) -> Self:
"""
parse PKGBUILD from input stream
Args:
stream(IO[str]): input stream containing PKGBUILD content
Returns:
Self: constructed instance of self
"""
parser = PkgbuildParser(stream)
fields = {patch.key: patch for patch in parser.parse()}
# pkgbase is optional field, the pkgname must be used instead if not set
# however, pkgname is not presented is "package()" functions which we are parsing here too,
# thus, in our terms, it is optional too
if "pkgbase" not in fields and "pkgname" in fields:
fields["pkgbase"] = fields["pkgname"]
return cls({key: value for key, value in fields.items() if key})
def packages(self) -> dict[str, Self]:
"""
extract properties from internal package functions
Returns:
dict[str, Self]: map of package name to its inner properties if defined
"""
packages = [self["pkgname"]] if isinstance(self["pkgname"], str) else self["pkgname"]
def io(package_name: str) -> IO[str]:
# try to read package specific function and fallback to default otherwise
content = self.get(f"package_{package_name}") or self["package"]
return StringIO(content)
return {package: self.from_io(io(package)) for package in packages}
def __getitem__(self, item: str) -> Any:
"""
get the field of the PKGBUILD. This method tries to get exact key value if possible; if none found, it tries to
fetch function with the same name
Args:
item(str): key name
Returns:
Any: substituted value by the key
Raises:
KeyError: if key doesn't exist
"""
value = self.fields.get(item)
# if the key wasn't found and user didn't ask for function explicitly, we can try to get by function name
if value is None and not item.endswith(PkgbuildToken.FunctionDeclaration):
value = self.fields.get(f"{item}{PkgbuildToken.FunctionDeclaration}")
# if we still didn't find anything, we can just raise the exception
if value is None:
raise KeyError(item)
return value.substitute(self.variables)
def __iter__(self) -> Iterator[str]:
"""
iterate over the fields
Returns:
Iterator[str]: keys iterator
"""
return iter(self.fields)
def __len__(self) -> int:
"""
get length of the mapping
Returns:
int: amount of the fields in this PKGBUILD
"""
return len(self.fields)

View File

@ -21,6 +21,7 @@ import shlex
from dataclasses import dataclass, fields
from pathlib import Path
from string import Template
from typing import Any, Generator, Self
from ahriman.core.utils import dataclass_view, filter_json
@ -167,6 +168,21 @@ class PkgbuildPatch:
return f"{self.key} {self.value}" # no quoting enabled here
return f"""{self.key}={PkgbuildPatch.quote(self.value)}"""
def substitute(self, variables: dict[str, str]) -> str | list[str]:
"""
substitute variables into the value
Args:
variables(dict[str, str]): map of variables available for usage
Returns:
str | list[str]: substituted value. All unknown variables will remain as links to their values.
This function doesn't support recursive substitution
"""
if isinstance(self.value, str):
return Template(self.value).safe_substitute(variables)
return [Template(value).safe_substitute(variables) for value in self.value]
def view(self) -> dict[str, Any]:
"""
generate json patch view

View File

@ -43,8 +43,6 @@ class Result:
def __init__(self, *, added: Iterable[Package] | None = None, updated: Iterable[Package] | None = None,
removed: Iterable[Package] | None = None, failed: Iterable[Package] | None = None) -> None:
"""
default constructor
Args:
added(Iterable[Package] | None, optional): initial list of successfully added packages
(Default value = None)

View File

@ -47,8 +47,6 @@ class _AuthorizationPolicy(aiohttp_security.AbstractAuthorizationPolicy):
def __init__(self, validator: Auth) -> None:
"""
default constructor
Args:
validator(Auth): authorization module instance
"""

View File

@ -28,9 +28,9 @@ def test_package_dependencies() -> None:
"""
must extract package dependencies
"""
packages = dict(Versions.package_dependencies("srcinfo"))
packages = dict(Versions.package_dependencies("requests"))
assert packages
assert packages.get("parse") is not None
assert packages.get("urllib3") is not None
def test_package_dependencies_missing() -> None:

View File

@ -0,0 +1,206 @@
import pytest
from io import StringIO
from pathlib import Path
from ahriman.core.alpm.pkgbuild_parser import PkgbuildParser
from ahriman.core.exceptions import PkgbuildParserError
from ahriman.models.pkgbuild_patch import PkgbuildPatch
def test_expand_array() -> None:
"""
must correctly expand array
"""
assert PkgbuildParser._expand_array(["${pkgbase}{", ",", "-libs", ",", "-fortran}"]) == [
"${pkgbase}", "${pkgbase}-libs", "${pkgbase}-fortran"
]
assert PkgbuildParser._expand_array(["first", "prefix{1", ",", "2", ",", "3}suffix", "last"]) == [
"first", "prefix1suffix", "prefix2suffix", "prefix3suffix", "last"
]
def test_expand_array_no_comma() -> None:
"""
must skip array extraction if there is no comma
"""
assert PkgbuildParser._expand_array(["${pkgbase}{", "-libs", "-fortran}"]) == ["${pkgbase}{", "-libs", "-fortran}"]
def test_expand_array_short() -> None:
"""
must skip array extraction if it is short
"""
assert PkgbuildParser._expand_array(["${pkgbase}{", ","]) == ["${pkgbase}{", ","]
def test_expand_array_exception() -> None:
"""
must raise exception if there is unclosed element
"""
with pytest.raises(PkgbuildParserError):
assert PkgbuildParser._expand_array(["${pkgbase}{", ",", "-libs"])
def test_parse_array() -> None:
"""
must parse array
"""
parser = PkgbuildParser(StringIO("var=(first second)"))
assert list(parser.parse()) == [PkgbuildPatch("var", ["first", "second"])]
def test_parse_array_comment() -> None:
"""
must parse array with comments inside
"""
parser = PkgbuildParser(StringIO("""validpgpkeys=(
'F3691687D867B81B51CE07D9BBE43771487328A9' # bpiotrowski@archlinux.org
'86CFFCA918CF3AF47147588051E8B148A9999C34' # evangelos@foutrelis.com
'13975A70E63C361C73AE69EF6EEB81F8981C74C7' # richard.guenther@gmail.com
'D3A93CAD751C2AF4F8C7AD516C35B99309B5FA62' # Jakub Jelinek <jakub@redhat.com>
)"""))
assert list(parser.parse()) == [PkgbuildPatch("validpgpkeys", [
"F3691687D867B81B51CE07D9BBE43771487328A9",
"86CFFCA918CF3AF47147588051E8B148A9999C34",
"13975A70E63C361C73AE69EF6EEB81F8981C74C7",
"D3A93CAD751C2AF4F8C7AD516C35B99309B5FA62",
])]
def test_parse_array_exception() -> None:
"""
must raise exception if there is no closing bracket
"""
parser = PkgbuildParser(StringIO("var=(first second"))
with pytest.raises(PkgbuildParserError):
assert list(parser.parse())
def test_parse_function() -> None:
"""
must parse function
"""
parser = PkgbuildParser(StringIO("var() { echo hello world } "))
assert list(parser.parse()) == [PkgbuildPatch("var()", "{ echo hello world }")]
def test_parse_function_eof() -> None:
"""
must parse function with "}" at the end of the file
"""
parser = PkgbuildParser(StringIO("var() { echo hello world }"))
assert list(parser.parse()) == [PkgbuildPatch("var()", "{ echo hello world }")]
def test_parse_function_spaces() -> None:
"""
must parse function with spaces in declaration
"""
parser = PkgbuildParser(StringIO("var ( ) { echo hello world } "))
assert list(parser.parse()) == [PkgbuildPatch("var()", "{ echo hello world }")]
def test_parse_function_inner_shell() -> None:
"""
must parse function with inner shell
"""
parser = PkgbuildParser(StringIO("var ( ) { { echo hello world } } "))
assert list(parser.parse()) == [PkgbuildPatch("var()", "{ { echo hello world } }")]
def test_parse_function_exception() -> None:
"""
must raise exception if no bracket found
"""
parser = PkgbuildParser(StringIO("var() echo hello world } "))
with pytest.raises(PkgbuildParserError):
assert list(parser.parse())
parser = PkgbuildParser(StringIO("var() { echo hello world"))
with pytest.raises(PkgbuildParserError):
assert list(parser.parse())
def test_parse_token_assignment() -> None:
"""
must parse simple assignment
"""
parser = PkgbuildParser(StringIO())
assert next(parser._parse_token("var=value")) == PkgbuildPatch("var", "value")
assert next(parser._parse_token("var=$value")) == PkgbuildPatch("var", "$value")
assert next(parser._parse_token("var=${value}")) == PkgbuildPatch("var", "${value}")
assert next(parser._parse_token("var=${value/-/_}")) == PkgbuildPatch("var", "${value/-/_}")
def test_parse_token_comment() -> None:
"""
must correctly parse comment
"""
parser = PkgbuildParser(StringIO("""first=1 # comment
# comment line
second=2
#third=3
"""))
assert list(parser.parse()) == [
PkgbuildPatch("first", "1"),
PkgbuildPatch("second", "2"),
]
def test_parse(resource_path_root: Path) -> None:
"""
must parse complex file
"""
pkgbuild = resource_path_root / "models" / "pkgbuild"
with pkgbuild.open() as content:
parser = PkgbuildParser(content)
assert list(parser.parse()) == [
PkgbuildPatch("var", "value"),
PkgbuildPatch("var", "value"),
PkgbuildPatch("var", "value with space"),
PkgbuildPatch("var", "value"),
PkgbuildPatch("var", "$ref"),
PkgbuildPatch("var", "${ref}"),
PkgbuildPatch("var", "$ref value"),
PkgbuildPatch("var", "${ref}value"),
PkgbuildPatch("var", "${ref/-/_}"),
PkgbuildPatch("var", "${ref##.*}"),
PkgbuildPatch("var", "${ref%%.*}"),
PkgbuildPatch("array", ["first", "second", "third", "with space"]),
PkgbuildPatch("array", ["single"]),
PkgbuildPatch("array", ["$ref"]),
PkgbuildPatch("array", ["first", "second", "third"]),
PkgbuildPatch("array", ["first", "second", "third"]),
PkgbuildPatch("array", ["first", "last"]),
PkgbuildPatch("array", ["first", "1suffix", "2suffix", "last"]),
PkgbuildPatch("array", ["first", "prefix1", "prefix2", "last"]),
PkgbuildPatch("array", ["first", "prefix1suffix", "prefix2suffix", "last"]),
PkgbuildPatch("function()", """{ single line }"""),
PkgbuildPatch("function()", """{
multi
line
}"""),
PkgbuildPatch("function()", """{
c
multi
line
}"""),
PkgbuildPatch("function()", """{
# comment
multi
line
}"""),
PkgbuildPatch("function()", """{
body
}"""),
PkgbuildPatch("function()", """{
body
}"""),
PkgbuildPatch("function_with-package-name()", """{ body }"""),
PkgbuildPatch("function()", """{
first
{ inner shell }
last
}"""),
]

View File

@ -2,37 +2,65 @@ import pytest
from pathlib import Path
from pytest_mock import MockerFixture
from unittest.mock import call as MockCall
from ahriman.core.build_tools.task import Task
from ahriman.models.pkgbuild_patch import PkgbuildPatch
def test_package_archives(task_ahriman: Task, mocker: MockerFixture) -> None:
"""
must correctly return list of new files
"""
mocker.patch("pathlib.Path.iterdir", return_value=[
Path(f"{task_ahriman.package.base}-{task_ahriman.package.version}-any.pkg.tar.xz"),
Path(f"{task_ahriman.package.base}-debug-{task_ahriman.package.version}-any.pkg.tar.xz"),
Path("source.pkg.tar.xz"),
Path("randomfile"),
Path("namcap.log"),
])
assert task_ahriman._package_archives(Path("local"), [Path("source.pkg.tar.xz")]) == [
Path(f"{task_ahriman.package.base}-{task_ahriman.package.version}-any.pkg.tar.xz"),
Path(f"{task_ahriman.package.base}-debug-{task_ahriman.package.version}-any.pkg.tar.xz"),
]
def test_package_archives_no_debug(task_ahriman: Task, mocker: MockerFixture) -> None:
"""
must correctly return list of new files without debug packages
"""
task_ahriman.include_debug_packages = False
mocker.patch("pathlib.Path.iterdir", return_value=[
Path(f"{task_ahriman.package.base}-{task_ahriman.package.version}-any.pkg.tar.xz"),
Path(f"{task_ahriman.package.base}-debug-{task_ahriman.package.version}-any.pkg.tar.xz"),
Path("source.pkg.tar.xz"),
Path("randomfile"),
Path("namcap.log"),
])
assert task_ahriman._package_archives(Path("local"), [Path("source.pkg.tar.xz")]) == [
Path(f"{task_ahriman.package.base}-{task_ahriman.package.version}-any.pkg.tar.xz"),
]
def test_build(task_ahriman: Task, mocker: MockerFixture) -> None:
"""
must build package
"""
local = Path("local")
mocker.patch("pathlib.Path.iterdir", return_value=["file"])
check_output_mock = mocker.patch("ahriman.core.build_tools.task.check_output")
archives_mock = mocker.patch("ahriman.core.build_tools.task.Task._package_archives",
return_value=[task_ahriman.package.base])
task_ahriman.build(local)
check_output_mock.assert_has_calls([
MockCall(
"extra-x86_64-build", "-r", str(task_ahriman.paths.chroot), "--", "--", "--skippgpcheck",
exception=pytest.helpers.anyvar(int),
cwd=local,
logger=task_ahriman.logger,
user=task_ahriman.uid,
environment={},
),
MockCall(
"makepkg", "--packagelist",
exception=pytest.helpers.anyvar(int),
cwd=local,
logger=task_ahriman.logger,
environment={},
),
])
assert task_ahriman.build(local) == [task_ahriman.package.base]
check_output_mock.assert_called_once_with(
"extra-x86_64-build", "-r", str(task_ahriman.paths.chroot), "--", "--", "--skippgpcheck",
exception=pytest.helpers.anyvar(int),
cwd=local,
logger=task_ahriman.logger,
user=task_ahriman.uid,
environment={},
)
archives_mock.assert_called_once_with(local, ["file"])
def test_build_environment(task_ahriman: Task, mocker: MockerFixture) -> None:
@ -40,55 +68,41 @@ def test_build_environment(task_ahriman: Task, mocker: MockerFixture) -> None:
must build package with environment variables set
"""
local = Path("local")
mocker.patch("pathlib.Path.iterdir", return_value=["file"])
mocker.patch("ahriman.core.build_tools.task.Task._package_archives", return_value=[task_ahriman.package.base])
check_output_mock = mocker.patch("ahriman.core.build_tools.task.check_output")
environment = {"variable": "value"}
task_ahriman.build(local, **environment, empty=None)
check_output_mock.assert_has_calls([
MockCall(
"extra-x86_64-build", "-r", str(task_ahriman.paths.chroot), "--", "--", "--skippgpcheck",
exception=pytest.helpers.anyvar(int),
cwd=local,
logger=task_ahriman.logger,
user=task_ahriman.uid,
environment=environment,
),
MockCall(
"makepkg", "--packagelist",
exception=pytest.helpers.anyvar(int),
cwd=local,
logger=task_ahriman.logger,
environment=environment,
),
])
check_output_mock.assert_called_once_with(
"extra-x86_64-build", "-r", str(task_ahriman.paths.chroot), "--", "--", "--skippgpcheck",
exception=pytest.helpers.anyvar(int),
cwd=local,
logger=task_ahriman.logger,
user=task_ahriman.uid,
environment=environment,
)
def test_build_no_debug(task_ahriman: Task, mocker: MockerFixture) -> None:
def test_build_dry_run(task_ahriman: Task, mocker: MockerFixture) -> None:
"""
must filter debug packages from result
must run devtools in dry-run mode
"""
local = Path("local")
mocker.patch("pathlib.Path.iterdir", return_value=["file"])
mocker.patch("ahriman.core.build_tools.task.Task._package_archives", return_value=[task_ahriman.package.base])
check_output_mock = mocker.patch("ahriman.core.build_tools.task.check_output")
task_ahriman.include_debug_packages = False
task_ahriman.build(local)
check_output_mock.assert_has_calls([
MockCall(
"extra-x86_64-build", "-r", str(task_ahriman.paths.chroot), "--", "--", "--skippgpcheck",
exception=pytest.helpers.anyvar(int),
cwd=local,
logger=task_ahriman.logger,
user=task_ahriman.uid,
environment={},
),
MockCall(
"makepkg", "--packagelist", "OPTIONS=(!debug)",
exception=pytest.helpers.anyvar(int),
cwd=local,
logger=task_ahriman.logger,
environment={},
),
])
assert task_ahriman.build(local, dry_run=True) == [task_ahriman.package.base]
check_output_mock.assert_called_once_with(
"extra-x86_64-build", "-r", str(task_ahriman.paths.chroot), "--", "--", "--skippgpcheck", "--nobuild",
exception=pytest.helpers.anyvar(int),
cwd=local,
logger=task_ahriman.logger,
user=task_ahriman.uid,
environment={},
)
def test_init(task_ahriman: Task, mocker: MockerFixture) -> None:

View File

@ -31,8 +31,7 @@ def test_updates_aur(update_handler: UpdateHandler, package_ahriman: Package,
event_mock.assert_called_once_with(package_ahriman.base, EventType.PackageOutdated,
pytest.helpers.anyvar(str, True))
package_is_outdated_mock.assert_called_once_with(
package_ahriman, update_handler.paths,
vcs_allowed_age=update_handler.vcs_allowed_age,
package_ahriman, update_handler.configuration,
calculate_version=True)
@ -119,8 +118,7 @@ def test_updates_aur_ignore_vcs(update_handler: UpdateHandler, package_ahriman:
assert not update_handler.updates_aur([], vcs=False)
package_is_outdated_mock.assert_called_once_with(
package_ahriman, update_handler.paths,
vcs_allowed_age=update_handler.vcs_allowed_age,
package_ahriman, update_handler.configuration,
calculate_version=False)
@ -227,8 +225,7 @@ def test_updates_local(update_handler: UpdateHandler, package_ahriman: Package,
event_mock.assert_called_once_with(package_ahriman.base, EventType.PackageOutdated,
pytest.helpers.anyvar(str, True))
package_is_outdated_mock.assert_called_once_with(
package_ahriman, update_handler.paths,
vcs_allowed_age=update_handler.vcs_allowed_age,
package_ahriman, update_handler.configuration,
calculate_version=True)
@ -245,8 +242,7 @@ def test_updates_local_ignore_vcs(update_handler: UpdateHandler, package_ahriman
assert not update_handler.updates_local(vcs=False)
package_is_outdated_mock.assert_called_once_with(
package_ahriman, update_handler.paths,
vcs_allowed_age=update_handler.vcs_allowed_age,
package_ahriman, update_handler.configuration,
calculate_version=False)

View File

@ -468,11 +468,12 @@ def test_walk(resource_path_root: Path) -> None:
resource_path_root / "models" / "package_ahriman_aur",
resource_path_root / "models" / "package_akonadi_aur",
resource_path_root / "models" / "package_ahriman_files",
resource_path_root / "models" / "package_ahriman_srcinfo",
resource_path_root / "models" / "package_gcc10_srcinfo",
resource_path_root / "models" / "package_jellyfin-ffmpeg5-bin_srcinfo",
resource_path_root / "models" / "package_tpacpi-bat-git_srcinfo",
resource_path_root / "models" / "package_yay_srcinfo",
resource_path_root / "models" / "package_ahriman_pkgbuild",
resource_path_root / "models" / "package_gcc10_pkgbuild",
resource_path_root / "models" / "package_jellyfin-ffmpeg6-bin_pkgbuild",
resource_path_root / "models" / "package_tpacpi-bat-git_pkgbuild",
resource_path_root / "models" / "package_yay_pkgbuild",
resource_path_root / "models" / "pkgbuild",
resource_path_root / "web" / "templates" / "build-status" / "alerts.jinja2",
resource_path_root / "web" / "templates" / "build-status" / "key-import-modal.jinja2",
resource_path_root / "web" / "templates" / "build-status" / "login-modal.jinja2",

Some files were not shown because too many files have changed in this diff Show More