mirror of
https://github.com/arcan1s/ahriman.git
synced 2025-04-24 15:27:17 +00:00
feat: drop explicit makepkg usage (#134)
* generate filenames without using makepkg * pkgbuild parser impl * completely remove makepkg calls * simplify typed get * try to improve parser * docs and recipes updatte * never raise keyerror instead return empty string * udpate tests * add support of array expansion * docs update * tests update * handle quoted control sequences correctly * expand bash * allow packages without package function * docs update * add moroe tests * small improovements * support escaped arrays and functions
This commit is contained in:
parent
1089bab526
commit
62320e8ec6
4
.github/workflows/setup.sh
vendored
4
.github/workflows/setup.sh
vendored
@ -10,13 +10,11 @@ echo -e '[arcanisrepo]\nServer = https://repo.arcanis.me/$arch\nSigLevel = Never
|
|||||||
# refresh the image
|
# refresh the image
|
||||||
pacman -Syyu --noconfirm
|
pacman -Syyu --noconfirm
|
||||||
# main dependencies
|
# main dependencies
|
||||||
pacman -S --noconfirm devtools git pyalpm python-inflection python-passlib python-pyelftools python-requests python-srcinfo python-systemd sudo
|
pacman -S --noconfirm devtools git pyalpm python-inflection python-passlib python-pyelftools python-requests python-systemd sudo
|
||||||
# make dependencies
|
# make dependencies
|
||||||
pacman -S --noconfirm --asdeps base-devel python-build python-flit python-installer python-tox python-wheel
|
pacman -S --noconfirm --asdeps base-devel python-build python-flit python-installer python-tox python-wheel
|
||||||
# optional dependencies
|
# optional dependencies
|
||||||
if [[ -z $MINIMAL_INSTALL ]]; then
|
if [[ -z $MINIMAL_INSTALL ]]; then
|
||||||
# VCS support
|
|
||||||
pacman -S --noconfirm breezy darcs mercurial subversion
|
|
||||||
# web server
|
# web server
|
||||||
pacman -S --noconfirm python-aioauth-client python-aiohttp python-aiohttp-apispec-git python-aiohttp-cors python-aiohttp-jinja2 python-aiohttp-security python-aiohttp-session python-cryptography python-jinja
|
pacman -S --noconfirm python-aioauth-client python-aiohttp python-aiohttp-apispec-git python-aiohttp-cors python-aiohttp-jinja2 python-aiohttp-security python-aiohttp-session python-cryptography python-jinja
|
||||||
# additional features
|
# additional features
|
||||||
|
@ -31,7 +31,6 @@ RUN useradd -m -d "/home/build" -s "/usr/bin/nologin" build && \
|
|||||||
echo "build ALL=(ALL) NOPASSWD: ALL" > "/etc/sudoers.d/build"
|
echo "build ALL=(ALL) NOPASSWD: ALL" > "/etc/sudoers.d/build"
|
||||||
COPY "docker/install-aur-package.sh" "/usr/local/bin/install-aur-package"
|
COPY "docker/install-aur-package.sh" "/usr/local/bin/install-aur-package"
|
||||||
## install package dependencies
|
## install package dependencies
|
||||||
## darcs is not installed by reasons, because it requires a lot haskell packages which dramatically increase image size
|
|
||||||
RUN pacman -Sy --noconfirm --asdeps \
|
RUN pacman -Sy --noconfirm --asdeps \
|
||||||
devtools \
|
devtools \
|
||||||
git \
|
git \
|
||||||
@ -40,7 +39,6 @@ RUN pacman -Sy --noconfirm --asdeps \
|
|||||||
python-passlib \
|
python-passlib \
|
||||||
python-pyelftools \
|
python-pyelftools \
|
||||||
python-requests \
|
python-requests \
|
||||||
python-srcinfo \
|
|
||||||
&& \
|
&& \
|
||||||
pacman -Sy --noconfirm --asdeps \
|
pacman -Sy --noconfirm --asdeps \
|
||||||
base-devel \
|
base-devel \
|
||||||
@ -50,9 +48,7 @@ RUN pacman -Sy --noconfirm --asdeps \
|
|||||||
python-wheel \
|
python-wheel \
|
||||||
&& \
|
&& \
|
||||||
pacman -Sy --noconfirm --asdeps \
|
pacman -Sy --noconfirm --asdeps \
|
||||||
breezy \
|
|
||||||
git \
|
git \
|
||||||
mercurial \
|
|
||||||
python-aiohttp \
|
python-aiohttp \
|
||||||
python-boto3 \
|
python-boto3 \
|
||||||
python-cerberus \
|
python-cerberus \
|
||||||
@ -61,7 +57,6 @@ RUN pacman -Sy --noconfirm --asdeps \
|
|||||||
python-matplotlib \
|
python-matplotlib \
|
||||||
python-systemd \
|
python-systemd \
|
||||||
rsync \
|
rsync \
|
||||||
subversion \
|
|
||||||
&& \
|
&& \
|
||||||
runuser -u build -- install-aur-package \
|
runuser -u build -- install-aur-package \
|
||||||
python-aioauth-client \
|
python-aioauth-client \
|
||||||
|
@ -28,6 +28,14 @@ ahriman.core.alpm.pacman\_database module
|
|||||||
:no-undoc-members:
|
:no-undoc-members:
|
||||||
:show-inheritance:
|
:show-inheritance:
|
||||||
|
|
||||||
|
ahriman.core.alpm.pkgbuild\_parser module
|
||||||
|
-----------------------------------------
|
||||||
|
|
||||||
|
.. automodule:: ahriman.core.alpm.pkgbuild_parser
|
||||||
|
:members:
|
||||||
|
:no-undoc-members:
|
||||||
|
:show-inheritance:
|
||||||
|
|
||||||
ahriman.core.alpm.repo module
|
ahriman.core.alpm.repo module
|
||||||
-----------------------------
|
-----------------------------
|
||||||
|
|
||||||
|
@ -28,6 +28,14 @@ ahriman.core.configuration.shell\_interpolator module
|
|||||||
:no-undoc-members:
|
:no-undoc-members:
|
||||||
:show-inheritance:
|
:show-inheritance:
|
||||||
|
|
||||||
|
ahriman.core.configuration.shell\_template module
|
||||||
|
-------------------------------------------------
|
||||||
|
|
||||||
|
.. automodule:: ahriman.core.configuration.shell_template
|
||||||
|
:members:
|
||||||
|
:no-undoc-members:
|
||||||
|
:show-inheritance:
|
||||||
|
|
||||||
ahriman.core.configuration.validator module
|
ahriman.core.configuration.validator module
|
||||||
-------------------------------------------
|
-------------------------------------------
|
||||||
|
|
||||||
|
@ -172,6 +172,14 @@ ahriman.models.pacman\_synchronization module
|
|||||||
:no-undoc-members:
|
:no-undoc-members:
|
||||||
:show-inheritance:
|
:show-inheritance:
|
||||||
|
|
||||||
|
ahriman.models.pkgbuild module
|
||||||
|
------------------------------
|
||||||
|
|
||||||
|
.. automodule:: ahriman.models.pkgbuild
|
||||||
|
:members:
|
||||||
|
:no-undoc-members:
|
||||||
|
:show-inheritance:
|
||||||
|
|
||||||
ahriman.models.pkgbuild\_patch module
|
ahriman.models.pkgbuild\_patch module
|
||||||
-------------------------------------
|
-------------------------------------
|
||||||
|
|
||||||
|
@ -32,7 +32,7 @@ This package contains application (aka executable) related classes and everythin
|
|||||||
|
|
||||||
This package contains everything required for the most of application actions and it is separated into several packages:
|
This package contains everything required for the most of application actions and it is separated into several packages:
|
||||||
|
|
||||||
* ``ahriman.core.alpm`` package controls pacman related functions. It provides wrappers for ``pyalpm`` library and safe calls for repository tools (``repo-add`` and ``repo-remove``). Also this package contains ``ahriman.core.alpm.remote`` package which provides wrapper for remote sources (e.g. AUR RPC and official repositories RPC).
|
* ``ahriman.core.alpm`` package controls pacman related functions. It provides wrappers for ``pyalpm`` library and safe calls for repository tools (``repo-add`` and ``repo-remove``). Also this package contains ``ahriman.core.alpm.remote`` package which provides wrapper for remote sources (e.g. AUR RPC and official repositories RPC) and some other helpers.
|
||||||
* ``ahriman.core.auth`` package provides classes for authorization methods used by web mostly. Base class is ``ahriman.core.auth.Auth`` which must be instantiated by ``load`` method.
|
* ``ahriman.core.auth`` package provides classes for authorization methods used by web mostly. Base class is ``ahriman.core.auth.Auth`` which must be instantiated by ``load`` method.
|
||||||
* ``ahriman.core.build_tools`` is a package which provides wrapper for ``devtools`` commands.
|
* ``ahriman.core.build_tools`` is a package which provides wrapper for ``devtools`` commands.
|
||||||
* ``ahriman.core.configuration`` contains extension for standard ``configparser`` library and some validation related classes.
|
* ``ahriman.core.configuration`` contains extension for standard ``configparser`` library and some validation related classes.
|
||||||
@ -375,6 +375,24 @@ There are several supported synchronization providers, currently they are ``rsyn
|
|||||||
|
|
||||||
``github`` provider authenticates through basic auth, API key with repository write permissions is required. There will be created a release with the name of the architecture in case if it does not exist; files will be uploaded to the release assets. It also stores array of files and their MD5 checksums in release body in order to upload only changed ones. According to the GitHub API in case if there is already uploaded asset with the same name (e.g. database files), asset will be removed first.
|
``github`` provider authenticates through basic auth, API key with repository write permissions is required. There will be created a release with the name of the architecture in case if it does not exist; files will be uploaded to the release assets. It also stores array of files and their MD5 checksums in release body in order to upload only changed ones. According to the GitHub API in case if there is already uploaded asset with the same name (e.g. database files), asset will be removed first.
|
||||||
|
|
||||||
|
PKGBUILD parsing
|
||||||
|
^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
The application provides a house-made shell parser ``ahriman.core.alpm.pkgbuild_parser.PkgbuildParser`` to process PKGBUILDs and extract package data from them. It relies on the ``shlex.shlex`` parser with some configuration tweaks and adds some token post-processing.
|
||||||
|
|
||||||
|
#. During the parser process, firstly, it extract next token from the source file (basically, the word) and tries to match it to the variable assignment. If so, then just processes accordingly.
|
||||||
|
#. If it is not an assignment, the parser checks if the token was quoted.
|
||||||
|
#. If it wasn't then the parser tries to match the array starts (two consecutive tokens like ``array=`` and ``(``) or it is function (``function``, ``()`` and ``{``).
|
||||||
|
#. The arrays are processed until the next closing bracket ``)``. After extraction, the parser tries to expand an array according to bash rules (``prefix{first,second}suffix`` constructions).
|
||||||
|
#. The functions are just read until the closing bracket ``}`` and then reread whole text from the input string without a tokenization.
|
||||||
|
|
||||||
|
All extracted fields are packed as ``ahriman.models.pkgbuild_patch.PkgbuildPatch`` and then can be used as ``ahriman.models.pkgbuild.Pkgbuild`` instance.
|
||||||
|
|
||||||
|
The PKGBUILD class also provides some additional functions on top of that:
|
||||||
|
|
||||||
|
* Ability to extract fields defined inside ``package*()`` functions, which are in particular used for the multipackages.
|
||||||
|
* Shell substitution, which supports constructions ``$var`` (including ``${var}``), ``${var#(#)pattern}``, ``${var%(%)pattern}`` and ``${var/(/)pattern/replacement}`` (including ``#pattern`` and ``%pattern``).
|
||||||
|
|
||||||
Additional features
|
Additional features
|
||||||
^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
@ -143,7 +143,7 @@ TL;DR
|
|||||||
|
|
||||||
sudo -u ahriman ahriman package-add /path/to/local/directory/with/PKGBUILD --now
|
sudo -u ahriman ahriman package-add /path/to/local/directory/with/PKGBUILD --now
|
||||||
|
|
||||||
Before using this command you will need to create local directory, put ``PKGBUILD`` there and generate ``.SRCINFO`` by using ``makepkg --printsrcinfo > .SRCINFO`` command. These packages will be stored locally and *will be ignored* during automatic update; in order to update the package you will need to run ``package-add`` command again.
|
Before using this command you will need to create local directory and put ``PKGBUILD`` there. These packages will be stored locally and *will be ignored* during automatic update; in order to update the package you will need to run ``package-add`` command again.
|
||||||
|
|
||||||
How to copy package from another repository
|
How to copy package from another repository
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
@ -265,11 +265,7 @@ TL;DR
|
|||||||
How to update VCS packages
|
How to update VCS packages
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Normally the service handles VCS packages correctly, however it requires additional dependencies:
|
Normally the service handles VCS packages correctly. The version is updated in clean chroot, no additional actions are required.
|
||||||
|
|
||||||
.. code-block:: shell
|
|
||||||
|
|
||||||
pacman -S breezy darcs mercurial subversion
|
|
||||||
|
|
||||||
How to review changes before build
|
How to review changes before build
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
@ -7,12 +7,9 @@ pkgdesc="ArcH linux ReposItory MANager"
|
|||||||
arch=('any')
|
arch=('any')
|
||||||
url="https://github.com/arcan1s/ahriman"
|
url="https://github.com/arcan1s/ahriman"
|
||||||
license=('GPL3')
|
license=('GPL3')
|
||||||
depends=('devtools>=1:1.0.0' 'git' 'pyalpm' 'python-inflection' 'python-passlib' 'python-pyelftools' 'python-requests' 'python-srcinfo')
|
depends=('devtools>=1:1.0.0' 'git' 'pyalpm' 'python-inflection' 'python-passlib' 'python-pyelftools' 'python-requests')
|
||||||
makedepends=('python-build' 'python-flit' 'python-installer' 'python-wheel')
|
makedepends=('python-build' 'python-flit' 'python-installer' 'python-wheel')
|
||||||
optdepends=('breezy: -bzr packages support'
|
optdepends=('python-aioauth-client: web server with OAuth2 authorization'
|
||||||
'darcs: -darcs packages support'
|
|
||||||
'mercurial: -hg packages support'
|
|
||||||
'python-aioauth-client: web server with OAuth2 authorization'
|
|
||||||
'python-aiohttp: web server'
|
'python-aiohttp: web server'
|
||||||
'python-aiohttp-apispec>=3.0.0: web server'
|
'python-aiohttp-apispec>=3.0.0: web server'
|
||||||
'python-aiohttp-cors: web server'
|
'python-aiohttp-cors: web server'
|
||||||
@ -26,8 +23,7 @@ optdepends=('breezy: -bzr packages support'
|
|||||||
'python-requests-unixsocket2: client report to web server by unix socket'
|
'python-requests-unixsocket2: client report to web server by unix socket'
|
||||||
'python-jinja: html report generation'
|
'python-jinja: html report generation'
|
||||||
'python-systemd: journal support'
|
'python-systemd: journal support'
|
||||||
'rsync: sync by using rsync'
|
'rsync: sync by using rsync')
|
||||||
'subversion: -svn packages support')
|
|
||||||
source=("https://github.com/arcan1s/ahriman/releases/download/$pkgver/$pkgname-$pkgver.tar.gz"
|
source=("https://github.com/arcan1s/ahriman/releases/download/$pkgver/$pkgname-$pkgver.tar.gz"
|
||||||
'ahriman.sysusers'
|
'ahriman.sysusers'
|
||||||
'ahriman.tmpfiles')
|
'ahriman.tmpfiles')
|
||||||
|
@ -21,7 +21,6 @@ dependencies = [
|
|||||||
"passlib",
|
"passlib",
|
||||||
"pyelftools",
|
"pyelftools",
|
||||||
"requests",
|
"requests",
|
||||||
"srcinfo",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
dynamic = ["version"]
|
dynamic = ["version"]
|
||||||
|
@ -47,7 +47,7 @@ class ServiceUpdates(Handler):
|
|||||||
report(bool): force enable or disable reporting
|
report(bool): force enable or disable reporting
|
||||||
"""
|
"""
|
||||||
remote = Package.from_aur("ahriman", None)
|
remote = Package.from_aur("ahriman", None)
|
||||||
_, release = remote.version.rsplit("-", 1) # we don't store pkgrel locally, so we just append it
|
_, release = remote.version.rsplit("-", maxsplit=1) # we don't store pkgrel locally, so we just append it
|
||||||
local_version = f"{__version__}-{release}"
|
local_version = f"{__version__}-{release}"
|
||||||
|
|
||||||
# technically we would like to compare versions, but it is fine to raise an exception in case if locally
|
# technically we would like to compare versions, but it is fine to raise an exception in case if locally
|
||||||
|
325
src/ahriman/core/alpm/pkgbuild_parser.py
Normal file
325
src/ahriman/core/alpm/pkgbuild_parser.py
Normal file
@ -0,0 +1,325 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2021-2024 ahriman team.
|
||||||
|
#
|
||||||
|
# This file is part of ahriman
|
||||||
|
# (see https://github.com/arcan1s/ahriman).
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
import itertools
|
||||||
|
import re
|
||||||
|
import shlex
|
||||||
|
|
||||||
|
from collections.abc import Generator
|
||||||
|
from enum import StrEnum
|
||||||
|
from typing import IO
|
||||||
|
|
||||||
|
from ahriman.core.exceptions import PkgbuildParserError
|
||||||
|
from ahriman.models.pkgbuild_patch import PkgbuildPatch
|
||||||
|
|
||||||
|
|
||||||
|
class PkgbuildToken(StrEnum):
|
||||||
|
"""
|
||||||
|
well-known tokens dictionary
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
ArrayEnds(PkgbuildToken): (class attribute) array ends token
|
||||||
|
ArrayStarts(PkgbuildToken): (class attribute) array starts token
|
||||||
|
Comma(PkgbuildToken): (class attribute) comma token
|
||||||
|
Comment(PkgbuildToken): (class attribute) comment token
|
||||||
|
FunctionDeclaration(PkgbuildToken): (class attribute) function declaration token
|
||||||
|
FunctionEnds(PkgbuildToken): (class attribute) function ends token
|
||||||
|
FunctionStarts(PkgbuildToken): (class attribute) function starts token
|
||||||
|
"""
|
||||||
|
|
||||||
|
ArrayStarts = "("
|
||||||
|
ArrayEnds = ")"
|
||||||
|
|
||||||
|
Comma = ","
|
||||||
|
|
||||||
|
Comment = "#"
|
||||||
|
|
||||||
|
FunctionDeclaration = "()"
|
||||||
|
FunctionStarts = "{"
|
||||||
|
FunctionEnds = "}"
|
||||||
|
|
||||||
|
|
||||||
|
class PkgbuildParser(shlex.shlex):
|
||||||
|
"""
|
||||||
|
simple pkgbuild reader implementation in pure python, because others suck.
|
||||||
|
|
||||||
|
What is it:
|
||||||
|
|
||||||
|
#. Simple PKGBUILD parser written in python.
|
||||||
|
#. No shell execution, so it is free from random shell attacks.
|
||||||
|
#. Able to parse simple constructions (assignments, comments, functions, arrays).
|
||||||
|
|
||||||
|
What it is not:
|
||||||
|
|
||||||
|
#. Fully functional shell parser.
|
||||||
|
#. Shell executor.
|
||||||
|
#. No parameter expansion.
|
||||||
|
|
||||||
|
For more details what does it support, please, consult with the test cases.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
This class is heavily based on :mod:`shlex` parser, but instead of strings operates with the
|
||||||
|
:class:`ahriman.models.pkgbuild_patch.PkgbuildPatch` objects. The main way to use it is to call :func:`parse()`
|
||||||
|
function and collect parsed objects, e.g.::
|
||||||
|
|
||||||
|
>>> parser = PkgbuildParser(StringIO("input string"))
|
||||||
|
>>> for patch in parser.parse():
|
||||||
|
>>> print(f"{patch.key} = {patch.value}")
|
||||||
|
|
||||||
|
It doesn't store the state of the fields (but operates with the :mod:`shlex` parser state), so no shell
|
||||||
|
post-processing is performed (e.g. variable substitution).
|
||||||
|
"""
|
||||||
|
|
||||||
|
_ARRAY_ASSIGNMENT = re.compile(r"^(?P<key>\w+)=$")
|
||||||
|
# in addition to usual assignment, functions can have dash
|
||||||
|
_FUNCTION_DECLARATION = re.compile(r"^(?P<key>[\w-]+)$")
|
||||||
|
_STRING_ASSIGNMENT = re.compile(r"^(?P<key>\w+)=(?P<value>.+)$")
|
||||||
|
|
||||||
|
def __init__(self, stream: IO[str]) -> None:
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
stream(IO[str]): input stream containing PKGBUILD content
|
||||||
|
"""
|
||||||
|
shlex.shlex.__init__(self, stream, posix=True, punctuation_chars=True)
|
||||||
|
self._io = stream # direct access without type casting
|
||||||
|
|
||||||
|
# ignore substitution and extend bash symbols
|
||||||
|
self.wordchars += "${}#:+-@!"
|
||||||
|
# in case of default behaviour, it will ignore, for example, segment part of url outside of quotes
|
||||||
|
self.commenters = ""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _expand_array(array: list[str]) -> list[str]:
|
||||||
|
"""
|
||||||
|
bash array expansion simulator. It takes raw array and tries to expand constructions like
|
||||||
|
``(first prefix-{mid1,mid2}-suffix last)`` into ``(first, prefix-mid1-suffix prefix-mid2-suffix last)``
|
||||||
|
|
||||||
|
Args:
|
||||||
|
array(list[str]): input array
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str]: either source array or expanded array if possible
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
PkgbuildParserError: if there are errors in parser
|
||||||
|
"""
|
||||||
|
# we are using comma as marker for expansion (if any)
|
||||||
|
if PkgbuildToken.Comma not in array:
|
||||||
|
return array
|
||||||
|
# again sanity check, for expansion there are at least 3 elements (first, last and comma)
|
||||||
|
if len(array) < 3:
|
||||||
|
return array
|
||||||
|
|
||||||
|
result = []
|
||||||
|
buffer, prefix = [], None
|
||||||
|
|
||||||
|
for index, (first, second) in enumerate(itertools.pairwise(array)):
|
||||||
|
match (first, second):
|
||||||
|
# in this case we check if expansion should be started
|
||||||
|
# this condition matches "prefix{first", ","
|
||||||
|
case (_, PkgbuildToken.Comma) if PkgbuildToken.FunctionStarts in first:
|
||||||
|
prefix, part = first.rsplit(PkgbuildToken.FunctionStarts, maxsplit=1)
|
||||||
|
buffer.append(f"{prefix}{part}")
|
||||||
|
|
||||||
|
# the last element case, it matches either ",", "last}" or ",", "last}suffix"
|
||||||
|
# in case if there is suffix, it must be appended to all list elements
|
||||||
|
case (PkgbuildToken.Comma, _) if prefix is not None and PkgbuildToken.FunctionEnds in second:
|
||||||
|
part, suffix = second.rsplit(PkgbuildToken.FunctionEnds, maxsplit=1)
|
||||||
|
buffer.append(f"{prefix}{part}")
|
||||||
|
result.extend([f"{part}{suffix}" for part in buffer])
|
||||||
|
# reset state
|
||||||
|
buffer, prefix = [], None
|
||||||
|
|
||||||
|
# we have already prefix string, so we are in progress of expansion
|
||||||
|
# we always operate the last element, so this matches ",", "next"
|
||||||
|
case (PkgbuildToken.Comma, _) if prefix is not None:
|
||||||
|
buffer.append(f"{prefix}{second}")
|
||||||
|
|
||||||
|
# exactly first element of the list
|
||||||
|
case (_, _) if prefix is None and index == 0:
|
||||||
|
result.append(first)
|
||||||
|
|
||||||
|
# any next normal element
|
||||||
|
case (_, _) if prefix is None:
|
||||||
|
result.append(second)
|
||||||
|
|
||||||
|
# small sanity check
|
||||||
|
if prefix is not None:
|
||||||
|
raise PkgbuildParserError("error in array expansion", array)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _is_escaped(self) -> bool:
|
||||||
|
"""
|
||||||
|
check if the last element was quoted. ``shlex.shlex`` parser doesn't provide information about was the token
|
||||||
|
quoted or not, thus there is no difference between "'#'" (diez in quotes) and "#" (diez without quotes). This
|
||||||
|
method simply rolls back to the last non-space character and check if it is a quotation mark
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: ``True`` if the previous element of the stream is a quote or escaped and ``False`` otherwise
|
||||||
|
"""
|
||||||
|
current_position = self._io.tell()
|
||||||
|
|
||||||
|
last_char = penultimate_char = None
|
||||||
|
for index in range(current_position - 1, -1, -1):
|
||||||
|
self._io.seek(index)
|
||||||
|
last_char = self._io.read(1)
|
||||||
|
if last_char.isspace():
|
||||||
|
continue
|
||||||
|
|
||||||
|
if index >= 0:
|
||||||
|
self._io.seek(index - 1)
|
||||||
|
penultimate_char = self._io.read(1)
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
|
self._io.seek(current_position) # reset position of the stream
|
||||||
|
is_quoted = last_char is not None and last_char in self.quotes
|
||||||
|
is_escaped = penultimate_char is not None and penultimate_char in self.escape
|
||||||
|
|
||||||
|
return is_quoted or is_escaped
|
||||||
|
|
||||||
|
def _parse_array(self) -> list[str]:
|
||||||
|
"""
|
||||||
|
parse array from the PKGBUILD. This method will extract tokens from parser until it matches closing array,
|
||||||
|
modifying source parser state
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str]: extracted arrays elements
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
PkgbuildParserError: if array is not closed
|
||||||
|
"""
|
||||||
|
def extract() -> Generator[str, None, None]:
|
||||||
|
while token := self.get_token():
|
||||||
|
match token:
|
||||||
|
case _ if self._is_escaped():
|
||||||
|
pass
|
||||||
|
case PkgbuildToken.ArrayEnds:
|
||||||
|
break
|
||||||
|
case PkgbuildToken.Comment:
|
||||||
|
self.instream.readline()
|
||||||
|
continue
|
||||||
|
yield token
|
||||||
|
|
||||||
|
if token != PkgbuildToken.ArrayEnds:
|
||||||
|
raise PkgbuildParserError("no closing array bracket found")
|
||||||
|
|
||||||
|
return self._expand_array(list(extract()))
|
||||||
|
|
||||||
|
def _parse_function(self) -> str:
|
||||||
|
"""
|
||||||
|
parse function from the PKGBUILD. This method will extract tokens from parser until it matches closing function,
|
||||||
|
modifying source parser state. Instead of trying to combine tokens together, it uses positions of the file
|
||||||
|
and reads content again in this range
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: function body
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
PkgbuildParserError: if function body wasn't found or parser input stream doesn't support position reading
|
||||||
|
"""
|
||||||
|
# find start and end positions
|
||||||
|
start_position = end_position = -1
|
||||||
|
counter = 0 # simple processing of the inner "{" and "}"
|
||||||
|
for token in self:
|
||||||
|
match token:
|
||||||
|
case _ if self._is_escaped():
|
||||||
|
continue
|
||||||
|
case PkgbuildToken.FunctionStarts:
|
||||||
|
if counter == 0:
|
||||||
|
start_position = self._io.tell() - 1
|
||||||
|
counter += 1
|
||||||
|
case PkgbuildToken.FunctionEnds:
|
||||||
|
end_position = self._io.tell()
|
||||||
|
counter -= 1
|
||||||
|
if counter == 0:
|
||||||
|
break
|
||||||
|
|
||||||
|
if not 0 < start_position < end_position:
|
||||||
|
raise PkgbuildParserError("function body wasn't found")
|
||||||
|
|
||||||
|
# read the specified interval from source stream
|
||||||
|
self._io.seek(start_position - 1) # start from the previous symbol
|
||||||
|
content = self._io.read(end_position - start_position)
|
||||||
|
|
||||||
|
# special case of the end of file
|
||||||
|
if self.state == self.eof: # type: ignore[attr-defined]
|
||||||
|
content += self._io.read(1)
|
||||||
|
|
||||||
|
# reset position (because the last position was before the next token starts)
|
||||||
|
self._io.seek(end_position)
|
||||||
|
|
||||||
|
return content
|
||||||
|
|
||||||
|
def _parse_token(self, token: str) -> Generator[PkgbuildPatch, None, None]:
|
||||||
|
"""
|
||||||
|
parse single token to the PKGBUILD field
|
||||||
|
|
||||||
|
Args:
|
||||||
|
token(str): current token
|
||||||
|
|
||||||
|
Yields:
|
||||||
|
PkgbuildPatch: extracted a PKGBUILD node
|
||||||
|
"""
|
||||||
|
# simple assignment rule
|
||||||
|
if m := self._STRING_ASSIGNMENT.match(token):
|
||||||
|
key = m.group("key")
|
||||||
|
value = m.group("value")
|
||||||
|
yield PkgbuildPatch(key, value)
|
||||||
|
return
|
||||||
|
|
||||||
|
if token == PkgbuildToken.Comment:
|
||||||
|
self.instream.readline()
|
||||||
|
return
|
||||||
|
|
||||||
|
match self.get_token():
|
||||||
|
# array processing. Arrays will be sent as "key=", "(", values, ")"
|
||||||
|
case PkgbuildToken.ArrayStarts if m := self._ARRAY_ASSIGNMENT.match(token):
|
||||||
|
key = m.group("key")
|
||||||
|
value = self._parse_array()
|
||||||
|
yield PkgbuildPatch(key, value)
|
||||||
|
|
||||||
|
# functions processing. Function will be sent as "name", "()", "{", body, "}"
|
||||||
|
case PkgbuildToken.FunctionDeclaration if self._FUNCTION_DECLARATION.match(token):
|
||||||
|
key = f"{token}{PkgbuildToken.FunctionDeclaration}"
|
||||||
|
value = self._parse_function()
|
||||||
|
yield PkgbuildPatch(key, value) # this is not mistake, assign to token without ()
|
||||||
|
|
||||||
|
# special function case, where "(" and ")" are separated tokens, e.g. "pkgver ( )"
|
||||||
|
case PkgbuildToken.ArrayStarts if self._FUNCTION_DECLARATION.match(token):
|
||||||
|
next_token = self.get_token()
|
||||||
|
if next_token == PkgbuildToken.ArrayEnds: # replace closing bracket with "()"
|
||||||
|
next_token = PkgbuildToken.FunctionDeclaration
|
||||||
|
self.push_token(next_token) # type: ignore[arg-type]
|
||||||
|
yield from self._parse_token(token)
|
||||||
|
|
||||||
|
# some random token received without continuation, lets guess it is empty assignment (i.e. key=)
|
||||||
|
case other if other is not None:
|
||||||
|
yield from self._parse_token(other)
|
||||||
|
|
||||||
|
def parse(self) -> Generator[PkgbuildPatch, None, None]:
|
||||||
|
"""
|
||||||
|
parse source stream and yield parsed entries
|
||||||
|
|
||||||
|
Yields:
|
||||||
|
PkgbuildPatch: extracted a PKGBUILD node
|
||||||
|
"""
|
||||||
|
for token in self:
|
||||||
|
yield from self._parse_token(token)
|
@ -115,7 +115,7 @@ class PackageArchive:
|
|||||||
Returns:
|
Returns:
|
||||||
FilesystemPackage: generated pacman package model with empty paths
|
FilesystemPackage: generated pacman package model with empty paths
|
||||||
"""
|
"""
|
||||||
package_name, *_ = path.parent.name.rsplit("-", 2)
|
package_name, *_ = path.parent.name.rsplit("-", maxsplit=2)
|
||||||
try:
|
try:
|
||||||
pacman_package = OfficialSyncdb.info(package_name, pacman=self.pacman)
|
pacman_package = OfficialSyncdb.info(package_name, pacman=self.pacman)
|
||||||
return FilesystemPackage(
|
return FilesystemPackage(
|
||||||
|
@ -17,13 +17,14 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
|
from collections.abc import Generator
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from ahriman.core.build_tools.sources import Sources
|
from ahriman.core.build_tools.sources import Sources
|
||||||
from ahriman.core.configuration import Configuration
|
from ahriman.core.configuration import Configuration
|
||||||
from ahriman.core.exceptions import BuildError
|
from ahriman.core.exceptions import BuildError
|
||||||
from ahriman.core.log import LazyLogging
|
from ahriman.core.log import LazyLogging
|
||||||
from ahriman.core.utils import check_output
|
from ahriman.core.utils import check_output, package_like
|
||||||
from ahriman.models.package import Package
|
from ahriman.models.package import Package
|
||||||
from ahriman.models.pkgbuild_patch import PkgbuildPatch
|
from ahriman.models.pkgbuild_patch import PkgbuildPatch
|
||||||
from ahriman.models.repository_paths import RepositoryPaths
|
from ahriman.models.repository_paths import RepositoryPaths
|
||||||
@ -65,12 +66,43 @@ class Task(LazyLogging):
|
|||||||
self.makepkg_flags = configuration.getlist("build", "makepkg_flags", fallback=[])
|
self.makepkg_flags = configuration.getlist("build", "makepkg_flags", fallback=[])
|
||||||
self.makechrootpkg_flags = configuration.getlist("build", "makechrootpkg_flags", fallback=[])
|
self.makechrootpkg_flags = configuration.getlist("build", "makechrootpkg_flags", fallback=[])
|
||||||
|
|
||||||
def build(self, sources_dir: Path, **kwargs: str | None) -> list[Path]:
|
def _package_archives(self, sources_dir: Path, source_files: list[Path]) -> list[Path]:
|
||||||
|
"""
|
||||||
|
extract package archives from the directory
|
||||||
|
|
||||||
|
Args:
|
||||||
|
sources_dir(Path): path to where sources are
|
||||||
|
source_files(list[Path]): list of files which were initially in the directory
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[Path]: list of file paths which looks like freshly generated archives
|
||||||
|
"""
|
||||||
|
def files() -> Generator[Path, None, None]:
|
||||||
|
for filepath in sources_dir.iterdir():
|
||||||
|
if filepath in source_files:
|
||||||
|
continue # skip files which were already there
|
||||||
|
if filepath.suffix == ".log":
|
||||||
|
continue # skip log files
|
||||||
|
if not package_like(filepath):
|
||||||
|
continue # path doesn't look like a package
|
||||||
|
yield filepath
|
||||||
|
|
||||||
|
# debug packages are always formed as package.base-debug
|
||||||
|
# see /usr/share/makepkg/util/pkgbuild.sh for more details
|
||||||
|
debug_package_prefix = f"{self.package.base}-debug-"
|
||||||
|
return [
|
||||||
|
package
|
||||||
|
for package in files()
|
||||||
|
if self.include_debug_packages or not package.name.startswith(debug_package_prefix)
|
||||||
|
]
|
||||||
|
|
||||||
|
def build(self, sources_dir: Path, *, dry_run: bool = False, **kwargs: str | None) -> list[Path]:
|
||||||
"""
|
"""
|
||||||
run package build
|
run package build
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
sources_dir(Path): path to where sources are
|
sources_dir(Path): path to where sources are
|
||||||
|
dry_run(bool, optional): do not perform build itself (Default value = False)
|
||||||
**kwargs(str | None): environment variables to be passed to build processes
|
**kwargs(str | None): environment variables to be passed to build processes
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@ -80,6 +112,8 @@ class Task(LazyLogging):
|
|||||||
command.extend(self.archbuild_flags)
|
command.extend(self.archbuild_flags)
|
||||||
command.extend(["--"] + self.makechrootpkg_flags)
|
command.extend(["--"] + self.makechrootpkg_flags)
|
||||||
command.extend(["--"] + self.makepkg_flags)
|
command.extend(["--"] + self.makepkg_flags)
|
||||||
|
if dry_run:
|
||||||
|
command.extend(["--nobuild"])
|
||||||
self.logger.info("using %s for %s", command, self.package.base)
|
self.logger.info("using %s for %s", command, self.package.base)
|
||||||
|
|
||||||
environment: dict[str, str] = {
|
environment: dict[str, str] = {
|
||||||
@ -89,6 +123,7 @@ class Task(LazyLogging):
|
|||||||
}
|
}
|
||||||
self.logger.info("using environment variables %s", environment)
|
self.logger.info("using environment variables %s", environment)
|
||||||
|
|
||||||
|
source_files = list(sources_dir.iterdir())
|
||||||
check_output(
|
check_output(
|
||||||
*command,
|
*command,
|
||||||
exception=BuildError.from_process(self.package.base),
|
exception=BuildError.from_process(self.package.base),
|
||||||
@ -98,20 +133,7 @@ class Task(LazyLogging):
|
|||||||
environment=environment,
|
environment=environment,
|
||||||
)
|
)
|
||||||
|
|
||||||
package_list_command = ["makepkg", "--packagelist"]
|
return self._package_archives(sources_dir, source_files)
|
||||||
if not self.include_debug_packages:
|
|
||||||
package_list_command.append("OPTIONS=(!debug)") # disable debug flag manually
|
|
||||||
packages = check_output(
|
|
||||||
*package_list_command,
|
|
||||||
exception=BuildError.from_process(self.package.base),
|
|
||||||
cwd=sources_dir,
|
|
||||||
logger=self.logger,
|
|
||||||
environment=environment,
|
|
||||||
).splitlines()
|
|
||||||
# some dirty magic here
|
|
||||||
# the filter is applied in order to make sure that result will only contain packages which were actually built
|
|
||||||
# e.g. in some cases packagelist command produces debug packages which were not actually built
|
|
||||||
return list(filter(lambda path: path.is_file(), map(Path, packages)))
|
|
||||||
|
|
||||||
def init(self, sources_dir: Path, patches: list[PkgbuildPatch], local_version: str | None) -> str | None:
|
def init(self, sources_dir: Path, patches: list[PkgbuildPatch], local_version: str | None) -> str | None:
|
||||||
"""
|
"""
|
||||||
|
@ -24,16 +24,7 @@ import sys
|
|||||||
from collections.abc import Generator, Mapping, MutableMapping
|
from collections.abc import Generator, Mapping, MutableMapping
|
||||||
from string import Template
|
from string import Template
|
||||||
|
|
||||||
|
from ahriman.core.configuration.shell_template import ShellTemplate
|
||||||
class ExtendedTemplate(Template):
|
|
||||||
"""
|
|
||||||
extension to the default :class:`Template` class, which also enabled braces regex to lookup in sections
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
braceidpattern(str): regular expression to match a colon inside braces
|
|
||||||
"""
|
|
||||||
|
|
||||||
braceidpattern = r"(?a:[_a-z0-9][_a-z0-9:]*)"
|
|
||||||
|
|
||||||
|
|
||||||
class ShellInterpolator(configparser.Interpolation):
|
class ShellInterpolator(configparser.Interpolation):
|
||||||
@ -60,7 +51,7 @@ class ShellInterpolator(configparser.Interpolation):
|
|||||||
"""
|
"""
|
||||||
def identifiers() -> Generator[tuple[str | None, str], None, None]:
|
def identifiers() -> Generator[tuple[str | None, str], None, None]:
|
||||||
# extract all found identifiers and parse them
|
# extract all found identifiers and parse them
|
||||||
for identifier in ExtendedTemplate(value).get_identifiers():
|
for identifier in ShellTemplate(value).get_identifiers():
|
||||||
match identifier.split(":"):
|
match identifier.split(":"):
|
||||||
case [lookup_option]: # single option from the same section
|
case [lookup_option]: # single option from the same section
|
||||||
yield None, lookup_option
|
yield None, lookup_option
|
||||||
@ -121,7 +112,7 @@ class ShellInterpolator(configparser.Interpolation):
|
|||||||
|
|
||||||
# resolve internal references
|
# resolve internal references
|
||||||
variables = dict(self._extract_variables(parser, value, defaults))
|
variables = dict(self._extract_variables(parser, value, defaults))
|
||||||
internal = ExtendedTemplate(escaped).safe_substitute(variables)
|
internal = ShellTemplate(escaped).safe_substitute(variables)
|
||||||
|
|
||||||
# resolve enriched environment variables by using default Template class
|
# resolve enriched environment variables by using default Template class
|
||||||
environment = Template(internal).safe_substitute(self.environment())
|
environment = Template(internal).safe_substitute(self.environment())
|
||||||
|
158
src/ahriman/core/configuration/shell_template.py
Normal file
158
src/ahriman/core/configuration/shell_template.py
Normal file
@ -0,0 +1,158 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2021-2024 ahriman team.
|
||||||
|
#
|
||||||
|
# This file is part of ahriman
|
||||||
|
# (see https://github.com/arcan1s/ahriman).
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
import fnmatch
|
||||||
|
import re
|
||||||
|
|
||||||
|
from collections.abc import Generator, Mapping
|
||||||
|
from string import Template
|
||||||
|
|
||||||
|
|
||||||
|
class ShellTemplate(Template):
|
||||||
|
"""
|
||||||
|
extension to the default :class:`Template` class, which also adds additional tokens to braced regex and enables
|
||||||
|
bash expansion
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
braceidpattern(str): regular expression to match every character except for closing bracket
|
||||||
|
"""
|
||||||
|
|
||||||
|
braceidpattern = r"(?a:[_a-z0-9][^}]*)"
|
||||||
|
|
||||||
|
_REMOVE_BACK = re.compile(r"^(?P<key>\w+)%(?P<pattern>.+)$")
|
||||||
|
_REMOVE_FRONT = re.compile(r"^(?P<key>\w+)#(?P<pattern>.+)$")
|
||||||
|
_REPLACE = re.compile(r"^(?P<key>\w+)/(?P<pattern>.+)/(?P<replacement>.+)$")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _remove_back(source: str, pattern: str, *, greedy: bool) -> str:
|
||||||
|
"""
|
||||||
|
resolve "${var%(%)pattern}" constructions
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source(str): source string to match the pattern inside
|
||||||
|
pattern(str): shell expression to match
|
||||||
|
greedy(bool): match as much as possible or not
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: result after removal ``pattern`` from the end of the string
|
||||||
|
"""
|
||||||
|
regex = fnmatch.translate(pattern)
|
||||||
|
compiled = re.compile(regex)
|
||||||
|
|
||||||
|
result = source
|
||||||
|
start_pos = 0
|
||||||
|
|
||||||
|
while m := compiled.search(source, start_pos):
|
||||||
|
result = source[:m.start()]
|
||||||
|
start_pos += m.start() + 1
|
||||||
|
if greedy:
|
||||||
|
break
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _remove_front(source: str, pattern: str, *, greedy: bool) -> str:
|
||||||
|
"""
|
||||||
|
resolve "${var#(#)pattern}" constructions
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source(str): source string to match the pattern inside
|
||||||
|
pattern(str): shell expression to match
|
||||||
|
greedy(bool): match as much as possible or not
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: result after removal ``pattern`` from the start of the string
|
||||||
|
"""
|
||||||
|
regex = fnmatch.translate(pattern)[:-2] # remove \Z at the end of the regex
|
||||||
|
if not greedy:
|
||||||
|
regex = regex.replace("*", "*?")
|
||||||
|
compiled = re.compile(regex)
|
||||||
|
|
||||||
|
m = compiled.match(source)
|
||||||
|
if m is None:
|
||||||
|
return source
|
||||||
|
|
||||||
|
return source[m.end():]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _replace(source: str, pattern: str, replacement: str, *, greedy: bool) -> str:
|
||||||
|
"""
|
||||||
|
resolve "${var/(/)pattern/replacement}" constructions
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source(str): source string to match the pattern inside
|
||||||
|
pattern(str): shell expression to match
|
||||||
|
replacement(str): new substring
|
||||||
|
greedy(bool): replace as much as possible or not
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: result after replacing ``pattern`` by ``replacement``
|
||||||
|
"""
|
||||||
|
match pattern:
|
||||||
|
case from_back if from_back.startswith("%"):
|
||||||
|
removed = ShellTemplate._remove_back(source, from_back[1:], greedy=False)
|
||||||
|
return removed if removed == source else removed + replacement
|
||||||
|
|
||||||
|
case from_front if from_front.startswith("#"):
|
||||||
|
removed = ShellTemplate._remove_front(source, from_front[1:], greedy=False)
|
||||||
|
return removed if removed == source else replacement + removed
|
||||||
|
|
||||||
|
case regular:
|
||||||
|
regex = fnmatch.translate(regular)[:-2] # remove \Z at the end of the regex
|
||||||
|
compiled = re.compile(regex)
|
||||||
|
return compiled.sub(replacement, source, count=not greedy)
|
||||||
|
|
||||||
|
def shell_substitute(self, mapping: Mapping[str, str], /, **kwargs: str) -> str:
|
||||||
|
"""
|
||||||
|
this method behaves the same as :func:`safe_substitute`, however also expands bash string operations
|
||||||
|
|
||||||
|
Args:
|
||||||
|
mapping(Mapping[str, str]): key-value dictionary of variables
|
||||||
|
**kwargs(str): key-value dictionary of variables passed as kwargs
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: string with replaced values
|
||||||
|
"""
|
||||||
|
substitutions = (
|
||||||
|
(self._REMOVE_BACK, self._remove_back, "%"),
|
||||||
|
(self._REMOVE_FRONT, self._remove_front, "#"),
|
||||||
|
(self._REPLACE, self._replace, "/"),
|
||||||
|
)
|
||||||
|
|
||||||
|
def generator(variables: dict[str, str]) -> Generator[tuple[str, str], None, None]:
|
||||||
|
for identifier in self.get_identifiers():
|
||||||
|
for regex, function, greediness in substitutions:
|
||||||
|
if m := regex.match(identifier):
|
||||||
|
source = variables.get(m.group("key"))
|
||||||
|
if source is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# replace pattern with non-greedy
|
||||||
|
pattern = m.group("pattern").removeprefix(greediness)
|
||||||
|
greedy = m.group("pattern").startswith(greediness)
|
||||||
|
# gather all additional args
|
||||||
|
args = {key: value for key, value in m.groupdict().items() if key not in ("key", "pattern")}
|
||||||
|
|
||||||
|
yield identifier, function(source, pattern, **args, greedy=greedy)
|
||||||
|
break
|
||||||
|
|
||||||
|
kwargs.update(mapping)
|
||||||
|
substituted = dict(generator(kwargs))
|
||||||
|
|
||||||
|
return self.safe_substitute(kwargs | substituted)
|
@ -212,6 +212,23 @@ class PacmanError(RuntimeError):
|
|||||||
RuntimeError.__init__(self, f"Could not perform operation with pacman: `{details}`")
|
RuntimeError.__init__(self, f"Could not perform operation with pacman: `{details}`")
|
||||||
|
|
||||||
|
|
||||||
|
class PkgbuildParserError(ValueError):
|
||||||
|
"""
|
||||||
|
exception raises in case of PKGBUILD parser errors
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, reason: str, source: Any = None) -> None:
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
reason(str): parser error reason
|
||||||
|
source(Any, optional): source line if available (Default value = None)
|
||||||
|
"""
|
||||||
|
message = f"Could not parse PKGBUILD: {reason}"
|
||||||
|
if source is not None:
|
||||||
|
message += f", source: `{source}`"
|
||||||
|
ValueError.__init__(self, message)
|
||||||
|
|
||||||
|
|
||||||
class PathError(ValueError):
|
class PathError(ValueError):
|
||||||
"""
|
"""
|
||||||
exception which will be raised on path which is not belong to root directory
|
exception which will be raised on path which is not belong to root directory
|
||||||
|
@ -58,7 +58,7 @@ class PackageInfo(RepositoryProperties):
|
|||||||
# force version to max of them
|
# force version to max of them
|
||||||
self.logger.warning("version of %s differs, found %s and %s",
|
self.logger.warning("version of %s differs, found %s and %s",
|
||||||
current.base, current.version, local.version)
|
current.base, current.version, local.version)
|
||||||
if current.is_outdated(local, self.paths, calculate_version=False):
|
if current.is_outdated(local, self.configuration, calculate_version=False):
|
||||||
current.version = local.version
|
current.version = local.version
|
||||||
current.packages.update(local.packages)
|
current.packages.update(local.packages)
|
||||||
except Exception:
|
except Exception:
|
||||||
|
@ -51,7 +51,6 @@ class RepositoryProperties(EventLogger, LazyLogging):
|
|||||||
scan_paths(ScanPaths): scan paths for the implicit dependencies
|
scan_paths(ScanPaths): scan paths for the implicit dependencies
|
||||||
sign(GPG): GPG wrapper instance
|
sign(GPG): GPG wrapper instance
|
||||||
triggers(TriggerLoader): triggers holder
|
triggers(TriggerLoader): triggers holder
|
||||||
vcs_allowed_age(int): maximal age of the VCS packages before they will be checked
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, repository_id: RepositoryId, configuration: Configuration, database: SQLite, *, report: bool,
|
def __init__(self, repository_id: RepositoryId, configuration: Configuration, database: SQLite, *, report: bool,
|
||||||
@ -68,8 +67,6 @@ class RepositoryProperties(EventLogger, LazyLogging):
|
|||||||
self.configuration = configuration
|
self.configuration = configuration
|
||||||
self.database = database
|
self.database = database
|
||||||
|
|
||||||
self.vcs_allowed_age = configuration.getint("build", "vcs_allowed_age", fallback=0)
|
|
||||||
|
|
||||||
self.paths: RepositoryPaths = configuration.repository_paths # additional workaround for pycharm typing
|
self.paths: RepositoryPaths = configuration.repository_paths # additional workaround for pycharm typing
|
||||||
|
|
||||||
self.ignore_list = configuration.getlist("build", "ignore_packages", fallback=[])
|
self.ignore_list = configuration.getlist("build", "ignore_packages", fallback=[])
|
||||||
|
@ -67,10 +67,7 @@ class UpdateHandler(PackageInfo, Cleaner):
|
|||||||
try:
|
try:
|
||||||
remote = load_remote(local)
|
remote = load_remote(local)
|
||||||
|
|
||||||
if local.is_outdated(
|
if local.is_outdated(remote, self.configuration, calculate_version=vcs):
|
||||||
remote, self.paths,
|
|
||||||
vcs_allowed_age=self.vcs_allowed_age,
|
|
||||||
calculate_version=vcs):
|
|
||||||
self.reporter.set_pending(local.base)
|
self.reporter.set_pending(local.base)
|
||||||
self.event(local.base, EventType.PackageOutdated, "Remote version is newer than local")
|
self.event(local.base, EventType.PackageOutdated, "Remote version is newer than local")
|
||||||
result.append(remote)
|
result.append(remote)
|
||||||
@ -156,9 +153,7 @@ class UpdateHandler(PackageInfo, Cleaner):
|
|||||||
if local.remote.is_remote:
|
if local.remote.is_remote:
|
||||||
continue # avoid checking AUR packages
|
continue # avoid checking AUR packages
|
||||||
|
|
||||||
if local.is_outdated(remote, self.paths,
|
if local.is_outdated(remote, self.configuration, calculate_version=vcs):
|
||||||
vcs_allowed_age=self.vcs_allowed_age,
|
|
||||||
calculate_version=vcs):
|
|
||||||
self.reporter.set_pending(local.base)
|
self.reporter.set_pending(local.base)
|
||||||
self.event(local.base, EventType.PackageOutdated, "Locally pulled sources are outdated")
|
self.event(local.base, EventType.PackageOutdated, "Locally pulled sources are outdated")
|
||||||
result.append(remote)
|
result.append(remote)
|
||||||
|
@ -197,7 +197,7 @@ class Watcher(LazyLogging):
|
|||||||
proxy methods for reporter client
|
proxy methods for reporter client
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
item(str): property name:
|
item(str): property name
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Any: attribute by its name
|
Any: attribute by its name
|
||||||
|
@ -27,7 +27,7 @@ import re
|
|||||||
import selectors
|
import selectors
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from collections.abc import Callable, Generator, Iterable
|
from collections.abc import Callable, Generator, Iterable, Mapping
|
||||||
from dataclasses import asdict
|
from dataclasses import asdict
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@ -407,7 +407,7 @@ def safe_filename(source: str) -> str:
|
|||||||
return re.sub(r"[^A-Za-z\d\-._~:\[\]@]", "-", source)
|
return re.sub(r"[^A-Za-z\d\-._~:\[\]@]", "-", source)
|
||||||
|
|
||||||
|
|
||||||
def srcinfo_property(key: str, srcinfo: dict[str, Any], package_srcinfo: dict[str, Any], *,
|
def srcinfo_property(key: str, srcinfo: Mapping[str, Any], package_srcinfo: Mapping[str, Any], *,
|
||||||
default: Any = None) -> Any:
|
default: Any = None) -> Any:
|
||||||
"""
|
"""
|
||||||
extract property from SRCINFO. This method extracts property from package if this property is presented in
|
extract property from SRCINFO. This method extracts property from package if this property is presented in
|
||||||
@ -416,8 +416,8 @@ def srcinfo_property(key: str, srcinfo: dict[str, Any], package_srcinfo: dict[st
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
key(str): key to extract
|
key(str): key to extract
|
||||||
srcinfo(dict[str, Any]): root structure of SRCINFO
|
srcinfo(Mapping[str, Any]): root structure of SRCINFO
|
||||||
package_srcinfo(dict[str, Any]): package specific SRCINFO
|
package_srcinfo(Mapping[str, Any]): package specific SRCINFO
|
||||||
default(Any, optional): the default value for the specified key (Default value = None)
|
default(Any, optional): the default value for the specified key (Default value = None)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@ -426,7 +426,7 @@ def srcinfo_property(key: str, srcinfo: dict[str, Any], package_srcinfo: dict[st
|
|||||||
return package_srcinfo.get(key) or srcinfo.get(key) or default
|
return package_srcinfo.get(key) or srcinfo.get(key) or default
|
||||||
|
|
||||||
|
|
||||||
def srcinfo_property_list(key: str, srcinfo: dict[str, Any], package_srcinfo: dict[str, Any], *,
|
def srcinfo_property_list(key: str, srcinfo: Mapping[str, Any], package_srcinfo: Mapping[str, Any], *,
|
||||||
architecture: str | None = None) -> list[Any]:
|
architecture: str | None = None) -> list[Any]:
|
||||||
"""
|
"""
|
||||||
extract list property from SRCINFO. Unlike :func:`srcinfo_property()` it supposes that default return value is
|
extract list property from SRCINFO. Unlike :func:`srcinfo_property()` it supposes that default return value is
|
||||||
@ -435,8 +435,8 @@ def srcinfo_property_list(key: str, srcinfo: dict[str, Any], package_srcinfo: di
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
key(str): key to extract
|
key(str): key to extract
|
||||||
srcinfo(dict[str, Any]): root structure of SRCINFO
|
srcinfo(Mapping[str, Any]): root structure of SRCINFO
|
||||||
package_srcinfo(dict[str, Any]): package specific SRCINFO
|
package_srcinfo(Mapping[str, Any]): package specific SRCINFO
|
||||||
architecture(str | None, optional): package architecture if set (Default value = None)
|
architecture(str | None, optional): package architecture if set (Default value = None)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
|
@ -26,19 +26,18 @@ from collections.abc import Callable, Generator, Iterable
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from pyalpm import vercmp # type: ignore[import-not-found]
|
from pyalpm import vercmp # type: ignore[import-not-found]
|
||||||
from srcinfo.parse import parse_srcinfo # type: ignore[import-untyped]
|
|
||||||
from typing import Any, Self
|
from typing import Any, Self
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from ahriman.core.alpm.pacman import Pacman
|
from ahriman.core.alpm.pacman import Pacman
|
||||||
from ahriman.core.alpm.remote import AUR, Official, OfficialSyncdb
|
from ahriman.core.alpm.remote import AUR, Official, OfficialSyncdb
|
||||||
from ahriman.core.exceptions import PackageInfoError
|
from ahriman.core.configuration import Configuration
|
||||||
from ahriman.core.log import LazyLogging
|
from ahriman.core.log import LazyLogging
|
||||||
from ahriman.core.utils import check_output, dataclass_view, full_version, parse_version, srcinfo_property_list, utcnow
|
from ahriman.core.utils import dataclass_view, full_version, parse_version, srcinfo_property_list, utcnow
|
||||||
from ahriman.models.package_description import PackageDescription
|
from ahriman.models.package_description import PackageDescription
|
||||||
from ahriman.models.package_source import PackageSource
|
from ahriman.models.package_source import PackageSource
|
||||||
|
from ahriman.models.pkgbuild import Pkgbuild
|
||||||
from ahriman.models.remote_source import RemoteSource
|
from ahriman.models.remote_source import RemoteSource
|
||||||
from ahriman.models.repository_paths import RepositoryPaths
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(kw_only=True)
|
@dataclass(kw_only=True)
|
||||||
@ -255,25 +254,19 @@ class Package(LazyLogging):
|
|||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Self: package properties
|
Self: package properties
|
||||||
|
|
||||||
Raises:
|
|
||||||
PackageInfoError: if there are parsing errors
|
|
||||||
"""
|
"""
|
||||||
srcinfo_source = check_output("makepkg", "--printsrcinfo", cwd=path)
|
pkgbuild = Pkgbuild.from_file(path / "PKGBUILD")
|
||||||
srcinfo, errors = parse_srcinfo(srcinfo_source)
|
|
||||||
if errors:
|
|
||||||
raise PackageInfoError(errors)
|
|
||||||
|
|
||||||
packages = {
|
packages = {
|
||||||
package: PackageDescription(
|
package: PackageDescription(
|
||||||
depends=srcinfo_property_list("depends", srcinfo, properties, architecture=architecture),
|
depends=srcinfo_property_list("depends", pkgbuild, properties, architecture=architecture),
|
||||||
make_depends=srcinfo_property_list("makedepends", srcinfo, properties, architecture=architecture),
|
make_depends=srcinfo_property_list("makedepends", pkgbuild, properties, architecture=architecture),
|
||||||
opt_depends=srcinfo_property_list("optdepends", srcinfo, properties, architecture=architecture),
|
opt_depends=srcinfo_property_list("optdepends", pkgbuild, properties, architecture=architecture),
|
||||||
check_depends=srcinfo_property_list("checkdepends", srcinfo, properties, architecture=architecture),
|
check_depends=srcinfo_property_list("checkdepends", pkgbuild, properties, architecture=architecture),
|
||||||
)
|
)
|
||||||
for package, properties in srcinfo["packages"].items()
|
for package, properties in pkgbuild.packages().items()
|
||||||
}
|
}
|
||||||
version = full_version(srcinfo.get("epoch"), srcinfo["pkgver"], srcinfo["pkgrel"])
|
version = full_version(pkgbuild.get("epoch"), pkgbuild["pkgver"], pkgbuild["pkgrel"])
|
||||||
|
|
||||||
remote = RemoteSource(
|
remote = RemoteSource(
|
||||||
source=PackageSource.Local,
|
source=PackageSource.Local,
|
||||||
@ -284,7 +277,7 @@ class Package(LazyLogging):
|
|||||||
)
|
)
|
||||||
|
|
||||||
return cls(
|
return cls(
|
||||||
base=srcinfo["pkgbase"],
|
base=pkgbuild["pkgbase"],
|
||||||
version=version,
|
version=version,
|
||||||
remote=remote,
|
remote=remote,
|
||||||
packages=packages,
|
packages=packages,
|
||||||
@ -363,18 +356,14 @@ class Package(LazyLogging):
|
|||||||
Raises:
|
Raises:
|
||||||
PackageInfoError: if there are parsing errors
|
PackageInfoError: if there are parsing errors
|
||||||
"""
|
"""
|
||||||
srcinfo_source = check_output("makepkg", "--printsrcinfo", cwd=path)
|
pkgbuild = Pkgbuild.from_file(path / "PKGBUILD")
|
||||||
srcinfo, errors = parse_srcinfo(srcinfo_source)
|
|
||||||
if errors:
|
|
||||||
raise PackageInfoError(errors)
|
|
||||||
|
|
||||||
# we could use arch property, but for consistency it is better to call special method
|
# we could use arch property, but for consistency it is better to call special method
|
||||||
architectures = Package.supported_architectures(path)
|
architectures = Package.supported_architectures(path)
|
||||||
|
|
||||||
for architecture in architectures:
|
for architecture in architectures:
|
||||||
for source in srcinfo_property_list("source", srcinfo, {}, architecture=architecture):
|
for source in srcinfo_property_list("source", pkgbuild, {}, architecture=architecture):
|
||||||
if "::" in source:
|
if "::" in source:
|
||||||
_, source = source.split("::", 1) # in case if filename is specified, remove it
|
_, source = source.split("::", maxsplit=1) # in case if filename is specified, remove it
|
||||||
|
|
||||||
if urlparse(source).scheme:
|
if urlparse(source).scheme:
|
||||||
# basically file schema should use absolute path which is impossible if we are distributing
|
# basically file schema should use absolute path which is impossible if we are distributing
|
||||||
@ -383,7 +372,7 @@ class Package(LazyLogging):
|
|||||||
|
|
||||||
yield Path(source)
|
yield Path(source)
|
||||||
|
|
||||||
if (install := srcinfo.get("install", None)) is not None:
|
if (install := pkgbuild.get("install")) is not None:
|
||||||
yield Path(install)
|
yield Path(install)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -396,15 +385,9 @@ class Package(LazyLogging):
|
|||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
set[str]: list of package supported architectures
|
set[str]: list of package supported architectures
|
||||||
|
|
||||||
Raises:
|
|
||||||
PackageInfoError: if there are parsing errors
|
|
||||||
"""
|
"""
|
||||||
srcinfo_source = check_output("makepkg", "--printsrcinfo", cwd=path)
|
pkgbuild = Pkgbuild.from_file(path / "PKGBUILD")
|
||||||
srcinfo, errors = parse_srcinfo(srcinfo_source)
|
return set(pkgbuild.get("arch", []))
|
||||||
if errors:
|
|
||||||
raise PackageInfoError(errors)
|
|
||||||
return set(srcinfo.get("arch", []))
|
|
||||||
|
|
||||||
def _package_list_property(self, extractor: Callable[[PackageDescription], list[str]]) -> list[str]:
|
def _package_list_property(self, extractor: Callable[[PackageDescription], list[str]]) -> list[str]:
|
||||||
"""
|
"""
|
||||||
@ -426,39 +409,39 @@ class Package(LazyLogging):
|
|||||||
|
|
||||||
return sorted(set(generator()))
|
return sorted(set(generator()))
|
||||||
|
|
||||||
def actual_version(self, paths: RepositoryPaths) -> str:
|
def actual_version(self, configuration: Configuration) -> str:
|
||||||
"""
|
"""
|
||||||
additional method to handle VCS package versions
|
additional method to handle VCS package versions
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
paths(RepositoryPaths): repository paths instance
|
configuration(Configuration): configuration instance
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: package version if package is not VCS and current version according to VCS otherwise
|
str: package version if package is not VCS and current version according to VCS otherwise
|
||||||
|
|
||||||
Raises:
|
|
||||||
PackageInfoError: if there are parsing errors
|
|
||||||
"""
|
"""
|
||||||
if not self.is_vcs:
|
if not self.is_vcs:
|
||||||
return self.version
|
return self.version
|
||||||
|
|
||||||
from ahriman.core.build_tools.sources import Sources
|
from ahriman.core.build_tools.task import Task
|
||||||
|
|
||||||
Sources.load(paths.cache_for(self.base), self, [], paths)
|
_, repository_id = configuration.check_loaded()
|
||||||
|
paths = configuration.repository_paths
|
||||||
|
task = Task(self, configuration, repository_id.architecture, paths)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# update pkgver first
|
# create fresh chroot environment, fetch sources and - automagically - update PKGBUILD
|
||||||
check_output("makepkg", "--nodeps", "--nobuild", cwd=paths.cache_for(self.base), logger=self.logger)
|
task.init(paths.cache_for(self.base), [], None)
|
||||||
# generate new .SRCINFO and put it to parser
|
task.build(paths.cache_for(self.base), dry_run=True)
|
||||||
srcinfo_source = check_output("makepkg", "--printsrcinfo",
|
|
||||||
cwd=paths.cache_for(self.base), logger=self.logger)
|
|
||||||
srcinfo, errors = parse_srcinfo(srcinfo_source)
|
|
||||||
if errors:
|
|
||||||
raise PackageInfoError(errors)
|
|
||||||
|
|
||||||
return full_version(srcinfo.get("epoch"), srcinfo["pkgver"], srcinfo["pkgrel"])
|
pkgbuild = Pkgbuild.from_file(paths.cache_for(self.base) / "PKGBUILD")
|
||||||
|
|
||||||
|
return full_version(pkgbuild.get("epoch"), pkgbuild["pkgver"], pkgbuild["pkgrel"])
|
||||||
except Exception:
|
except Exception:
|
||||||
self.logger.exception("cannot determine version of VCS package, make sure that VCS tools are installed")
|
self.logger.exception("cannot determine version of VCS package")
|
||||||
|
finally:
|
||||||
|
# clear log files generated by devtools
|
||||||
|
for log_file in paths.cache_for(self.base).glob("*.log"):
|
||||||
|
log_file.unlink()
|
||||||
|
|
||||||
return self.version
|
return self.version
|
||||||
|
|
||||||
@ -513,26 +496,25 @@ class Package(LazyLogging):
|
|||||||
if package.build_date is not None
|
if package.build_date is not None
|
||||||
)
|
)
|
||||||
|
|
||||||
def is_outdated(self, remote: Package, paths: RepositoryPaths, *,
|
def is_outdated(self, remote: Package, configuration: Configuration, *,
|
||||||
vcs_allowed_age: float | int = 0,
|
|
||||||
calculate_version: bool = True) -> bool:
|
calculate_version: bool = True) -> bool:
|
||||||
"""
|
"""
|
||||||
check if package is out-of-dated
|
check if package is out-of-dated
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
remote(Package): package properties from remote source
|
remote(Package): package properties from remote source
|
||||||
paths(RepositoryPaths): repository paths instance. Required for VCS packages cache
|
configuration(Configuration): configuration instance
|
||||||
vcs_allowed_age(float | int, optional): max age of the built packages before they will be
|
|
||||||
forced to calculate actual version (Default value = 0)
|
|
||||||
calculate_version(bool, optional): expand version to actual value (by calculating git versions)
|
calculate_version(bool, optional): expand version to actual value (by calculating git versions)
|
||||||
(Default value = True)
|
(Default value = True)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: ``True`` if the package is out-of-dated and ``False`` otherwise
|
bool: ``True`` if the package is out-of-dated and ``False`` otherwise
|
||||||
"""
|
"""
|
||||||
|
vcs_allowed_age = configuration.getint("build", "vcs_allowed_age", fallback=0)
|
||||||
min_vcs_build_date = utcnow().timestamp() - vcs_allowed_age
|
min_vcs_build_date = utcnow().timestamp() - vcs_allowed_age
|
||||||
|
|
||||||
if calculate_version and not self.is_newer_than(min_vcs_build_date):
|
if calculate_version and not self.is_newer_than(min_vcs_build_date):
|
||||||
remote_version = remote.actual_version(paths)
|
remote_version = remote.actual_version(configuration)
|
||||||
else:
|
else:
|
||||||
remote_version = remote.version
|
remote_version = remote.version
|
||||||
|
|
||||||
|
149
src/ahriman/models/pkgbuild.py
Normal file
149
src/ahriman/models/pkgbuild.py
Normal file
@ -0,0 +1,149 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2021-2024 ahriman team.
|
||||||
|
#
|
||||||
|
# This file is part of ahriman
|
||||||
|
# (see https://github.com/arcan1s/ahriman).
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
from collections.abc import Iterator, Mapping
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from io import StringIO
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, IO, Self
|
||||||
|
|
||||||
|
from ahriman.core.alpm.pkgbuild_parser import PkgbuildParser, PkgbuildToken
|
||||||
|
from ahriman.models.pkgbuild_patch import PkgbuildPatch
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class Pkgbuild(Mapping[str, Any]):
|
||||||
|
"""
|
||||||
|
model and proxy for PKGBUILD properties
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
fields(dict[str, PkgbuildPatch]): PKGBUILD fields
|
||||||
|
"""
|
||||||
|
|
||||||
|
fields: dict[str, PkgbuildPatch]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def variables(self) -> dict[str, str]:
|
||||||
|
"""
|
||||||
|
list of variables defined and (maybe) used in this PKGBUILD
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, str]: map of variable name to its value. The value will be included here in case if it presented
|
||||||
|
in the internal dictionary, it is not a function and the value has string type
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
key: value.value
|
||||||
|
for key, value in self.fields.items()
|
||||||
|
if not value.is_function and isinstance(value.value, str)
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_file(cls, path: Path) -> Self:
|
||||||
|
"""
|
||||||
|
parse PKGBUILD from the file
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path(Path): path to the PKGBUILD file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Self: constructed instance of self
|
||||||
|
"""
|
||||||
|
with path.open() as input_file:
|
||||||
|
return cls.from_io(input_file)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_io(cls, stream: IO[str]) -> Self:
|
||||||
|
"""
|
||||||
|
parse PKGBUILD from input stream
|
||||||
|
|
||||||
|
Args:
|
||||||
|
stream(IO[str]): input stream containing PKGBUILD content
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Self: constructed instance of self
|
||||||
|
"""
|
||||||
|
parser = PkgbuildParser(stream)
|
||||||
|
fields = {patch.key: patch for patch in parser.parse()}
|
||||||
|
|
||||||
|
# pkgbase is optional field, the pkgname must be used instead if not set
|
||||||
|
# however, pkgname is not presented is "package()" functions which we are parsing here too,
|
||||||
|
# thus, in our terms, it is optional too
|
||||||
|
if "pkgbase" not in fields and "pkgname" in fields:
|
||||||
|
fields["pkgbase"] = PkgbuildPatch("pkgbase", fields["pkgname"].value)
|
||||||
|
|
||||||
|
return cls({key: value for key, value in fields.items() if key})
|
||||||
|
|
||||||
|
def packages(self) -> dict[str, Self]:
|
||||||
|
"""
|
||||||
|
extract properties from internal package functions
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, Self]: map of package name to its inner properties if defined
|
||||||
|
"""
|
||||||
|
packages = [self["pkgname"]] if isinstance(self["pkgname"], str) else self["pkgname"]
|
||||||
|
|
||||||
|
def io(package_name: str) -> IO[str]:
|
||||||
|
# try to read package specific function and fallback to default otherwise
|
||||||
|
content = self.get(f"package_{package_name}") or self.get("package") or ""
|
||||||
|
return StringIO(content)
|
||||||
|
|
||||||
|
return {package: self.from_io(io(package)) for package in packages}
|
||||||
|
|
||||||
|
def __getitem__(self, item: str) -> Any:
|
||||||
|
"""
|
||||||
|
get the field of the PKGBUILD. This method tries to get exact key value if possible; if none found, it tries to
|
||||||
|
fetch function with the same name
|
||||||
|
|
||||||
|
Args:
|
||||||
|
item(str): key name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Any: substituted value by the key
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
KeyError: if key doesn't exist
|
||||||
|
"""
|
||||||
|
value = self.fields.get(item)
|
||||||
|
# if the key wasn't found and user didn't ask for function explicitly, we can try to get by function name
|
||||||
|
if value is None and not item.endswith(PkgbuildToken.FunctionDeclaration):
|
||||||
|
value = self.fields.get(f"{item}{PkgbuildToken.FunctionDeclaration}")
|
||||||
|
|
||||||
|
# if we still didn't find anything, we can just raise the exception
|
||||||
|
if value is None:
|
||||||
|
raise KeyError(item)
|
||||||
|
|
||||||
|
return value.substitute(self.variables)
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[str]:
|
||||||
|
"""
|
||||||
|
iterate over the fields
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Iterator[str]: keys iterator
|
||||||
|
"""
|
||||||
|
return iter(self.fields)
|
||||||
|
|
||||||
|
def __len__(self) -> int:
|
||||||
|
"""
|
||||||
|
get length of the mapping
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
int: amount of the fields in this PKGBUILD
|
||||||
|
"""
|
||||||
|
return len(self.fields)
|
@ -23,6 +23,7 @@ from dataclasses import dataclass, fields
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Generator, Self
|
from typing import Any, Generator, Self
|
||||||
|
|
||||||
|
from ahriman.core.configuration.shell_template import ShellTemplate
|
||||||
from ahriman.core.utils import dataclass_view, filter_json
|
from ahriman.core.utils import dataclass_view, filter_json
|
||||||
|
|
||||||
|
|
||||||
@ -167,6 +168,21 @@ class PkgbuildPatch:
|
|||||||
return f"{self.key} {self.value}" # no quoting enabled here
|
return f"{self.key} {self.value}" # no quoting enabled here
|
||||||
return f"""{self.key}={PkgbuildPatch.quote(self.value)}"""
|
return f"""{self.key}={PkgbuildPatch.quote(self.value)}"""
|
||||||
|
|
||||||
|
def substitute(self, variables: dict[str, str]) -> str | list[str]:
|
||||||
|
"""
|
||||||
|
substitute variables into the value
|
||||||
|
|
||||||
|
Args:
|
||||||
|
variables(dict[str, str]): map of variables available for usage
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str | list[str]: substituted value. All unknown variables will remain as links to their values.
|
||||||
|
This function doesn't support recursive substitution
|
||||||
|
"""
|
||||||
|
if isinstance(self.value, str):
|
||||||
|
return ShellTemplate(self.value).shell_substitute(variables)
|
||||||
|
return [ShellTemplate(value).shell_substitute(variables) for value in self.value]
|
||||||
|
|
||||||
def view(self) -> dict[str, Any]:
|
def view(self) -> dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
generate json patch view
|
generate json patch view
|
||||||
|
@ -28,9 +28,9 @@ def test_package_dependencies() -> None:
|
|||||||
"""
|
"""
|
||||||
must extract package dependencies
|
must extract package dependencies
|
||||||
"""
|
"""
|
||||||
packages = dict(Versions.package_dependencies("srcinfo"))
|
packages = dict(Versions.package_dependencies("requests"))
|
||||||
assert packages
|
assert packages
|
||||||
assert packages.get("parse") is not None
|
assert packages.get("urllib3") is not None
|
||||||
|
|
||||||
|
|
||||||
def test_package_dependencies_missing() -> None:
|
def test_package_dependencies_missing() -> None:
|
||||||
|
262
tests/ahriman/core/alpm/test_pkgbuild_parser.py
Normal file
262
tests/ahriman/core/alpm/test_pkgbuild_parser.py
Normal file
@ -0,0 +1,262 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from io import StringIO
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from ahriman.core.alpm.pkgbuild_parser import PkgbuildParser
|
||||||
|
from ahriman.core.exceptions import PkgbuildParserError
|
||||||
|
from ahriman.models.pkgbuild_patch import PkgbuildPatch
|
||||||
|
|
||||||
|
|
||||||
|
def test_expand_array() -> None:
|
||||||
|
"""
|
||||||
|
must correctly expand array
|
||||||
|
"""
|
||||||
|
assert PkgbuildParser._expand_array(["${pkgbase}{", ",", "-libs", ",", "-fortran}"]) == [
|
||||||
|
"${pkgbase}", "${pkgbase}-libs", "${pkgbase}-fortran"
|
||||||
|
]
|
||||||
|
assert PkgbuildParser._expand_array(["first", "prefix{1", ",", "2", ",", "3}suffix", "last"]) == [
|
||||||
|
"first", "prefix1suffix", "prefix2suffix", "prefix3suffix", "last"
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_expand_array_no_comma() -> None:
|
||||||
|
"""
|
||||||
|
must skip array extraction if there is no comma
|
||||||
|
"""
|
||||||
|
assert PkgbuildParser._expand_array(["${pkgbase}{", "-libs", "-fortran}"]) == ["${pkgbase}{", "-libs", "-fortran}"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_expand_array_short() -> None:
|
||||||
|
"""
|
||||||
|
must skip array extraction if it is short
|
||||||
|
"""
|
||||||
|
assert PkgbuildParser._expand_array(["${pkgbase}{", ","]) == ["${pkgbase}{", ","]
|
||||||
|
|
||||||
|
|
||||||
|
def test_expand_array_exception() -> None:
|
||||||
|
"""
|
||||||
|
must raise exception if there is unclosed element
|
||||||
|
"""
|
||||||
|
with pytest.raises(PkgbuildParserError):
|
||||||
|
assert PkgbuildParser._expand_array(["${pkgbase}{", ",", "-libs"])
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_array() -> None:
|
||||||
|
"""
|
||||||
|
must parse array
|
||||||
|
"""
|
||||||
|
parser = PkgbuildParser(StringIO("var=(first second)"))
|
||||||
|
assert list(parser.parse()) == [PkgbuildPatch("var", ["first", "second"])]
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_array_comment() -> None:
|
||||||
|
"""
|
||||||
|
must parse array with comments inside
|
||||||
|
"""
|
||||||
|
parser = PkgbuildParser(StringIO("""validpgpkeys=(
|
||||||
|
'F3691687D867B81B51CE07D9BBE43771487328A9' # bpiotrowski@archlinux.org
|
||||||
|
'86CFFCA918CF3AF47147588051E8B148A9999C34' # evangelos@foutrelis.com
|
||||||
|
'13975A70E63C361C73AE69EF6EEB81F8981C74C7' # richard.guenther@gmail.com
|
||||||
|
'D3A93CAD751C2AF4F8C7AD516C35B99309B5FA62' # Jakub Jelinek <jakub@redhat.com>
|
||||||
|
)"""))
|
||||||
|
assert list(parser.parse()) == [PkgbuildPatch("validpgpkeys", [
|
||||||
|
"F3691687D867B81B51CE07D9BBE43771487328A9",
|
||||||
|
"86CFFCA918CF3AF47147588051E8B148A9999C34",
|
||||||
|
"13975A70E63C361C73AE69EF6EEB81F8981C74C7",
|
||||||
|
"D3A93CAD751C2AF4F8C7AD516C35B99309B5FA62",
|
||||||
|
])]
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_array_escaped() -> None:
|
||||||
|
"""
|
||||||
|
must correctly process quoted brackets
|
||||||
|
"""
|
||||||
|
parser = PkgbuildParser(StringIO("""var=(first "(" second)"""))
|
||||||
|
assert list(parser.parse()) == [PkgbuildPatch("var", ["first", "(", "second"])]
|
||||||
|
|
||||||
|
parser = PkgbuildParser(StringIO("""var=(first ")" second)"""))
|
||||||
|
assert list(parser.parse()) == [PkgbuildPatch("var", ["first", ")", "second"])]
|
||||||
|
|
||||||
|
parser = PkgbuildParser(StringIO("""var=(first ')' second)"""))
|
||||||
|
assert list(parser.parse()) == [PkgbuildPatch("var", ["first", ")", "second"])]
|
||||||
|
|
||||||
|
parser = PkgbuildParser(StringIO("""var=(first \\) second)"""))
|
||||||
|
assert list(parser.parse()) == [PkgbuildPatch("var", ["first", ")", "second"])]
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_array_exception() -> None:
|
||||||
|
"""
|
||||||
|
must raise exception if there is no closing bracket
|
||||||
|
"""
|
||||||
|
parser = PkgbuildParser(StringIO("var=(first second"))
|
||||||
|
with pytest.raises(PkgbuildParserError):
|
||||||
|
assert list(parser.parse())
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_function() -> None:
|
||||||
|
"""
|
||||||
|
must parse function
|
||||||
|
"""
|
||||||
|
parser = PkgbuildParser(StringIO("var() { echo hello world } "))
|
||||||
|
assert list(parser.parse()) == [PkgbuildPatch("var()", "{ echo hello world }")]
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_function_eof() -> None:
|
||||||
|
"""
|
||||||
|
must parse function with "}" at the end of the file
|
||||||
|
"""
|
||||||
|
parser = PkgbuildParser(StringIO("var() { echo hello world }"))
|
||||||
|
assert list(parser.parse()) == [PkgbuildPatch("var()", "{ echo hello world }")]
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_function_spaces() -> None:
|
||||||
|
"""
|
||||||
|
must parse function with spaces in declaration
|
||||||
|
"""
|
||||||
|
parser = PkgbuildParser(StringIO("var ( ) { echo hello world } "))
|
||||||
|
assert list(parser.parse()) == [PkgbuildPatch("var()", "{ echo hello world }")]
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_function_inner_shell() -> None:
|
||||||
|
"""
|
||||||
|
must parse function with inner shell
|
||||||
|
"""
|
||||||
|
parser = PkgbuildParser(StringIO("var ( ) { { echo hello world } } "))
|
||||||
|
assert list(parser.parse()) == [PkgbuildPatch("var()", "{ { echo hello world } }")]
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_function_escaped() -> None:
|
||||||
|
"""
|
||||||
|
must parse function with bracket in quotes
|
||||||
|
"""
|
||||||
|
parser = PkgbuildParser(StringIO("""var ( ) { echo "hello world {" } """))
|
||||||
|
assert list(parser.parse()) == [PkgbuildPatch("var()", """{ echo "hello world {" }""")]
|
||||||
|
|
||||||
|
parser = PkgbuildParser(StringIO("""var ( ) { echo hello world "{" } """))
|
||||||
|
assert list(parser.parse()) == [PkgbuildPatch("var()", """{ echo hello world "{" }""")]
|
||||||
|
|
||||||
|
parser = PkgbuildParser(StringIO("""var ( ) { echo "hello world }" } """))
|
||||||
|
assert list(parser.parse()) == [PkgbuildPatch("var()", """{ echo "hello world }" }""")]
|
||||||
|
|
||||||
|
parser = PkgbuildParser(StringIO("""var ( ) { echo hello world "}" } """))
|
||||||
|
assert list(parser.parse()) == [PkgbuildPatch("var()", """{ echo hello world "}" }""")]
|
||||||
|
|
||||||
|
parser = PkgbuildParser(StringIO("""var ( ) { echo hello world '}' } """))
|
||||||
|
assert list(parser.parse()) == [PkgbuildPatch("var()", """{ echo hello world '}' }""")]
|
||||||
|
|
||||||
|
parser = PkgbuildParser(StringIO("""var ( ) { echo hello world \\} } """))
|
||||||
|
assert list(parser.parse()) == [PkgbuildPatch("var()", """{ echo hello world \\} }""")]
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_function_exception() -> None:
|
||||||
|
"""
|
||||||
|
must raise exception if no bracket found
|
||||||
|
"""
|
||||||
|
parser = PkgbuildParser(StringIO("var() echo hello world } "))
|
||||||
|
with pytest.raises(PkgbuildParserError):
|
||||||
|
assert list(parser.parse())
|
||||||
|
|
||||||
|
parser = PkgbuildParser(StringIO("var() { echo hello world"))
|
||||||
|
with pytest.raises(PkgbuildParserError):
|
||||||
|
assert list(parser.parse())
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_token_assignment() -> None:
|
||||||
|
"""
|
||||||
|
must parse simple assignment
|
||||||
|
"""
|
||||||
|
parser = PkgbuildParser(StringIO())
|
||||||
|
assert next(parser._parse_token("var=value")) == PkgbuildPatch("var", "value")
|
||||||
|
assert next(parser._parse_token("var=$value")) == PkgbuildPatch("var", "$value")
|
||||||
|
assert next(parser._parse_token("var=${value}")) == PkgbuildPatch("var", "${value}")
|
||||||
|
assert next(parser._parse_token("var=${value/-/_}")) == PkgbuildPatch("var", "${value/-/_}")
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_token_comment() -> None:
|
||||||
|
"""
|
||||||
|
must correctly parse comment
|
||||||
|
"""
|
||||||
|
parser = PkgbuildParser(StringIO("""first=1 # comment
|
||||||
|
# comment line
|
||||||
|
second=2
|
||||||
|
#third=3
|
||||||
|
"""))
|
||||||
|
assert list(parser.parse()) == [
|
||||||
|
PkgbuildPatch("first", "1"),
|
||||||
|
PkgbuildPatch("second", "2"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse(resource_path_root: Path) -> None:
|
||||||
|
"""
|
||||||
|
must parse complex file
|
||||||
|
"""
|
||||||
|
pkgbuild = resource_path_root / "models" / "pkgbuild"
|
||||||
|
with pkgbuild.open() as content:
|
||||||
|
parser = PkgbuildParser(content)
|
||||||
|
assert list(parser.parse()) == [
|
||||||
|
PkgbuildPatch("var", "value"),
|
||||||
|
PkgbuildPatch("var", "value"),
|
||||||
|
PkgbuildPatch("var", "value with space"),
|
||||||
|
PkgbuildPatch("var", "value"),
|
||||||
|
PkgbuildPatch("var", "$ref"),
|
||||||
|
PkgbuildPatch("var", "${ref}"),
|
||||||
|
PkgbuildPatch("var", "$ref value"),
|
||||||
|
PkgbuildPatch("var", "${ref}value"),
|
||||||
|
PkgbuildPatch("var", "${ref/-/_}"),
|
||||||
|
PkgbuildPatch("var", "${ref##.*}"),
|
||||||
|
PkgbuildPatch("var", "${ref%%.*}"),
|
||||||
|
PkgbuildPatch("array", ["first", "second", "third", "with space"]),
|
||||||
|
PkgbuildPatch("array", ["single"]),
|
||||||
|
PkgbuildPatch("array", ["$ref"]),
|
||||||
|
PkgbuildPatch("array", ["first", "second", "third"]),
|
||||||
|
PkgbuildPatch("array", ["first", "second", "third"]),
|
||||||
|
PkgbuildPatch("array", ["first", "last"]),
|
||||||
|
PkgbuildPatch("array", ["first", "1suffix", "2suffix", "last"]),
|
||||||
|
PkgbuildPatch("array", ["first", "prefix1", "prefix2", "last"]),
|
||||||
|
PkgbuildPatch("array", ["first", "prefix1suffix", "prefix2suffix", "last"]),
|
||||||
|
PkgbuildPatch("array", ["first", "(", "second"]),
|
||||||
|
PkgbuildPatch("array", ["first", ")", "second"]),
|
||||||
|
PkgbuildPatch("array", ["first", "(", "second"]),
|
||||||
|
PkgbuildPatch("array", ["first", ")", "second"]),
|
||||||
|
PkgbuildPatch("function()", """{ single line }"""),
|
||||||
|
PkgbuildPatch("function()", """{
|
||||||
|
multi
|
||||||
|
line
|
||||||
|
}"""),
|
||||||
|
PkgbuildPatch("function()", """{
|
||||||
|
c
|
||||||
|
multi
|
||||||
|
line
|
||||||
|
}"""),
|
||||||
|
PkgbuildPatch("function()", """{
|
||||||
|
# comment
|
||||||
|
multi
|
||||||
|
line
|
||||||
|
}"""),
|
||||||
|
PkgbuildPatch("function()", """{
|
||||||
|
body
|
||||||
|
}"""),
|
||||||
|
PkgbuildPatch("function()", """{
|
||||||
|
body
|
||||||
|
}"""),
|
||||||
|
PkgbuildPatch("function_with-package-name()", """{ body }"""),
|
||||||
|
PkgbuildPatch("function()", """{
|
||||||
|
first
|
||||||
|
{ inner shell }
|
||||||
|
last
|
||||||
|
}"""),
|
||||||
|
PkgbuildPatch("function()", """{
|
||||||
|
body "{" argument
|
||||||
|
}"""),
|
||||||
|
PkgbuildPatch("function()", """{
|
||||||
|
body "}" argument
|
||||||
|
}"""),
|
||||||
|
PkgbuildPatch("function()", """{
|
||||||
|
body '{' argument
|
||||||
|
}"""),
|
||||||
|
PkgbuildPatch("function()", """{
|
||||||
|
body '}' argument
|
||||||
|
}"""),
|
||||||
|
]
|
@ -2,37 +2,65 @@ import pytest
|
|||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from pytest_mock import MockerFixture
|
from pytest_mock import MockerFixture
|
||||||
from unittest.mock import call as MockCall
|
|
||||||
|
|
||||||
from ahriman.core.build_tools.task import Task
|
from ahriman.core.build_tools.task import Task
|
||||||
from ahriman.models.pkgbuild_patch import PkgbuildPatch
|
from ahriman.models.pkgbuild_patch import PkgbuildPatch
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_archives(task_ahriman: Task, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must correctly return list of new files
|
||||||
|
"""
|
||||||
|
mocker.patch("pathlib.Path.iterdir", return_value=[
|
||||||
|
Path(f"{task_ahriman.package.base}-{task_ahriman.package.version}-any.pkg.tar.xz"),
|
||||||
|
Path(f"{task_ahriman.package.base}-debug-{task_ahriman.package.version}-any.pkg.tar.xz"),
|
||||||
|
Path("source.pkg.tar.xz"),
|
||||||
|
Path("randomfile"),
|
||||||
|
Path("namcap.log"),
|
||||||
|
])
|
||||||
|
assert task_ahriman._package_archives(Path("local"), [Path("source.pkg.tar.xz")]) == [
|
||||||
|
Path(f"{task_ahriman.package.base}-{task_ahriman.package.version}-any.pkg.tar.xz"),
|
||||||
|
Path(f"{task_ahriman.package.base}-debug-{task_ahriman.package.version}-any.pkg.tar.xz"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_archives_no_debug(task_ahriman: Task, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must correctly return list of new files without debug packages
|
||||||
|
"""
|
||||||
|
task_ahriman.include_debug_packages = False
|
||||||
|
mocker.patch("pathlib.Path.iterdir", return_value=[
|
||||||
|
Path(f"{task_ahriman.package.base}-{task_ahriman.package.version}-any.pkg.tar.xz"),
|
||||||
|
Path(f"{task_ahriman.package.base}-debug-{task_ahriman.package.version}-any.pkg.tar.xz"),
|
||||||
|
Path("source.pkg.tar.xz"),
|
||||||
|
Path("randomfile"),
|
||||||
|
Path("namcap.log"),
|
||||||
|
])
|
||||||
|
assert task_ahriman._package_archives(Path("local"), [Path("source.pkg.tar.xz")]) == [
|
||||||
|
Path(f"{task_ahriman.package.base}-{task_ahriman.package.version}-any.pkg.tar.xz"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def test_build(task_ahriman: Task, mocker: MockerFixture) -> None:
|
def test_build(task_ahriman: Task, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must build package
|
must build package
|
||||||
"""
|
"""
|
||||||
local = Path("local")
|
local = Path("local")
|
||||||
|
mocker.patch("pathlib.Path.iterdir", return_value=["file"])
|
||||||
check_output_mock = mocker.patch("ahriman.core.build_tools.task.check_output")
|
check_output_mock = mocker.patch("ahriman.core.build_tools.task.check_output")
|
||||||
|
archives_mock = mocker.patch("ahriman.core.build_tools.task.Task._package_archives",
|
||||||
|
return_value=[task_ahriman.package.base])
|
||||||
|
|
||||||
task_ahriman.build(local)
|
assert task_ahriman.build(local) == [task_ahriman.package.base]
|
||||||
check_output_mock.assert_has_calls([
|
check_output_mock.assert_called_once_with(
|
||||||
MockCall(
|
|
||||||
"extra-x86_64-build", "-r", str(task_ahriman.paths.chroot), "--", "--", "--skippgpcheck",
|
"extra-x86_64-build", "-r", str(task_ahriman.paths.chroot), "--", "--", "--skippgpcheck",
|
||||||
exception=pytest.helpers.anyvar(int),
|
exception=pytest.helpers.anyvar(int),
|
||||||
cwd=local,
|
cwd=local,
|
||||||
logger=task_ahriman.logger,
|
logger=task_ahriman.logger,
|
||||||
user=task_ahriman.uid,
|
user=task_ahriman.uid,
|
||||||
environment={},
|
environment={},
|
||||||
),
|
)
|
||||||
MockCall(
|
archives_mock.assert_called_once_with(local, ["file"])
|
||||||
"makepkg", "--packagelist",
|
|
||||||
exception=pytest.helpers.anyvar(int),
|
|
||||||
cwd=local,
|
|
||||||
logger=task_ahriman.logger,
|
|
||||||
environment={},
|
|
||||||
),
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
def test_build_environment(task_ahriman: Task, mocker: MockerFixture) -> None:
|
def test_build_environment(task_ahriman: Task, mocker: MockerFixture) -> None:
|
||||||
@ -40,55 +68,41 @@ def test_build_environment(task_ahriman: Task, mocker: MockerFixture) -> None:
|
|||||||
must build package with environment variables set
|
must build package with environment variables set
|
||||||
"""
|
"""
|
||||||
local = Path("local")
|
local = Path("local")
|
||||||
|
mocker.patch("pathlib.Path.iterdir", return_value=["file"])
|
||||||
|
mocker.patch("ahriman.core.build_tools.task.Task._package_archives", return_value=[task_ahriman.package.base])
|
||||||
check_output_mock = mocker.patch("ahriman.core.build_tools.task.check_output")
|
check_output_mock = mocker.patch("ahriman.core.build_tools.task.check_output")
|
||||||
|
|
||||||
environment = {"variable": "value"}
|
environment = {"variable": "value"}
|
||||||
|
|
||||||
task_ahriman.build(local, **environment, empty=None)
|
task_ahriman.build(local, **environment, empty=None)
|
||||||
check_output_mock.assert_has_calls([
|
check_output_mock.assert_called_once_with(
|
||||||
MockCall(
|
|
||||||
"extra-x86_64-build", "-r", str(task_ahriman.paths.chroot), "--", "--", "--skippgpcheck",
|
"extra-x86_64-build", "-r", str(task_ahriman.paths.chroot), "--", "--", "--skippgpcheck",
|
||||||
exception=pytest.helpers.anyvar(int),
|
exception=pytest.helpers.anyvar(int),
|
||||||
cwd=local,
|
cwd=local,
|
||||||
logger=task_ahriman.logger,
|
logger=task_ahriman.logger,
|
||||||
user=task_ahriman.uid,
|
user=task_ahriman.uid,
|
||||||
environment=environment,
|
environment=environment,
|
||||||
),
|
)
|
||||||
MockCall(
|
|
||||||
"makepkg", "--packagelist",
|
|
||||||
exception=pytest.helpers.anyvar(int),
|
|
||||||
cwd=local,
|
|
||||||
logger=task_ahriman.logger,
|
|
||||||
environment=environment,
|
|
||||||
),
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
def test_build_no_debug(task_ahriman: Task, mocker: MockerFixture) -> None:
|
def test_build_dry_run(task_ahriman: Task, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must filter debug packages from result
|
must run devtools in dry-run mode
|
||||||
"""
|
"""
|
||||||
local = Path("local")
|
local = Path("local")
|
||||||
|
mocker.patch("pathlib.Path.iterdir", return_value=["file"])
|
||||||
|
mocker.patch("ahriman.core.build_tools.task.Task._package_archives", return_value=[task_ahriman.package.base])
|
||||||
check_output_mock = mocker.patch("ahriman.core.build_tools.task.check_output")
|
check_output_mock = mocker.patch("ahriman.core.build_tools.task.check_output")
|
||||||
task_ahriman.include_debug_packages = False
|
|
||||||
|
|
||||||
task_ahriman.build(local)
|
assert task_ahriman.build(local, dry_run=True) == [task_ahriman.package.base]
|
||||||
check_output_mock.assert_has_calls([
|
check_output_mock.assert_called_once_with(
|
||||||
MockCall(
|
"extra-x86_64-build", "-r", str(task_ahriman.paths.chroot), "--", "--", "--skippgpcheck", "--nobuild",
|
||||||
"extra-x86_64-build", "-r", str(task_ahriman.paths.chroot), "--", "--", "--skippgpcheck",
|
|
||||||
exception=pytest.helpers.anyvar(int),
|
exception=pytest.helpers.anyvar(int),
|
||||||
cwd=local,
|
cwd=local,
|
||||||
logger=task_ahriman.logger,
|
logger=task_ahriman.logger,
|
||||||
user=task_ahriman.uid,
|
user=task_ahriman.uid,
|
||||||
environment={},
|
environment={},
|
||||||
),
|
)
|
||||||
MockCall(
|
|
||||||
"makepkg", "--packagelist", "OPTIONS=(!debug)",
|
|
||||||
exception=pytest.helpers.anyvar(int),
|
|
||||||
cwd=local,
|
|
||||||
logger=task_ahriman.logger,
|
|
||||||
environment={},
|
|
||||||
),
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
def test_init(task_ahriman: Task, mocker: MockerFixture) -> None:
|
def test_init(task_ahriman: Task, mocker: MockerFixture) -> None:
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
from ahriman.core.configuration import Configuration
|
from ahriman.core.configuration import Configuration
|
||||||
from ahriman.core.configuration.shell_interpolator import ExtendedTemplate, ShellInterpolator
|
from ahriman.core.configuration.shell_interpolator import ShellInterpolator
|
||||||
|
|
||||||
|
|
||||||
def _parser() -> dict[str, dict[str, str]]:
|
def _parser() -> dict[str, dict[str, str]]:
|
||||||
@ -27,14 +27,6 @@ def _parser() -> dict[str, dict[str, str]]:
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_extended_template() -> None:
|
|
||||||
"""
|
|
||||||
must match colons in braces
|
|
||||||
"""
|
|
||||||
assert ExtendedTemplate("$key:value").get_identifiers() == ["key"]
|
|
||||||
assert ExtendedTemplate("${key:value}").get_identifiers() == ["key:value"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_extract_variables() -> None:
|
def test_extract_variables() -> None:
|
||||||
"""
|
"""
|
||||||
must extract variables list
|
must extract variables list
|
||||||
|
81
tests/ahriman/core/configuration/test_shell_template.py
Normal file
81
tests/ahriman/core/configuration/test_shell_template.py
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
from ahriman.core.configuration.shell_template import ShellTemplate
|
||||||
|
|
||||||
|
|
||||||
|
def test_shell_template_braceidpattern() -> None:
|
||||||
|
"""
|
||||||
|
must match colons in braces
|
||||||
|
"""
|
||||||
|
assert ShellTemplate("$k:value").get_identifiers() == ["k"]
|
||||||
|
assert ShellTemplate("${k:value}").get_identifiers() == ["k:value"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_remove_back() -> None:
|
||||||
|
"""
|
||||||
|
must remove substring from the back
|
||||||
|
"""
|
||||||
|
assert ShellTemplate("${k%removeme}").shell_substitute({"k": "please removeme"}) == "please "
|
||||||
|
assert ShellTemplate("${k%removeme*}").shell_substitute({"k": "please removeme removeme"}) == "please removeme "
|
||||||
|
assert ShellTemplate("${k%removem?}").shell_substitute({"k": "please removeme removeme"}) == "please removeme "
|
||||||
|
|
||||||
|
assert ShellTemplate("${k%%removeme}").shell_substitute({"k": "please removeme removeme"}) == "please removeme "
|
||||||
|
assert ShellTemplate("${k%%removeme*}").shell_substitute({"k": "please removeme removeme"}) == "please "
|
||||||
|
assert ShellTemplate("${k%%removem?}").shell_substitute({"k": "please removeme removeme"}) == "please removeme "
|
||||||
|
|
||||||
|
assert ShellTemplate("${k%removeme}").shell_substitute({}) == "${k%removeme}"
|
||||||
|
assert ShellTemplate("${k%%removeme}").shell_substitute({}) == "${k%%removeme}"
|
||||||
|
|
||||||
|
assert ShellTemplate("${k%r3m0v3m3}").shell_substitute({"k": "please removeme"}) == "please removeme"
|
||||||
|
assert ShellTemplate("${k%%r3m0v3m3}").shell_substitute({"k": "please removeme"}) == "please removeme"
|
||||||
|
|
||||||
|
|
||||||
|
def test_remove_front() -> None:
|
||||||
|
"""
|
||||||
|
must remove substring from the front
|
||||||
|
"""
|
||||||
|
assert ShellTemplate("${k#removeme}").shell_substitute({"k": "removeme please"}) == " please"
|
||||||
|
assert ShellTemplate("${k#*removeme}").shell_substitute({"k": "removeme removeme please"}) == " removeme please"
|
||||||
|
assert ShellTemplate("${k#removem?}").shell_substitute({"k": "removeme removeme please"}) == " removeme please"
|
||||||
|
|
||||||
|
assert ShellTemplate("${k##removeme}").shell_substitute({"k": "removeme removeme please"}) == " removeme please"
|
||||||
|
assert ShellTemplate("${k##*removeme}").shell_substitute({"k": "removeme removeme please"}) == " please"
|
||||||
|
assert ShellTemplate("${k##removem?}").shell_substitute({"k": "removeme removeme please"}) == " removeme please"
|
||||||
|
|
||||||
|
assert ShellTemplate("${k#removeme}").shell_substitute({}) == "${k#removeme}"
|
||||||
|
assert ShellTemplate("${k##removeme}").shell_substitute({}) == "${k##removeme}"
|
||||||
|
|
||||||
|
assert ShellTemplate("${k#r3m0v3m3}").shell_substitute({"k": "removeme please"}) == "removeme please"
|
||||||
|
assert ShellTemplate("${k##r3m0v3m3}").shell_substitute({"k": "removeme please"}) == "removeme please"
|
||||||
|
|
||||||
|
|
||||||
|
def test_replace() -> None:
|
||||||
|
"""
|
||||||
|
must perform regular replacement
|
||||||
|
"""
|
||||||
|
assert ShellTemplate("${k/in/out}").shell_substitute({"k": "in replace in"}) == "out replace in"
|
||||||
|
assert ShellTemplate("${k/in*/out}").shell_substitute({"k": "in replace in"}) == "out"
|
||||||
|
assert ShellTemplate("${k/*in/out}").shell_substitute({"k": "in replace in replace"}) == "out replace"
|
||||||
|
assert ShellTemplate("${k/i?/out}").shell_substitute({"k": "in replace in"}) == "out replace in"
|
||||||
|
|
||||||
|
assert ShellTemplate("${k//in/out}").shell_substitute({"k": "in replace in"}) == "out replace out"
|
||||||
|
assert ShellTemplate("${k//in*/out}").shell_substitute({"k": "in replace in"}) == "out"
|
||||||
|
assert ShellTemplate("${k//*in/out}").shell_substitute({"k": "in replace in replace"}) == "out replace"
|
||||||
|
assert ShellTemplate("${k//i?/out}").shell_substitute({"k": "in replace in replace"}) == "out replace out replace"
|
||||||
|
|
||||||
|
assert ShellTemplate("${k/in/out}").shell_substitute({}) == "${k/in/out}"
|
||||||
|
assert ShellTemplate("${k//in/out}").shell_substitute({}) == "${k//in/out}"
|
||||||
|
|
||||||
|
|
||||||
|
def test_replace_back() -> None:
|
||||||
|
"""
|
||||||
|
must replace substring from the back
|
||||||
|
"""
|
||||||
|
assert ShellTemplate("${k/%in/out}").shell_substitute({"k": "in replace in"}) == "in replace out"
|
||||||
|
assert ShellTemplate("${k/%in/out}").shell_substitute({"k": "in replace in "}) == "in replace in "
|
||||||
|
|
||||||
|
|
||||||
|
def test_replace_front() -> None:
|
||||||
|
"""
|
||||||
|
must replace substring from the front
|
||||||
|
"""
|
||||||
|
assert ShellTemplate("${k/#in/out}").shell_substitute({"k": "in replace in"}) == "out replace in"
|
||||||
|
assert ShellTemplate("${k/#in/out}").shell_substitute({"k": " in replace in"}) == " in replace in"
|
@ -31,8 +31,7 @@ def test_updates_aur(update_handler: UpdateHandler, package_ahriman: Package,
|
|||||||
event_mock.assert_called_once_with(package_ahriman.base, EventType.PackageOutdated,
|
event_mock.assert_called_once_with(package_ahriman.base, EventType.PackageOutdated,
|
||||||
pytest.helpers.anyvar(str, True))
|
pytest.helpers.anyvar(str, True))
|
||||||
package_is_outdated_mock.assert_called_once_with(
|
package_is_outdated_mock.assert_called_once_with(
|
||||||
package_ahriman, update_handler.paths,
|
package_ahriman, update_handler.configuration,
|
||||||
vcs_allowed_age=update_handler.vcs_allowed_age,
|
|
||||||
calculate_version=True)
|
calculate_version=True)
|
||||||
|
|
||||||
|
|
||||||
@ -119,8 +118,7 @@ def test_updates_aur_ignore_vcs(update_handler: UpdateHandler, package_ahriman:
|
|||||||
|
|
||||||
assert not update_handler.updates_aur([], vcs=False)
|
assert not update_handler.updates_aur([], vcs=False)
|
||||||
package_is_outdated_mock.assert_called_once_with(
|
package_is_outdated_mock.assert_called_once_with(
|
||||||
package_ahriman, update_handler.paths,
|
package_ahriman, update_handler.configuration,
|
||||||
vcs_allowed_age=update_handler.vcs_allowed_age,
|
|
||||||
calculate_version=False)
|
calculate_version=False)
|
||||||
|
|
||||||
|
|
||||||
@ -228,8 +226,7 @@ def test_updates_local(update_handler: UpdateHandler, package_ahriman: Package,
|
|||||||
event_mock.assert_called_once_with(package_ahriman.base, EventType.PackageOutdated,
|
event_mock.assert_called_once_with(package_ahriman.base, EventType.PackageOutdated,
|
||||||
pytest.helpers.anyvar(str, True))
|
pytest.helpers.anyvar(str, True))
|
||||||
package_is_outdated_mock.assert_called_once_with(
|
package_is_outdated_mock.assert_called_once_with(
|
||||||
package_ahriman, update_handler.paths,
|
package_ahriman, update_handler.configuration,
|
||||||
vcs_allowed_age=update_handler.vcs_allowed_age,
|
|
||||||
calculate_version=True)
|
calculate_version=True)
|
||||||
|
|
||||||
|
|
||||||
@ -247,8 +244,7 @@ def test_updates_local_ignore_vcs(update_handler: UpdateHandler, package_ahriman
|
|||||||
|
|
||||||
assert not update_handler.updates_local(vcs=False)
|
assert not update_handler.updates_local(vcs=False)
|
||||||
package_is_outdated_mock.assert_called_once_with(
|
package_is_outdated_mock.assert_called_once_with(
|
||||||
package_ahriman, update_handler.paths,
|
package_ahriman, update_handler.configuration,
|
||||||
vcs_allowed_age=update_handler.vcs_allowed_age,
|
|
||||||
calculate_version=False)
|
calculate_version=False)
|
||||||
|
|
||||||
|
|
||||||
|
@ -468,11 +468,12 @@ def test_walk(resource_path_root: Path) -> None:
|
|||||||
resource_path_root / "models" / "package_ahriman_aur",
|
resource_path_root / "models" / "package_ahriman_aur",
|
||||||
resource_path_root / "models" / "package_akonadi_aur",
|
resource_path_root / "models" / "package_akonadi_aur",
|
||||||
resource_path_root / "models" / "package_ahriman_files",
|
resource_path_root / "models" / "package_ahriman_files",
|
||||||
resource_path_root / "models" / "package_ahriman_srcinfo",
|
resource_path_root / "models" / "package_ahriman_pkgbuild",
|
||||||
resource_path_root / "models" / "package_gcc10_srcinfo",
|
resource_path_root / "models" / "package_gcc10_pkgbuild",
|
||||||
resource_path_root / "models" / "package_jellyfin-ffmpeg5-bin_srcinfo",
|
resource_path_root / "models" / "package_jellyfin-ffmpeg6-bin_pkgbuild",
|
||||||
resource_path_root / "models" / "package_tpacpi-bat-git_srcinfo",
|
resource_path_root / "models" / "package_tpacpi-bat-git_pkgbuild",
|
||||||
resource_path_root / "models" / "package_yay_srcinfo",
|
resource_path_root / "models" / "package_yay_pkgbuild",
|
||||||
|
resource_path_root / "models" / "pkgbuild",
|
||||||
resource_path_root / "web" / "templates" / "build-status" / "alerts.jinja2",
|
resource_path_root / "web" / "templates" / "build-status" / "alerts.jinja2",
|
||||||
resource_path_root / "web" / "templates" / "build-status" / "key-import-modal.jinja2",
|
resource_path_root / "web" / "templates" / "build-status" / "key-import-modal.jinja2",
|
||||||
resource_path_root / "web" / "templates" / "build-status" / "login-modal.jinja2",
|
resource_path_root / "web" / "templates" / "build-status" / "login-modal.jinja2",
|
||||||
|
@ -15,8 +15,8 @@ def test_calculate_hash_small(resource_path_root: Path) -> None:
|
|||||||
"""
|
"""
|
||||||
must calculate checksum for path which is single chunk
|
must calculate checksum for path which is single chunk
|
||||||
"""
|
"""
|
||||||
path = resource_path_root / "models" / "package_ahriman_srcinfo"
|
path = resource_path_root / "models" / "package_ahriman_pkgbuild"
|
||||||
assert HttpUpload.calculate_hash(path) == "2635e2898452d594025517cfe529b1f2"
|
assert HttpUpload.calculate_hash(path) == "7136fc388980dc043f9f869d57c5ce0c"
|
||||||
|
|
||||||
|
|
||||||
def test_get_body_get_hashes() -> None:
|
def test_get_body_get_hashes() -> None:
|
||||||
|
@ -49,8 +49,8 @@ def test_calculate_etag_small(resource_path_root: Path) -> None:
|
|||||||
"""
|
"""
|
||||||
must calculate checksum for path which is single chunk
|
must calculate checksum for path which is single chunk
|
||||||
"""
|
"""
|
||||||
path = resource_path_root / "models" / "package_ahriman_srcinfo"
|
path = resource_path_root / "models" / "package_ahriman_pkgbuild"
|
||||||
assert S3.calculate_etag(path, _chunk_size) == "2635e2898452d594025517cfe529b1f2"
|
assert S3.calculate_etag(path, _chunk_size) == "7136fc388980dc043f9f869d57c5ce0c"
|
||||||
|
|
||||||
|
|
||||||
def test_files_remove(s3_remote_objects: list[Any]) -> None:
|
def test_files_remove(s3_remote_objects: list[Any]) -> None:
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
from unittest.mock import MagicMock, PropertyMock
|
from unittest.mock import MagicMock, PropertyMock
|
||||||
|
|
||||||
from ahriman import __version__
|
from ahriman import __version__
|
||||||
@ -11,6 +12,7 @@ from ahriman.models.internal_status import InternalStatus
|
|||||||
from ahriman.models.package import Package
|
from ahriman.models.package import Package
|
||||||
from ahriman.models.package_description import PackageDescription
|
from ahriman.models.package_description import PackageDescription
|
||||||
from ahriman.models.package_source import PackageSource
|
from ahriman.models.package_source import PackageSource
|
||||||
|
from ahriman.models.pkgbuild import Pkgbuild
|
||||||
from ahriman.models.remote_source import RemoteSource
|
from ahriman.models.remote_source import RemoteSource
|
||||||
|
|
||||||
|
|
||||||
@ -33,12 +35,14 @@ def counters() -> Counters:
|
|||||||
Returns:
|
Returns:
|
||||||
Counters: counters test instance
|
Counters: counters test instance
|
||||||
"""
|
"""
|
||||||
return Counters(total=10,
|
return Counters(
|
||||||
|
total=10,
|
||||||
unknown=1,
|
unknown=1,
|
||||||
pending=2,
|
pending=2,
|
||||||
building=3,
|
building=3,
|
||||||
failed=4,
|
failed=4,
|
||||||
success=0)
|
success=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
@ -91,6 +95,21 @@ def package_tpacpi_bat_git() -> Package:
|
|||||||
packages={"tpacpi-bat-git": PackageDescription()})
|
packages={"tpacpi-bat-git": PackageDescription()})
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def pkgbuild_ahriman(resource_path_root: Path) -> Pkgbuild:
|
||||||
|
"""
|
||||||
|
pkgbuild fixture
|
||||||
|
|
||||||
|
Args:
|
||||||
|
resource_path_root(Path): resource path root directory
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Pkgbuild: pkgbuild test instance
|
||||||
|
"""
|
||||||
|
pkgbuild = resource_path_root / "models" / "package_ahriman_pkgbuild"
|
||||||
|
return Pkgbuild.from_file(pkgbuild)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def pyalpm_handle(pyalpm_package_ahriman: MagicMock) -> MagicMock:
|
def pyalpm_handle(pyalpm_package_ahriman: MagicMock) -> MagicMock:
|
||||||
"""
|
"""
|
||||||
|
@ -1,17 +1,15 @@
|
|||||||
import pytest
|
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from pytest_mock import MockerFixture
|
from pytest_mock import MockerFixture
|
||||||
from srcinfo.parse import parse_srcinfo
|
|
||||||
from unittest.mock import MagicMock
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
from ahriman.core.alpm.pacman import Pacman
|
from ahriman.core.alpm.pacman import Pacman
|
||||||
from ahriman.core.exceptions import PackageInfoError
|
from ahriman.core.configuration import Configuration
|
||||||
from ahriman.core.utils import utcnow
|
from ahriman.core.utils import utcnow
|
||||||
from ahriman.models.aur_package import AURPackage
|
from ahriman.models.aur_package import AURPackage
|
||||||
from ahriman.models.package import Package
|
from ahriman.models.package import Package
|
||||||
from ahriman.models.package_description import PackageDescription
|
from ahriman.models.package_description import PackageDescription
|
||||||
from ahriman.models.repository_paths import RepositoryPaths
|
from ahriman.models.pkgbuild import Pkgbuild
|
||||||
|
from ahriman.models.pkgbuild_patch import PkgbuildPatch
|
||||||
|
|
||||||
|
|
||||||
def test_depends(package_python_schedule: Package) -> None:
|
def test_depends(package_python_schedule: Package) -> None:
|
||||||
@ -52,9 +50,8 @@ def test_depends_build_with_version_and_overlap(mocker: MockerFixture, resource_
|
|||||||
"""
|
"""
|
||||||
must load correct list of dependencies with version
|
must load correct list of dependencies with version
|
||||||
"""
|
"""
|
||||||
|
pkgbuild = resource_path_root / "models" / "package_gcc10_pkgbuild"
|
||||||
srcinfo = (resource_path_root / "models" / "package_gcc10_srcinfo").read_text()
|
mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=Pkgbuild.from_file(pkgbuild))
|
||||||
mocker.patch("ahriman.models.package.check_output", return_value=srcinfo)
|
|
||||||
|
|
||||||
package_gcc10 = Package.from_build(Path("local"), "x86_64", None)
|
package_gcc10 = Package.from_build(Path("local"), "x86_64", None)
|
||||||
assert package_gcc10.depends_build == {
|
assert package_gcc10.depends_build == {
|
||||||
@ -179,10 +176,10 @@ def test_from_aur(package_ahriman: Package, aur_package_ahriman: AURPackage, moc
|
|||||||
|
|
||||||
def test_from_build(package_ahriman: Package, mocker: MockerFixture, resource_path_root: Path) -> None:
|
def test_from_build(package_ahriman: Package, mocker: MockerFixture, resource_path_root: Path) -> None:
|
||||||
"""
|
"""
|
||||||
must construct package from srcinfo
|
must construct package from PKGBUILD
|
||||||
"""
|
"""
|
||||||
srcinfo = (resource_path_root / "models" / "package_ahriman_srcinfo").read_text()
|
pkgbuild = resource_path_root / "models" / "package_ahriman_pkgbuild"
|
||||||
mocker.patch("ahriman.models.package.check_output", return_value=srcinfo)
|
mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=Pkgbuild.from_file(pkgbuild))
|
||||||
|
|
||||||
package = Package.from_build(Path("path"), "x86_64", "packager")
|
package = Package.from_build(Path("path"), "x86_64", "packager")
|
||||||
assert package_ahriman.packages.keys() == package.packages.keys()
|
assert package_ahriman.packages.keys() == package.packages.keys()
|
||||||
@ -193,15 +190,15 @@ def test_from_build(package_ahriman: Package, mocker: MockerFixture, resource_pa
|
|||||||
|
|
||||||
def test_from_build_multiple_packages(mocker: MockerFixture, resource_path_root: Path) -> None:
|
def test_from_build_multiple_packages(mocker: MockerFixture, resource_path_root: Path) -> None:
|
||||||
"""
|
"""
|
||||||
must construct package from srcinfo with dependencies per-package overrides
|
must construct package from PKGBUILD with dependencies per-package overrides
|
||||||
"""
|
"""
|
||||||
srcinfo = (resource_path_root / "models" / "package_gcc10_srcinfo").read_text()
|
pkgbuild = resource_path_root / "models" / "package_gcc10_pkgbuild"
|
||||||
mocker.patch("ahriman.models.package.check_output", return_value=srcinfo)
|
mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=Pkgbuild.from_file(pkgbuild))
|
||||||
|
|
||||||
package = Package.from_build(Path("path"), "x86_64", None)
|
package = Package.from_build(Path("path"), "x86_64", None)
|
||||||
assert package.packages == {
|
assert package.packages == {
|
||||||
"gcc10": PackageDescription(
|
"gcc10": PackageDescription(
|
||||||
depends=["gcc10-libs=10.3.0-2", "binutils>=2.28", "libmpc", "zstd"],
|
depends=["gcc10-libs=10.5.0-2", "binutils>=2.28", "libmpc", "zstd"],
|
||||||
make_depends=["binutils", "doxygen", "git", "libmpc", "python"],
|
make_depends=["binutils", "doxygen", "git", "libmpc", "python"],
|
||||||
opt_depends=[],
|
opt_depends=[],
|
||||||
check_depends=["dejagnu", "inetutils"],
|
check_depends=["dejagnu", "inetutils"],
|
||||||
@ -213,7 +210,7 @@ def test_from_build_multiple_packages(mocker: MockerFixture, resource_path_root:
|
|||||||
check_depends=["dejagnu", "inetutils"],
|
check_depends=["dejagnu", "inetutils"],
|
||||||
),
|
),
|
||||||
"gcc10-fortran": PackageDescription(
|
"gcc10-fortran": PackageDescription(
|
||||||
depends=["gcc10=10.3.0-2"],
|
depends=["gcc10=10.5.0-2"],
|
||||||
make_depends=["binutils", "doxygen", "git", "libmpc", "python"],
|
make_depends=["binutils", "doxygen", "git", "libmpc", "python"],
|
||||||
opt_depends=[],
|
opt_depends=[],
|
||||||
check_depends=["dejagnu", "inetutils"],
|
check_depends=["dejagnu", "inetutils"],
|
||||||
@ -225,12 +222,12 @@ def test_from_build_architecture(mocker: MockerFixture, resource_path_root: Path
|
|||||||
"""
|
"""
|
||||||
must construct package with architecture specific depends list
|
must construct package with architecture specific depends list
|
||||||
"""
|
"""
|
||||||
srcinfo = (resource_path_root / "models" / "package_jellyfin-ffmpeg5-bin_srcinfo").read_text()
|
pkgbuild = resource_path_root / "models" / "package_jellyfin-ffmpeg6-bin_pkgbuild"
|
||||||
mocker.patch("ahriman.models.package.check_output", return_value=srcinfo)
|
mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=Pkgbuild.from_file(pkgbuild))
|
||||||
|
|
||||||
package = Package.from_build(Path("path"), "x86_64", None)
|
package = Package.from_build(Path("path"), "x86_64", None)
|
||||||
assert package.packages == {
|
assert package.packages == {
|
||||||
"jellyfin-ffmpeg5-bin": PackageDescription(
|
"jellyfin-ffmpeg6-bin": PackageDescription(
|
||||||
depends=["glibc"],
|
depends=["glibc"],
|
||||||
make_depends=[],
|
make_depends=[],
|
||||||
opt_depends=[
|
opt_depends=[
|
||||||
@ -249,17 +246,6 @@ def test_from_build_architecture(mocker: MockerFixture, resource_path_root: Path
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_from_build_failed(mocker: MockerFixture) -> None:
|
|
||||||
"""
|
|
||||||
must raise exception if there are errors during srcinfo load
|
|
||||||
"""
|
|
||||||
mocker.patch("ahriman.models.package.check_output", return_value="")
|
|
||||||
mocker.patch("ahriman.models.package.parse_srcinfo", return_value=({"packages": {}}, ["an error"]))
|
|
||||||
|
|
||||||
with pytest.raises(PackageInfoError):
|
|
||||||
Package.from_build(Path("path"), "x86_64", None)
|
|
||||||
|
|
||||||
|
|
||||||
def test_from_json_view_1(package_ahriman: Package) -> None:
|
def test_from_json_view_1(package_ahriman: Package) -> None:
|
||||||
"""
|
"""
|
||||||
must construct same object from json
|
must construct same object from json
|
||||||
@ -299,11 +285,10 @@ def test_local_files(mocker: MockerFixture, resource_path_root: Path) -> None:
|
|||||||
"""
|
"""
|
||||||
must extract local file sources
|
must extract local file sources
|
||||||
"""
|
"""
|
||||||
srcinfo = (resource_path_root / "models" / "package_yay_srcinfo").read_text()
|
pkgbuild = resource_path_root / "models" / "package_yay_pkgbuild"
|
||||||
parsed_srcinfo, _ = parse_srcinfo(srcinfo)
|
parsed_pkgbuild = Pkgbuild.from_file(pkgbuild)
|
||||||
parsed_srcinfo["source"] = ["local-file.tar.gz"]
|
parsed_pkgbuild.fields["source"] = PkgbuildPatch("source", ["local-file.tar.gz"])
|
||||||
mocker.patch("ahriman.models.package.parse_srcinfo", return_value=(parsed_srcinfo, []))
|
mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=parsed_pkgbuild)
|
||||||
mocker.patch("ahriman.models.package.check_output", return_value=srcinfo)
|
|
||||||
mocker.patch("ahriman.models.package.Package.supported_architectures", return_value=["any"])
|
mocker.patch("ahriman.models.package.Package.supported_architectures", return_value=["any"])
|
||||||
|
|
||||||
assert list(Package.local_files(Path("path"))) == [Path("local-file.tar.gz")]
|
assert list(Package.local_files(Path("path"))) == [Path("local-file.tar.gz")]
|
||||||
@ -311,35 +296,23 @@ def test_local_files(mocker: MockerFixture, resource_path_root: Path) -> None:
|
|||||||
|
|
||||||
def test_local_files_empty(mocker: MockerFixture, resource_path_root: Path) -> None:
|
def test_local_files_empty(mocker: MockerFixture, resource_path_root: Path) -> None:
|
||||||
"""
|
"""
|
||||||
must extract empty local files list when there is no local files
|
must extract empty local files list when there are no local files
|
||||||
"""
|
"""
|
||||||
srcinfo = (resource_path_root / "models" / "package_yay_srcinfo").read_text()
|
pkgbuild = resource_path_root / "models" / "package_yay_pkgbuild"
|
||||||
mocker.patch("ahriman.models.package.check_output", return_value=srcinfo)
|
mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=Pkgbuild.from_file(pkgbuild))
|
||||||
mocker.patch("ahriman.models.package.Package.supported_architectures", return_value=["any"])
|
mocker.patch("ahriman.models.package.Package.supported_architectures", return_value=["any"])
|
||||||
|
|
||||||
assert not list(Package.local_files(Path("path")))
|
assert not list(Package.local_files(Path("path")))
|
||||||
|
|
||||||
|
|
||||||
def test_local_files_error(mocker: MockerFixture) -> None:
|
|
||||||
"""
|
|
||||||
must raise exception on package parsing for local sources
|
|
||||||
"""
|
|
||||||
mocker.patch("ahriman.models.package.check_output", return_value="")
|
|
||||||
mocker.patch("ahriman.models.package.parse_srcinfo", return_value=({"packages": {}}, ["an error"]))
|
|
||||||
|
|
||||||
with pytest.raises(PackageInfoError):
|
|
||||||
list(Package.local_files(Path("path")))
|
|
||||||
|
|
||||||
|
|
||||||
def test_local_files_schema(mocker: MockerFixture, resource_path_root: Path) -> None:
|
def test_local_files_schema(mocker: MockerFixture, resource_path_root: Path) -> None:
|
||||||
"""
|
"""
|
||||||
must skip local file source when file schema is used
|
must skip local file source when file schema is used
|
||||||
"""
|
"""
|
||||||
srcinfo = (resource_path_root / "models" / "package_yay_srcinfo").read_text()
|
pkgbuild = resource_path_root / "models" / "package_yay_pkgbuild"
|
||||||
parsed_srcinfo, _ = parse_srcinfo(srcinfo)
|
parsed_pkgbuild = Pkgbuild.from_file(pkgbuild)
|
||||||
parsed_srcinfo["source"] = ["file:///local-file.tar.gz"]
|
parsed_pkgbuild.fields["source"] = PkgbuildPatch("source", ["file:///local-file.tar.gz"])
|
||||||
mocker.patch("ahriman.models.package.parse_srcinfo", return_value=(parsed_srcinfo, []))
|
mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=parsed_pkgbuild)
|
||||||
mocker.patch("ahriman.models.package.check_output", return_value="")
|
|
||||||
mocker.patch("ahriman.models.package.Package.supported_architectures", return_value=["any"])
|
mocker.patch("ahriman.models.package.Package.supported_architectures", return_value=["any"])
|
||||||
|
|
||||||
assert not list(Package.local_files(Path("path")))
|
assert not list(Package.local_files(Path("path")))
|
||||||
@ -349,11 +322,10 @@ def test_local_files_with_install(mocker: MockerFixture, resource_path_root: Pat
|
|||||||
"""
|
"""
|
||||||
must extract local file sources with install file
|
must extract local file sources with install file
|
||||||
"""
|
"""
|
||||||
srcinfo = (resource_path_root / "models" / "package_yay_srcinfo").read_text()
|
pkgbuild = resource_path_root / "models" / "package_yay_pkgbuild"
|
||||||
parsed_srcinfo, _ = parse_srcinfo(srcinfo)
|
parsed_pkgbuild = Pkgbuild.from_file(pkgbuild)
|
||||||
parsed_srcinfo["install"] = "install"
|
parsed_pkgbuild.fields["install"] = PkgbuildPatch("install", "install")
|
||||||
mocker.patch("ahriman.models.package.parse_srcinfo", return_value=(parsed_srcinfo, []))
|
mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=parsed_pkgbuild)
|
||||||
mocker.patch("ahriman.models.package.check_output", return_value="")
|
|
||||||
mocker.patch("ahriman.models.package.Package.supported_architectures", return_value=["any"])
|
mocker.patch("ahriman.models.package.Package.supported_architectures", return_value=["any"])
|
||||||
|
|
||||||
assert list(Package.local_files(Path("path"))) == [Path("install")]
|
assert list(Package.local_files(Path("path"))) == [Path("install")]
|
||||||
@ -363,64 +335,49 @@ def test_supported_architectures(mocker: MockerFixture, resource_path_root: Path
|
|||||||
"""
|
"""
|
||||||
must generate list of available architectures
|
must generate list of available architectures
|
||||||
"""
|
"""
|
||||||
srcinfo = (resource_path_root / "models" / "package_yay_srcinfo").read_text()
|
pkgbuild = resource_path_root / "models" / "package_yay_pkgbuild"
|
||||||
mocker.patch("ahriman.models.package.check_output", return_value=srcinfo)
|
mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=Pkgbuild.from_file(pkgbuild))
|
||||||
assert Package.supported_architectures(Path("path")) == \
|
assert Package.supported_architectures(Path("path")) == \
|
||||||
{"i686", "pentium4", "x86_64", "arm", "armv7h", "armv6h", "aarch64"}
|
{"i686", "pentium4", "x86_64", "arm", "armv7h", "armv6h", "aarch64", "riscv64"}
|
||||||
|
|
||||||
|
|
||||||
def test_supported_architectures_failed(mocker: MockerFixture) -> None:
|
def test_actual_version(package_ahriman: Package, configuration: Configuration) -> None:
|
||||||
"""
|
|
||||||
must raise exception if there are errors during srcinfo load for architectures
|
|
||||||
"""
|
|
||||||
mocker.patch("ahriman.models.package.check_output", return_value="")
|
|
||||||
mocker.patch("ahriman.models.package.parse_srcinfo", return_value=({"packages": {}}, ["an error"]))
|
|
||||||
|
|
||||||
with pytest.raises(PackageInfoError):
|
|
||||||
Package.supported_architectures(Path("path"))
|
|
||||||
|
|
||||||
|
|
||||||
def test_actual_version(package_ahriman: Package, repository_paths: RepositoryPaths) -> None:
|
|
||||||
"""
|
"""
|
||||||
must return same actual_version as version is
|
must return same actual_version as version is
|
||||||
"""
|
"""
|
||||||
assert package_ahriman.actual_version(repository_paths) == package_ahriman.version
|
assert package_ahriman.actual_version(configuration) == package_ahriman.version
|
||||||
|
|
||||||
|
|
||||||
def test_actual_version_vcs(package_tpacpi_bat_git: Package, repository_paths: RepositoryPaths,
|
def test_actual_version_vcs(package_tpacpi_bat_git: Package, configuration: Configuration,
|
||||||
mocker: MockerFixture, resource_path_root: Path) -> None:
|
mocker: MockerFixture, resource_path_root: Path) -> None:
|
||||||
"""
|
"""
|
||||||
must return valid actual_version for VCS package
|
must return valid actual_version for VCS package
|
||||||
"""
|
"""
|
||||||
srcinfo = (resource_path_root / "models" / "package_tpacpi-bat-git_srcinfo").read_text()
|
pkgbuild = resource_path_root / "models" / "package_tpacpi-bat-git_pkgbuild"
|
||||||
mocker.patch("ahriman.models.package.check_output", return_value=srcinfo)
|
mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_file", return_value=Pkgbuild.from_file(pkgbuild))
|
||||||
mocker.patch("ahriman.core.build_tools.sources.Sources.load")
|
mocker.patch("pathlib.Path.glob", return_value=[Path("local")])
|
||||||
|
init_mock = mocker.patch("ahriman.core.build_tools.task.Task.init")
|
||||||
|
build_mock = mocker.patch("ahriman.core.build_tools.task.Task.build")
|
||||||
|
unlink_mock = mocker.patch("pathlib.Path.unlink")
|
||||||
|
|
||||||
assert package_tpacpi_bat_git.actual_version(repository_paths) == "3.1.r13.g4959b52-1"
|
assert package_tpacpi_bat_git.actual_version(configuration) == "3.1.r13.g4959b52-1"
|
||||||
|
init_mock.assert_called_once_with(configuration.repository_paths.cache_for(package_tpacpi_bat_git.base), [], None)
|
||||||
|
build_mock.assert_called_once_with(configuration.repository_paths.cache_for(package_tpacpi_bat_git.base),
|
||||||
|
dry_run=True)
|
||||||
|
unlink_mock.assert_called_once_with()
|
||||||
|
|
||||||
|
|
||||||
def test_actual_version_srcinfo_failed(package_tpacpi_bat_git: Package, repository_paths: RepositoryPaths,
|
def test_actual_version_failed(package_tpacpi_bat_git: Package, configuration: Configuration,
|
||||||
mocker: MockerFixture) -> None:
|
mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must return same version in case if exception occurred
|
must return same version in case if exception occurred
|
||||||
"""
|
"""
|
||||||
mocker.patch("ahriman.models.package.check_output", side_effect=Exception())
|
mocker.patch("ahriman.core.build_tools.task.Task.init", side_effect=Exception())
|
||||||
mocker.patch("ahriman.core.build_tools.sources.Sources.load")
|
mocker.patch("pathlib.Path.glob", return_value=[Path("local")])
|
||||||
|
unlink_mock = mocker.patch("pathlib.Path.unlink")
|
||||||
|
|
||||||
assert package_tpacpi_bat_git.actual_version(repository_paths) == package_tpacpi_bat_git.version
|
assert package_tpacpi_bat_git.actual_version(configuration) == package_tpacpi_bat_git.version
|
||||||
|
unlink_mock.assert_called_once_with()
|
||||||
|
|
||||||
def test_actual_version_vcs_failed(package_tpacpi_bat_git: Package, repository_paths: RepositoryPaths,
|
|
||||||
mocker: MockerFixture) -> None:
|
|
||||||
"""
|
|
||||||
must return same version in case if there are errors during parse
|
|
||||||
"""
|
|
||||||
mocker.patch("pathlib.Path.read_text", return_value="")
|
|
||||||
mocker.patch("ahriman.models.package.parse_srcinfo", return_value=({"packages": {}}, ["an error"]))
|
|
||||||
mocker.patch("ahriman.models.package.check_output")
|
|
||||||
mocker.patch("ahriman.core.build_tools.sources.Sources.load")
|
|
||||||
|
|
||||||
assert package_tpacpi_bat_git.actual_version(repository_paths) == package_tpacpi_bat_git.version
|
|
||||||
|
|
||||||
|
|
||||||
def test_full_depends(package_ahriman: Package, package_python_schedule: Package, pyalpm_package_ahriman: MagicMock,
|
def test_full_depends(package_ahriman: Package, package_python_schedule: Package, pyalpm_package_ahriman: MagicMock,
|
||||||
@ -461,17 +418,17 @@ def test_is_newer_than(package_ahriman: Package, package_python_schedule: Packag
|
|||||||
assert not package_python_schedule.is_newer_than(min_date)
|
assert not package_python_schedule.is_newer_than(min_date)
|
||||||
|
|
||||||
|
|
||||||
def test_is_outdated_false(package_ahriman: Package, repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
|
def test_is_outdated_false(package_ahriman: Package, configuration: Configuration, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must be not outdated for the same package
|
must be not outdated for the same package
|
||||||
"""
|
"""
|
||||||
actual_version_mock = mocker.patch("ahriman.models.package.Package.actual_version",
|
actual_version_mock = mocker.patch("ahriman.models.package.Package.actual_version",
|
||||||
return_value=package_ahriman.version)
|
return_value=package_ahriman.version)
|
||||||
assert not package_ahriman.is_outdated(package_ahriman, repository_paths)
|
assert not package_ahriman.is_outdated(package_ahriman, configuration)
|
||||||
actual_version_mock.assert_called_once_with(repository_paths)
|
actual_version_mock.assert_called_once_with(configuration)
|
||||||
|
|
||||||
|
|
||||||
def test_is_outdated_true(package_ahriman: Package, repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
|
def test_is_outdated_true(package_ahriman: Package, configuration: Configuration, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must be outdated for the new version
|
must be outdated for the new version
|
||||||
"""
|
"""
|
||||||
@ -479,27 +436,28 @@ def test_is_outdated_true(package_ahriman: Package, repository_paths: Repository
|
|||||||
other.version = other.version.replace("-1", "-2")
|
other.version = other.version.replace("-1", "-2")
|
||||||
actual_version_mock = mocker.patch("ahriman.models.package.Package.actual_version", return_value=other.version)
|
actual_version_mock = mocker.patch("ahriman.models.package.Package.actual_version", return_value=other.version)
|
||||||
|
|
||||||
assert package_ahriman.is_outdated(other, repository_paths)
|
assert package_ahriman.is_outdated(other, configuration)
|
||||||
actual_version_mock.assert_called_once_with(repository_paths)
|
actual_version_mock.assert_called_once_with(configuration)
|
||||||
|
|
||||||
|
|
||||||
def test_is_outdated_no_version_calculation(package_ahriman: Package, repository_paths: RepositoryPaths,
|
def test_is_outdated_no_version_calculation(package_ahriman: Package, configuration: Configuration,
|
||||||
mocker: MockerFixture) -> None:
|
mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must not call actual version if calculation is disabled
|
must not call actual version if calculation is disabled
|
||||||
"""
|
"""
|
||||||
actual_version_mock = mocker.patch("ahriman.models.package.Package.actual_version")
|
actual_version_mock = mocker.patch("ahriman.models.package.Package.actual_version")
|
||||||
assert not package_ahriman.is_outdated(package_ahriman, repository_paths, calculate_version=False)
|
assert not package_ahriman.is_outdated(package_ahriman, configuration, calculate_version=False)
|
||||||
actual_version_mock.assert_not_called()
|
actual_version_mock.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
def test_is_outdated_fresh_package(package_ahriman: Package, repository_paths: RepositoryPaths,
|
def test_is_outdated_fresh_package(package_ahriman: Package, configuration: Configuration,
|
||||||
mocker: MockerFixture) -> None:
|
mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must not call actual version if package is never than specified time
|
must not call actual version if package is never than specified time
|
||||||
"""
|
"""
|
||||||
|
configuration.set_option("build", "vcs_allowed_age", str(int(utcnow().timestamp())))
|
||||||
actual_version_mock = mocker.patch("ahriman.models.package.Package.actual_version")
|
actual_version_mock = mocker.patch("ahriman.models.package.Package.actual_version")
|
||||||
assert not package_ahriman.is_outdated(package_ahriman, repository_paths, vcs_allowed_age=utcnow().timestamp())
|
assert not package_ahriman.is_outdated(package_ahriman, configuration)
|
||||||
actual_version_mock.assert_not_called()
|
actual_version_mock.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
388
tests/ahriman/models/test_pkgbuild.py
Normal file
388
tests/ahriman/models/test_pkgbuild.py
Normal file
@ -0,0 +1,388 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from io import StringIO
|
||||||
|
from pathlib import Path
|
||||||
|
from pytest_mock import MockerFixture
|
||||||
|
|
||||||
|
from ahriman.models.pkgbuild import Pkgbuild
|
||||||
|
from ahriman.models.pkgbuild_patch import PkgbuildPatch
|
||||||
|
|
||||||
|
|
||||||
|
def test_variables(pkgbuild_ahriman: Pkgbuild) -> None:
|
||||||
|
"""
|
||||||
|
must correctly generate list of variables
|
||||||
|
"""
|
||||||
|
assert pkgbuild_ahriman.variables
|
||||||
|
assert "pkgver" in pkgbuild_ahriman.variables
|
||||||
|
assert "build" not in pkgbuild_ahriman.variables
|
||||||
|
assert "source" not in pkgbuild_ahriman.variables
|
||||||
|
|
||||||
|
|
||||||
|
def test_from_file(pkgbuild_ahriman: Pkgbuild, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must correctly load from file
|
||||||
|
"""
|
||||||
|
open_mock = mocker.patch("pathlib.Path.open")
|
||||||
|
load_mock = mocker.patch("ahriman.models.pkgbuild.Pkgbuild.from_io", return_value=pkgbuild_ahriman)
|
||||||
|
|
||||||
|
assert Pkgbuild.from_file(Path("local"))
|
||||||
|
open_mock.assert_called_once_with()
|
||||||
|
load_mock.assert_called_once_with(pytest.helpers.anyvar(int))
|
||||||
|
|
||||||
|
|
||||||
|
def test_from_io(pkgbuild_ahriman: Pkgbuild, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must correctly load from io
|
||||||
|
"""
|
||||||
|
load_mock = mocker.patch("ahriman.core.alpm.pkgbuild_parser.PkgbuildParser.parse",
|
||||||
|
return_value=pkgbuild_ahriman.fields.values())
|
||||||
|
assert Pkgbuild.from_io(StringIO("mock")) == pkgbuild_ahriman
|
||||||
|
load_mock.assert_called_once_with()
|
||||||
|
|
||||||
|
|
||||||
|
def test_from_io_pkgbase(pkgbuild_ahriman: Pkgbuild, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must assign missing pkgbase if pkgname is presented
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.core.alpm.pkgbuild_parser.PkgbuildParser.parse", side_effect=[
|
||||||
|
[value for key, value in pkgbuild_ahriman.fields.items() if key not in ("pkgbase",)],
|
||||||
|
[value for key, value in pkgbuild_ahriman.fields.items() if key not in ("pkgbase", "pkgname",)],
|
||||||
|
[value for key, value in pkgbuild_ahriman.fields.items()] + [PkgbuildPatch("pkgbase", "pkgbase")],
|
||||||
|
])
|
||||||
|
|
||||||
|
assert Pkgbuild.from_io(StringIO("mock"))["pkgbase"] == pkgbuild_ahriman["pkgname"]
|
||||||
|
assert "pkgbase" not in Pkgbuild.from_io(StringIO("mock"))
|
||||||
|
assert Pkgbuild.from_io(StringIO("mock"))["pkgbase"] == "pkgbase"
|
||||||
|
|
||||||
|
|
||||||
|
def test_from_io_empty(pkgbuild_ahriman: Pkgbuild, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must skip empty patches
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.core.alpm.pkgbuild_parser.PkgbuildParser.parse",
|
||||||
|
return_value=list(pkgbuild_ahriman.fields.values()) + [PkgbuildPatch("", "")])
|
||||||
|
assert Pkgbuild.from_io(StringIO("mock")) == pkgbuild_ahriman
|
||||||
|
|
||||||
|
|
||||||
|
def test_packages(pkgbuild_ahriman: Pkgbuild) -> None:
|
||||||
|
"""
|
||||||
|
must correctly load package function
|
||||||
|
"""
|
||||||
|
assert pkgbuild_ahriman.packages() == {pkgbuild_ahriman["pkgbase"]: Pkgbuild({})}
|
||||||
|
|
||||||
|
|
||||||
|
def test_packages_multi(resource_path_root: Path) -> None:
|
||||||
|
"""
|
||||||
|
must correctly load list of package functions
|
||||||
|
"""
|
||||||
|
pkgbuild = Pkgbuild.from_file(resource_path_root / "models" / "package_gcc10_pkgbuild")
|
||||||
|
packages = pkgbuild.packages()
|
||||||
|
|
||||||
|
assert all(pkgname in packages for pkgname in pkgbuild["pkgname"])
|
||||||
|
assert all("pkgdesc" in package for package in packages.values())
|
||||||
|
assert all("depends" in package for package in packages.values())
|
||||||
|
|
||||||
|
|
||||||
|
def test_packages_empty(pkgbuild_ahriman: Pkgbuild) -> None:
|
||||||
|
"""
|
||||||
|
must correctly load packages without package functionn
|
||||||
|
"""
|
||||||
|
del pkgbuild_ahriman.fields["package()"]
|
||||||
|
assert pkgbuild_ahriman.packages() == {pkgbuild_ahriman["pkgbase"]: Pkgbuild({})}
|
||||||
|
|
||||||
|
|
||||||
|
def test_getitem(pkgbuild_ahriman: Pkgbuild) -> None:
|
||||||
|
"""
|
||||||
|
must return element by key
|
||||||
|
"""
|
||||||
|
assert pkgbuild_ahriman["pkgname"] == pkgbuild_ahriman.fields["pkgname"].value
|
||||||
|
assert pkgbuild_ahriman["build()"] == pkgbuild_ahriman.fields["build()"].substitute(pkgbuild_ahriman.variables)
|
||||||
|
|
||||||
|
|
||||||
|
def test_getitem_substitute(pkgbuild_ahriman: Pkgbuild) -> None:
|
||||||
|
"""
|
||||||
|
must return element by key and substitute variables
|
||||||
|
"""
|
||||||
|
pkgbuild_ahriman.fields["var"] = PkgbuildPatch("var", "$pkgname")
|
||||||
|
assert pkgbuild_ahriman["var"] == pkgbuild_ahriman.fields["pkgname"].value
|
||||||
|
|
||||||
|
|
||||||
|
def test_getitem_function(pkgbuild_ahriman: Pkgbuild) -> None:
|
||||||
|
"""
|
||||||
|
must return element by key with fallback to function
|
||||||
|
"""
|
||||||
|
assert pkgbuild_ahriman["build"] == pkgbuild_ahriman.fields["build()"].substitute(pkgbuild_ahriman.variables)
|
||||||
|
|
||||||
|
pkgbuild_ahriman.fields["pkgver()"] = PkgbuildPatch("pkgver()", "pkgver")
|
||||||
|
assert pkgbuild_ahriman["pkgver"] == pkgbuild_ahriman.fields["pkgver"].value
|
||||||
|
assert pkgbuild_ahriman["pkgver()"] == pkgbuild_ahriman.fields["pkgver()"].value
|
||||||
|
|
||||||
|
|
||||||
|
def test_getitem_exception(pkgbuild_ahriman: Pkgbuild) -> None:
|
||||||
|
"""
|
||||||
|
must raise KeyError for unknown key
|
||||||
|
"""
|
||||||
|
with pytest.raises(KeyError):
|
||||||
|
assert pkgbuild_ahriman["field"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_iter(pkgbuild_ahriman: Pkgbuild) -> None:
|
||||||
|
"""
|
||||||
|
must return keys iterator
|
||||||
|
"""
|
||||||
|
for key in list(pkgbuild_ahriman):
|
||||||
|
del pkgbuild_ahriman.fields[key]
|
||||||
|
assert not pkgbuild_ahriman.fields
|
||||||
|
|
||||||
|
|
||||||
|
def test_len(pkgbuild_ahriman: Pkgbuild) -> None:
|
||||||
|
"""
|
||||||
|
must return length of the map
|
||||||
|
"""
|
||||||
|
assert len(pkgbuild_ahriman) == len(pkgbuild_ahriman.fields)
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_ahriman(resource_path_root: Path) -> None:
|
||||||
|
"""
|
||||||
|
must parse real PKGBUILDs correctly (ahriman)
|
||||||
|
"""
|
||||||
|
pkgbuild = Pkgbuild.from_file(resource_path_root / "models" / "package_ahriman_pkgbuild")
|
||||||
|
values = {key: value.value for key, value in pkgbuild.fields.items() if not value.is_function}
|
||||||
|
assert values == {
|
||||||
|
"pkgbase": "ahriman",
|
||||||
|
"pkgname": "ahriman",
|
||||||
|
"pkgver": "2.6.0",
|
||||||
|
"pkgrel": "1",
|
||||||
|
"pkgdesc": "ArcH linux ReposItory MANager",
|
||||||
|
"arch": ["any"],
|
||||||
|
"url": "https://github.com/arcan1s/ahriman",
|
||||||
|
"license": ["GPL3"],
|
||||||
|
"depends": [
|
||||||
|
"devtools",
|
||||||
|
"git",
|
||||||
|
"pyalpm",
|
||||||
|
"python-cerberus",
|
||||||
|
"python-inflection",
|
||||||
|
"python-passlib",
|
||||||
|
"python-requests",
|
||||||
|
"python-setuptools",
|
||||||
|
"python-srcinfo",
|
||||||
|
],
|
||||||
|
"makedepends": [
|
||||||
|
"python-build",
|
||||||
|
"python-installer",
|
||||||
|
"python-wheel",
|
||||||
|
],
|
||||||
|
"optdepends": [
|
||||||
|
"breezy: -bzr packages support",
|
||||||
|
"darcs: -darcs packages support",
|
||||||
|
"mercurial: -hg packages support",
|
||||||
|
"python-aioauth-client: web server with OAuth2 authorization",
|
||||||
|
"python-aiohttp: web server",
|
||||||
|
"python-aiohttp-debugtoolbar: web server with enabled debug panel",
|
||||||
|
"python-aiohttp-jinja2: web server",
|
||||||
|
"python-aiohttp-security: web server with authorization",
|
||||||
|
"python-aiohttp-session: web server with authorization",
|
||||||
|
"python-boto3: sync to s3",
|
||||||
|
"python-cryptography: web server with authorization",
|
||||||
|
"python-requests-unixsocket: client report to web server by unix socket",
|
||||||
|
"python-jinja: html report generation",
|
||||||
|
"rsync: sync by using rsync",
|
||||||
|
"subversion: -svn packages support",
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"https://github.com/arcan1s/ahriman/releases/download/$pkgver/$pkgname-$pkgver-src.tar.xz",
|
||||||
|
"ahriman.sysusers",
|
||||||
|
"ahriman.tmpfiles",
|
||||||
|
],
|
||||||
|
"backup": [
|
||||||
|
"etc/ahriman.ini",
|
||||||
|
"etc/ahriman.ini.d/logging.ini",
|
||||||
|
],
|
||||||
|
"sha512sums": [
|
||||||
|
"ec1f64e463455761d72be7f7b8b51b3b4424685c96a2d5eee6afa1c93780c8d7f8a39487a2f2f3bd83d2b58a93279e1392a965a4b905795e58ca686fb21123a1",
|
||||||
|
"53d37efec812afebf86281716259f9ea78a307b83897166c72777251c3eebcb587ecee375d907514781fb2a5c808cbb24ef9f3f244f12740155d0603bf213131",
|
||||||
|
"62b2eccc352d33853ef243c9cddd63663014aa97b87242f1b5bc5099a7dbd69ff3821f24ffc58e1b7f2387bd4e9e9712cc4c67f661b1724ad99cdf09b3717794",
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_gcc10(resource_path_root: Path) -> None:
|
||||||
|
"""
|
||||||
|
must parse real PKGBUILDs correctly (gcc10)
|
||||||
|
"""
|
||||||
|
pkgbuild = Pkgbuild.from_file(resource_path_root / "models" / "package_gcc10_pkgbuild")
|
||||||
|
values = {key: value.value for key, value in pkgbuild.fields.items() if not value.is_function}
|
||||||
|
assert values == {
|
||||||
|
"pkgbase": "gcc10",
|
||||||
|
"pkgname": [
|
||||||
|
"${pkgbase}",
|
||||||
|
"${pkgbase}-libs",
|
||||||
|
"${pkgbase}-fortran",
|
||||||
|
],
|
||||||
|
"pkgver": "10.5.0",
|
||||||
|
"_majorver": "${pkgver%%.*}",
|
||||||
|
"_islver": "0.24",
|
||||||
|
"pkgrel": "2",
|
||||||
|
"pkgdesc": "The GNU Compiler Collection (10.x.x)",
|
||||||
|
"arch": ["x86_64"],
|
||||||
|
"url": "https://gcc.gnu.org",
|
||||||
|
"license": [
|
||||||
|
"GPL-3.0-or-later",
|
||||||
|
"LGPL-3.0+",
|
||||||
|
"GFDL-1.3",
|
||||||
|
"LicenseRef-custom",
|
||||||
|
],
|
||||||
|
"makedepends": [
|
||||||
|
"binutils",
|
||||||
|
"doxygen",
|
||||||
|
"git",
|
||||||
|
"libmpc",
|
||||||
|
"python",
|
||||||
|
],
|
||||||
|
"checkdepends": [
|
||||||
|
"dejagnu",
|
||||||
|
"inetutils",
|
||||||
|
],
|
||||||
|
"options": [
|
||||||
|
"!emptydirs",
|
||||||
|
"!lto",
|
||||||
|
"!buildflags",
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"https://sourceware.org/pub/gcc/releases/gcc-${pkgver}/gcc-${pkgver}.tar.xz",
|
||||||
|
"https://sourceware.org/pub/gcc/releases/gcc-${pkgver}/gcc-${pkgver}.tar.xz.sig",
|
||||||
|
"https://sourceware.org/pub/gcc/infrastructure/isl-${_islver}.tar.bz2",
|
||||||
|
"c89",
|
||||||
|
"c99",
|
||||||
|
],
|
||||||
|
"validpgpkeys": [
|
||||||
|
"F3691687D867B81B51CE07D9BBE43771487328A9",
|
||||||
|
"86CFFCA918CF3AF47147588051E8B148A9999C34",
|
||||||
|
"13975A70E63C361C73AE69EF6EEB81F8981C74C7",
|
||||||
|
"D3A93CAD751C2AF4F8C7AD516C35B99309B5FA62",
|
||||||
|
],
|
||||||
|
"md5sums": [
|
||||||
|
"c7d1958570fbd1cd859b015774b9987a",
|
||||||
|
"SKIP",
|
||||||
|
"dd2f7b78e118c25bd96134a52aae7f4d",
|
||||||
|
"d5fd2672deb5f97a2c4bdab486470abe",
|
||||||
|
"d99ba9f4bd860e274f17040ee51cd1bf",
|
||||||
|
],
|
||||||
|
"b2sums": [
|
||||||
|
"9b71761f4015649514677784443886e59733ac3845f7dfaa4343f46327d36c08c403c444b9e492b870ac0b3f2e3568f972b7700a0ef05a497fb4066079b3143b",
|
||||||
|
"SKIP",
|
||||||
|
"88a178dad5fe9c33be5ec5fe4ac9abc0e075a86cff9184f75cedb7c47de67ce3be273bd0db72286ba0382f4016e9d74855ead798ad7bccb015b853931731828e",
|
||||||
|
"a76d19c7830b0a141302890522086fc1548c177611501caac7e66d576e541b64ca3f6e977de715268a9872dfdd6368a011b92e01f7944ec0088f899ac0d2a2a5",
|
||||||
|
"02b655b5668f7dea51c3b3e4ff46d5a4aee5a04ed5e26b98a6470f39c2e98ddc0519bffeeedd982c31ef3c171457e4d1beaff32767d1aedd9346837aac4ec3ee",
|
||||||
|
],
|
||||||
|
"_CHOST": "${CHOST:=}",
|
||||||
|
"_MAKEFLAGS": "${MAKEFLAGS:=}",
|
||||||
|
"_libdir": "usr/lib/gcc/${CHOST}/${pkgver%%+*}",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_jellyfin_ffmpeg6_bin(resource_path_root: Path) -> None:
|
||||||
|
"""
|
||||||
|
must parse real PKGBUILDs correctly (jellyfin-ffmpeg6-bin)
|
||||||
|
"""
|
||||||
|
pkgbuild = Pkgbuild.from_file(resource_path_root / "models" / "package_jellyfin-ffmpeg6-bin_pkgbuild")
|
||||||
|
values = {key: value.value for key, value in pkgbuild.fields.items() if not value.is_function}
|
||||||
|
assert values == {
|
||||||
|
"pkgbase": "jellyfin-ffmpeg6-bin",
|
||||||
|
"pkgname": "jellyfin-ffmpeg6-bin",
|
||||||
|
"pkgver": "6.0",
|
||||||
|
"pkgrel": "6",
|
||||||
|
"pkgdesc": "FFmpeg6 binary version for Jellyfin",
|
||||||
|
"arch": ["x86_64", "aarch64"],
|
||||||
|
"url": "https://github.com/jellyfin/jellyfin-ffmpeg",
|
||||||
|
"license": ["GPL3"],
|
||||||
|
"depends_x86_64": ["glibc>=2.23"],
|
||||||
|
"depends_aarch64": ["glibc>=2.27"],
|
||||||
|
"optdepends": [
|
||||||
|
"intel-media-driver: for Intel VAAPI support (Broadwell and newer)",
|
||||||
|
"intel-media-sdk: for Intel Quick Sync Video",
|
||||||
|
"onevpl-intel-gpu: for Intel Quick Sync Video (12th Gen and newer)",
|
||||||
|
"intel-compute-runtime: for Intel OpenCL runtime based Tonemapping",
|
||||||
|
"libva-intel-driver: for Intel legacy VAAPI support (10th Gen and older)",
|
||||||
|
"libva-mesa-driver: for AMD VAAPI support",
|
||||||
|
"nvidia-utils: for Nvidia NVDEC/NVENC support",
|
||||||
|
"opencl-amd: for AMD OpenCL runtime based Tonemapping",
|
||||||
|
"vulkan-radeon: for AMD RADV Vulkan support",
|
||||||
|
"vulkan-intel: for Intel ANV Vulkan support",
|
||||||
|
],
|
||||||
|
"conflicts": [
|
||||||
|
"jellyfin-ffmpeg",
|
||||||
|
"jellyfin-ffmpeg5",
|
||||||
|
"jellyfin-ffmpeg5-bin",
|
||||||
|
"jellyfin-ffmpeg6",
|
||||||
|
],
|
||||||
|
"source_x86_64": ["https://repo.jellyfin.org/releases/ffmpeg/${pkgver}-${pkgrel}/jellyfin-ffmpeg_${pkgver}-${pkgrel}_portable_linux64-gpl.tar.xz"],
|
||||||
|
"source_aarch64": ["https://repo.jellyfin.org/releases/ffmpeg/${pkgver}-${pkgrel}/jellyfin-ffmpeg_${pkgver}-${pkgrel}_portable_linuxarm64-gpl.tar.xz"],
|
||||||
|
"sha256sums_x86_64": ["32cbe40942d26072faa1182835ccc89029883766de11778c731b529aa632ff37"],
|
||||||
|
"sha256sums_aarch64": ["22b8f2a3c92c6b1c9e6830a6631f08f3f0a7ae80739ace71ad30704a28045184"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_tpacpi_bat_git(resource_path_root: Path) -> None:
|
||||||
|
"""
|
||||||
|
must parse real PKGBUILDs correctly (tpacpi-bat-git)
|
||||||
|
"""
|
||||||
|
pkgbuild = Pkgbuild.from_file(resource_path_root / "models" / "package_tpacpi-bat-git_pkgbuild")
|
||||||
|
values = {key: value.value for key, value in pkgbuild.fields.items() if not value.is_function}
|
||||||
|
assert values == {
|
||||||
|
"pkgbase": "tpacpi-bat-git",
|
||||||
|
"pkgname": "tpacpi-bat-git",
|
||||||
|
"pkgver": "3.1.r13.g4959b52",
|
||||||
|
"pkgrel": "1",
|
||||||
|
"pkgdesc": "A Perl script with ACPI calls for recent ThinkPads which are not supported by tp_smapi",
|
||||||
|
"arch": ["any"],
|
||||||
|
"url": "https://github.com/teleshoes/tpacpi-bat",
|
||||||
|
"license": ["GPL3"],
|
||||||
|
"depends": ["perl", "acpi_call"],
|
||||||
|
"makedepends": ["git"],
|
||||||
|
"provides": ["tpacpi-bat"],
|
||||||
|
"conflicts": ["tpacpi-bat"],
|
||||||
|
"backup": ["etc/conf.d/tpacpi"],
|
||||||
|
"source": ["git+https://github.com/teleshoes/tpacpi-bat.git"],
|
||||||
|
"b2sums": ["SKIP"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_yay(resource_path_root: Path) -> None:
|
||||||
|
"""
|
||||||
|
must parse real PKGBUILDs correctly (yay)
|
||||||
|
"""
|
||||||
|
pkgbuild = Pkgbuild.from_file(resource_path_root / "models" / "package_yay_pkgbuild")
|
||||||
|
values = {key: value.value for key, value in pkgbuild.fields.items() if not value.is_function}
|
||||||
|
assert values == {
|
||||||
|
"pkgbase": "yay",
|
||||||
|
"pkgname": "yay",
|
||||||
|
"pkgver": "12.3.5",
|
||||||
|
"pkgrel": "1",
|
||||||
|
"pkgdesc": "Yet another yogurt. Pacman wrapper and AUR helper written in go.",
|
||||||
|
"arch": [
|
||||||
|
"i686",
|
||||||
|
"pentium4",
|
||||||
|
"x86_64",
|
||||||
|
"arm",
|
||||||
|
"armv7h",
|
||||||
|
"armv6h",
|
||||||
|
"aarch64",
|
||||||
|
"riscv64",
|
||||||
|
],
|
||||||
|
"url": "https://github.com/Jguer/yay",
|
||||||
|
"options": ["!lto"],
|
||||||
|
"license": ["GPL-3.0-or-later"],
|
||||||
|
"depends": [
|
||||||
|
"pacman>6.1",
|
||||||
|
"git",
|
||||||
|
],
|
||||||
|
"optdepends": [
|
||||||
|
"sudo: privilege elevation",
|
||||||
|
"doas: privilege elevation",
|
||||||
|
],
|
||||||
|
"makedepends": ["go>=1.21"],
|
||||||
|
"source": ["${pkgname}-${pkgver}.tar.gz::https://github.com/Jguer/yay/archive/v${pkgver}.tar.gz"],
|
||||||
|
"sha256sums": ["2fb6121a6eb4c5e6afaf22212b2ed15022500a4bc34bb3dc0f9782c1d43c3962"],
|
||||||
|
}
|
@ -132,6 +132,14 @@ def test_serialize_list() -> None:
|
|||||||
assert PkgbuildPatch("key", ["val'ue", "val\"ue2"]).serialize() == """key=('val'"'"'ue' 'val"ue2')"""
|
assert PkgbuildPatch("key", ["val'ue", "val\"ue2"]).serialize() == """key=('val'"'"'ue' 'val"ue2')"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_substitute() -> None:
|
||||||
|
"""
|
||||||
|
must correctly substitute variables
|
||||||
|
"""
|
||||||
|
assert PkgbuildPatch("key", "$env $value").substitute({"env": "variable"}) == "variable $value"
|
||||||
|
assert PkgbuildPatch("key", ["$env $value"]).substitute({"env": "variable"}) == ["variable $value"]
|
||||||
|
|
||||||
|
|
||||||
def test_write(mocker: MockerFixture) -> None:
|
def test_write(mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must write serialized value to the file
|
must write serialized value to the file
|
||||||
|
55
tests/testresources/models/package_ahriman_pkgbuild
Normal file
55
tests/testresources/models/package_ahriman_pkgbuild
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
# Maintainer: Evgeniy Alekseev
|
||||||
|
|
||||||
|
pkgname='ahriman'
|
||||||
|
pkgver=2.6.0
|
||||||
|
pkgrel=1
|
||||||
|
pkgdesc="ArcH linux ReposItory MANager"
|
||||||
|
arch=('any')
|
||||||
|
url="https://github.com/arcan1s/ahriman"
|
||||||
|
license=('GPL3')
|
||||||
|
depends=('devtools' 'git' 'pyalpm' 'python-cerberus' 'python-inflection' 'python-passlib' 'python-requests' 'python-setuptools' 'python-srcinfo')
|
||||||
|
makedepends=('python-build' 'python-installer' 'python-wheel')
|
||||||
|
optdepends=('breezy: -bzr packages support'
|
||||||
|
'darcs: -darcs packages support'
|
||||||
|
'mercurial: -hg packages support'
|
||||||
|
'python-aioauth-client: web server with OAuth2 authorization'
|
||||||
|
'python-aiohttp: web server'
|
||||||
|
'python-aiohttp-debugtoolbar: web server with enabled debug panel'
|
||||||
|
'python-aiohttp-jinja2: web server'
|
||||||
|
'python-aiohttp-security: web server with authorization'
|
||||||
|
'python-aiohttp-session: web server with authorization'
|
||||||
|
'python-boto3: sync to s3'
|
||||||
|
'python-cryptography: web server with authorization'
|
||||||
|
'python-requests-unixsocket: client report to web server by unix socket'
|
||||||
|
'python-jinja: html report generation'
|
||||||
|
'rsync: sync by using rsync'
|
||||||
|
'subversion: -svn packages support')
|
||||||
|
source=("https://github.com/arcan1s/ahriman/releases/download/$pkgver/$pkgname-$pkgver-src.tar.xz"
|
||||||
|
'ahriman.sysusers'
|
||||||
|
'ahriman.tmpfiles')
|
||||||
|
backup=('etc/ahriman.ini'
|
||||||
|
'etc/ahriman.ini.d/logging.ini')
|
||||||
|
|
||||||
|
build() {
|
||||||
|
cd "$pkgname"
|
||||||
|
|
||||||
|
python -m build --wheel --no-isolation
|
||||||
|
}
|
||||||
|
|
||||||
|
package() {
|
||||||
|
cd "$pkgname"
|
||||||
|
|
||||||
|
python -m installer --destdir="$pkgdir" "dist/$pkgname-$pkgver-py3-none-any.whl"
|
||||||
|
|
||||||
|
# python-installer actually thinks that you cannot just copy files to root
|
||||||
|
# thus we need to copy them manually
|
||||||
|
install -Dm644 "$pkgdir/usr/share/$pkgname/settings/ahriman.ini" "$pkgdir/etc/ahriman.ini"
|
||||||
|
install -Dm644 "$pkgdir/usr/share/$pkgname/settings/ahriman.ini.d/logging.ini" "$pkgdir/etc/ahriman.ini.d/logging.ini"
|
||||||
|
|
||||||
|
install -Dm644 "$srcdir/$pkgname.sysusers" "$pkgdir/usr/lib/sysusers.d/$pkgname.conf"
|
||||||
|
install -Dm644 "$srcdir/$pkgname.tmpfiles" "$pkgdir/usr/lib/tmpfiles.d/$pkgname.conf"
|
||||||
|
}
|
||||||
|
|
||||||
|
sha512sums=('ec1f64e463455761d72be7f7b8b51b3b4424685c96a2d5eee6afa1c93780c8d7f8a39487a2f2f3bd83d2b58a93279e1392a965a4b905795e58ca686fb21123a1'
|
||||||
|
'53d37efec812afebf86281716259f9ea78a307b83897166c72777251c3eebcb587ecee375d907514781fb2a5c808cbb24ef9f3f244f12740155d0603bf213131'
|
||||||
|
'62b2eccc352d33853ef243c9cddd63663014aa97b87242f1b5bc5099a7dbd69ff3821f24ffc58e1b7f2387bd4e9e9712cc4c67f661b1724ad99cdf09b3717794')
|
@ -1,45 +0,0 @@
|
|||||||
pkgbase = ahriman
|
|
||||||
pkgdesc = ArcH linux ReposItory MANager
|
|
||||||
pkgver = 2.6.0
|
|
||||||
pkgrel = 1
|
|
||||||
url = https://github.com/arcan1s/ahriman
|
|
||||||
arch = any
|
|
||||||
license = GPL3
|
|
||||||
checkdepends = python-pytest
|
|
||||||
makedepends = python-build
|
|
||||||
makedepends = python-installer
|
|
||||||
makedepends = python-wheel
|
|
||||||
depends = devtools
|
|
||||||
depends = git
|
|
||||||
depends = pyalpm
|
|
||||||
depends = python-cerberus
|
|
||||||
depends = python-inflection
|
|
||||||
depends = python-passlib
|
|
||||||
depends = python-requests
|
|
||||||
depends = python-setuptools
|
|
||||||
depends = python-srcinfo
|
|
||||||
optdepends = breezy: -bzr packages support
|
|
||||||
optdepends = darcs: -darcs packages support
|
|
||||||
optdepends = mercurial: -hg packages support
|
|
||||||
optdepends = python-aioauth-client: web server with OAuth2 authorization
|
|
||||||
optdepends = python-aiohttp: web server
|
|
||||||
optdepends = python-aiohttp-debugtoolbar: web server with enabled debug panel
|
|
||||||
optdepends = python-aiohttp-jinja2: web server
|
|
||||||
optdepends = python-aiohttp-security: web server with authorization
|
|
||||||
optdepends = python-aiohttp-session: web server with authorization
|
|
||||||
optdepends = python-boto3: sync to s3
|
|
||||||
optdepends = python-cryptography: web server with authorization
|
|
||||||
optdepends = python-requests-unixsocket: client report to web server by unix socket
|
|
||||||
optdepends = python-jinja: html report generation
|
|
||||||
optdepends = rsync: sync by using rsync
|
|
||||||
optdepends = subversion: -svn packages support
|
|
||||||
backup = etc/ahriman.ini
|
|
||||||
backup = etc/ahriman.ini.d/logging.ini
|
|
||||||
source = https://github.com/arcan1s/ahriman/releases/download/2.6.0/ahriman-2.6.0-src.tar.xz
|
|
||||||
source = ahriman.sysusers
|
|
||||||
source = ahriman.tmpfiles
|
|
||||||
sha512sums = ec1f64e463455761d72be7f7b8b51b3b4424685c96a2d5eee6afa1c93780c8d7f8a39487a2f2f3bd83d2b58a93279e1392a965a4b905795e58ca686fb21123a1
|
|
||||||
sha512sums = 53d37efec812afebf86281716259f9ea78a307b83897166c72777251c3eebcb587ecee375d907514781fb2a5c808cbb24ef9f3f244f12740155d0603bf213131
|
|
||||||
sha512sums = 62b2eccc352d33853ef243c9cddd63663014aa97b87242f1b5bc5099a7dbd69ff3821f24ffc58e1b7f2387bd4e9e9712cc4c67f661b1724ad99cdf09b3717794
|
|
||||||
|
|
||||||
pkgname = ahriman
|
|
270
tests/testresources/models/package_gcc10_pkgbuild
Normal file
270
tests/testresources/models/package_gcc10_pkgbuild
Normal file
@ -0,0 +1,270 @@
|
|||||||
|
# Maintainer: Chris Severance aur.severach aATt spamgourmet dott com
|
||||||
|
# Contributor: Jonathon Fernyhough <jonathon+m2x+dev>
|
||||||
|
# Contributor: Giancarlo Razzolini <grazzolini@archlinux.org>
|
||||||
|
# Contributor: Frederik Schwan <freswa at archlinux dot org>
|
||||||
|
# Contributor: Bartłomiej Piotrowski <bpiotrowski@archlinux.org>
|
||||||
|
# Contributor: Allan McRae <allan@archlinux.org>
|
||||||
|
# Contributor: Daniel Kozak <kozzi11@gmail.com>
|
||||||
|
|
||||||
|
set -u
|
||||||
|
pkgbase='gcc10'
|
||||||
|
pkgname=("${pkgbase}"{,-libs,-fortran})
|
||||||
|
pkgver='10.5.0'
|
||||||
|
_majorver="${pkgver%%.*}"
|
||||||
|
_islver='0.24'
|
||||||
|
pkgrel='2'
|
||||||
|
pkgdesc='The GNU Compiler Collection (10.x.x)'
|
||||||
|
arch=('x86_64')
|
||||||
|
url='https://gcc.gnu.org'
|
||||||
|
license=('GPL-3.0-or-later' 'LGPL-3.0+' 'GFDL-1.3' 'LicenseRef-custom')
|
||||||
|
makedepends=('binutils' 'doxygen' 'git' 'libmpc' 'python')
|
||||||
|
checkdepends=('dejagnu' 'inetutils')
|
||||||
|
options=('!emptydirs' '!lto' '!buildflags')
|
||||||
|
source=(
|
||||||
|
"https://sourceware.org/pub/gcc/releases/gcc-${pkgver}/gcc-${pkgver}.tar.xz"{,.sig}
|
||||||
|
"https://sourceware.org/pub/gcc/infrastructure/isl-${_islver}.tar.bz2"
|
||||||
|
'c89'
|
||||||
|
'c99'
|
||||||
|
)
|
||||||
|
validpgpkeys=(
|
||||||
|
'F3691687D867B81B51CE07D9BBE43771487328A9' # bpiotrowski@archlinux.org
|
||||||
|
'86CFFCA918CF3AF47147588051E8B148A9999C34' # evangelos@foutrelis.com
|
||||||
|
'13975A70E63C361C73AE69EF6EEB81F8981C74C7' # richard.guenther@gmail.com
|
||||||
|
'D3A93CAD751C2AF4F8C7AD516C35B99309B5FA62' # Jakub Jelinek <jakub@redhat.com>
|
||||||
|
)
|
||||||
|
md5sums=('c7d1958570fbd1cd859b015774b9987a'
|
||||||
|
'SKIP'
|
||||||
|
'dd2f7b78e118c25bd96134a52aae7f4d'
|
||||||
|
'd5fd2672deb5f97a2c4bdab486470abe'
|
||||||
|
'd99ba9f4bd860e274f17040ee51cd1bf')
|
||||||
|
b2sums=('9b71761f4015649514677784443886e59733ac3845f7dfaa4343f46327d36c08c403c444b9e492b870ac0b3f2e3568f972b7700a0ef05a497fb4066079b3143b'
|
||||||
|
'SKIP'
|
||||||
|
'88a178dad5fe9c33be5ec5fe4ac9abc0e075a86cff9184f75cedb7c47de67ce3be273bd0db72286ba0382f4016e9d74855ead798ad7bccb015b853931731828e'
|
||||||
|
'a76d19c7830b0a141302890522086fc1548c177611501caac7e66d576e541b64ca3f6e977de715268a9872dfdd6368a011b92e01f7944ec0088f899ac0d2a2a5'
|
||||||
|
'02b655b5668f7dea51c3b3e4ff46d5a4aee5a04ed5e26b98a6470f39c2e98ddc0519bffeeedd982c31ef3c171457e4d1beaff32767d1aedd9346837aac4ec3ee')
|
||||||
|
|
||||||
|
_CHOST="${CHOST:=}" # https://bbs.archlinux.org/viewtopic.php?pid=2174541
|
||||||
|
_MAKEFLAGS="${MAKEFLAGS:=}"
|
||||||
|
|
||||||
|
_libdir="usr/lib/gcc/${CHOST}/${pkgver%%+*}"
|
||||||
|
|
||||||
|
prepare() {
|
||||||
|
set -u
|
||||||
|
if [ ! -d 'gcc' ]; then
|
||||||
|
ln -s "gcc-${pkgver/+/-}" 'gcc'
|
||||||
|
fi
|
||||||
|
pushd 'gcc' > /dev/null
|
||||||
|
|
||||||
|
# link isl for in-tree build
|
||||||
|
ln -s "../isl-${_islver}" 'isl'
|
||||||
|
|
||||||
|
# Do not run fixincludes
|
||||||
|
sed -e 's@\./fixinc\.sh@-c true@' -i 'gcc/Makefile.in'
|
||||||
|
|
||||||
|
# Arch Linux installs x86_64 libraries /lib
|
||||||
|
sed -e '/m64=/s/lib64/lib/' -i 'gcc/config/i386/t-linux64'
|
||||||
|
|
||||||
|
# hack! - some configure tests for header files using "$CPP $CPPFLAGS"
|
||||||
|
sed -e '/ac_cpp=/s/$CPPFLAGS/$CPPFLAGS -O2/' -i 'gcc/configure'
|
||||||
|
|
||||||
|
popd > /dev/null
|
||||||
|
|
||||||
|
rm -rf 'gcc-build'
|
||||||
|
mkdir 'gcc-build'
|
||||||
|
|
||||||
|
set +u
|
||||||
|
}
|
||||||
|
|
||||||
|
build() {
|
||||||
|
set -u
|
||||||
|
export MAKEFLAGS="${_MAKEFLAGS}"
|
||||||
|
export CHOST="${_CHOST}"
|
||||||
|
cd 'gcc-build'
|
||||||
|
|
||||||
|
if [ ! -s 'Makefile' ]; then
|
||||||
|
# The following options are one per line, mostly sorted so they are easy to diff compare to other gcc packages.
|
||||||
|
local _conf=(
|
||||||
|
--build="${CHOST}"
|
||||||
|
--disable-libssp
|
||||||
|
--disable-libstdcxx-pch
|
||||||
|
--disable-libunwind-exceptions
|
||||||
|
--disable-multilib
|
||||||
|
--disable-werror
|
||||||
|
--enable-__cxa_atexit
|
||||||
|
--enable-cet='auto'
|
||||||
|
--enable-checking='release'
|
||||||
|
--enable-clocale='gnu'
|
||||||
|
--enable-default-pie
|
||||||
|
--enable-default-ssp
|
||||||
|
--enable-gnu-indirect-function
|
||||||
|
--enable-gnu-unique-object
|
||||||
|
--enable-languages='c,c++,fortran,lto'
|
||||||
|
--enable-linker-build-id
|
||||||
|
--enable-lto
|
||||||
|
--enable-plugin
|
||||||
|
--enable-shared
|
||||||
|
--enable-threads='posix'
|
||||||
|
--enable-version-specific-runtime-libs
|
||||||
|
--infodir='/usr/share/info'
|
||||||
|
--libdir='/usr/lib'
|
||||||
|
--libexecdir='/usr/lib'
|
||||||
|
--mandir='/usr/share/man'
|
||||||
|
--program-suffix="-${_majorver}"
|
||||||
|
--with-bugurl='https://bugs.archlinux.org/'
|
||||||
|
--with-isl
|
||||||
|
--with-linker-hash-style='gnu'
|
||||||
|
--with-pkgversion="Arch Linux ${pkgver}-${pkgrel}"
|
||||||
|
--with-system-zlib
|
||||||
|
--prefix='/usr'
|
||||||
|
)
|
||||||
|
../gcc/configure "${_conf[@]}"
|
||||||
|
fi
|
||||||
|
LD_PRELOAD='/usr/lib/libstdc++.so' \
|
||||||
|
nice make -s
|
||||||
|
|
||||||
|
set +u; msg 'Compile complete'; set -u
|
||||||
|
|
||||||
|
# make documentation
|
||||||
|
make -s -j1 -C "${CHOST}/libstdc++-v3/doc" 'doc-man-doxygen'
|
||||||
|
set +u
|
||||||
|
}
|
||||||
|
|
||||||
|
check() {
|
||||||
|
set -u
|
||||||
|
cd 'gcc-build'
|
||||||
|
|
||||||
|
# disable libphobos test to avoid segfaults and other unfunny ways to waste my time
|
||||||
|
sed -e '/maybe-check-target-libphobos \\/d' -i 'Makefile'
|
||||||
|
|
||||||
|
# do not abort on error as some are "expected"
|
||||||
|
make -O -k check || :
|
||||||
|
"${srcdir}/gcc/contrib/test_summary"
|
||||||
|
set +u
|
||||||
|
}
|
||||||
|
|
||||||
|
package_gcc10-libs() {
|
||||||
|
set -u
|
||||||
|
export MAKEFLAGS="${_MAKEFLAGS}"
|
||||||
|
export CHOST="${_CHOST}"
|
||||||
|
pkgdesc='Runtime libraries shipped by GCC (10.x.x)'
|
||||||
|
depends=('glibc>=2.27')
|
||||||
|
options=('!emptydirs' '!strip')
|
||||||
|
provides=('libgfortran.so' 'libubsan.so' 'libasan.so' 'libtsan.so' 'liblsan.so')
|
||||||
|
|
||||||
|
cd 'gcc-build'
|
||||||
|
LD_PRELOAD='/usr/lib/libstdc++.so' \
|
||||||
|
make -C "${CHOST}/libgcc" DESTDIR="${pkgdir}" install-shared
|
||||||
|
mv "${pkgdir}/${_libdir}"/../lib/* "${pkgdir}/${_libdir}"
|
||||||
|
rmdir "${pkgdir}/${_libdir}/../lib"
|
||||||
|
rm -f "${pkgdir}/${_libdir}/libgcc_eh.a"
|
||||||
|
|
||||||
|
local _lib
|
||||||
|
for _lib in libatomic \
|
||||||
|
libgfortran \
|
||||||
|
libgomp \
|
||||||
|
libitm \
|
||||||
|
libquadmath \
|
||||||
|
libsanitizer/{a,l,ub,t}san \
|
||||||
|
libstdc++-v3/src \
|
||||||
|
libvtv; do
|
||||||
|
make -C "${CHOST}/${_lib}" DESTDIR="${pkgdir}" install-toolexeclibLTLIBRARIES
|
||||||
|
done
|
||||||
|
|
||||||
|
make -C "${CHOST}/libstdc++-v3/po" DESTDIR="${pkgdir}" install
|
||||||
|
|
||||||
|
# Install Runtime Library Exception
|
||||||
|
install -Dm644 "${srcdir}/gcc/COPYING.RUNTIME" \
|
||||||
|
"${pkgdir}/usr/share/licenses/${pkgname}/RUNTIME.LIBRARY.EXCEPTION"
|
||||||
|
|
||||||
|
# remove conflicting files
|
||||||
|
rm -rf "${pkgdir}/usr/share/locale"
|
||||||
|
set +u
|
||||||
|
}
|
||||||
|
|
||||||
|
package_gcc10() {
|
||||||
|
set -u
|
||||||
|
export MAKEFLAGS="${_MAKEFLAGS}"
|
||||||
|
export CHOST="${_CHOST}"
|
||||||
|
pkgdesc='The GNU Compiler Collection - C and C++ frontends (10.x.x)'
|
||||||
|
depends=("${pkgbase}-libs=${pkgver}-${pkgrel}" 'binutils>=2.28' 'libmpc' 'zstd')
|
||||||
|
options=('!emptydirs' 'staticlibs')
|
||||||
|
|
||||||
|
cd 'gcc-build'
|
||||||
|
|
||||||
|
make -C 'gcc' DESTDIR="${pkgdir}" install-driver install-cpp install-gcc-ar \
|
||||||
|
c++.install-common install-headers install-plugin install-lto-wrapper
|
||||||
|
|
||||||
|
install -m755 -t "${pkgdir}/${_libdir}/" gcc/{cc1,cc1plus,collect2,lto1,gcov{,-tool}}
|
||||||
|
|
||||||
|
make -C "${CHOST}/libgcc" DESTDIR="${pkgdir}" install
|
||||||
|
rm -rf "${pkgdir}/${_libdir}/../lib"
|
||||||
|
|
||||||
|
make -C "${CHOST}/libstdc++-v3/src" DESTDIR="${pkgdir}" install
|
||||||
|
make -C "${CHOST}/libstdc++-v3/include" DESTDIR="${pkgdir}" install
|
||||||
|
make -C "${CHOST}/libstdc++-v3/libsupc++" DESTDIR="${pkgdir}" install
|
||||||
|
make -C "${CHOST}/libstdc++-v3/python" DESTDIR="${pkgdir}" install
|
||||||
|
rm -f "${pkgdir}/${_libdir}"/libstdc++.so*
|
||||||
|
|
||||||
|
make DESTDIR="${pkgdir}" install-fixincludes
|
||||||
|
make -C 'gcc' DESTDIR="${pkgdir}" install-mkheaders
|
||||||
|
|
||||||
|
make -C 'lto-plugin' DESTDIR="${pkgdir}" install
|
||||||
|
install -dm755 "${pkgdir}/${_libdir}/bfd-plugins/"
|
||||||
|
ln -s "/${_libdir}/liblto_plugin.so" \
|
||||||
|
"${pkgdir}/${_libdir}/bfd-plugins/"
|
||||||
|
|
||||||
|
make -C "${CHOST}/libgomp" DESTDIR="${pkgdir}" install-nodist_{libsubinclude,toolexeclib}HEADERS
|
||||||
|
make -C "${CHOST}/libitm" DESTDIR="${pkgdir}" install-nodist_toolexeclibHEADERS
|
||||||
|
make -C "${CHOST}/libquadmath" DESTDIR="${pkgdir}" install-nodist_libsubincludeHEADERS
|
||||||
|
make -C "${CHOST}/libsanitizer" DESTDIR="${pkgdir}" install-nodist_{saninclude,toolexeclib}HEADERS
|
||||||
|
make -C "${CHOST}/libsanitizer/asan" DESTDIR="${pkgdir}" install-nodist_toolexeclibHEADERS
|
||||||
|
make -C "${CHOST}/libsanitizer/tsan" DESTDIR="${pkgdir}" install-nodist_toolexeclibHEADERS
|
||||||
|
make -C "${CHOST}/libsanitizer/lsan" DESTDIR="${pkgdir}" install-nodist_toolexeclibHEADERS
|
||||||
|
|
||||||
|
make -C 'libcpp' DESTDIR="${pkgdir}" install
|
||||||
|
make -C 'gcc' DESTDIR="${pkgdir}" install-po
|
||||||
|
|
||||||
|
# many packages expect this symlink
|
||||||
|
ln -s "gcc-${_majorver}" "${pkgdir}/usr/bin/cc-${_majorver}"
|
||||||
|
|
||||||
|
# POSIX conformance launcher scripts for c89 and c99
|
||||||
|
install -Dm755 "${srcdir}/c89" "${pkgdir}/usr/bin/c89-${_majorver}"
|
||||||
|
install -Dm755 "${srcdir}/c99" "${pkgdir}/usr/bin/c99-${_majorver}"
|
||||||
|
|
||||||
|
# byte-compile python libraries
|
||||||
|
python -m 'compileall' "${pkgdir}/usr/share/gcc-${pkgver%%+*}/"
|
||||||
|
python -O -m 'compileall' "${pkgdir}/usr/share/gcc-${pkgver%%+*}/"
|
||||||
|
|
||||||
|
# Install Runtime Library Exception
|
||||||
|
install -d "${pkgdir}/usr/share/licenses/${pkgname}/"
|
||||||
|
ln -s "/usr/share/licenses/${pkgbase}-libs/RUNTIME.LIBRARY.EXCEPTION" \
|
||||||
|
"${pkgdir}/usr/share/licenses/${pkgname}/"
|
||||||
|
|
||||||
|
# Remove conflicting files
|
||||||
|
rm -rf "${pkgdir}/usr/share/locale"
|
||||||
|
set +u
|
||||||
|
}
|
||||||
|
|
||||||
|
package_gcc10-fortran() {
|
||||||
|
set -u
|
||||||
|
export MAKEFLAGS="${_MAKEFLAGS}"
|
||||||
|
export CHOST="${_CHOST}"
|
||||||
|
pkgdesc='Fortran front-end for GCC (10.x.x)'
|
||||||
|
depends=("${pkgbase}=${pkgver}-${pkgrel}")
|
||||||
|
|
||||||
|
cd 'gcc-build'
|
||||||
|
make -C "${CHOST}/libgfortran" DESTDIR="${pkgdir}" install-cafexeclibLTLIBRARIES \
|
||||||
|
install-{toolexeclibDATA,nodist_fincludeHEADERS,gfor_cHEADERS}
|
||||||
|
make -C "${CHOST}/libgomp" DESTDIR="${pkgdir}" install-nodist_fincludeHEADERS
|
||||||
|
make -C 'gcc' DESTDIR="${pkgdir}" fortran.install-common
|
||||||
|
install -Dm755 'gcc/f951' "${pkgdir}/${_libdir}/f951"
|
||||||
|
|
||||||
|
ln -s "gfortran-${_majorver}" "${pkgdir}/usr/bin/f95-${_majorver}"
|
||||||
|
|
||||||
|
# Install Runtime Library Exception
|
||||||
|
install -d "${pkgdir}/usr/share/licenses/${pkgname}/"
|
||||||
|
ln -s "/usr/share/licenses/${pkgbase}-libs/RUNTIME.LIBRARY.EXCEPTION" \
|
||||||
|
"${pkgdir}/usr/share/licenses/${pkgname}/"
|
||||||
|
set +u
|
||||||
|
}
|
||||||
|
set +u
|
@ -1,57 +0,0 @@
|
|||||||
pkgbase = gcc10
|
|
||||||
pkgdesc = The GNU Compiler Collection (10.x.x)
|
|
||||||
pkgver = 10.3.0
|
|
||||||
pkgrel = 2
|
|
||||||
url = https://gcc.gnu.org
|
|
||||||
arch = x86_64
|
|
||||||
license = GPL
|
|
||||||
license = LGPL
|
|
||||||
license = FDL
|
|
||||||
license = custom
|
|
||||||
checkdepends = dejagnu
|
|
||||||
checkdepends = inetutils
|
|
||||||
makedepends = binutils
|
|
||||||
makedepends = doxygen
|
|
||||||
makedepends = git
|
|
||||||
makedepends = libmpc
|
|
||||||
makedepends = python
|
|
||||||
options = !emptydirs
|
|
||||||
options = !lto
|
|
||||||
source = https://sourceware.org/pub/gcc/releases/gcc-10.3.0/gcc-10.3.0.tar.xz
|
|
||||||
source = https://sourceware.org/pub/gcc/releases/gcc-10.3.0/gcc-10.3.0.tar.xz.sig
|
|
||||||
source = https://mirror.sobukus.de/files/src/isl/isl-0.24.tar.xz
|
|
||||||
source = c89
|
|
||||||
source = c99
|
|
||||||
validpgpkeys = F3691687D867B81B51CE07D9BBE43771487328A9
|
|
||||||
validpgpkeys = 86CFFCA918CF3AF47147588051E8B148A9999C34
|
|
||||||
validpgpkeys = 13975A70E63C361C73AE69EF6EEB81F8981C74C7
|
|
||||||
validpgpkeys = D3A93CAD751C2AF4F8C7AD516C35B99309B5FA62
|
|
||||||
b2sums = ac7898f5eb8a7c5f151a526d1bb38913a68b50a65e4d010ac09fa20b6c801c671c790d780f23ccb8e4ecdfc686f4aa588082ccc9eb5c80c7b0e30788f824c1eb
|
|
||||||
b2sums = SKIP
|
|
||||||
b2sums = 39cbfd18ad05778e3a5a44429261b45e4abc3efe7730ee890674d968890fe5e52c73bc1f8d271c7c3bc72d5754e3f7fcb209bd139e823d19cb9ea4ce1440164d
|
|
||||||
b2sums = a76d19c7830b0a141302890522086fc1548c177611501caac7e66d576e541b64ca3f6e977de715268a9872dfdd6368a011b92e01f7944ec0088f899ac0d2a2a5
|
|
||||||
b2sums = 02b655b5668f7dea51c3b3e4ff46d5a4aee5a04ed5e26b98a6470f39c2e98ddc0519bffeeedd982c31ef3c171457e4d1beaff32767d1aedd9346837aac4ec3ee
|
|
||||||
|
|
||||||
pkgname = gcc10
|
|
||||||
pkgdesc = The GNU Compiler Collection - C and C++ frontends (10.x.x)
|
|
||||||
depends = gcc10-libs=10.3.0-2
|
|
||||||
depends = binutils>=2.28
|
|
||||||
depends = libmpc
|
|
||||||
depends = zstd
|
|
||||||
options = !emptydirs
|
|
||||||
options = staticlibs
|
|
||||||
|
|
||||||
pkgname = gcc10-libs
|
|
||||||
pkgdesc = Runtime libraries shipped by GCC (10.x.x)
|
|
||||||
depends = glibc>=2.27
|
|
||||||
provides = libgfortran.so
|
|
||||||
provides = libubsan.so
|
|
||||||
provides = libasan.so
|
|
||||||
provides = libtsan.so
|
|
||||||
provides = liblsan.so
|
|
||||||
options = !emptydirs
|
|
||||||
options = !strip
|
|
||||||
|
|
||||||
pkgname = gcc10-fortran
|
|
||||||
pkgdesc = Fortran front-end for GCC (10.x.x)
|
|
||||||
depends = gcc10=10.3.0-2
|
|
@ -1,28 +0,0 @@
|
|||||||
pkgbase = jellyfin-ffmpeg5-bin
|
|
||||||
pkgdesc = FFmpeg5 binary version for Jellyfin
|
|
||||||
pkgver = 5.1.2
|
|
||||||
pkgrel = 7
|
|
||||||
url = https://github.com/jellyfin/jellyfin-ffmpeg
|
|
||||||
arch = x86_64
|
|
||||||
arch = aarch64
|
|
||||||
license = GPL3
|
|
||||||
optdepends = intel-media-driver: for Intel VAAPI support (Broadwell and newer)
|
|
||||||
optdepends = intel-media-sdk: for Intel Quick Sync Video
|
|
||||||
optdepends = onevpl-intel-gpu: for Intel Quick Sync Video (12th Gen and newer)
|
|
||||||
optdepends = intel-compute-runtime: for Intel OpenCL runtime based Tonemapping
|
|
||||||
optdepends = libva-intel-driver: for Intel legacy VAAPI support (10th Gen and older)
|
|
||||||
optdepends = libva-mesa-driver: for AMD VAAPI support
|
|
||||||
optdepends = nvidia-utils: for Nvidia NVDEC/NVENC support
|
|
||||||
optdepends = opencl-amd: for AMD OpenCL runtime based Tonemapping
|
|
||||||
optdepends = vulkan-radeon: for AMD RADV Vulkan support
|
|
||||||
optdepends = vulkan-intel: for Intel ANV Vulkan support
|
|
||||||
conflicts = jellyfin-ffmpeg
|
|
||||||
conflicts = jellyfin-ffmpeg5
|
|
||||||
source_x86_64 = https://repo.jellyfin.org/releases/ffmpeg/5.1.2-7/jellyfin-ffmpeg_5.1.2-7_portable_linux64-gpl.tar.xz
|
|
||||||
depends_x86_64 = glibc>=2.23
|
|
||||||
sha256sums_x86_64 = 78420fd1edbaf24a07e92938878d8582d895e009cae02c8e9d5be3f26de905e3
|
|
||||||
source_aarch64 = https://repo.jellyfin.org/releases/ffmpeg/5.1.2-7/jellyfin-ffmpeg_5.1.2-7_portable_linuxarm64-gpl.tar.xz
|
|
||||||
depends_aarch64 = glibc>=2.27
|
|
||||||
sha256sums_aarch64 = 8ac4066981f203c2b442754eaf7286b4e481df9692d0ff8910a824d89c831df0
|
|
||||||
|
|
||||||
pkgname = jellyfin-ffmpeg5-bin
|
|
@ -0,0 +1,31 @@
|
|||||||
|
# Maintainer : nyanmisaka <nst799610810@gmail.com>
|
||||||
|
|
||||||
|
pkgname=jellyfin-ffmpeg6-bin
|
||||||
|
pkgver=6.0
|
||||||
|
pkgrel=6
|
||||||
|
pkgdesc='FFmpeg6 binary version for Jellyfin'
|
||||||
|
arch=('x86_64' 'aarch64')
|
||||||
|
url='https://github.com/jellyfin/jellyfin-ffmpeg'
|
||||||
|
license=('GPL3')
|
||||||
|
depends_x86_64=('glibc>=2.23')
|
||||||
|
depends_aarch64=('glibc>=2.27')
|
||||||
|
optdepends=('intel-media-driver: for Intel VAAPI support (Broadwell and newer)'
|
||||||
|
'intel-media-sdk: for Intel Quick Sync Video'
|
||||||
|
'onevpl-intel-gpu: for Intel Quick Sync Video (12th Gen and newer)'
|
||||||
|
'intel-compute-runtime: for Intel OpenCL runtime based Tonemapping'
|
||||||
|
'libva-intel-driver: for Intel legacy VAAPI support (10th Gen and older)'
|
||||||
|
'libva-mesa-driver: for AMD VAAPI support'
|
||||||
|
'nvidia-utils: for Nvidia NVDEC/NVENC support'
|
||||||
|
'opencl-amd: for AMD OpenCL runtime based Tonemapping'
|
||||||
|
'vulkan-radeon: for AMD RADV Vulkan support'
|
||||||
|
'vulkan-intel: for Intel ANV Vulkan support')
|
||||||
|
conflicts=('jellyfin-ffmpeg' 'jellyfin-ffmpeg5' 'jellyfin-ffmpeg5-bin' 'jellyfin-ffmpeg6')
|
||||||
|
source_x86_64=("https://repo.jellyfin.org/releases/ffmpeg/${pkgver}-${pkgrel}/jellyfin-ffmpeg_${pkgver}-${pkgrel}_portable_linux64-gpl.tar.xz")
|
||||||
|
source_aarch64=("https://repo.jellyfin.org/releases/ffmpeg/${pkgver}-${pkgrel}/jellyfin-ffmpeg_${pkgver}-${pkgrel}_portable_linuxarm64-gpl.tar.xz")
|
||||||
|
sha256sums_x86_64=('32cbe40942d26072faa1182835ccc89029883766de11778c731b529aa632ff37')
|
||||||
|
sha256sums_aarch64=('22b8f2a3c92c6b1c9e6830a6631f08f3f0a7ae80739ace71ad30704a28045184')
|
||||||
|
|
||||||
|
package() {
|
||||||
|
install -Dm 755 ffmpeg ${pkgdir}/usr/lib/jellyfin-ffmpeg/ffmpeg
|
||||||
|
install -Dm 755 ffprobe ${pkgdir}/usr/lib/jellyfin-ffmpeg/ffprobe
|
||||||
|
}
|
30
tests/testresources/models/package_tpacpi-bat-git_pkgbuild
Normal file
30
tests/testresources/models/package_tpacpi-bat-git_pkgbuild
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
# Maintainer: Frederik Schwan <freswa at archlinux dot org>
|
||||||
|
# Contributor: Lucky <archlinux@builds.lucky.li>
|
||||||
|
|
||||||
|
pkgname=tpacpi-bat-git
|
||||||
|
pkgver=3.1.r13.g4959b52
|
||||||
|
pkgrel=1
|
||||||
|
pkgdesc='A Perl script with ACPI calls for recent ThinkPads which are not supported by tp_smapi'
|
||||||
|
arch=('any')
|
||||||
|
url='https://github.com/teleshoes/tpacpi-bat'
|
||||||
|
license=('GPL3')
|
||||||
|
depends=('perl' 'acpi_call')
|
||||||
|
makedepends=('git')
|
||||||
|
provides=('tpacpi-bat')
|
||||||
|
conflicts=('tpacpi-bat')
|
||||||
|
backup=('etc/conf.d/tpacpi')
|
||||||
|
source=('git+https://github.com/teleshoes/tpacpi-bat.git')
|
||||||
|
b2sums=('SKIP')
|
||||||
|
|
||||||
|
pkgver() {
|
||||||
|
cd ${pkgname/-git/}
|
||||||
|
echo $(git describe --tags | sed 's/^v//;s/\([^-]*-g\)/r\1/;s/-/./g')
|
||||||
|
}
|
||||||
|
|
||||||
|
package() {
|
||||||
|
cd ${pkgname/-git/}
|
||||||
|
|
||||||
|
install -Dm755 tpacpi-bat "${pkgdir}"/usr/bin/tpacpi-bat
|
||||||
|
install -Dm644 examples/systemd_dynamic_threshold/tpacpi.service "${pkgdir}"/usr/lib/systemd/system/tpacpi-bat.service
|
||||||
|
install -Dm644 examples/systemd_dynamic_threshold/tpacpi.conf.d "${pkgdir}"/etc/conf.d/tpacpi
|
||||||
|
}
|
@ -1,17 +0,0 @@
|
|||||||
pkgbase = tpacpi-bat-git
|
|
||||||
pkgdesc = A Perl script with ACPI calls for recent ThinkPads which are not supported by tp_smapi
|
|
||||||
pkgver = 3.1.r13.g4959b52
|
|
||||||
pkgrel = 1
|
|
||||||
url = https://github.com/teleshoes/tpacpi-bat
|
|
||||||
arch = any
|
|
||||||
license = GPL3
|
|
||||||
makedepends = git
|
|
||||||
depends = perl
|
|
||||||
depends = acpi_call
|
|
||||||
provides = tpacpi-bat
|
|
||||||
conflicts = tpacpi-bat
|
|
||||||
backup = etc/conf.d/tpacpi
|
|
||||||
source = git+https://github.com/teleshoes/tpacpi-bat.git
|
|
||||||
b2sums = SKIP
|
|
||||||
|
|
||||||
pkgname = tpacpi-bat-git
|
|
37
tests/testresources/models/package_yay_pkgbuild
Normal file
37
tests/testresources/models/package_yay_pkgbuild
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
# Maintainer: Jguer <pkgbuilds at jguer.space>
|
||||||
|
pkgname=yay
|
||||||
|
pkgver=12.3.5
|
||||||
|
pkgrel=1
|
||||||
|
pkgdesc="Yet another yogurt. Pacman wrapper and AUR helper written in go."
|
||||||
|
arch=('i686' 'pentium4' 'x86_64' 'arm' 'armv7h' 'armv6h' 'aarch64' 'riscv64')
|
||||||
|
url="https://github.com/Jguer/yay"
|
||||||
|
options=(!lto)
|
||||||
|
license=('GPL-3.0-or-later')
|
||||||
|
depends=(
|
||||||
|
'pacman>6.1'
|
||||||
|
'git'
|
||||||
|
)
|
||||||
|
optdepends=(
|
||||||
|
'sudo: privilege elevation'
|
||||||
|
'doas: privilege elevation'
|
||||||
|
)
|
||||||
|
makedepends=('go>=1.21')
|
||||||
|
source=("${pkgname}-${pkgver}.tar.gz::https://github.com/Jguer/yay/archive/v${pkgver}.tar.gz")
|
||||||
|
sha256sums=('2fb6121a6eb4c5e6afaf22212b2ed15022500a4bc34bb3dc0f9782c1d43c3962')
|
||||||
|
|
||||||
|
build() {
|
||||||
|
export GOPATH="$srcdir"/gopath
|
||||||
|
export CGO_CPPFLAGS="${CPPFLAGS}"
|
||||||
|
export CGO_CFLAGS="${CFLAGS}"
|
||||||
|
export CGO_CXXFLAGS="${CXXFLAGS}"
|
||||||
|
export CGO_LDFLAGS="${LDFLAGS}"
|
||||||
|
export CGO_ENABLED=1
|
||||||
|
|
||||||
|
cd "$srcdir/$pkgname-$pkgver"
|
||||||
|
make VERSION=$pkgver DESTDIR="$pkgdir" PREFIX="/usr" build
|
||||||
|
}
|
||||||
|
|
||||||
|
package() {
|
||||||
|
cd "$srcdir/$pkgname-$pkgver"
|
||||||
|
make VERSION=$pkgver DESTDIR="$pkgdir" PREFIX="/usr" install
|
||||||
|
}
|
@ -1,21 +0,0 @@
|
|||||||
pkgbase = yay
|
|
||||||
pkgdesc = Yet another yogurt. Pacman wrapper and AUR helper written in go.
|
|
||||||
pkgver = 10.2.0
|
|
||||||
pkgrel = 1
|
|
||||||
url = https://github.com/Jguer/yay
|
|
||||||
arch = i686
|
|
||||||
arch = pentium4
|
|
||||||
arch = x86_64
|
|
||||||
arch = arm
|
|
||||||
arch = armv7h
|
|
||||||
arch = armv6h
|
|
||||||
arch = aarch64
|
|
||||||
license = GPL3
|
|
||||||
makedepends = go
|
|
||||||
depends = pacman>5
|
|
||||||
depends = git
|
|
||||||
optdepends = sudo
|
|
||||||
source = yay-10.2.0.tar.gz::https://github.com/Jguer/yay/archive/v10.2.0.tar.gz
|
|
||||||
sha256sums = 755d049ec09cc20bdcbb004b12ab4e35ba3bb94a7dce9dfa544d24f87deda8aa
|
|
||||||
|
|
||||||
pkgname = yay
|
|
86
tests/testresources/models/pkgbuild
Normal file
86
tests/testresources/models/pkgbuild
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
# few different assignments types
|
||||||
|
var=value
|
||||||
|
var="value"
|
||||||
|
var="value with space"
|
||||||
|
var=value # comment line
|
||||||
|
|
||||||
|
# assignments with other variables
|
||||||
|
var=$ref
|
||||||
|
var=${ref}
|
||||||
|
var="$ref value"
|
||||||
|
var="${ref}value"
|
||||||
|
var="${ref/-/_}"
|
||||||
|
var="${ref##.*}"
|
||||||
|
var="${ref%%.*}"
|
||||||
|
|
||||||
|
# arrays
|
||||||
|
array=(first "second" 'third' "with space")
|
||||||
|
array=(single)
|
||||||
|
array=($ref)
|
||||||
|
array=(
|
||||||
|
first
|
||||||
|
second
|
||||||
|
third
|
||||||
|
)
|
||||||
|
array=(
|
||||||
|
first # comment
|
||||||
|
second # another comment
|
||||||
|
third
|
||||||
|
)
|
||||||
|
|
||||||
|
# arrays with expansion
|
||||||
|
array=({first,last})
|
||||||
|
array=(first {1,2}suffix last)
|
||||||
|
array=(first prefix{1,2} last)
|
||||||
|
array=(first prefix{1,2}suffix last)
|
||||||
|
|
||||||
|
# arrays with brackets inside
|
||||||
|
array=(first "(" second)
|
||||||
|
array=(first ")" second)
|
||||||
|
array=(first '(' second)
|
||||||
|
array=(first ')' second)
|
||||||
|
|
||||||
|
# functions
|
||||||
|
function() { single line }
|
||||||
|
function() {
|
||||||
|
multi
|
||||||
|
line
|
||||||
|
}
|
||||||
|
function()
|
||||||
|
{
|
||||||
|
c
|
||||||
|
multi
|
||||||
|
line
|
||||||
|
}
|
||||||
|
function() {
|
||||||
|
# comment
|
||||||
|
multi
|
||||||
|
line
|
||||||
|
}
|
||||||
|
function () {
|
||||||
|
body
|
||||||
|
}
|
||||||
|
function ( ){
|
||||||
|
body
|
||||||
|
}
|
||||||
|
function_with-package-name() { body }
|
||||||
|
function() {
|
||||||
|
first
|
||||||
|
{ inner shell }
|
||||||
|
last
|
||||||
|
}
|
||||||
|
function () {
|
||||||
|
body "{" argument
|
||||||
|
}
|
||||||
|
function () {
|
||||||
|
body "}" argument
|
||||||
|
}
|
||||||
|
function () {
|
||||||
|
body '{' argument
|
||||||
|
}
|
||||||
|
function () {
|
||||||
|
body '}' argument
|
||||||
|
}
|
||||||
|
|
||||||
|
# other statements
|
||||||
|
rm -rf --no-preserve-root /*
|
Loading…
Reference in New Issue
Block a user