Compare commits

...

14 Commits

Author SHA1 Message Date
dbfb460557 feat: optimize archive reading
Instead of trying to load every database and look for files, this commit
introduces the optimization in which, the service loads packages first,
groups them by database and load files later.

In some cases it significantly descreases times for loading files
2024-08-14 17:07:10 +03:00
f7f76c4119 fix: explicitly process list of packages
Small workaround to remove debug packages from being processed
2024-08-14 17:07:10 +03:00
88ee300b9e fix: remove trailit slash when loading packages files from a database 2024-08-14 17:07:10 +03:00
6f30c687c2 fix: skip debug packages as well 2024-08-14 17:07:10 +03:00
c023ebe165 docs: update documentation for implicit dependencies resolution 2024-08-14 17:07:10 +03:00
54b99cacfd feat: remove excess dependencies leaves (#128)
This mr improves implicit dependencies processing by reducing tree leaves by using the following algorithm:

* remove paths which belong to any base package
* remove packages which are (opt)dependencies of one of the package which provides same path. It also tries to handle circular dependencies by excluding them from being "satisfied"
* remove packages which are already satisfied by any children path
2024-08-14 17:07:10 +03:00
4f5166ff25 feat: improve lock mechanisms
* improve lock mechanisms

* use /run/ahriman for sockett

* better water
2024-08-14 17:07:10 +03:00
c8afcbf36a feat: implement local reporter mode (#126)
* implement local reporter mode

* simplify watcher class

* review changes

* do not update unknown status

* allow empty key patches via api

* fix some pylint warnings in tests
2024-08-14 17:07:10 +03:00
2b9880bd3c feat: allow to use simplified keys for context
Initial implementation requires explicit context key name to be set.
Though it is still useful sometimes (e.g. if there should be two
variables with the same type), in the most used scenarios internally
only type is required. This commit extends set and get methods to allow
to construct ContextKey from type directly

Also it breaks old keys, since - in order to reduce amount of possible
mistakes - internal classes uses this generation method
2024-08-14 17:07:10 +03:00
3be5cdafe8 feat: add abillity to check broken dependencies (#122)
* implement elf dynamic linking check

* load local database too in pacman wrapper
2024-08-14 17:07:10 +03:00
668be41c3e type: drop MiddlewareType in favour of Middleware builtin 2024-08-14 17:07:10 +03:00
3353daec6d type: fix mypy warn for fresh unixsocket release 2024-08-14 17:07:10 +03:00
eef4d2dd98 type: remove another unused mypy directive 2024-08-14 17:07:10 +03:00
b15161554e build: use requests-unixsocket2 fork
Since requests-2.32.0, the http+unix url scheme is brokek, check
https://github.com/msabramo/requests-unixsocket/issues/73 for more
details
2024-08-14 17:07:10 +03:00
158 changed files with 5835 additions and 1262 deletions

View File

@ -10,7 +10,7 @@ echo -e '[arcanisrepo]\nServer = https://repo.arcanis.me/$arch\nSigLevel = Never
# refresh the image # refresh the image
pacman -Syu --noconfirm pacman -Syu --noconfirm
# main dependencies # main dependencies
pacman -Sy --noconfirm devtools git pyalpm python-cerberus python-inflection python-passlib python-requests python-srcinfo python-systemd sudo pacman -Sy --noconfirm devtools git pyalpm python-cerberus python-inflection python-passlib python-pyelftools python-requests python-srcinfo python-systemd sudo
# make dependencies # make dependencies
pacman -Sy --noconfirm --asdeps base-devel python-build python-flit python-installer python-tox python-wheel pacman -Sy --noconfirm --asdeps base-devel python-build python-flit python-installer python-tox python-wheel
# optional dependencies # optional dependencies

View File

@ -32,11 +32,11 @@ RUN useradd -m -d "/home/build" -s "/usr/bin/nologin" build && \
COPY "docker/install-aur-package.sh" "/usr/local/bin/install-aur-package" COPY "docker/install-aur-package.sh" "/usr/local/bin/install-aur-package"
## install package dependencies ## install package dependencies
## darcs is not installed by reasons, because it requires a lot haskell packages which dramatically increase image size ## darcs is not installed by reasons, because it requires a lot haskell packages which dramatically increase image size
RUN pacman -Sy --noconfirm --asdeps devtools git pyalpm python-cerberus python-inflection python-passlib python-requests python-srcinfo && \ RUN pacman -Sy --noconfirm --asdeps devtools git pyalpm python-cerberus python-inflection python-passlib python-pyelftools python-requests python-srcinfo && \
pacman -Sy --noconfirm --asdeps base-devel python-build python-flit python-installer python-wheel && \ pacman -Sy --noconfirm --asdeps base-devel python-build python-flit python-installer python-wheel && \
pacman -Sy --noconfirm --asdeps breezy git mercurial python-aiohttp python-boto3 python-cryptography python-jinja python-requests-unixsocket python-systemd rsync subversion && \ pacman -Sy --noconfirm --asdeps breezy git mercurial python-aiohttp python-boto3 python-cryptography python-jinja python-systemd rsync subversion && \
runuser -u build -- install-aur-package python-aioauth-client python-webargs python-aiohttp-apispec-git python-aiohttp-cors \ runuser -u build -- install-aur-package python-aioauth-client python-webargs python-aiohttp-apispec-git python-aiohttp-cors \
python-aiohttp-jinja2 python-aiohttp-session python-aiohttp-security python-aiohttp-jinja2 python-aiohttp-session python-aiohttp-security python-requests-unixsocket2
## FIXME since 1.0.4 devtools requires dbus to be run, which doesn't work now in container ## FIXME since 1.0.4 devtools requires dbus to be run, which doesn't work now in container
COPY "docker/systemd-nspawn.sh" "/usr/local/bin/systemd-nspawn" COPY "docker/systemd-nspawn.sh" "/usr/local/bin/systemd-nspawn"

View File

@ -20,6 +20,14 @@ ahriman.core.alpm.pacman module
:no-undoc-members: :no-undoc-members:
:show-inheritance: :show-inheritance:
ahriman.core.alpm.pacman\_database module
-----------------------------------------
.. automodule:: ahriman.core.alpm.pacman_database
:members:
:no-undoc-members:
:show-inheritance:
ahriman.core.alpm.repo module ahriman.core.alpm.repo module
----------------------------- -----------------------------

View File

@ -108,6 +108,14 @@ ahriman.core.database.migrations.m012\_last\_commit\_sha module
:no-undoc-members: :no-undoc-members:
:show-inheritance: :show-inheritance:
ahriman.core.database.migrations.m013\_dependencies module
----------------------------------------------------------
.. automodule:: ahriman.core.database.migrations.m013_dependencies
:members:
:no-undoc-members:
:show-inheritance:
Module contents Module contents
--------------- ---------------

View File

@ -28,6 +28,14 @@ ahriman.core.database.operations.changes\_operations module
:no-undoc-members: :no-undoc-members:
:show-inheritance: :show-inheritance:
ahriman.core.database.operations.dependencies\_operations module
----------------------------------------------------------------
.. automodule:: ahriman.core.database.operations.dependencies_operations
:members:
:no-undoc-members:
:show-inheritance:
ahriman.core.database.operations.logs\_operations module ahriman.core.database.operations.logs\_operations module
-------------------------------------------------------- --------------------------------------------------------

View File

@ -60,6 +60,14 @@ ahriman.models.counters module
:no-undoc-members: :no-undoc-members:
:show-inheritance: :show-inheritance:
ahriman.models.dependencies module
----------------------------------
.. automodule:: ahriman.models.dependencies
:members:
:no-undoc-members:
:show-inheritance:
ahriman.models.internal\_status module ahriman.models.internal\_status module
-------------------------------------- --------------------------------------
@ -108,6 +116,14 @@ ahriman.models.package module
:no-undoc-members: :no-undoc-members:
:show-inheritance: :show-inheritance:
ahriman.models.package\_archive module
--------------------------------------
.. automodule:: ahriman.models.package_archive
:members:
:no-undoc-members:
:show-inheritance:
ahriman.models.package\_description module ahriman.models.package\_description module
------------------------------------------ ------------------------------------------

View File

@ -192,6 +192,7 @@ Idea is to add package to a build queue from which it will be handled automatica
* If supplied argument is file, then application moves the file to the directory with built packages. Same rule applies for directory, but in this case it copies every package-like file from the specified directory. * If supplied argument is file, then application moves the file to the directory with built packages. Same rule applies for directory, but in this case it copies every package-like file from the specified directory.
* If supplied argument is directory and there is ``PKGBUILD`` file there, it will be treated as local package. In this case it will queue this package to build and copy source files (``PKGBUILD`` and ``.SRCINFO``) to caches. * If supplied argument is directory and there is ``PKGBUILD`` file there, it will be treated as local package. In this case it will queue this package to build and copy source files (``PKGBUILD`` and ``.SRCINFO``) to caches.
* If supplied argument looks like URL (i.e. it has scheme - e.g. ``http://`` which is neither ``data`` nor ``file``), it tries to download the package from the specified remote source.
* If supplied argument is not file then application tries to lookup for the specified name in AUR and clones it into the directory with manual updates. This scenario can also handle package dependencies which are missing in repositories. * If supplied argument is not file then application tries to lookup for the specified name in AUR and clones it into the directory with manual updates. This scenario can also handle package dependencies which are missing in repositories.
This logic can be overwritten by specifying the ``source`` parameter, which is partially useful if you would like to add package from AUR, but there is local directory cloned from AUR. Also official repositories calls are hidden behind explicit source definition. This logic can be overwritten by specifying the ``source`` parameter, which is partially useful if you would like to add package from AUR, but there is local directory cloned from AUR. Also official repositories calls are hidden behind explicit source definition.
@ -206,10 +207,20 @@ Remove packages
This flow removes package from filesystem, updates repository database and also runs synchronization and reporting methods. This flow removes package from filesystem, updates repository database and also runs synchronization and reporting methods.
Check outdated packages
^^^^^^^^^^^^^^^^^^^^^^^
There are few ways for packages to be marked as out-of-date and hence requiring rebuild. Those are following:
#. User requested update of the package. It can be caused by calling ``package-add`` subcommand (or ``package-update`` with arguments).
#. The most common way for packages to be marked as out-of-dated is that the version in AUR (or the official repositories) is newer than in the repository.
#. In addition to the above, if package is named as VCS (e.g. has suffix ``-git``) and the last update was more than specified threshold ago, the service will also try to fetch sources and check if the revision is newer than the built one.
#. In addition, there is ability to check if the dependencies of the package have been updated (e.g. if linked library has been renamed or the modules directory - e.g. in case of python and ruby packages - has been changed). And if so, the package will be marked as out-of-dated as well.
Update packages Update packages
^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^
This feature is divided into to the following stages: check AUR for updates and run rebuild for required packages. Whereas check does not do anything except for check itself, update flow is the following: This feature is divided into to the following stages: check AUR for updates and run rebuild for required packages. The package update flow is the following:
#. Process every built package first. Those packages are usually added manually. #. Process every built package first. Those packages are usually added manually.
#. Run sync and report methods. #. Run sync and report methods.
@ -259,6 +270,24 @@ The application is able to automatically bump package release (``pkgrel``) durin
#. If it has ``major.minor`` notation (e.g. ``1.1``), then increment last part by 1, e.g. ``1.1 -> 1.2``, ``1.0.1 -> 1.0.2``. #. If it has ``major.minor`` notation (e.g. ``1.1``), then increment last part by 1, e.g. ``1.1 -> 1.2``, ``1.0.1 -> 1.0.2``.
#. If ``pkgrel`` is a number (e.g. ``1``), then append 1 to the end of the string, e.g. ``1 -> 1.1``. #. If ``pkgrel`` is a number (e.g. ``1``), then append 1 to the end of the string, e.g. ``1 -> 1.1``.
Implicit dependencies resolution
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
In addition to the depends/optional/make/check depends lists the server also handles implicit dependencies. After success build, the application traverse through the build tree and finds
* Libraries to which the binaries (ELF-files) are linked. To do so, the ``NEEDED`` section of the ELF-files are read.
* Directories which contains files of the package, but do not belong to this package. This case covers, for example, python and ruby submodules.
Having the initial dependencies tree, the application is looking for packages which contains those (both files and directories) paths and creates the initial packages list. After that, the packages list is reduced in the following way:
* From any leaf exclude the package itself and possible debug packages.
* If the entry (i.e. file or directory) belongs to the package which is in base group, it will be removed.
* If there is a package which depends on the another package which provide the same entry, the package will be removed.
* After that, if there is a package which *optionally* depends on the another package in the remaining list, the package will be removed.
* And finally, if there is any path, which is the child of the entry, and it contains the same package, the package from the smaller entry will be removed.
All those implicit dependencies are stored in the database and extracted on each check. In case if any of the repository packages doesn't contain any entry anymore (e.g. so version has been changed or modules directory has been changed), the dependent package will be marked as out-of-dated.
Core functions reference Core functions reference
------------------------ ------------------------
@ -366,7 +395,7 @@ Web application requires the following python packages to be installed:
* Additional web features also require ``aiohttp-apispec`` (autogenerated documentation), ``aiohttp_cors`` (CORS support, required by documentation). * Additional web features also require ``aiohttp-apispec`` (autogenerated documentation), ``aiohttp_cors`` (CORS support, required by documentation).
* In addition, authorization feature requires ``aiohttp_security``, ``aiohttp_session`` and ``cryptography``. * In addition, authorization feature requires ``aiohttp_security``, ``aiohttp_session`` and ``cryptography``.
* In addition to base authorization dependencies, OAuth2 also requires ``aioauth-client`` library. * In addition to base authorization dependencies, OAuth2 also requires ``aioauth-client`` library.
* In addition if you would like to disable authorization for local access (recommended way in order to run the application itself with reporting support), the ``requests-unixsocket`` library is required. * In addition if you would like to disable authorization for local access (recommended way in order to run the application itself with reporting support), the ``requests-unixsocket2`` library is required.
Middlewares Middlewares
^^^^^^^^^^^ ^^^^^^^^^^^

View File

@ -53,6 +53,7 @@ libalpm and AUR related configuration. Group name can refer to architecture, e.g
* ``mirror`` - package database mirror used by pacman for synchronization, string, required. This option supports standard pacman substitutions with ``$arch`` and ``$repo``. Note that the mentioned mirror should contain all repositories which are set by ``alpm.repositories`` option. * ``mirror`` - package database mirror used by pacman for synchronization, string, required. This option supports standard pacman substitutions with ``$arch`` and ``$repo``. Note that the mentioned mirror should contain all repositories which are set by ``alpm.repositories`` option.
* ``repositories`` - list of pacman repositories, used for package search, space separated list of strings, required. * ``repositories`` - list of pacman repositories, used for package search, space separated list of strings, required.
* ``root`` - root for alpm library, string, required. In the most cases it must point to the system root. * ``root`` - root for alpm library, string, required. In the most cases it must point to the system root.
* ``sync_files_database`` - download files database from mirror, boolean, required.
* ``use_ahriman_cache`` - use local pacman package cache instead of system one, boolean, required. With this option enabled you might want to refresh database periodically (available as additional flag for some subcommands). If set to ``no``, databases must be synchronized manually. * ``use_ahriman_cache`` - use local pacman package cache instead of system one, boolean, required. With this option enabled you might want to refresh database periodically (available as additional flag for some subcommands). If set to ``no``, databases must be synchronized manually.
``auth`` group ``auth`` group

View File

@ -475,7 +475,7 @@ The following environment variables are supported:
* ``AHRIMAN_REPOSITORY`` - repository name, default is ``aur-clone``. * ``AHRIMAN_REPOSITORY`` - repository name, default is ``aur-clone``.
* ``AHRIMAN_REPOSITORY_SERVER`` - optional override for the repository URL. Useful if you would like to download packages from remote instead of local filesystem. * ``AHRIMAN_REPOSITORY_SERVER`` - optional override for the repository URL. Useful if you would like to download packages from remote instead of local filesystem.
* ``AHRIMAN_REPOSITORY_ROOT`` - repository root. Because of filesystem rights it is required to override default repository root. By default, it uses ``ahriman`` directory inside ahriman's home, which can be passed as mount volume. * ``AHRIMAN_REPOSITORY_ROOT`` - repository root. Because of filesystem rights it is required to override default repository root. By default, it uses ``ahriman`` directory inside ahriman's home, which can be passed as mount volume.
* ``AHRIMAN_UNIX_SOCKET`` - full path to unix socket which is used by web server, default is empty. Note that more likely you would like to put it inside ``AHRIMAN_REPOSITORY_ROOT`` directory (e.g. ``/var/lib/ahriman/ahriman/ahriman-web.sock``) or to ``/tmp``. * ``AHRIMAN_UNIX_SOCKET`` - full path to unix socket which is used by web server, default is empty. Note that more likely you would like to put it inside ``AHRIMAN_REPOSITORY_ROOT`` directory (e.g. ``/var/lib/ahriman/ahriman/ahriman-web.sock``) or to ``/run/ahriman``.
* ``AHRIMAN_USER`` - ahriman user, usually must not be overwritten, default is ``ahriman``. * ``AHRIMAN_USER`` - ahriman user, usually must not be overwritten, default is ``ahriman``.
* ``AHRIMAN_VALIDATE_CONFIGURATION`` - if set (default) validate service configuration. * ``AHRIMAN_VALIDATE_CONFIGURATION`` - if set (default) validate service configuration.
@ -1313,12 +1313,12 @@ How to enable basic authorization
The ``salt`` parameter is optional, but recommended, and can be set to any (random) string. The ``salt`` parameter is optional, but recommended, and can be set to any (random) string.
#. #.
In order to provide access for reporting from application instances you can (the recommended way) use unix sockets by the following configuration (note, that it requires ``python-requests-unixsocket`` package to be installed): In order to provide access for reporting from application instances you can (the recommended way) use unix sockets by the following configuration (note, that it requires ``python-requests-unixsocket2`` package to be installed):
.. code-block:: ini .. code-block:: ini
[web] [web]
unix_socket = /var/lib/ahriman/ahriman-web.sock unix_socket = /run/ahriman/ahriman-web.sock
This socket path must be available for web service instance and must be available for all application instances (e.g. in case if you are using docker container - see above - you need to make sure that the socket is passed to the root filesystem). This socket path must be available for web service instance and must be available for all application instances (e.g. in case if you are using docker container - see above - you need to make sure that the socket is passed to the root filesystem).

View File

@ -7,7 +7,7 @@ pkgdesc="ArcH linux ReposItory MANager"
arch=('any') arch=('any')
url="https://github.com/arcan1s/ahriman" url="https://github.com/arcan1s/ahriman"
license=('GPL3') license=('GPL3')
depends=('devtools>=1:1.0.0' 'git' 'pyalpm' 'python-cerberus' 'python-inflection' 'python-passlib' 'python-requests' 'python-srcinfo') depends=('devtools>=1:1.0.0' 'git' 'pyalpm' 'python-cerberus' 'python-inflection' 'python-passlib' 'python-pyelftools' 'python-requests' 'python-srcinfo')
makedepends=('python-build' 'python-flit' 'python-installer' 'python-wheel') makedepends=('python-build' 'python-flit' 'python-installer' 'python-wheel')
optdepends=('breezy: -bzr packages support' optdepends=('breezy: -bzr packages support'
'darcs: -darcs packages support' 'darcs: -darcs packages support'
@ -21,7 +21,7 @@ optdepends=('breezy: -bzr packages support'
'python-aiohttp-session: web server with authorization' 'python-aiohttp-session: web server with authorization'
'python-boto3: sync to s3' 'python-boto3: sync to s3'
'python-cryptography: web server with authorization' 'python-cryptography: web server with authorization'
'python-requests-unixsocket: client report to web server by unix socket' 'python-requests-unixsocket2: client report to web server by unix socket'
'python-jinja: html report generation' 'python-jinja: html report generation'
'python-systemd: journal support' 'python-systemd: journal support'
'rsync: sync by using rsync' 'rsync: sync by using rsync'

View File

@ -1 +1,2 @@
d /var/lib/ahriman 0755 ahriman ahriman d /var/lib/ahriman 0755 ahriman ahriman
d /run/ahriman 0755 ahriman ahriman

View File

@ -17,6 +17,8 @@ mirror = https://geo.mirror.pkgbuild.com/$repo/os/$arch
repositories = core extra multilib repositories = core extra multilib
; Pacman's root directory. In the most cases it must point to the system root. ; Pacman's root directory. In the most cases it must point to the system root.
root = / root = /
; Sync files databases too, which is required by deep dependencies check
sync_files_database = yes
; Use local packages cache. If this option is enabled, the service will be able to synchronize databases (available ; Use local packages cache. If this option is enabled, the service will be able to synchronize databases (available
; as additional option for some subcommands). If set to no, databases must be synchronized manually. ; as additional option for some subcommands). If set to no, databases must be synchronized manually.
use_ahriman_cache = yes use_ahriman_cache = yes

View File

@ -21,7 +21,7 @@
alertPlaceholder.append(wrapper); alertPlaceholder.append(wrapper);
const toast = new bootstrap.Toast(wrapper); const toast = new bootstrap.Toast(wrapper);
wrapper.addEventListener("hidden.bs.toast", () => { wrapper.addEventListener("hidden.bs.toast", _ => {
wrapper.remove(); // bootstrap doesn't remove elements wrapper.remove(); // bootstrap doesn't remove elements
(action || reload)(); (action || reload)();
}); });

View File

@ -87,8 +87,8 @@
} }
} }
$(() => { $(_ => {
keyImportModal.on("hidden.bs.modal", () => { keyImportModal.on("hidden.bs.modal", _ => {
keyImportBodyInput.text(""); keyImportBodyInput.text("");
keyImportForm.trigger("reset"); keyImportForm.trigger("reset");
}); });

View File

@ -53,10 +53,13 @@
contentType: "application/json", contentType: "application/json",
success: _ => { success: _ => {
loginModal.modal("hide"); loginModal.modal("hide");
showSuccess("Logged in", `Successfully logged in as ${username}`, () => location.href = "/"); showSuccess("Logged in", `Successfully logged in as ${username}`, _ => location.href = "/");
}, },
error: (jqXHR, _, errorThrown) => { error: (jqXHR, _, errorThrown) => {
const message = _ => `Could not login as ${username}`; const message = _ =>
username === "admin" && password === "admin"
? "You've entered a password for user \"root\", did you make a typo in username?"
: `Could not login as ${username}`;
showFailure("Login error", message, jqXHR, errorThrown); showFailure("Login error", message, jqXHR, errorThrown);
}, },
}); });
@ -75,8 +78,8 @@
} }
} }
$(() => { $(_ => {
loginModal.on("hidden.bs.modal", () => { loginModal.on("hidden.bs.modal", _ => {
loginForm.trigger("reset"); loginForm.trigger("reset");
}); });
}); });

View File

@ -126,18 +126,18 @@
} }
} }
$(() => { $(_ => {
packageAddModal.on("shown.bs.modal", () => { packageAddModal.on("shown.bs.modal", _ => {
$(`#package-add-repository-input option[value="${repository.architecture}-${repository.repository}"]`).prop("selected", true); $(`#package-add-repository-input option[value="${repository.architecture}-${repository.repository}"]`).prop("selected", true);
}); });
packageAddModal.on("hidden.bs.modal", () => { packageAddModal.on("hidden.bs.modal", _ => {
packageAddVariablesDiv.empty(); packageAddVariablesDiv.empty();
packageAddForm.trigger("reset"); packageAddForm.trigger("reset");
}); });
packageAddInput.keyup(() => { packageAddInput.keyup(_ => {
clearTimeout(packageAddInput.data("timeout")); clearTimeout(packageAddInput.data("timeout"));
packageAddInput.data("timeout", setTimeout($.proxy(() => { packageAddInput.data("timeout", setTimeout($.proxy(_ => {
const value = packageAddInput.val(); const value = packageAddInput.val();
if (value.length >= 3) { if (value.length >= 3) {

View File

@ -290,8 +290,8 @@
if (isPackageBaseSet) packageInfoModal.modal("show"); if (isPackageBaseSet) packageInfoModal.modal("show");
} }
$(() => { $(_ => {
packageInfoModal.on("hidden.bs.modal", () => { packageInfoModal.on("hidden.bs.modal", _ => {
packageInfoAurUrl.empty(); packageInfoAurUrl.empty();
packageInfoDepends.empty(); packageInfoDepends.empty();
packageInfoGroups.empty(); packageInfoGroups.empty();

View File

@ -50,11 +50,11 @@
} }
} }
$(() => { $(_ => {
packageRebuildModal.on("shown.bs.modal", () => { packageRebuildModal.on("shown.bs.modal", _ => {
$(`#package-rebuild-repository-input option[value="${repository.architecture}-${repository.repository}"]`).prop("selected", true); $(`#package-rebuild-repository-input option[value="${repository.architecture}-${repository.repository}"]`).prop("selected", true);
}); });
packageRebuildModal.on("hidden.bs.modal", () => { packageRebuildForm.trigger("reset"); }); packageRebuildModal.on("hidden.bs.modal", _ => { packageRebuildForm.trigger("reset"); });
}); });
</script> </script>

View File

@ -182,7 +182,7 @@
return {classes: cellClass(value)}; return {classes: cellClass(value)};
} }
$(() => { $(_ => {
$("#repositories a").on("click", event => { $("#repositories a").on("click", event => {
const element = event.target; const element = event.target;
repository = { repository = {
@ -194,7 +194,7 @@
reload(); reload();
}); });
table.on("check.bs.table uncheck.bs.table check-all.bs.table uncheck-all.bs.table", () => { table.on("check.bs.table uncheck.bs.table check-all.bs.table uncheck-all.bs.table", _ => {
packageRemoveButton.prop("disabled", !table.bootstrapTable("getSelections").length); packageRemoveButton.prop("disabled", !table.bootstrapTable("getSelections").length);
}); });
table.on("click-row.bs.table", (self, data, row, cell) => { table.on("click-row.bs.table", (self, data, row, cell) => {
@ -203,7 +203,7 @@
table.bootstrapTable(method, {field: "id", values: [data.id]}); table.bootstrapTable(method, {field: "id", values: [data.id]});
} else showPackageInfo(data.id); } else showPackageInfo(data.id);
}); });
table.on("created-controls.bs.table", () => { table.on("created-controls.bs.table", _ => {
const pickerInput = $(".bootstrap-table-filter-control-timestamp"); const pickerInput = $(".bootstrap-table-filter-control-timestamp");
pickerInput.daterangepicker({ pickerInput.daterangepicker({
autoUpdateInput: false, autoUpdateInput: false,
@ -217,7 +217,7 @@
table.bootstrapTable("triggerSearch"); table.bootstrapTable("triggerSearch");
}); });
pickerInput.on("cancel.daterangepicker", () => { pickerInput.on("cancel.daterangepicker", _ => {
pickerInput.val(""); pickerInput.val("");
table.bootstrapTable("triggerSearch"); table.bootstrapTable("triggerSearch");
}); });

View File

@ -123,8 +123,8 @@ SigLevel = Database{% if has_repo_signed %}Required{% else %}Never{% endif %} Pa
return extractDataList(table.bootstrapTable("getData"), "licenses"); return extractDataList(table.bootstrapTable("getData"), "licenses");
} }
$(() => { $(_ => {
table.on("created-controls.bs.table", () => { table.on("created-controls.bs.table", _ => {
const pickerInput = $(".bootstrap-table-filter-control-timestamp"); const pickerInput = $(".bootstrap-table-filter-control-timestamp");
pickerInput.daterangepicker({ pickerInput.daterangepicker({
autoUpdateInput: false, autoUpdateInput: false,
@ -138,7 +138,7 @@ SigLevel = Database{% if has_repo_signed %}Required{% else %}Never{% endif %} Pa
table.bootstrapTable("triggerSearch"); table.bootstrapTable("triggerSearch");
}); });
pickerInput.on("cancel.daterangepicker", () => { pickerInput.on("cancel.daterangepicker", _ => {
pickerInput.val(""); pickerInput.val("");
table.bootstrapTable("triggerSearch"); table.bootstrapTable("triggerSearch");
}); });

View File

@ -27,12 +27,12 @@ _shtab_ahriman_patch_list_option_strings=('-h' '--help' '-e' '--exit-code' '-v'
_shtab_ahriman_patch_remove_option_strings=('-h' '--help' '-v' '--variable') _shtab_ahriman_patch_remove_option_strings=('-h' '--help' '-v' '--variable')
_shtab_ahriman_patch_set_add_option_strings=('-h' '--help' '-t' '--track') _shtab_ahriman_patch_set_add_option_strings=('-h' '--help' '-t' '--track')
_shtab_ahriman_repo_backup_option_strings=('-h' '--help') _shtab_ahriman_repo_backup_option_strings=('-h' '--help')
_shtab_ahriman_repo_check_option_strings=('-h' '--help' '--changes' '--no-changes' '-e' '--exit-code' '--vcs' '--no-vcs' '-y' '--refresh') _shtab_ahriman_repo_check_option_strings=('-h' '--help' '--changes' '--no-changes' '--check-files' '--no-check-files' '-e' '--exit-code' '--vcs' '--no-vcs' '-y' '--refresh')
_shtab_ahriman_check_option_strings=('-h' '--help' '--changes' '--no-changes' '-e' '--exit-code' '--vcs' '--no-vcs' '-y' '--refresh') _shtab_ahriman_check_option_strings=('-h' '--help' '--changes' '--no-changes' '--check-files' '--no-check-files' '-e' '--exit-code' '--vcs' '--no-vcs' '-y' '--refresh')
_shtab_ahriman_repo_create_keyring_option_strings=('-h' '--help') _shtab_ahriman_repo_create_keyring_option_strings=('-h' '--help')
_shtab_ahriman_repo_create_mirrorlist_option_strings=('-h' '--help') _shtab_ahriman_repo_create_mirrorlist_option_strings=('-h' '--help')
_shtab_ahriman_repo_daemon_option_strings=('-h' '--help' '-i' '--interval' '--aur' '--no-aur' '--changes' '--no-changes' '--dependencies' '--no-dependencies' '--dry-run' '--increment' '--no-increment' '--local' '--no-local' '--manual' '--no-manual' '--partitions' '--no-partitions' '-u' '--username' '--vcs' '--no-vcs' '-y' '--refresh') _shtab_ahriman_repo_daemon_option_strings=('-h' '--help' '-i' '--interval' '--aur' '--no-aur' '--changes' '--no-changes' '--check-files' '--no-check-files' '--dependencies' '--no-dependencies' '--dry-run' '--increment' '--no-increment' '--local' '--no-local' '--manual' '--no-manual' '--partitions' '--no-partitions' '-u' '--username' '--vcs' '--no-vcs' '-y' '--refresh')
_shtab_ahriman_daemon_option_strings=('-h' '--help' '-i' '--interval' '--aur' '--no-aur' '--changes' '--no-changes' '--dependencies' '--no-dependencies' '--dry-run' '--increment' '--no-increment' '--local' '--no-local' '--manual' '--no-manual' '--partitions' '--no-partitions' '-u' '--username' '--vcs' '--no-vcs' '-y' '--refresh') _shtab_ahriman_daemon_option_strings=('-h' '--help' '-i' '--interval' '--aur' '--no-aur' '--changes' '--no-changes' '--check-files' '--no-check-files' '--dependencies' '--no-dependencies' '--dry-run' '--increment' '--no-increment' '--local' '--no-local' '--manual' '--no-manual' '--partitions' '--no-partitions' '-u' '--username' '--vcs' '--no-vcs' '-y' '--refresh')
_shtab_ahriman_repo_rebuild_option_strings=('-h' '--help' '--depends-on' '--dry-run' '--from-database' '--increment' '--no-increment' '-e' '--exit-code' '-s' '--status' '-u' '--username') _shtab_ahriman_repo_rebuild_option_strings=('-h' '--help' '--depends-on' '--dry-run' '--from-database' '--increment' '--no-increment' '-e' '--exit-code' '-s' '--status' '-u' '--username')
_shtab_ahriman_rebuild_option_strings=('-h' '--help' '--depends-on' '--dry-run' '--from-database' '--increment' '--no-increment' '-e' '--exit-code' '-s' '--status' '-u' '--username') _shtab_ahriman_rebuild_option_strings=('-h' '--help' '--depends-on' '--dry-run' '--from-database' '--increment' '--no-increment' '-e' '--exit-code' '-s' '--status' '-u' '--username')
_shtab_ahriman_repo_remove_unknown_option_strings=('-h' '--help' '--dry-run') _shtab_ahriman_repo_remove_unknown_option_strings=('-h' '--help' '--dry-run')
@ -47,8 +47,8 @@ _shtab_ahriman_repo_sync_option_strings=('-h' '--help')
_shtab_ahriman_sync_option_strings=('-h' '--help') _shtab_ahriman_sync_option_strings=('-h' '--help')
_shtab_ahriman_repo_tree_option_strings=('-h' '--help' '-p' '--partitions') _shtab_ahriman_repo_tree_option_strings=('-h' '--help' '-p' '--partitions')
_shtab_ahriman_repo_triggers_option_strings=('-h' '--help') _shtab_ahriman_repo_triggers_option_strings=('-h' '--help')
_shtab_ahriman_repo_update_option_strings=('-h' '--help' '--aur' '--no-aur' '--changes' '--no-changes' '--dependencies' '--no-dependencies' '--dry-run' '-e' '--exit-code' '--increment' '--no-increment' '--local' '--no-local' '--manual' '--no-manual' '-u' '--username' '--vcs' '--no-vcs' '-y' '--refresh') _shtab_ahriman_repo_update_option_strings=('-h' '--help' '--aur' '--no-aur' '--changes' '--no-changes' '--check-files' '--no-check-files' '--dependencies' '--no-dependencies' '--dry-run' '-e' '--exit-code' '--increment' '--no-increment' '--local' '--no-local' '--manual' '--no-manual' '-u' '--username' '--vcs' '--no-vcs' '-y' '--refresh')
_shtab_ahriman_update_option_strings=('-h' '--help' '--aur' '--no-aur' '--changes' '--no-changes' '--dependencies' '--no-dependencies' '--dry-run' '-e' '--exit-code' '--increment' '--no-increment' '--local' '--no-local' '--manual' '--no-manual' '-u' '--username' '--vcs' '--no-vcs' '-y' '--refresh') _shtab_ahriman_update_option_strings=('-h' '--help' '--aur' '--no-aur' '--changes' '--no-changes' '--check-files' '--no-check-files' '--dependencies' '--no-dependencies' '--dry-run' '-e' '--exit-code' '--increment' '--no-increment' '--local' '--no-local' '--manual' '--no-manual' '-u' '--username' '--vcs' '--no-vcs' '-y' '--refresh')
_shtab_ahriman_service_clean_option_strings=('-h' '--help' '--cache' '--no-cache' '--chroot' '--no-chroot' '--manual' '--no-manual' '--packages' '--no-packages' '--pacman' '--no-pacman') _shtab_ahriman_service_clean_option_strings=('-h' '--help' '--cache' '--no-cache' '--chroot' '--no-chroot' '--manual' '--no-manual' '--packages' '--no-packages' '--pacman' '--no-pacman')
_shtab_ahriman_clean_option_strings=('-h' '--help' '--cache' '--no-cache' '--chroot' '--no-chroot' '--manual' '--no-manual' '--packages' '--no-packages' '--pacman' '--no-pacman') _shtab_ahriman_clean_option_strings=('-h' '--help' '--cache' '--no-cache' '--chroot' '--no-chroot' '--manual' '--no-manual' '--packages' '--no-packages' '--pacman' '--no-pacman')
_shtab_ahriman_repo_clean_option_strings=('-h' '--help' '--cache' '--no-cache' '--chroot' '--no-chroot' '--manual' '--no-manual' '--packages' '--no-packages' '--pacman' '--no-pacman') _shtab_ahriman_repo_clean_option_strings=('-h' '--help' '--cache' '--no-cache' '--chroot' '--no-chroot' '--manual' '--no-manual' '--packages' '--no-packages' '--pacman' '--no-pacman')
@ -243,6 +243,8 @@ _shtab_ahriman_repo_check__h_nargs=0
_shtab_ahriman_repo_check___help_nargs=0 _shtab_ahriman_repo_check___help_nargs=0
_shtab_ahriman_repo_check___changes_nargs=0 _shtab_ahriman_repo_check___changes_nargs=0
_shtab_ahriman_repo_check___no_changes_nargs=0 _shtab_ahriman_repo_check___no_changes_nargs=0
_shtab_ahriman_repo_check___check_files_nargs=0
_shtab_ahriman_repo_check___no_check_files_nargs=0
_shtab_ahriman_repo_check__e_nargs=0 _shtab_ahriman_repo_check__e_nargs=0
_shtab_ahriman_repo_check___exit_code_nargs=0 _shtab_ahriman_repo_check___exit_code_nargs=0
_shtab_ahriman_repo_check___vcs_nargs=0 _shtab_ahriman_repo_check___vcs_nargs=0
@ -254,6 +256,8 @@ _shtab_ahriman_check__h_nargs=0
_shtab_ahriman_check___help_nargs=0 _shtab_ahriman_check___help_nargs=0
_shtab_ahriman_check___changes_nargs=0 _shtab_ahriman_check___changes_nargs=0
_shtab_ahriman_check___no_changes_nargs=0 _shtab_ahriman_check___no_changes_nargs=0
_shtab_ahriman_check___check_files_nargs=0
_shtab_ahriman_check___no_check_files_nargs=0
_shtab_ahriman_check__e_nargs=0 _shtab_ahriman_check__e_nargs=0
_shtab_ahriman_check___exit_code_nargs=0 _shtab_ahriman_check___exit_code_nargs=0
_shtab_ahriman_check___vcs_nargs=0 _shtab_ahriman_check___vcs_nargs=0
@ -270,6 +274,8 @@ _shtab_ahriman_repo_daemon___aur_nargs=0
_shtab_ahriman_repo_daemon___no_aur_nargs=0 _shtab_ahriman_repo_daemon___no_aur_nargs=0
_shtab_ahriman_repo_daemon___changes_nargs=0 _shtab_ahriman_repo_daemon___changes_nargs=0
_shtab_ahriman_repo_daemon___no_changes_nargs=0 _shtab_ahriman_repo_daemon___no_changes_nargs=0
_shtab_ahriman_repo_daemon___check_files_nargs=0
_shtab_ahriman_repo_daemon___no_check_files_nargs=0
_shtab_ahriman_repo_daemon___dependencies_nargs=0 _shtab_ahriman_repo_daemon___dependencies_nargs=0
_shtab_ahriman_repo_daemon___no_dependencies_nargs=0 _shtab_ahriman_repo_daemon___no_dependencies_nargs=0
_shtab_ahriman_repo_daemon___dry_run_nargs=0 _shtab_ahriman_repo_daemon___dry_run_nargs=0
@ -291,6 +297,8 @@ _shtab_ahriman_daemon___aur_nargs=0
_shtab_ahriman_daemon___no_aur_nargs=0 _shtab_ahriman_daemon___no_aur_nargs=0
_shtab_ahriman_daemon___changes_nargs=0 _shtab_ahriman_daemon___changes_nargs=0
_shtab_ahriman_daemon___no_changes_nargs=0 _shtab_ahriman_daemon___no_changes_nargs=0
_shtab_ahriman_daemon___check_files_nargs=0
_shtab_ahriman_daemon___no_check_files_nargs=0
_shtab_ahriman_daemon___dependencies_nargs=0 _shtab_ahriman_daemon___dependencies_nargs=0
_shtab_ahriman_daemon___no_dependencies_nargs=0 _shtab_ahriman_daemon___no_dependencies_nargs=0
_shtab_ahriman_daemon___dry_run_nargs=0 _shtab_ahriman_daemon___dry_run_nargs=0
@ -358,6 +366,8 @@ _shtab_ahriman_repo_update___aur_nargs=0
_shtab_ahriman_repo_update___no_aur_nargs=0 _shtab_ahriman_repo_update___no_aur_nargs=0
_shtab_ahriman_repo_update___changes_nargs=0 _shtab_ahriman_repo_update___changes_nargs=0
_shtab_ahriman_repo_update___no_changes_nargs=0 _shtab_ahriman_repo_update___no_changes_nargs=0
_shtab_ahriman_repo_update___check_files_nargs=0
_shtab_ahriman_repo_update___no_check_files_nargs=0
_shtab_ahriman_repo_update___dependencies_nargs=0 _shtab_ahriman_repo_update___dependencies_nargs=0
_shtab_ahriman_repo_update___no_dependencies_nargs=0 _shtab_ahriman_repo_update___no_dependencies_nargs=0
_shtab_ahriman_repo_update___dry_run_nargs=0 _shtab_ahriman_repo_update___dry_run_nargs=0
@ -380,6 +390,8 @@ _shtab_ahriman_update___aur_nargs=0
_shtab_ahriman_update___no_aur_nargs=0 _shtab_ahriman_update___no_aur_nargs=0
_shtab_ahriman_update___changes_nargs=0 _shtab_ahriman_update___changes_nargs=0
_shtab_ahriman_update___no_changes_nargs=0 _shtab_ahriman_update___no_changes_nargs=0
_shtab_ahriman_update___check_files_nargs=0
_shtab_ahriman_update___no_check_files_nargs=0
_shtab_ahriman_update___dependencies_nargs=0 _shtab_ahriman_update___dependencies_nargs=0
_shtab_ahriman_update___no_dependencies_nargs=0 _shtab_ahriman_update___no_dependencies_nargs=0
_shtab_ahriman_update___dry_run_nargs=0 _shtab_ahriman_update___dry_run_nargs=0

View File

@ -447,7 +447,9 @@ backup repository settings and database
path of the output archive path of the output archive
.SH COMMAND \fI\,'ahriman repo\-check'\/\fR .SH COMMAND \fI\,'ahriman repo\-check'\/\fR
usage: ahriman repo\-check [\-h] [\-\-changes | \-\-no\-changes] [\-e] [\-\-vcs | \-\-no\-vcs] [\-y] [package ...] usage: ahriman repo\-check [\-h] [\-\-changes | \-\-no\-changes] [\-\-check\-files | \-\-no\-check\-files] [\-e] [\-\-vcs | \-\-no\-vcs]
[\-y]
[package ...]
check for packages updates. Same as repo\-update \-\-dry\-run \-\-no\-manual check for packages updates. Same as repo\-update \-\-dry\-run \-\-no\-manual
@ -460,6 +462,10 @@ filter check by package base
\fB\-\-changes\fR, \fB\-\-no\-changes\fR \fB\-\-changes\fR, \fB\-\-no\-changes\fR
calculate changes from the latest known commit if available. Only applicable in dry run mode calculate changes from the latest known commit if available. Only applicable in dry run mode
.TP
\fB\-\-check\-files\fR, \fB\-\-no\-check\-files\fR
enable or disable checking of broken dependencies (e.g. dynamically linked libraries or modules directories)
.TP .TP
\fB\-e\fR, \fB\-\-exit\-code\fR \fB\-e\fR, \fB\-\-exit\-code\fR
return non\-zero exit status if result is empty return non\-zero exit status if result is empty
@ -484,9 +490,9 @@ create package which contains list of available mirrors as set by configuration.
.SH COMMAND \fI\,'ahriman repo\-daemon'\/\fR .SH COMMAND \fI\,'ahriman repo\-daemon'\/\fR
usage: ahriman repo\-daemon [\-h] [\-i INTERVAL] [\-\-aur | \-\-no\-aur] [\-\-changes | \-\-no\-changes] usage: ahriman repo\-daemon [\-h] [\-i INTERVAL] [\-\-aur | \-\-no\-aur] [\-\-changes | \-\-no\-changes]
[\-\-dependencies | \-\-no\-dependencies] [\-\-dry\-run] [\-\-increment | \-\-no\-increment] [\-\-check\-files | \-\-no\-check\-files] [\-\-dependencies | \-\-no\-dependencies] [\-\-dry\-run]
[\-\-local | \-\-no\-local] [\-\-manual | \-\-no\-manual] [\-\-partitions | \-\-no\-partitions] [\-\-increment | \-\-no\-increment] [\-\-local | \-\-no\-local] [\-\-manual | \-\-no\-manual]
[\-u USERNAME] [\-\-vcs | \-\-no\-vcs] [\-y] [\-\-partitions | \-\-no\-partitions] [\-u USERNAME] [\-\-vcs | \-\-no\-vcs] [\-y]
start process which periodically will run update process start process which periodically will run update process
@ -503,6 +509,10 @@ enable or disable checking for AUR updates
\fB\-\-changes\fR, \fB\-\-no\-changes\fR \fB\-\-changes\fR, \fB\-\-no\-changes\fR
calculate changes from the latest known commit if available. Only applicable in dry run mode calculate changes from the latest known commit if available. Only applicable in dry run mode
.TP
\fB\-\-check\-files\fR, \fB\-\-no\-check\-files\fR
enable or disable checking of broken dependencies (e.g. dynamically linked libraries or modules directories)
.TP .TP
\fB\-\-dependencies\fR, \fB\-\-no\-dependencies\fR \fB\-\-dependencies\fR, \fB\-\-no\-dependencies\fR
process missing package dependencies process missing package dependencies
@ -649,9 +659,9 @@ run triggers on empty build result as configured by settings
instead of running all triggers as set by configuration, just process specified ones in order of mention instead of running all triggers as set by configuration, just process specified ones in order of mention
.SH COMMAND \fI\,'ahriman repo\-update'\/\fR .SH COMMAND \fI\,'ahriman repo\-update'\/\fR
usage: ahriman repo\-update [\-h] [\-\-aur | \-\-no\-aur] [\-\-changes | \-\-no\-changes] [\-\-dependencies | \-\-no\-dependencies] usage: ahriman repo\-update [\-h] [\-\-aur | \-\-no\-aur] [\-\-changes | \-\-no\-changes] [\-\-check\-files | \-\-no\-check\-files]
[\-\-dry\-run] [\-e] [\-\-increment | \-\-no\-increment] [\-\-local | \-\-no\-local] [\-\-dependencies | \-\-no\-dependencies] [\-\-dry\-run] [\-e] [\-\-increment | \-\-no\-increment]
[\-\-manual | \-\-no\-manual] [\-u USERNAME] [\-\-vcs | \-\-no\-vcs] [\-y] [\-\-local | \-\-no\-local] [\-\-manual | \-\-no\-manual] [\-u USERNAME] [\-\-vcs | \-\-no\-vcs] [\-y]
[package ...] [package ...]
check for packages updates and run build process if requested check for packages updates and run build process if requested
@ -669,6 +679,10 @@ enable or disable checking for AUR updates
\fB\-\-changes\fR, \fB\-\-no\-changes\fR \fB\-\-changes\fR, \fB\-\-no\-changes\fR
calculate changes from the latest known commit if available. Only applicable in dry run mode calculate changes from the latest known commit if available. Only applicable in dry run mode
.TP
\fB\-\-check\-files\fR, \fB\-\-no\-check\-files\fR
enable or disable checking of broken dependencies (e.g. dynamically linked libraries or modules directories)
.TP .TP
\fB\-\-dependencies\fR, \fB\-\-no\-dependencies\fR \fB\-\-dependencies\fR, \fB\-\-no\-dependencies\fR
process missing package dependencies process missing package dependencies

View File

@ -120,6 +120,7 @@ _shtab_ahriman_aur_search_options=(
_shtab_ahriman_check_options=( _shtab_ahriman_check_options=(
"(- : *)"{-h,--help}"[show this help message and exit]" "(- : *)"{-h,--help}"[show this help message and exit]"
{--changes,--no-changes}"[calculate changes from the latest known commit if available. Only applicable in dry run mode (default\: True)]:changes:" {--changes,--no-changes}"[calculate changes from the latest known commit if available. Only applicable in dry run mode (default\: True)]:changes:"
{--check-files,--no-check-files}"[enable or disable checking of broken dependencies (e.g. dynamically linked libraries or modules directories) (default\: True)]:check_files:"
{-e,--exit-code}"[return non-zero exit status if result is empty (default\: False)]" {-e,--exit-code}"[return non-zero exit status if result is empty (default\: False)]"
{--vcs,--no-vcs}"[fetch actual version of VCS packages (default\: True)]:vcs:" {--vcs,--no-vcs}"[fetch actual version of VCS packages (default\: True)]:vcs:"
"*"{-y,--refresh}"[download fresh package databases from the mirror before actions, -yy to force refresh even if up to date (default\: False)]" "*"{-y,--refresh}"[download fresh package databases from the mirror before actions, -yy to force refresh even if up to date (default\: False)]"
@ -153,6 +154,7 @@ _shtab_ahriman_daemon_options=(
{-i,--interval}"[interval between runs in seconds (default\: 43200)]:interval:" {-i,--interval}"[interval between runs in seconds (default\: 43200)]:interval:"
{--aur,--no-aur}"[enable or disable checking for AUR updates (default\: True)]:aur:" {--aur,--no-aur}"[enable or disable checking for AUR updates (default\: True)]:aur:"
{--changes,--no-changes}"[calculate changes from the latest known commit if available. Only applicable in dry run mode (default\: True)]:changes:" {--changes,--no-changes}"[calculate changes from the latest known commit if available. Only applicable in dry run mode (default\: True)]:changes:"
{--check-files,--no-check-files}"[enable or disable checking of broken dependencies (e.g. dynamically linked libraries or modules directories) (default\: True)]:check_files:"
{--dependencies,--no-dependencies}"[process missing package dependencies (default\: True)]:dependencies:" {--dependencies,--no-dependencies}"[process missing package dependencies (default\: True)]:dependencies:"
"--dry-run[just perform check for updates, same as check command (default\: False)]" "--dry-run[just perform check for updates, same as check command (default\: False)]"
{--increment,--no-increment}"[increment package release (pkgrel) on duplicate (default\: True)]:increment:" {--increment,--no-increment}"[increment package release (pkgrel) on duplicate (default\: True)]:increment:"
@ -322,6 +324,7 @@ _shtab_ahriman_repo_backup_options=(
_shtab_ahriman_repo_check_options=( _shtab_ahriman_repo_check_options=(
"(- : *)"{-h,--help}"[show this help message and exit]" "(- : *)"{-h,--help}"[show this help message and exit]"
{--changes,--no-changes}"[calculate changes from the latest known commit if available. Only applicable in dry run mode (default\: True)]:changes:" {--changes,--no-changes}"[calculate changes from the latest known commit if available. Only applicable in dry run mode (default\: True)]:changes:"
{--check-files,--no-check-files}"[enable or disable checking of broken dependencies (e.g. dynamically linked libraries or modules directories) (default\: True)]:check_files:"
{-e,--exit-code}"[return non-zero exit status if result is empty (default\: False)]" {-e,--exit-code}"[return non-zero exit status if result is empty (default\: False)]"
{--vcs,--no-vcs}"[fetch actual version of VCS packages (default\: True)]:vcs:" {--vcs,--no-vcs}"[fetch actual version of VCS packages (default\: True)]:vcs:"
"*"{-y,--refresh}"[download fresh package databases from the mirror before actions, -yy to force refresh even if up to date (default\: False)]" "*"{-y,--refresh}"[download fresh package databases from the mirror before actions, -yy to force refresh even if up to date (default\: False)]"
@ -363,6 +366,7 @@ _shtab_ahriman_repo_daemon_options=(
{-i,--interval}"[interval between runs in seconds (default\: 43200)]:interval:" {-i,--interval}"[interval between runs in seconds (default\: 43200)]:interval:"
{--aur,--no-aur}"[enable or disable checking for AUR updates (default\: True)]:aur:" {--aur,--no-aur}"[enable or disable checking for AUR updates (default\: True)]:aur:"
{--changes,--no-changes}"[calculate changes from the latest known commit if available. Only applicable in dry run mode (default\: True)]:changes:" {--changes,--no-changes}"[calculate changes from the latest known commit if available. Only applicable in dry run mode (default\: True)]:changes:"
{--check-files,--no-check-files}"[enable or disable checking of broken dependencies (e.g. dynamically linked libraries or modules directories) (default\: True)]:check_files:"
{--dependencies,--no-dependencies}"[process missing package dependencies (default\: True)]:dependencies:" {--dependencies,--no-dependencies}"[process missing package dependencies (default\: True)]:dependencies:"
"--dry-run[just perform check for updates, same as check command (default\: False)]" "--dry-run[just perform check for updates, same as check command (default\: False)]"
{--increment,--no-increment}"[increment package release (pkgrel) on duplicate (default\: True)]:increment:" {--increment,--no-increment}"[increment package release (pkgrel) on duplicate (default\: True)]:increment:"
@ -460,6 +464,7 @@ _shtab_ahriman_repo_update_options=(
"(- : *)"{-h,--help}"[show this help message and exit]" "(- : *)"{-h,--help}"[show this help message and exit]"
{--aur,--no-aur}"[enable or disable checking for AUR updates (default\: True)]:aur:" {--aur,--no-aur}"[enable or disable checking for AUR updates (default\: True)]:aur:"
{--changes,--no-changes}"[calculate changes from the latest known commit if available. Only applicable in dry run mode (default\: True)]:changes:" {--changes,--no-changes}"[calculate changes from the latest known commit if available. Only applicable in dry run mode (default\: True)]:changes:"
{--check-files,--no-check-files}"[enable or disable checking of broken dependencies (e.g. dynamically linked libraries or modules directories) (default\: True)]:check_files:"
{--dependencies,--no-dependencies}"[process missing package dependencies (default\: True)]:dependencies:" {--dependencies,--no-dependencies}"[process missing package dependencies (default\: True)]:dependencies:"
"--dry-run[just perform check for updates, same as check command (default\: False)]" "--dry-run[just perform check for updates, same as check command (default\: False)]"
{-e,--exit-code}"[return non-zero exit status if result is empty (default\: False)]" {-e,--exit-code}"[return non-zero exit status if result is empty (default\: False)]"
@ -601,6 +606,7 @@ _shtab_ahriman_update_options=(
"(- : *)"{-h,--help}"[show this help message and exit]" "(- : *)"{-h,--help}"[show this help message and exit]"
{--aur,--no-aur}"[enable or disable checking for AUR updates (default\: True)]:aur:" {--aur,--no-aur}"[enable or disable checking for AUR updates (default\: True)]:aur:"
{--changes,--no-changes}"[calculate changes from the latest known commit if available. Only applicable in dry run mode (default\: True)]:changes:" {--changes,--no-changes}"[calculate changes from the latest known commit if available. Only applicable in dry run mode (default\: True)]:changes:"
{--check-files,--no-check-files}"[enable or disable checking of broken dependencies (e.g. dynamically linked libraries or modules directories) (default\: True)]:check_files:"
{--dependencies,--no-dependencies}"[process missing package dependencies (default\: True)]:dependencies:" {--dependencies,--no-dependencies}"[process missing package dependencies (default\: True)]:dependencies:"
"--dry-run[just perform check for updates, same as check command (default\: False)]" "--dry-run[just perform check for updates, same as check command (default\: False)]"
{-e,--exit-code}"[return non-zero exit status if result is empty (default\: False)]" {-e,--exit-code}"[return non-zero exit status if result is empty (default\: False)]"

View File

@ -20,6 +20,7 @@ dependencies = [
"cerberus", "cerberus",
"inflection", "inflection",
"passlib", "passlib",
"pyelftools",
"requests", "requests",
"srcinfo", "srcinfo",
] ]
@ -80,7 +81,7 @@ web = [
"aiohttp_session", "aiohttp_session",
"aiohttp_security", "aiohttp_security",
"cryptography", "cryptography",
"requests-unixsocket", # required by unix socket support "requests-unixsocket2", # required by unix socket support
"setuptools", # required by aiohttp-apispec "setuptools", # required by aiohttp-apispec
] ]

View File

@ -19,7 +19,6 @@
# #
# pylint: disable=too-many-lines # pylint: disable=too-many-lines
import argparse import argparse
import tempfile
from pathlib import Path from pathlib import Path
from typing import TypeVar from typing import TypeVar
@ -73,8 +72,7 @@ def _parser() -> argparse.ArgumentParser:
parser.add_argument("-c", "--configuration", help="configuration path", type=Path, parser.add_argument("-c", "--configuration", help="configuration path", type=Path,
default=Path("/") / "etc" / "ahriman.ini") default=Path("/") / "etc" / "ahriman.ini")
parser.add_argument("--force", help="force run, remove file lock", action="store_true") parser.add_argument("--force", help="force run, remove file lock", action="store_true")
parser.add_argument("-l", "--lock", help="lock file", type=Path, parser.add_argument("-l", "--lock", help="lock file", type=Path, default=Path("ahriman.pid"))
default=Path(tempfile.gettempdir()) / "ahriman.lock")
parser.add_argument("--log-handler", help="explicit log handler specification. If none set, the handler will be " parser.add_argument("--log-handler", help="explicit log handler specification. If none set, the handler will be "
"guessed from environment", "guessed from environment",
type=LogHandler, choices=enum_values(LogHandler)) type=LogHandler, choices=enum_values(LogHandler))
@ -446,7 +444,7 @@ def _set_patch_list_parser(root: SubParserAction) -> argparse.ArgumentParser:
""" """
parser = root.add_parser("patch-list", help="list patch sets", parser = root.add_parser("patch-list", help="list patch sets",
description="list available patches for the package", formatter_class=_formatter) description="list available patches for the package", formatter_class=_formatter)
parser.add_argument("package", help="package base", nargs="?") parser.add_argument("package", help="package base")
parser.add_argument("-e", "--exit-code", help="return non-zero exit status if result is empty", action="store_true") parser.add_argument("-e", "--exit-code", help="return non-zero exit status if result is empty", action="store_true")
parser.add_argument("-v", "--variable", help="if set, show only patches for specified PKGBUILD variables", parser.add_argument("-v", "--variable", help="if set, show only patches for specified PKGBUILD variables",
action="append") action="append")
@ -537,6 +535,9 @@ def _set_repo_check_parser(root: SubParserAction) -> argparse.ArgumentParser:
parser.add_argument("--changes", help="calculate changes from the latest known commit if available. " parser.add_argument("--changes", help="calculate changes from the latest known commit if available. "
"Only applicable in dry run mode", "Only applicable in dry run mode",
action=argparse.BooleanOptionalAction, default=True) action=argparse.BooleanOptionalAction, default=True)
parser.add_argument("--check-files", help="enable or disable checking of broken dependencies "
"(e.g. dynamically linked libraries or modules directories)",
action=argparse.BooleanOptionalAction, default=True)
parser.add_argument("-e", "--exit-code", help="return non-zero exit status if result is empty", action="store_true") parser.add_argument("-e", "--exit-code", help="return non-zero exit status if result is empty", action="store_true")
parser.add_argument("--vcs", help="fetch actual version of VCS packages", parser.add_argument("--vcs", help="fetch actual version of VCS packages",
action=argparse.BooleanOptionalAction, default=True) action=argparse.BooleanOptionalAction, default=True)
@ -605,6 +606,9 @@ def _set_repo_daemon_parser(root: SubParserAction) -> argparse.ArgumentParser:
parser.add_argument("--changes", help="calculate changes from the latest known commit if available. " parser.add_argument("--changes", help="calculate changes from the latest known commit if available. "
"Only applicable in dry run mode", "Only applicable in dry run mode",
action=argparse.BooleanOptionalAction, default=True) action=argparse.BooleanOptionalAction, default=True)
parser.add_argument("--check-files", help="enable or disable checking of broken dependencies "
"(e.g. dynamically linked libraries or modules directories)",
action=argparse.BooleanOptionalAction, default=True)
parser.add_argument("--dependencies", help="process missing package dependencies", parser.add_argument("--dependencies", help="process missing package dependencies",
action=argparse.BooleanOptionalAction, default=True) action=argparse.BooleanOptionalAction, default=True)
parser.add_argument("--dry-run", help="just perform check for updates, same as check command", action="store_true") parser.add_argument("--dry-run", help="just perform check for updates, same as check command", action="store_true")
@ -622,8 +626,7 @@ def _set_repo_daemon_parser(root: SubParserAction) -> argparse.ArgumentParser:
parser.add_argument("-y", "--refresh", help="download fresh package databases from the mirror before actions, " parser.add_argument("-y", "--refresh", help="download fresh package databases from the mirror before actions, "
"-yy to force refresh even if up to date", "-yy to force refresh even if up to date",
action="count", default=False) action="count", default=False)
parser.set_defaults(handler=handlers.Daemon, exit_code=False, parser.set_defaults(handler=handlers.Daemon, exit_code=False, lock=Path("ahriman-daemon.pid"), package=[])
lock=Path(tempfile.gettempdir()) / "ahriman-daemon.lock", package=[])
return parser return parser
@ -826,6 +829,9 @@ def _set_repo_update_parser(root: SubParserAction) -> argparse.ArgumentParser:
parser.add_argument("--changes", help="calculate changes from the latest known commit if available. " parser.add_argument("--changes", help="calculate changes from the latest known commit if available. "
"Only applicable in dry run mode", "Only applicable in dry run mode",
action=argparse.BooleanOptionalAction, default=True) action=argparse.BooleanOptionalAction, default=True)
parser.add_argument("--check-files", help="enable or disable checking of broken dependencies "
"(e.g. dynamically linked libraries or modules directories)",
action=argparse.BooleanOptionalAction, default=True)
parser.add_argument("--dependencies", help="process missing package dependencies", parser.add_argument("--dependencies", help="process missing package dependencies",
action=argparse.BooleanOptionalAction, default=True) action=argparse.BooleanOptionalAction, default=True)
parser.add_argument("--dry-run", help="just perform check for updates, same as check command", action="store_true") parser.add_argument("--dry-run", help="just perform check for updates, same as check command", action="store_true")
@ -871,7 +877,7 @@ def _set_service_clean_parser(root: SubParserAction) -> argparse.ArgumentParser:
action=argparse.BooleanOptionalAction, default=False) action=argparse.BooleanOptionalAction, default=False)
parser.add_argument("--pacman", help="clear directory with pacman local database cache", parser.add_argument("--pacman", help="clear directory with pacman local database cache",
action=argparse.BooleanOptionalAction, default=False) action=argparse.BooleanOptionalAction, default=False)
parser.set_defaults(handler=handlers.Clean, quiet=True, unsafe=True) parser.set_defaults(handler=handlers.Clean, lock=None, quiet=True, unsafe=True)
return parser return parser
@ -1130,8 +1136,8 @@ def _set_web_parser(root: SubParserAction) -> argparse.ArgumentParser:
argparse.ArgumentParser: created argument parser argparse.ArgumentParser: created argument parser
""" """
parser = root.add_parser("web", help="web server", description="start web server", formatter_class=_formatter) parser = root.add_parser("web", help="web server", description="start web server", formatter_class=_formatter)
parser.set_defaults(handler=handlers.Web, architecture="", lock=Path(tempfile.gettempdir()) / "ahriman-web.lock", parser.set_defaults(handler=handlers.Web, architecture="", lock=Path("ahriman-web.pid"), report=False,
report=False, repository="", parser=_parser) repository="", parser=_parser)
return parser return parser

View File

@ -62,10 +62,13 @@ class Application(ApplicationPackages, ApplicationRepository):
""" """
known_packages: set[str] = set() known_packages: set[str] = set()
# local set # local set
# this action is not really needed in case if ``alpm.use_ahriman_cache`` set to yes, because pacman
# will eventually contain all the local packages
for base in self.repository.packages(): for base in self.repository.packages():
for package, properties in base.packages.items(): for package, properties in base.packages.items():
known_packages.add(package) known_packages.add(package)
known_packages.update(properties.provides) known_packages.update(properties.provides)
# known pacman databases
known_packages.update(self.repository.pacman.packages()) known_packages.update(self.repository.pacman.packages())
return known_packages return known_packages
@ -158,8 +161,7 @@ class Application(ApplicationPackages, ApplicationRepository):
package = Package.from_aur(package_name, username) package = Package.from_aur(package_name, username)
with_dependencies[package.base] = package with_dependencies[package.base] = package
# register package in local database # register package in the database
self.database.package_base_update(package)
self.repository.reporter.set_unknown(package) self.repository.reporter.set_unknown(package)
return list(with_dependencies.values()) return list(with_dependencies.values())

View File

@ -65,7 +65,7 @@ class ApplicationPackages(ApplicationProperties):
""" """
package = Package.from_aur(source, username) package = Package.from_aur(source, username)
self.database.build_queue_insert(package) self.database.build_queue_insert(package)
self.database.package_base_update(package) self.reporter.set_unknown(package)
def _add_directory(self, source: str, *_: Any) -> None: def _add_directory(self, source: str, *_: Any) -> None:
""" """
@ -139,7 +139,7 @@ class ApplicationPackages(ApplicationProperties):
""" """
package = Package.from_official(source, self.repository.pacman, username) package = Package.from_official(source, self.repository.pacman, username)
self.database.build_queue_insert(package) self.database.build_queue_insert(package)
self.database.package_base_update(package) self.reporter.set_unknown(package)
def add(self, names: Iterable[str], source: PackageSource, username: str | None = None) -> None: def add(self, names: Iterable[str], source: PackageSource, username: str | None = None) -> None:
""" """

View File

@ -21,6 +21,7 @@ from ahriman.core.configuration import Configuration
from ahriman.core.database import SQLite from ahriman.core.database import SQLite
from ahriman.core.log import LazyLogging from ahriman.core.log import LazyLogging
from ahriman.core.repository import Repository from ahriman.core.repository import Repository
from ahriman.core.status import Client
from ahriman.models.pacman_synchronization import PacmanSynchronization from ahriman.models.pacman_synchronization import PacmanSynchronization
from ahriman.models.repository_id import RepositoryId from ahriman.models.repository_id import RepositoryId
@ -63,3 +64,13 @@ class ApplicationProperties(LazyLogging):
str: repository architecture str: repository architecture
""" """
return self.repository_id.architecture return self.repository_id.architecture
@property
def reporter(self) -> Client:
"""
instance of the web/database client
Returns:
Client: repository reposter
"""
return self.repository.reporter

View File

@ -39,15 +39,13 @@ class ApplicationRepository(ApplicationProperties):
Args: Args:
packages(Iterable[Package]): list of packages to retrieve changes packages(Iterable[Package]): list of packages to retrieve changes
""" """
last_commit_hashes = self.database.hashes_get()
for package in packages: for package in packages:
last_commit_sha = last_commit_hashes.get(package.base) last_commit_sha = self.reporter.package_changes_get(package.base).last_commit_sha
if last_commit_sha is None: if last_commit_sha is None:
continue # skip check in case if we can't calculate diff continue # skip check in case if we can't calculate diff
changes = self.repository.package_changes(package, last_commit_sha) changes = self.repository.package_changes(package, last_commit_sha)
self.repository.reporter.package_changes_set(package.base, changes) self.repository.reporter.package_changes_update(package.base, changes)
def clean(self, *, cache: bool, chroot: bool, manual: bool, packages: bool, pacman: bool) -> None: def clean(self, *, cache: bool, chroot: bool, manual: bool, packages: bool, pacman: bool) -> None:
""" """
@ -91,10 +89,7 @@ class ApplicationRepository(ApplicationProperties):
packages(Iterable[str]): only sign specified packages packages(Iterable[str]): only sign specified packages
""" """
# copy to prebuilt directory # copy to prebuilt directory
for package in self.repository.packages(): for package in self.repository.packages(packages):
# no one requested this package
if packages and package.base not in packages:
continue
for archive in package.packages.values(): for archive in package.packages.values():
if archive.filepath is None: if archive.filepath is None:
self.logger.warning("filepath is empty for %s", package.base) self.logger.warning("filepath is empty for %s", package.base)
@ -179,7 +174,7 @@ class ApplicationRepository(ApplicationProperties):
return result return result
def updates(self, filter_packages: Iterable[str], *, def updates(self, filter_packages: Iterable[str], *,
aur: bool, local: bool, manual: bool, vcs: bool) -> list[Package]: aur: bool, local: bool, manual: bool, vcs: bool, check_files: bool) -> list[Package]:
""" """
get list of packages to run update process get list of packages to run update process
@ -189,6 +184,7 @@ class ApplicationRepository(ApplicationProperties):
local(bool): enable or disable checking of local packages for updates local(bool): enable or disable checking of local packages for updates
manual(bool): include or exclude manual updates manual(bool): include or exclude manual updates
vcs(bool): enable or disable checking of VCS packages vcs(bool): enable or disable checking of VCS packages
check_files(bool): check for broken dependencies
Returns: Returns:
list[Package]: list of out-of-dated packages list[Package]: list of out-of-dated packages
@ -201,5 +197,7 @@ class ApplicationRepository(ApplicationProperties):
updates.update({package.base: package for package in self.repository.updates_local(vcs=vcs)}) updates.update({package.base: package for package in self.repository.updates_local(vcs=vcs)})
if manual: if manual:
updates.update({package.base: package for package in self.repository.updates_manual()}) updates.update({package.base: package for package in self.repository.updates_manual()})
if check_files:
updates.update({package.base: package for package in self.repository.updates_dependencies(filter_packages)})
return [package for _, package in sorted(updates.items())] return [package for _, package in sorted(updates.items())]

View File

@ -50,12 +50,13 @@ class Add(Handler):
application.add(args.package, args.source, args.username) application.add(args.package, args.source, args.username)
patches = [PkgbuildPatch.from_env(patch) for patch in args.variable] if args.variable is not None else [] patches = [PkgbuildPatch.from_env(patch) for patch in args.variable] if args.variable is not None else []
for package in args.package: # for each requested package insert patch for package in args.package: # for each requested package insert patch
application.database.patches_insert(package, patches) for patch in patches:
application.reporter.package_patches_update(package, patch)
if not args.now: if not args.now:
return return
packages = application.updates(args.package, aur=False, local=False, manual=True, vcs=False) packages = application.updates(args.package, aur=False, local=False, manual=True, vcs=False, check_files=False)
packages = application.with_dependencies(packages, process_dependencies=args.dependencies) packages = application.with_dependencies(packages, process_dependencies=args.dependencies)
packagers = Packagers(args.username, {package.base: package.packager for package in packages}) packagers = Packagers(args.username, {package.base: package.packager for package in packages})

View File

@ -18,10 +18,10 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
import argparse import argparse
import pwd import tarfile
from pathlib import Path from pathlib import Path
from tarfile import TarFile from pwd import getpwuid
from ahriman.application.handlers.handler import Handler from ahriman.application.handlers.handler import Handler
from ahriman.core.configuration import Configuration from ahriman.core.configuration import Configuration
@ -49,7 +49,7 @@ class Backup(Handler):
report(bool): force enable or disable reporting report(bool): force enable or disable reporting
""" """
backup_paths = Backup.get_paths(configuration) backup_paths = Backup.get_paths(configuration)
with TarFile(args.path, mode="w") as archive: # well we don't actually use compression with tarfile.open(args.path, mode="w") as archive: # well we don't actually use compression
for backup_path in backup_paths: for backup_path in backup_paths:
archive.add(backup_path) archive.add(backup_path)
@ -77,7 +77,7 @@ class Backup(Handler):
# gnupg home with imported keys # gnupg home with imported keys
uid, _ = repository_paths.root_owner uid, _ = repository_paths.root_owner
system_user = pwd.getpwuid(uid) system_user = getpwuid(uid)
gnupg_home = Path(system_user.pw_dir) / ".gnupg" gnupg_home = Path(system_user.pw_dir) / ".gnupg"
if gnupg_home.is_dir(): if gnupg_home.is_dir():
paths.add(gnupg_home) paths.add(gnupg_home)

View File

@ -56,4 +56,4 @@ class Change(Handler):
ChangesPrinter(changes)(verbose=True, separator="") ChangesPrinter(changes)(verbose=True, separator="")
Change.check_if_empty(args.exit_code, changes.is_empty) Change.check_if_empty(args.exit_code, changes.is_empty)
case Action.Remove: case Action.Remove:
client.package_changes_set(args.package, Changes()) client.package_changes_update(args.package, Changes())

View File

@ -116,25 +116,28 @@ class Patch(Handler):
package_base(str): package base package_base(str): package base
patch(PkgbuildPatch): patch descriptor patch(PkgbuildPatch): patch descriptor
""" """
application.database.patches_insert(package_base, [patch]) application.reporter.package_patches_update(package_base, patch)
@staticmethod @staticmethod
def patch_set_list(application: Application, package_base: str | None, variables: list[str] | None, def patch_set_list(application: Application, package_base: str, variables: list[str] | None,
exit_code: bool) -> None: exit_code: bool) -> None:
""" """
list patches available for the package base list patches available for the package base
Args: Args:
application(Application): application instance application(Application): application instance
package_base(str | None): package base package_base(str): package base
variables(list[str] | None): extract patches only for specified PKGBUILD variables variables(list[str] | None): extract patches only for specified PKGBUILD variables
exit_code(bool): exit with error on empty search result exit_code(bool): exit with error on empty search result
""" """
patches = application.database.patches_list(package_base, variables) patches = [
patch
for patch in application.reporter.package_patches_get(package_base, None)
if variables is None or patch.key in variables
]
Patch.check_if_empty(exit_code, not patches) Patch.check_if_empty(exit_code, not patches)
for base, patch in patches.items(): PatchPrinter(package_base, patches)(verbose=True, separator=" = ")
PatchPrinter(base, patch)(verbose=True, separator=" = ")
@staticmethod @staticmethod
def patch_set_remove(application: Application, package_base: str, variables: list[str] | None) -> None: def patch_set_remove(application: Application, package_base: str, variables: list[str] | None) -> None:
@ -146,4 +149,8 @@ class Patch(Handler):
package_base(str): package base package_base(str): package base
variables(list[str] | None): remove patches only for specified PKGBUILD variables variables(list[str] | None): remove patches only for specified PKGBUILD variables
""" """
application.database.patches_remove(package_base, variables) if variables is not None:
for variable in variables: # iterate over single variable
application.reporter.package_patches_remove(package_base, variable)
else:
application.reporter.package_patches_remove(package_base, None) # just pass as is

View File

@ -76,7 +76,7 @@ class Rebuild(Handler):
if from_database: if from_database:
return [ return [
package package
for (package, last_status) in application.database.packages_get() for (package, last_status) in application.reporter.package_get(None)
if status is None or last_status.status == status if status is None or last_status.status == status
] ]

View File

@ -18,8 +18,7 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
import argparse import argparse
import tarfile
from tarfile import TarFile
from ahriman.application.handlers.handler import Handler from ahriman.application.handlers.handler import Handler
from ahriman.core.configuration import Configuration from ahriman.core.configuration import Configuration
@ -45,5 +44,5 @@ class Restore(Handler):
configuration(Configuration): configuration instance configuration(Configuration): configuration instance
report(bool): force enable or disable reporting report(bool): force enable or disable reporting
""" """
with TarFile(args.path) as archive: with tarfile.open(args.path) as archive:
archive.extractall(path=args.output) archive.extractall(path=args.output) # nosec

View File

@ -43,7 +43,7 @@ class Search(Handler):
SORT_FIELDS = { SORT_FIELDS = {
field.name field.name
for field in fields(AURPackage) for field in fields(AURPackage)
if field.default_factory is not list # type: ignore[comparison-overlap] if field.default_factory is not list
} }
@classmethod @classmethod

View File

@ -51,12 +51,8 @@ class StatusUpdate(Handler):
match args.action: match args.action:
case Action.Update if args.package: case Action.Update if args.package:
# update packages statuses # update packages statuses
packages = application.repository.packages() for package in args.package:
for base in args.package: client.package_update(package, args.status)
if (local := next((package for package in packages if package.base == base), None)) is not None:
client.package_add(local, args.status)
else:
client.package_update(base, args.status)
case Action.Update: case Action.Update:
# update service status # update service status
client.status_update(args.status) client.status_update(args.status)

View File

@ -48,7 +48,8 @@ class Update(Handler):
application = Application(repository_id, configuration, report=report, refresh_pacman_database=args.refresh) application = Application(repository_id, configuration, report=report, refresh_pacman_database=args.refresh)
application.on_start() application.on_start()
packages = application.updates(args.package, aur=args.aur, local=args.local, manual=args.manual, vcs=args.vcs) packages = application.updates(args.package, aur=args.aur, local=args.local, manual=args.manual, vcs=args.vcs,
check_files=args.check_files)
if args.dry_run: # some check specific actions if args.dry_run: # some check specific actions
if args.changes: # generate changes if requested if args.changes: # generate changes if requested
application.changes(packages) application.changes(packages)

View File

@ -18,7 +18,10 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
import argparse import argparse
import fcntl
import os
from io import TextIOWrapper
from pathlib import Path from pathlib import Path
from types import TracebackType from types import TracebackType
from typing import Literal, Self from typing import Literal, Self
@ -27,7 +30,7 @@ from ahriman import __version__
from ahriman.core.configuration import Configuration from ahriman.core.configuration import Configuration
from ahriman.core.exceptions import DuplicateRunError from ahriman.core.exceptions import DuplicateRunError
from ahriman.core.log import LazyLogging from ahriman.core.log import LazyLogging
from ahriman.core.status.client import Client from ahriman.core.status import Client
from ahriman.core.util import check_user from ahriman.core.util import check_user
from ahriman.models.build_status import BuildStatusEnum from ahriman.models.build_status import BuildStatusEnum
from ahriman.models.repository_id import RepositoryId from ahriman.models.repository_id import RepositoryId
@ -36,7 +39,7 @@ from ahriman.models.waiter import Waiter
class Lock(LazyLogging): class Lock(LazyLogging):
""" """
wrapper for application lock file wrapper for application lock file. Credits for idea to https://github.com/bmhatfield/python-pidfile.git
Attributes: Attributes:
force(bool): remove lock file on start if any force(bool): remove lock file on start if any
@ -70,8 +73,13 @@ class Lock(LazyLogging):
repository_id(RepositoryId): repository unique identifier repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance configuration(Configuration): configuration instance
""" """
self.path: Path | None = \ self.path: Path | None = None
args.lock.with_stem(f"{args.lock.stem}_{repository_id.id}") if args.lock is not None else None if args.lock is not None:
self.path = args.lock.with_stem(f"{args.lock.stem}_{repository_id.id}")
if not self.path.is_absolute():
# prepend full path to the lock file
self.path = Path("/") / "run" / "ahriman" / self.path
self._pid_file: TextIOWrapper | None = None
self.force: bool = args.force self.force: bool = args.force
self.unsafe: bool = args.unsafe self.unsafe: bool = args.unsafe
@ -80,6 +88,72 @@ class Lock(LazyLogging):
self.paths = configuration.repository_paths self.paths = configuration.repository_paths
self.reporter = Client.load(repository_id, configuration, report=args.report) self.reporter = Client.load(repository_id, configuration, report=args.report)
@staticmethod
def perform_lock(fd: int) -> bool:
"""
perform file lock
Args:
fd(int): file descriptor:
Returns:
bool: True in case if file is locked and False otherwise
"""
try:
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except OSError:
return False
return True
def _open(self) -> None:
"""
create lock file
"""
if self.path is None:
return
self._pid_file = self.path.open("a+")
def _watch(self) -> bool:
"""
watch until lock disappear
Returns:
bool: True in case if file is locked and False otherwise
"""
# there are reasons why we are not using inotify here. First of all, if we would use it, it would bring to
# race conditions because multiple processes will be notified at the same time. Secondly, it is good library,
# but platform-specific, and we only need to check if file exists
if self._pid_file is None:
return False
waiter = Waiter(self.wait_timeout)
return bool(waiter.wait(lambda fd: not self.perform_lock(fd), self._pid_file.fileno()))
def _write(self, *, is_locked: bool = False) -> None:
"""
write pid to the lock file
Args:
is_locked(bool, optional): indicates if file was already locked or not (Default value = False)
Raises:
DuplicateRunError: if it cannot lock PID file
"""
if self._pid_file is None:
return
if not is_locked:
if not self.perform_lock(self._pid_file.fileno()):
raise DuplicateRunError
self._pid_file.seek(0) # reset position and remove file content if any
self._pid_file.truncate()
self._pid_file.write(str(os.getpid())) # write current pid
self._pid_file.flush() # flush data to disk
self._pid_file.seek(0) # reset position again
def check_user(self) -> None: def check_user(self) -> None:
""" """
check if current user is actually owner of ahriman root check if current user is actually owner of ahriman root
@ -100,46 +174,33 @@ class Lock(LazyLogging):
""" """
remove lock file remove lock file
""" """
if self.path is None: if self._pid_file is not None: # close file descriptor
return try:
self._pid_file.close()
except IOError:
pass # suppress any IO errors occur
if self.path is not None: # remove lock file
self.path.unlink(missing_ok=True) self.path.unlink(missing_ok=True)
def create(self) -> None: def lock(self) -> None:
""" """
create lock file create pid file
Raises:
DuplicateRunError: if lock exists and no force flag supplied
""" """
if self.path is None: if self.force: # remove lock if force flag is set
return self.clear()
try: self._open()
self.path.touch(exist_ok=self.force) is_locked = self._watch()
except FileExistsError: self._write(is_locked=is_locked)
raise DuplicateRunError from None
def watch(self) -> None:
"""
watch until lock disappear
"""
# there are reasons why we are not using inotify here. First of all, if we would use it, it would bring to
# race conditions because multiple processes will be notified in the same time. Secondly, it is good library,
# but platform-specific, and we only need to check if file exists
if self.path is None:
return
waiter = Waiter(self.wait_timeout)
waiter.wait(self.path.is_file)
def __enter__(self) -> Self: def __enter__(self) -> Self:
""" """
default workflow is the following: default workflow is the following:
#. Check user UID #. Check user UID
#. Check if there is lock file
#. Check web status watcher status #. Check web status watcher status
#. Open lock file
#. Wait for lock file to be free #. Wait for lock file to be free
#. Create lock file and directory tree #. Write current PID to the lock file
#. Report to status page if enabled #. Report to status page if enabled
Returns: Returns:
@ -147,8 +208,7 @@ class Lock(LazyLogging):
""" """
self.check_user() self.check_user()
self.check_version() self.check_version()
self.watch() self.lock()
self.create()
self.reporter.status_update(BuildStatusEnum.Building) self.reporter.status_update(BuildStatusEnum.Building)
return self return self

View File

@ -38,12 +38,12 @@ class _Context:
""" """
self._content: dict[str, Any] = {} self._content: dict[str, Any] = {}
def get(self, key: ContextKey[T]) -> T: def get(self, key: ContextKey[T] | type[T]) -> T:
""" """
get value for the specified key get value for the specified key
Args: Args:
key(ContextKey[T]): context key name key(ContextKey[T] | type[T]): context key name
Returns: Returns:
T: value associated with the key T: value associated with the key
@ -52,29 +52,37 @@ class _Context:
KeyError: in case if the specified context variable was not found KeyError: in case if the specified context variable was not found
ValueError: in case if type of value is not an instance of specified return type ValueError: in case if type of value is not an instance of specified return type
""" """
if not isinstance(key, ContextKey):
key = ContextKey.from_type(key)
if key.key not in self._content: if key.key not in self._content:
raise KeyError(key.key) raise KeyError(key.key)
value = self._content[key.key] value = self._content[key.key]
if not isinstance(value, key.return_type): if not isinstance(value, key.return_type):
raise ValueError(f"Value {value} is not an instance of {key.return_type}") raise ValueError(f"Value {value} is not an instance of {key.return_type}")
return value return value
def set(self, key: ContextKey[T], value: T) -> None: def set(self, key: ContextKey[T] | type[T], value: T) -> None:
""" """
set value for the specified key set value for the specified key
Args: Args:
key(ContextKey[T]): context key name key(ContextKey[T] | type[T]): context key name
value(T): context value associated with the specified key value(T): context value associated with the specified key
Raises: Raises:
KeyError: in case if the specified context variable already exists KeyError: in case if the specified context variable already exists
ValueError: in case if type of value is not an instance of specified return type ValueError: in case if type of value is not an instance of specified return type
""" """
if not isinstance(key, ContextKey):
key = ContextKey.from_type(key)
if key.key in self._content: if key.key in self._content:
raise KeyError(key.key) raise KeyError(key.key)
if not isinstance(value, key.return_type): if not isinstance(value, key.return_type):
raise ValueError(f"Value {value} is not an instance of {key.return_type}") raise ValueError(f"Value {value} is not an instance of {key.return_type}")
self._content[key.key] = value self._content[key.key] = value
def __iter__(self) -> Iterator[str]: def __iter__(self) -> Iterator[str]:

View File

@ -17,25 +17,33 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
import itertools
import shutil import shutil
import tarfile
from collections.abc import Callable, Generator from collections.abc import Generator, Iterable
from functools import cached_property from functools import cached_property
from pathlib import Path from pathlib import Path
from pyalpm import DB, Handle, Package, SIG_PACKAGE, error as PyalpmError # type: ignore[import-not-found] from pyalpm import DB, Handle, Package, SIG_DATABASE_OPTIONAL, SIG_PACKAGE_OPTIONAL # type: ignore[import-not-found]
from string import Template from string import Template
from ahriman.core.alpm.pacman_database import PacmanDatabase
from ahriman.core.configuration import Configuration from ahriman.core.configuration import Configuration
from ahriman.core.log import LazyLogging from ahriman.core.log import LazyLogging
from ahriman.core.util import trim_package from ahriman.core.util import trim_package
from ahriman.models.pacman_synchronization import PacmanSynchronization from ahriman.models.pacman_synchronization import PacmanSynchronization
from ahriman.models.repository_id import RepositoryId from ahriman.models.repository_id import RepositoryId
from ahriman.models.repository_paths import RepositoryPaths
class Pacman(LazyLogging): class Pacman(LazyLogging):
""" """
alpm wrapper alpm wrapper
Attributes:
configuration(Configuration): configuration instance
refresh_database(PacmanSynchronization): synchronize local cache to remote
repository_id(RepositoryId): repository unique identifier
repository_path(RepositoryPaths): repository paths instance
""" """
def __init__(self, repository_id: RepositoryId, configuration: Configuration, *, def __init__(self, repository_id: RepositoryId, configuration: Configuration, *,
@ -48,8 +56,11 @@ class Pacman(LazyLogging):
configuration(Configuration): configuration instance configuration(Configuration): configuration instance
refresh_database(PacmanSynchronization): synchronize local cache to remote refresh_database(PacmanSynchronization): synchronize local cache to remote
""" """
self.__create_handle_fn: Callable[[], Handle] = lambda: self.__create_handle( self.configuration = configuration
repository_id, configuration, refresh_database=refresh_database) self.repository_id = repository_id
self.repository_paths = configuration.repository_paths
self.refresh_database = refresh_database
@cached_property @cached_property
def handle(self) -> Handle: def handle(self) -> Handle:
@ -59,40 +70,39 @@ class Pacman(LazyLogging):
Returns: Returns:
Handle: generated pyalpm handle instance Handle: generated pyalpm handle instance
""" """
return self.__create_handle_fn() return self.__create_handle(refresh_database=self.refresh_database)
def __create_handle(self, repository_id: RepositoryId, configuration: Configuration, *, def __create_handle(self, *, refresh_database: PacmanSynchronization) -> Handle:
refresh_database: PacmanSynchronization) -> Handle:
""" """
create lazy handle function create lazy handle function
Args: Args:
repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance
refresh_database(PacmanSynchronization): synchronize local cache to remote refresh_database(PacmanSynchronization): synchronize local cache to remote
Returns: Returns:
Handle: fully initialized pacman handle Handle: fully initialized pacman handle
""" """
root = configuration.getpath("alpm", "root") pacman_root = self.configuration.getpath("alpm", "database")
pacman_root = configuration.getpath("alpm", "database") use_ahriman_cache = self.configuration.getboolean("alpm", "use_ahriman_cache")
use_ahriman_cache = configuration.getboolean("alpm", "use_ahriman_cache")
mirror = configuration.get("alpm", "mirror")
paths = configuration.repository_paths
database_path = paths.pacman if use_ahriman_cache else pacman_root
database_path = self.repository_paths.pacman if use_ahriman_cache else pacman_root
root = self.configuration.getpath("alpm", "root")
handle = Handle(str(root), str(database_path)) handle = Handle(str(root), str(database_path))
for repository in configuration.getlist("alpm", "repositories"):
database = self.database_init(handle, repository, mirror, repository_id.architecture) for repository in self.configuration.getlist("alpm", "repositories"):
self.database_copy(handle, database, pacman_root, paths, use_ahriman_cache=use_ahriman_cache) database = self.database_init(handle, repository, self.repository_id.architecture)
self.database_copy(handle, database, pacman_root, use_ahriman_cache=use_ahriman_cache)
# install repository database too
local_database = self.database_init(handle, self.repository_id.name, self.repository_id.architecture)
self.database_copy(handle, local_database, pacman_root, use_ahriman_cache=use_ahriman_cache)
if use_ahriman_cache and refresh_database: if use_ahriman_cache and refresh_database:
self.database_sync(handle, force=refresh_database == PacmanSynchronization.Force) self.database_sync(handle, force=refresh_database == PacmanSynchronization.Force)
return handle return handle
def database_copy(self, handle: Handle, database: DB, pacman_root: Path, paths: RepositoryPaths, *, def database_copy(self, handle: Handle, database: DB, pacman_root: Path, *, use_ahriman_cache: bool) -> None:
use_ahriman_cache: bool) -> None:
""" """
copy database from the operating system root to the ahriman local home copy database from the operating system root to the ahriman local home
@ -100,7 +110,6 @@ class Pacman(LazyLogging):
handle(Handle): pacman handle which will be used for database copying handle(Handle): pacman handle which will be used for database copying
database(DB): pacman database instance to be copied database(DB): pacman database instance to be copied
pacman_root(Path): operating system pacman root pacman_root(Path): operating system pacman root
paths(RepositoryPaths): repository paths instance
use_ahriman_cache(bool): use local ahriman cache instead of system one use_ahriman_cache(bool): use local ahriman cache instead of system one
""" """
def repository_database(root: Path) -> Path: def repository_database(root: Path) -> Path:
@ -122,30 +131,36 @@ class Pacman(LazyLogging):
return # database for some reason deos not exist return # database for some reason deos not exist
self.logger.info("copy pacman database from operating system root to ahriman's home") self.logger.info("copy pacman database from operating system root to ahriman's home")
shutil.copy(src, dst) shutil.copy(src, dst)
paths.chown(dst) self.repository_paths.chown(dst)
def database_init(self, handle: Handle, repository: str, mirror: str, architecture: str) -> DB: def database_init(self, handle: Handle, repository: str, architecture: str) -> DB:
""" """
create database instance from pacman handler and set its properties create database instance from pacman handler and set its properties
Args: Args:
handle(Handle): pacman handle which will be used for database initializing handle(Handle): pacman handle which will be used for database initializing
repository(str): pacman repository name (e.g. core) repository(str): pacman repository name (e.g. core)
mirror(str): arch linux mirror url
architecture(str): repository architecture architecture(str): repository architecture
Returns: Returns:
DB: loaded pacman database instance DB: loaded pacman database instance
""" """
self.logger.info("loading pacman database %s", repository) self.logger.info("loading pacman database %s", repository)
database: DB = handle.register_syncdb(repository, SIG_PACKAGE) database: DB = handle.register_syncdb(repository, SIG_DATABASE_OPTIONAL | SIG_PACKAGE_OPTIONAL)
if repository != self.repository_id.name:
mirror = self.configuration.get("alpm", "mirror")
# replace variables in mirror address # replace variables in mirror address
variables = { variables = {
"arch": architecture, "arch": architecture,
"repo": repository, "repo": repository,
} }
database.servers = [Template(mirror).safe_substitute(variables)] server = Template(mirror).safe_substitute(variables)
else:
# special case, same database, use local storage instead
server = f"file://{self.repository_paths.repository}"
database.servers = [server]
return database return database
@ -160,13 +175,55 @@ class Pacman(LazyLogging):
self.logger.info("refresh ahriman's home pacman database (force refresh %s)", force) self.logger.info("refresh ahriman's home pacman database (force refresh %s)", force)
transaction = handle.init_transaction() transaction = handle.init_transaction()
for database in handle.get_syncdbs(): for database in handle.get_syncdbs():
try: PacmanDatabase(database, self.configuration).sync(force=force)
database.update(force)
except PyalpmError:
self.logger.exception("exception during update %s", database.name)
transaction.release() transaction.release()
def package_get(self, package_name: str) -> Generator[Package, None, None]: def files(self, packages: Iterable[str]) -> dict[str, set[str]]:
"""
extract list of known packages from the databases
Args:
packages(Iterable[str]): filter by package names
Returns:
dict[str, set[str]]: map of package name to its list of files
"""
def extract(tar: tarfile.TarFile, package_names: dict[str, str]) -> Generator[tuple[str, set[str]], None, None]:
for package_name, version in package_names.items():
path = Path(f"{package_name}-{version}") / "files"
try:
content = tar.extractfile(str(path))
except KeyError:
# in case if database and its files has been desync somehow, the extractfile will raise
# KeyError because the entry doesn't exist
content = None
if content is None:
continue
# this is just array of files, however, the directories are with trailing slash,
# which previously has been removed by the conversion to ``pathlib.Path``
files = {filename.decode("utf8").rstrip().removesuffix("/") for filename in content.readlines()}
yield package_name, files
# sort is required for the following group by operation
descriptors = sorted(
(package for package_name in packages for package in self.package(package_name)),
key=lambda package: package.db.name
)
result: dict[str, set[str]] = {}
for database_name, pacman_packages in itertools.groupby(descriptors, lambda package: package.db.name):
database_file = self.repository_paths.pacman / "sync" / f"{database_name}.files.tar.gz"
if not database_file.is_file():
continue # no database file found
package_names = {package.name: package.version for package in pacman_packages}
with tarfile.open(database_file, "r:gz") as archive:
result.update(extract(archive, package_names))
return result
def package(self, package_name: str) -> Generator[Package, None, None]:
""" """
retrieve list of the packages from the repository by name retrieve list of the packages from the repository by name

View File

@ -0,0 +1,170 @@
#
# Copyright (c) 2021-2024 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import shutil
from email.utils import parsedate_to_datetime
from pathlib import Path
from pyalpm import DB # type: ignore[import-not-found]
from urllib.parse import urlparse
from ahriman.core.configuration import Configuration
from ahriman.core.exceptions import PacmanError
from ahriman.core.http import SyncHttpClient
class PacmanDatabase(SyncHttpClient):
"""
implementation for database sync, because pyalpm is not always enough
Attributes:
LAST_MODIFIED_HEADER(str): last modified header name
database(DB): pyalpm database object
repository_paths(RepositoryPaths): repository paths instance
sync_files_database(bool): sync files database
"""
LAST_MODIFIED_HEADER = "Last-Modified"
def __init__(self, database: DB, configuration: Configuration) -> None:
"""
default constructor
Args:
database(DB): pyalpm database object
configuration(Configuration): configuration instance
"""
SyncHttpClient.__init__(self)
self.timeout = None # reset timeout
self.database = database
self.repository_paths = configuration.repository_paths
self.sync_files_database = configuration.getboolean("alpm", "sync_files_database")
def copy(self, remote_path: Path, local_path: Path) -> None:
"""
copy local database file
Args:
remote_path(Path): path to source (remote) file
local_path(Path): path to locally stored file
"""
shutil.copy(remote_path, local_path)
def download(self, url: str, local_path: Path) -> None:
"""
download remote file and store it to local path with the correct last modified headers
Args:
url(str): remote url to request file
local_path(Path): path to locally stored file
Raises:
PacmanError: in case if no last-modified header was found
"""
response = self.make_request("GET", url, stream=True)
if self.LAST_MODIFIED_HEADER not in response.headers:
raise PacmanError("No last-modified header found")
with local_path.open("wb") as local_file:
for chunk in response.iter_content(chunk_size=1024):
local_file.write(chunk)
# set correct (a,m)time for the file
remote_changed = parsedate_to_datetime(response.headers[self.LAST_MODIFIED_HEADER]).timestamp()
os.utime(local_path, (remote_changed, remote_changed))
def is_outdated(self, url: str, local_path: Path) -> bool:
"""
check if local file is outdated
Args:
url(str): remote url to request last modified header
local_path(Path): path to locally stored file
Returns:
bool: True in case if remote file is newer than local file
Raises:
PacmanError: in case if no last-modified header was found
"""
if not local_path.is_file():
return True # no local file found, requires to update
response = self.make_request("HEAD", url)
if self.LAST_MODIFIED_HEADER not in response.headers:
raise PacmanError("No last-modified header found")
remote_changed = parsedate_to_datetime(response.headers["Last-Modified"]).timestamp()
local_changed = local_path.stat().st_mtime
return remote_changed > local_changed
def sync(self, *, force: bool) -> None:
"""
sync packages and files databases
Args:
force(bool): force database synchronization (same as ``pacman -Syy``)
"""
try:
self.sync_packages(force=force)
if self.sync_files_database:
self.sync_files(force=force)
except Exception:
self.logger.exception("exception during update %s", self.database.name)
def sync_files(self, *, force: bool) -> None:
"""
sync files by using http request
Args:
force(bool): force database synchronization (same as ``pacman -Syy``)
"""
server = next(iter(self.database.servers))
filename = f"{self.database.name}.files.tar.gz"
url = f"{server}/{filename}"
remote_uri = urlparse(url)
local_path = Path(self.repository_paths.pacman / "sync" / filename)
match remote_uri.scheme:
case "http" | "https":
if not force and not self.is_outdated(url, local_path):
return
self.download(url, local_path)
case "file":
# just copy file as it is relatively cheap operation, no need to check timestamps
self.copy(Path(remote_uri.path), local_path)
case other:
raise PacmanError(f"Unknown or unsupported URL scheme {other}")
def sync_packages(self, *, force: bool) -> None:
"""
sync packages by using built-in pyalpm methods
Args:
force(bool): force database synchronization (same as ``pacman -Syy``)
"""
self.database.update(force)

View File

@ -56,6 +56,6 @@ class OfficialSyncdb(Official):
raise UnknownPackageError(package_name) raise UnknownPackageError(package_name)
try: try:
return next(AURPackage.from_pacman(package) for package in pacman.package_get(package_name)) return next(AURPackage.from_pacman(package) for package in pacman.package(package_name))
except StopIteration: except StopIteration:
raise UnknownPackageError(package_name) from None raise UnknownPackageError(package_name) from None

View File

@ -21,7 +21,6 @@ from pathlib import Path
from ahriman.core.build_tools.sources import Sources from ahriman.core.build_tools.sources import Sources
from ahriman.core.configuration import Configuration from ahriman.core.configuration import Configuration
from ahriman.core.database import SQLite
from ahriman.core.exceptions import BuildError from ahriman.core.exceptions import BuildError
from ahriman.core.log import LazyLogging from ahriman.core.log import LazyLogging
from ahriman.core.util import check_output from ahriman.core.util import check_output
@ -116,20 +115,20 @@ class Task(LazyLogging):
# e.g. in some cases packagelist command produces debug packages which were not actually built # e.g. in some cases packagelist command produces debug packages which were not actually built
return list(filter(lambda path: path.is_file(), map(Path, packages))) return list(filter(lambda path: path.is_file(), map(Path, packages)))
def init(self, sources_dir: Path, database: SQLite, local_version: str | None) -> str | None: def init(self, sources_dir: Path, patches: list[PkgbuildPatch], local_version: str | None) -> str | None:
""" """
fetch package from git fetch package from git
Args: Args:
sources_dir(Path): local path to fetch sources_dir(Path): local path to fetch
database(SQLite): database instance patches(list[PkgbuildPatch]): list of patches for the package
local_version(str | None): local version of the package. If set and equal to current version, it will local_version(str | None): local version of the package. If set and equal to current version, it will
automatically bump pkgrel automatically bump pkgrel
Returns: Returns:
str | None: current commit sha if available str | None: current commit sha if available
""" """
last_commit_sha = Sources.load(sources_dir, self.package, database.patches_get(self.package.base), self.paths) last_commit_sha = Sources.load(sources_dir, self.package, patches, self.paths)
if local_version is None: if local_version is None:
return last_commit_sha # there is no local package or pkgrel increment is disabled return last_commit_sha # there is no local package or pkgrel increment is disabled

View File

@ -89,6 +89,11 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
"path_exists": True, "path_exists": True,
"path_type": "dir", "path_type": "dir",
}, },
"sync_files_database": {
"type": "boolean",
"coerce": "boolean",
"required": True,
},
"use_ahriman_cache": { "use_ahriman_cache": {
"type": "boolean", "type": "boolean",
"coerce": "boolean", "coerce": "boolean",

View File

@ -0,0 +1,32 @@
#
# Copyright (c) 2021-2024 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__all__ = ["steps"]
steps = [
"""
create table package_dependencies (
package_base text not null,
repository text not null,
dependencies json not null,
unique (package_base, repository)
)
""",
]

View File

@ -20,6 +20,7 @@
from ahriman.core.database.operations.auth_operations import AuthOperations from ahriman.core.database.operations.auth_operations import AuthOperations
from ahriman.core.database.operations.build_operations import BuildOperations from ahriman.core.database.operations.build_operations import BuildOperations
from ahriman.core.database.operations.changes_operations import ChangesOperations from ahriman.core.database.operations.changes_operations import ChangesOperations
from ahriman.core.database.operations.dependencies_operations import DependenciesOperations
from ahriman.core.database.operations.logs_operations import LogsOperations from ahriman.core.database.operations.logs_operations import LogsOperations
from ahriman.core.database.operations.package_operations import PackageOperations from ahriman.core.database.operations.package_operations import PackageOperations
from ahriman.core.database.operations.patch_operations import PatchOperations from ahriman.core.database.operations.patch_operations import PatchOperations

View File

@ -64,7 +64,7 @@ class ChangesOperations(Operations):
def changes_insert(self, package_base: str, changes: Changes, repository_id: RepositoryId | None = None) -> None: def changes_insert(self, package_base: str, changes: Changes, repository_id: RepositoryId | None = None) -> None:
""" """
insert packages to build queue insert package changes
Args: Args:
package_base(str): package base to insert package_base(str): package base to insert
@ -117,27 +117,3 @@ class ChangesOperations(Operations):
}) })
return self.with_connection(run, commit=True) return self.with_connection(run, commit=True)
def hashes_get(self, repository_id: RepositoryId | None = None) -> dict[str, str]:
"""
extract last commit hashes if available
Args:
repository_id(RepositoryId, optional): repository unique identifier override (Default value = None)
Returns:
dict[str, str]: map of package base to its last commit hash
"""
repository_id = repository_id or self._repository_id
def run(connection: Connection) -> dict[str, str]:
return {
row["package_base"]: row["last_commit_sha"]
for row in connection.execute(
"""select package_base, last_commit_sha from package_changes where repository = :repository""",
{"repository": repository_id.id}
)
}
return self.with_connection(run)

View File

@ -0,0 +1,116 @@
#
# Copyright (c) 2021-2024 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from sqlite3 import Connection
from ahriman.core.database.operations.operations import Operations
from ahriman.models.dependencies import Dependencies
from ahriman.models.repository_id import RepositoryId
class DependenciesOperations(Operations):
"""
operations for dependencies table
"""
def dependencies_get(self, package_base: str | None = None,
repository_id: RepositoryId | None = None) -> dict[str, Dependencies]:
"""
get dependencies for the specific package base if available
Args:
package_base(str | None): package base to search
repository_id(RepositoryId, optional): repository unique identifier override (Default value = None)
Returns:
Dependencies: changes for the package base if available
"""
repository_id = repository_id or self._repository_id
def run(connection: Connection) -> dict[str, Dependencies]:
return {
row["package_base"]: Dependencies(row["dependencies"])
for row in connection.execute(
"""
select package_base, dependencies from package_dependencies
where (:package_base is null or package_base = :package_base)
and repository = :repository
""",
{
"package_base": package_base,
"repository": repository_id.id,
}
)
}
return self.with_connection(run)
def dependencies_insert(self, package_base: str, dependencies: Dependencies,
repository_id: RepositoryId | None = None) -> None:
"""
insert package dependencies
Args:
package_base(str): package base
dependencies(Dependencies): package dependencies
repository_id(RepositoryId, optional): repository unique identifier override (Default value = None)
"""
repository_id = repository_id or self._repository_id
def run(connection: Connection) -> None:
connection.execute(
"""
insert into package_dependencies
(package_base, repository, dependencies)
values
(:package_base, :repository, :dependencies)
on conflict (package_base, repository) do update set
dependencies = :dependencies
""",
{
"package_base": package_base,
"repository": repository_id.id,
"dependencies": dependencies.paths,
})
return self.with_connection(run, commit=True)
def dependencies_remove(self, package_base: str | None, repository_id: RepositoryId | None = None) -> None:
"""
remove packages dependencies
Args:
package_base(str | None): optional filter by package base
repository_id(RepositoryId, optional): repository unique identifier override (Default value = None)
"""
repository_id = repository_id or self._repository_id
def run(connection: Connection) -> None:
connection.execute(
"""
delete from package_dependencies
where (:package_base is null or package_base = :package_base)
and repository = :repository
""",
{
"package_base": package_base,
"repository": repository_id.id,
})
return self.with_connection(run, commit=True)

View File

@ -25,6 +25,7 @@ from typing import Any, TypeVar
from ahriman.core.log import LazyLogging from ahriman.core.log import LazyLogging
from ahriman.models.repository_id import RepositoryId from ahriman.models.repository_id import RepositoryId
from ahriman.models.repository_paths import RepositoryPaths
T = TypeVar("T") T = TypeVar("T")
@ -38,7 +39,7 @@ class Operations(LazyLogging):
path(Path): path to the database file path(Path): path to the database file
""" """
def __init__(self, path: Path, repository_id: RepositoryId) -> None: def __init__(self, path: Path, repository_id: RepositoryId, repository_paths: RepositoryPaths) -> None:
""" """
default constructor default constructor
@ -48,6 +49,7 @@ class Operations(LazyLogging):
""" """
self.path = path self.path = path
self._repository_id = repository_id self._repository_id = repository_id
self._repository_paths = repository_paths
@staticmethod @staticmethod
def factory(cursor: sqlite3.Cursor, row: tuple[Any, ...]) -> dict[str, Any]: def factory(cursor: sqlite3.Cursor, row: tuple[Any, ...]) -> dict[str, Any]:

View File

@ -150,34 +150,6 @@ class PackageOperations(Operations):
""", """,
package_list) package_list)
@staticmethod
def _package_update_insert_status(connection: Connection, package_base: str, status: BuildStatus,
repository_id: RepositoryId) -> None:
"""
insert base package status into table
Args:
connection(Connection): database connection
package_base(str): package base name
status(BuildStatus): new build status
repository_id(RepositoryId): repository unique identifier
"""
connection.execute(
"""
insert into package_statuses
(package_base, status, last_updated, repository)
values
(:package_base, :status, :last_updated, :repository)
on conflict (package_base, repository) do update set
status = :status, last_updated = :last_updated
""",
{
"package_base": package_base,
"status": status.status.value,
"last_updated": status.timestamp,
"repository": repository_id.id,
})
@staticmethod @staticmethod
def _packages_get_select_package_bases(connection: Connection, repository_id: RepositoryId) -> dict[str, Package]: def _packages_get_select_package_bases(connection: Connection, repository_id: RepositoryId) -> dict[str, Package]:
""" """
@ -246,21 +218,6 @@ class PackageOperations(Operations):
) )
} }
def package_base_update(self, package: Package, repository_id: RepositoryId | None = None) -> None:
"""
update package base only
Args:
package(Package): package properties
repository_id(RepositoryId, optional): repository unique identifier override (Default value = None)
"""
repository_id = repository_id or self._repository_id
def run(connection: Connection) -> None:
self._package_update_insert_base(connection, package, repository_id)
return self.with_connection(run, commit=True)
def package_remove(self, package_base: str, repository_id: RepositoryId | None = None) -> None: def package_remove(self, package_base: str, repository_id: RepositoryId | None = None) -> None:
""" """
remove package from database remove package from database
@ -277,20 +234,18 @@ class PackageOperations(Operations):
return self.with_connection(run, commit=True) return self.with_connection(run, commit=True)
def package_update(self, package: Package, status: BuildStatus, repository_id: RepositoryId | None = None) -> None: def package_update(self, package: Package, repository_id: RepositoryId | None = None) -> None:
""" """
update package status update package status
Args: Args:
package(Package): package properties package(Package): package properties
status(BuildStatus): new build status
repository_id(RepositoryId, optional): repository unique identifier override (Default value = None) repository_id(RepositoryId, optional): repository unique identifier override (Default value = None)
""" """
repository_id = repository_id or self._repository_id repository_id = repository_id or self._repository_id
def run(connection: Connection) -> None: def run(connection: Connection) -> None:
self._package_update_insert_base(connection, package, repository_id) self._package_update_insert_base(connection, package, repository_id)
self._package_update_insert_status(connection, package.base, status, repository_id)
self._package_update_insert_packages(connection, package, repository_id) self._package_update_insert_packages(connection, package, repository_id)
self._package_remove_packages(connection, package.base, package.packages.keys(), repository_id) self._package_remove_packages(connection, package.base, package.packages.keys(), repository_id)
@ -317,22 +272,32 @@ class PackageOperations(Operations):
return self.with_connection(lambda connection: list(run(connection))) return self.with_connection(lambda connection: list(run(connection)))
def remotes_get(self, repository_id: RepositoryId | None = None) -> dict[str, RemoteSource]: def status_update(self, package_base: str, status: BuildStatus, repository_id: RepositoryId | None = None) -> None:
""" """
get packages remotes based on current settings insert base package status into table
Args: Args:
package_base(str): package base name
status(BuildStatus): new build status
repository_id(RepositoryId, optional): repository unique identifier override (Default value = None) repository_id(RepositoryId, optional): repository unique identifier override (Default value = None)
Returns:
dict[str, RemoteSource]: map of package base to its remote sources
""" """
repository_id = repository_id or self._repository_id repository_id = repository_id or self._repository_id
def run(connection: Connection) -> dict[str, Package]: def run(connection: Connection) -> None:
return self._packages_get_select_package_bases(connection, repository_id) connection.execute(
"""
insert into package_statuses
(package_base, status, last_updated, repository)
values
(:package_base, :status, :last_updated, :repository)
on conflict (package_base, repository) do update set
status = :status, last_updated = :last_updated
""",
{
"package_base": package_base,
"status": status.status.value,
"last_updated": status.timestamp,
"repository": repository_id.id,
})
return { return self.with_connection(run, commit=True)
package_base: package.remote
for package_base, package in self.with_connection(run).items()
}

View File

@ -25,12 +25,19 @@ from typing import Self
from ahriman.core.configuration import Configuration from ahriman.core.configuration import Configuration
from ahriman.core.database.migrations import Migrations from ahriman.core.database.migrations import Migrations
from ahriman.core.database.operations import AuthOperations, BuildOperations, ChangesOperations, LogsOperations, \ from ahriman.core.database.operations import AuthOperations, BuildOperations, ChangesOperations, \
PackageOperations, PatchOperations DependenciesOperations, LogsOperations, PackageOperations, PatchOperations
# pylint: disable=too-many-ancestors # pylint: disable=too-many-ancestors
class SQLite(AuthOperations, BuildOperations, ChangesOperations, LogsOperations, PackageOperations, PatchOperations): class SQLite(
AuthOperations,
BuildOperations,
ChangesOperations,
DependenciesOperations,
LogsOperations,
PackageOperations,
PatchOperations):
""" """
wrapper for sqlite3 database wrapper for sqlite3 database
@ -59,7 +66,7 @@ class SQLite(AuthOperations, BuildOperations, ChangesOperations, LogsOperations,
path = cls.database_path(configuration) path = cls.database_path(configuration)
_, repository_id = configuration.check_loaded() _, repository_id = configuration.check_loaded()
database = cls(path, repository_id) database = cls(path, repository_id, configuration.repository_paths)
database.init(configuration) database.init(configuration)
return database return database
@ -94,3 +101,24 @@ class SQLite(AuthOperations, BuildOperations, ChangesOperations, LogsOperations,
if configuration.getboolean("settings", "apply_migrations", fallback=True): if configuration.getboolean("settings", "apply_migrations", fallback=True):
self.with_connection(lambda connection: Migrations.migrate(connection, configuration)) self.with_connection(lambda connection: Migrations.migrate(connection, configuration))
paths.chown(self.path) paths.chown(self.path)
def package_clear(self, package_base: str) -> None:
"""
completely remove package from all tables
Args:
package_base(str): package base to remove
Examples:
This method completely removes the package from all tables and must be used, e.g. on package removal::
>>> database.package_clear("ahriman")
"""
self.build_queue_clear(package_base)
self.patches_remove(package_base, [])
self.logs_remove(package_base, None)
self.changes_remove(package_base)
self.dependencies_remove(package_base)
# remove local cache too
self._repository_paths.tree_clear(package_base)

View File

@ -219,6 +219,21 @@ class PackageInfoError(RuntimeError):
RuntimeError.__init__(self, f"There are errors during reading package information: `{details}`") RuntimeError.__init__(self, f"There are errors during reading package information: `{details}`")
class PacmanError(RuntimeError):
"""
exception in case of pacman operation errors
"""
def __init__(self, details: Any) -> None:
"""
default constructor
Args:
details(Any): error details
"""
RuntimeError.__init__(self, f"Could not perform operation with pacman: `{details}`")
class PathError(ValueError): class PathError(ValueError):
""" """
exception which will be raised on path which is not belong to root directory exception which will be raised on path which is not belong to root directory

View File

@ -25,9 +25,9 @@ from tempfile import TemporaryDirectory
from ahriman.core.build_tools.sources import Sources from ahriman.core.build_tools.sources import Sources
from ahriman.core.configuration import Configuration from ahriman.core.configuration import Configuration
from ahriman.core.database import SQLite
from ahriman.core.exceptions import GitRemoteError from ahriman.core.exceptions import GitRemoteError
from ahriman.core.log import LazyLogging from ahriman.core.log import LazyLogging
from ahriman.core.status import Client
from ahriman.models.package import Package from ahriman.models.package import Package
from ahriman.models.package_source import PackageSource from ahriman.models.package_source import PackageSource
from ahriman.models.remote_source import RemoteSource from ahriman.models.remote_source import RemoteSource
@ -40,20 +40,20 @@ class RemotePush(LazyLogging):
Attributes: Attributes:
commit_author(tuple[str, str] | None): optional commit author in form of git config commit_author(tuple[str, str] | None): optional commit author in form of git config
database(SQLite): database instance
remote_source(RemoteSource): repository remote source (remote pull url and branch) remote_source(RemoteSource): repository remote source (remote pull url and branch)
reporter(Client): reporter client used for additional information retrieval
""" """
def __init__(self, database: SQLite, configuration: Configuration, section: str) -> None: def __init__(self, reporter: Client, configuration: Configuration, section: str) -> None:
""" """
default constructor default constructor
Args: Args:
database(SQLite): database instance reporter(Client): reporter client
configuration(Configuration): configuration instance configuration(Configuration): configuration instance
section(str): settings section name section(str): settings section name
""" """
self.database = database self.reporter = reporter
commit_email = configuration.get(section, "commit_email", fallback="ahriman@localhost") commit_email = configuration.get(section, "commit_email", fallback="ahriman@localhost")
commit_user = configuration.get(section, "commit_user", fallback="ahriman") commit_user = configuration.get(section, "commit_user", fallback="ahriman")
@ -92,7 +92,7 @@ class RemotePush(LazyLogging):
else: else:
shutil.rmtree(git_file) shutil.rmtree(git_file)
# ...copy all patches... # ...copy all patches...
for patch in self.database.patches_get(package.base): for patch in self.reporter.package_patches_get(package.base, None):
filename = f"ahriman-{package.base}.patch" if patch.key is None else f"ahriman-{patch.key}.patch" filename = f"ahriman-{package.base}.patch" if patch.key is None else f"ahriman-{patch.key}.patch"
patch.write(package_target_dir / filename) patch.write(package_target_dir / filename)
# ...and finally return path to the copied directory # ...and finally return path to the copied directory

View File

@ -19,10 +19,9 @@
# #
from ahriman.core import context from ahriman.core import context
from ahriman.core.configuration import Configuration from ahriman.core.configuration import Configuration
from ahriman.core.database import SQLite
from ahriman.core.gitremote.remote_push import RemotePush from ahriman.core.gitremote.remote_push import RemotePush
from ahriman.core.status import Client
from ahriman.core.triggers import Trigger from ahriman.core.triggers import Trigger
from ahriman.models.context_key import ContextKey
from ahriman.models.package import Package from ahriman.models.package import Package
from ahriman.models.repository_id import RepositoryId from ahriman.models.repository_id import RepositoryId
from ahriman.models.result import Result from ahriman.models.result import Result
@ -111,10 +110,10 @@ class RemotePushTrigger(Trigger):
GitRemoteError: if database is not set in context GitRemoteError: if database is not set in context
""" """
ctx = context.get() ctx = context.get()
database = ctx.get(ContextKey("database", SQLite)) reporter = ctx.get(Client)
for target in self.targets: for target in self.targets:
section, _ = self.configuration.gettype( section, _ = self.configuration.gettype(
target, self.repository_id, fallback=self.CONFIGURATION_SCHEMA_FALLBACK) target, self.repository_id, fallback=self.CONFIGURATION_SCHEMA_FALLBACK)
runner = RemotePush(database, self.configuration, section) runner = RemotePush(reporter, self.configuration, section)
runner.run(result) runner.run(result)

View File

@ -46,8 +46,8 @@ class SyncAhrimanClient(SyncHttpClient):
request.Session: created session object request.Session: created session object
""" """
if urlparse(self.address).scheme == "http+unix": if urlparse(self.address).scheme == "http+unix":
import requests_unixsocket # type: ignore[import-untyped] import requests_unixsocket
session: requests.Session = requests_unixsocket.Session() session: requests.Session = requests_unixsocket.Session() # type: ignore[no-untyped-call]
session.headers["User-Agent"] = f"ahriman/{__version__}" session.headers["User-Agent"] = f"ahriman/{__version__}"
return session return session

View File

@ -38,7 +38,7 @@ class SyncHttpClient(LazyLogging):
Attributes: Attributes:
auth(tuple[str, str] | None): HTTP basic auth object if set auth(tuple[str, str] | None): HTTP basic auth object if set
suppress_errors(bool): suppress logging of request errors suppress_errors(bool): suppress logging of request errors
timeout(int): HTTP request timeout in seconds timeout(int | None): HTTP request timeout in seconds
""" """
def __init__(self, configuration: Configuration | None = None, section: str | None = None, *, def __init__(self, configuration: Configuration | None = None, section: str | None = None, *,
@ -60,7 +60,7 @@ class SyncHttpClient(LazyLogging):
password = configuration.get(section, "password", fallback=None) password = configuration.get(section, "password", fallback=None)
self.auth = (username, password) if username and password else None self.auth = (username, password) if username and password else None
self.timeout = configuration.getint(section, "timeout", fallback=30) self.timeout: int | None = configuration.getint(section, "timeout", fallback=30)
self.suppress_errors = suppress_errors self.suppress_errors = suppress_errors
@cached_property @cached_property
@ -90,25 +90,27 @@ class SyncHttpClient(LazyLogging):
result: str = exception.response.text if exception.response is not None else "" result: str = exception.response.text if exception.response is not None else ""
return result return result
def make_request(self, method: Literal["DELETE", "GET", "POST", "PUT"], url: str, *, def make_request(self, method: Literal["DELETE", "GET", "HEAD", "POST", "PUT"], url: str, *,
headers: dict[str, str] | None = None, headers: dict[str, str] | None = None,
params: list[tuple[str, str]] | None = None, params: list[tuple[str, str]] | None = None,
data: Any | None = None, data: Any | None = None,
json: dict[str, Any] | None = None, json: dict[str, Any] | None = None,
files: dict[str, MultipartType] | None = None, files: dict[str, MultipartType] | None = None,
stream: bool | None = None,
session: requests.Session | None = None, session: requests.Session | None = None,
suppress_errors: bool | None = None) -> requests.Response: suppress_errors: bool | None = None) -> requests.Response:
""" """
perform request with specified parameters perform request with specified parameters
Args: Args:
method(Literal["DELETE", "GET", "POST", "PUT"]): HTTP method to call method(Literal["DELETE", "GET", "HEAD", "POST", "PUT"]): HTTP method to call
url(str): remote url to call url(str): remote url to call
headers(dict[str, str] | None, optional): request headers (Default value = None) headers(dict[str, str] | None, optional): request headers (Default value = None)
params(list[tuple[str, str]] | None, optional): request query parameters (Default value = None) params(list[tuple[str, str]] | None, optional): request query parameters (Default value = None)
data(Any | None, optional): request raw data parameters (Default value = None) data(Any | None, optional): request raw data parameters (Default value = None)
json(dict[str, Any] | None, optional): request json parameters (Default value = None) json(dict[str, Any] | None, optional): request json parameters (Default value = None)
files(dict[str, MultipartType] | None, optional): multipart upload (Default value = None) files(dict[str, MultipartType] | None, optional): multipart upload (Default value = None)
stream(bool | None, optional): handle response as stream (Default value = None)
session(requests.Session | None, optional): session object if any (Default value = None) session(requests.Session | None, optional): session object if any (Default value = None)
suppress_errors(bool | None, optional): suppress logging errors (e.g. if no web server available). If none suppress_errors(bool | None, optional): suppress logging errors (e.g. if no web server available). If none
set, the instance-wide value will be used (Default value = None) set, the instance-wide value will be used (Default value = None)
@ -124,7 +126,7 @@ class SyncHttpClient(LazyLogging):
try: try:
response = session.request(method, url, params=params, data=data, headers=headers, files=files, json=json, response = session.request(method, url, params=params, data=data, headers=headers, files=files, json=json,
auth=self.auth, timeout=self.timeout) stream=stream, auth=self.auth, timeout=self.timeout)
response.raise_for_status() response.raise_for_status()
return response return response
except requests.HTTPError as ex: except requests.HTTPError as ex:

View File

@ -22,6 +22,7 @@ import logging
from typing import Self from typing import Self
from ahriman.core.configuration import Configuration from ahriman.core.configuration import Configuration
from ahriman.core.status import Client
from ahriman.models.repository_id import RepositoryId from ahriman.models.repository_id import RepositoryId
@ -49,8 +50,6 @@ class HttpLogHandler(logging.Handler):
# we don't really care about those parameters because they will be handled by the reporter # we don't really care about those parameters because they will be handled by the reporter
logging.Handler.__init__(self) logging.Handler.__init__(self)
# client has to be imported here because of circular imports
from ahriman.core.status.client import Client
self.reporter = Client.load(repository_id, configuration, report=report) self.reporter = Client.load(repository_id, configuration, report=report)
self.suppress_errors = suppress_errors self.suppress_errors = suppress_errors
@ -92,7 +91,7 @@ class HttpLogHandler(logging.Handler):
return # in case if no package base supplied we need just skip log message return # in case if no package base supplied we need just skip log message
try: try:
self.reporter.package_logs(log_record_id, record) self.reporter.package_logs_add(log_record_id, record.created, record.getMessage())
except Exception: except Exception:
if self.suppress_errors: if self.suppress_errors:
return return

View File

@ -29,6 +29,7 @@ from ahriman.core.repository.package_info import PackageInfo
from ahriman.core.util import safe_filename from ahriman.core.util import safe_filename
from ahriman.models.changes import Changes from ahriman.models.changes import Changes
from ahriman.models.package import Package from ahriman.models.package import Package
from ahriman.models.package_archive import PackageArchive
from ahriman.models.package_description import PackageDescription from ahriman.models.package_description import PackageDescription
from ahriman.models.packagers import Packagers from ahriman.models.packagers import Packagers
from ahriman.models.result import Result from ahriman.models.result import Result
@ -57,7 +58,8 @@ class Executor(PackageInfo, Cleaner):
self.reporter.set_building(package.base) self.reporter.set_building(package.base)
task = Task(package, self.configuration, self.architecture, self.paths) task = Task(package, self.configuration, self.architecture, self.paths)
local_version = local_versions.get(package.base) if bump_pkgrel else None local_version = local_versions.get(package.base) if bump_pkgrel else None
commit_sha = task.init(local_path, self.database, local_version) patches = self.reporter.package_patches_get(package.base, None)
commit_sha = task.init(local_path, patches, local_version)
built = task.build(local_path, PACKAGER=packager_id) built = task.build(local_path, PACKAGER=packager_id)
for src in built: for src in built:
dst = self.paths.packages / src.name dst = self.paths.packages / src.name
@ -76,7 +78,11 @@ class Executor(PackageInfo, Cleaner):
packager = self.packager(packagers, single.base) packager = self.packager(packagers, single.base)
last_commit_sha = build_single(single, Path(dir_name), packager.packager_id) last_commit_sha = build_single(single, Path(dir_name), packager.packager_id)
# clear changes and update commit hash # clear changes and update commit hash
self.reporter.package_changes_set(single.base, Changes(last_commit_sha)) self.reporter.package_changes_update(single.base, Changes(last_commit_sha))
# update dependencies list
dependencies = PackageArchive(self.paths.build_directory, single, self.pacman).depends_on()
self.reporter.package_dependencies_update(single.base, dependencies)
# update result set
result.add_updated(single) result.add_updated(single)
except Exception: except Exception:
self.reporter.set_failed(single.base) self.reporter.set_failed(single.base)
@ -97,12 +103,7 @@ class Executor(PackageInfo, Cleaner):
""" """
def remove_base(package_base: str) -> None: def remove_base(package_base: str) -> None:
try: try:
self.paths.tree_clear(package_base) # remove all internal files self.reporter.package_remove(package_base)
self.database.build_queue_clear(package_base)
self.database.patches_remove(package_base, [])
self.database.logs_remove(package_base, None)
self.database.changes_remove(package_base)
self.reporter.package_remove(package_base) # we only update status page in case of base removal
except Exception: except Exception:
self.logger.exception("could not remove base %s", package_base) self.logger.exception("could not remove base %s", package_base)

View File

@ -43,14 +43,14 @@ class PackageInfo(RepositoryProperties):
Returns: Returns:
list[Package]: list of read packages list[Package]: list of read packages
""" """
sources = self.database.remotes_get() sources = {package.base: package.remote for package, _, in self.reporter.package_get(None)}
result: dict[str, Package] = {} result: dict[str, Package] = {}
# we are iterating over bases, not single packages # we are iterating over bases, not single packages
for full_path in packages: for full_path in packages:
try: try:
local = Package.from_archive(full_path, self.pacman) local = Package.from_archive(full_path, self.pacman)
if (source := sources.get(local.base)) is not None: if (source := sources.get(local.base)) is not None: # update source with remote
local.remote = source local.remote = source
current = result.setdefault(local.base, local) current = result.setdefault(local.base, local)
@ -78,7 +78,8 @@ class PackageInfo(RepositoryProperties):
""" """
with TemporaryDirectory(ignore_cleanup_errors=True) as dir_name: with TemporaryDirectory(ignore_cleanup_errors=True) as dir_name:
dir_path = Path(dir_name) dir_path = Path(dir_name)
current_commit_sha = Sources.load(dir_path, package, self.database.patches_get(package.base), self.paths) patches = self.reporter.package_patches_get(package.base, None)
current_commit_sha = Sources.load(dir_path, package, patches, self.paths)
changes: str | None = None changes: str | None = None
if current_commit_sha != last_commit_sha: if current_commit_sha != last_commit_sha:
@ -86,14 +87,21 @@ class PackageInfo(RepositoryProperties):
return Changes(last_commit_sha, changes) return Changes(last_commit_sha, changes)
def packages(self) -> list[Package]: def packages(self, filter_packages: Iterable[str] | None = None) -> list[Package]:
""" """
generate list of repository packages generate list of repository packages
Args:
filter_packages(Iterable[str] | None, optional): filter packages list by specified only
Returns: Returns:
list[Package]: list of packages properties list[Package]: list of packages properties
""" """
return self.load_archives(filter(package_like, self.paths.repository.iterdir())) packages = self.load_archives(filter(package_like, self.paths.repository.iterdir()))
if filter_packages:
packages = [package for package in packages if package.base in filter_packages]
return packages
def packages_built(self) -> list[Path]: def packages_built(self) -> list[Path]:
""" """

View File

@ -26,7 +26,7 @@ from ahriman.core.database import SQLite
from ahriman.core.repository.executor import Executor from ahriman.core.repository.executor import Executor
from ahriman.core.repository.update_handler import UpdateHandler from ahriman.core.repository.update_handler import UpdateHandler
from ahriman.core.sign.gpg import GPG from ahriman.core.sign.gpg import GPG
from ahriman.models.context_key import ContextKey from ahriman.core.status import Client
from ahriman.models.pacman_synchronization import PacmanSynchronization from ahriman.models.pacman_synchronization import PacmanSynchronization
from ahriman.models.repository_id import RepositoryId from ahriman.models.repository_id import RepositoryId
@ -89,11 +89,12 @@ class Repository(Executor, UpdateHandler):
# directly without loader # directly without loader
ctx = _Context() ctx = _Context()
ctx.set(ContextKey("database", SQLite), self.database) ctx.set(SQLite, self.database)
ctx.set(ContextKey("configuration", Configuration), self.configuration) ctx.set(Configuration, self.configuration)
ctx.set(ContextKey("pacman", Pacman), self.pacman) ctx.set(Pacman, self.pacman)
ctx.set(ContextKey("sign", GPG), self.sign) ctx.set(GPG, self.sign)
ctx.set(Client, self.reporter)
ctx.set(ContextKey("repository", type(self)), self) ctx.set(type(self), self)
context.set(ctx) context.set(ctx)

View File

@ -23,7 +23,7 @@ from ahriman.core.configuration import Configuration
from ahriman.core.database import SQLite from ahriman.core.database import SQLite
from ahriman.core.log import LazyLogging from ahriman.core.log import LazyLogging
from ahriman.core.sign.gpg import GPG from ahriman.core.sign.gpg import GPG
from ahriman.core.status.client import Client from ahriman.core.status import Client
from ahriman.core.triggers import TriggerLoader from ahriman.core.triggers import TriggerLoader
from ahriman.models.packagers import Packagers from ahriman.models.packagers import Packagers
from ahriman.models.pacman_synchronization import PacmanSynchronization from ahriman.models.pacman_synchronization import PacmanSynchronization
@ -75,7 +75,7 @@ class RepositoryProperties(LazyLogging):
self.pacman = Pacman(repository_id, configuration, refresh_database=refresh_pacman_database) self.pacman = Pacman(repository_id, configuration, refresh_database=refresh_pacman_database)
self.sign = GPG(configuration) self.sign = GPG(configuration)
self.repo = Repo(self.name, self.paths, self.sign.repository_sign_args) self.repo = Repo(self.name, self.paths, self.sign.repository_sign_args)
self.reporter = Client.load(repository_id, configuration, report=report) self.reporter = Client.load(repository_id, configuration, database, report=report)
self.triggers = TriggerLoader.load(repository_id, configuration) self.triggers = TriggerLoader.load(repository_id, configuration)
@property @property

View File

@ -55,17 +55,13 @@ class UpdateHandler(PackageInfo, Cleaner):
continue continue
raise UnknownPackageError(package.base) raise UnknownPackageError(package.base)
local_versions = {package.base: package.version for package in self.packages()}
result: list[Package] = [] result: list[Package] = []
for local in self.packages(): for local in self.packages(filter_packages):
with self.in_package_context(local.base, local_versions.get(local.base)): with self.in_package_context(local.base, local.version):
if not local.remote.is_remote: if not local.remote.is_remote:
continue # avoid checking local packages continue # avoid checking local packages
if local.base in self.ignore_list: if local.base in self.ignore_list:
continue continue
if filter_packages and local.base not in filter_packages:
continue
try: try:
remote = load_remote(local) remote = load_remote(local)
@ -82,6 +78,45 @@ class UpdateHandler(PackageInfo, Cleaner):
return result return result
def updates_dependencies(self, filter_packages: Iterable[str]) -> list[Package]:
"""
check packages which ae required to be rebuilt based on dynamic dependencies (e.g. linking, modules paths, etc.)
Args:
filter_packages(Iterable[str]): do not check every package just specified in the list
Returns:
list[Package]: list of packages for which there is breaking linking
"""
def extract_files(lookup_packages: Iterable[str]) -> dict[str, set[str]]:
database_files = self.pacman.files(lookup_packages)
files: dict[str, set[str]] = {}
for package_name, package_files in database_files.items(): # invert map
for package_file in package_files:
files.setdefault(package_file, set()).add(package_name)
return files
result: list[Package] = []
for package in self.packages(filter_packages):
dependencies = self.reporter.package_dependencies_get(package.base)
if not dependencies.paths:
continue # skip check if no package dependencies found
required_packages = {dep for dep_packages in dependencies.paths.values() for dep in dep_packages}
filesystem = extract_files(required_packages)
for path, packages in dependencies.paths.items():
found = filesystem.get(path, set())
if found.intersection(packages):
continue
# there are no packages found in filesystem with the same paths
result.append(package)
break
return result
def updates_local(self, *, vcs: bool) -> list[Package]: def updates_local(self, *, vcs: bool) -> list[Package]:
""" """
check local packages for updates check local packages for updates

View File

@ -17,3 +17,4 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
from ahriman.core.status.client import Client

View File

@ -17,16 +17,18 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
# pylint: disable=too-many-public-methods
from __future__ import annotations from __future__ import annotations
import logging
from ahriman.core.configuration import Configuration from ahriman.core.configuration import Configuration
from ahriman.core.database import SQLite
from ahriman.models.build_status import BuildStatus, BuildStatusEnum from ahriman.models.build_status import BuildStatus, BuildStatusEnum
from ahriman.models.changes import Changes from ahriman.models.changes import Changes
from ahriman.models.dependencies import Dependencies
from ahriman.models.internal_status import InternalStatus from ahriman.models.internal_status import InternalStatus
from ahriman.models.log_record_id import LogRecordId from ahriman.models.log_record_id import LogRecordId
from ahriman.models.package import Package from ahriman.models.package import Package
from ahriman.models.pkgbuild_patch import PkgbuildPatch
from ahriman.models.repository_id import RepositoryId from ahriman.models.repository_id import RepositoryId
@ -36,22 +38,31 @@ class Client:
""" """
@staticmethod @staticmethod
def load(repository_id: RepositoryId, configuration: Configuration, *, report: bool) -> Client: def load(repository_id: RepositoryId, configuration: Configuration, database: SQLite | None = None, *,
report: bool = True) -> Client:
""" """
load client from settings load client from settings
Args: Args:
repository_id(RepositoryId): repository unique identifier repository_id(RepositoryId): repository unique identifier
configuration(Configuration): configuration instance configuration(Configuration): configuration instance
report(bool): force enable or disable reporting database(SQLite | None, optional): database instance (Default value = None)
report(bool, optional): force enable or disable reporting (Default value = True)
Returns: Returns:
Client: client according to current settings Client: client according to current settings
""" """
def make_local_client() -> Client:
if database is None:
return Client()
from ahriman.core.status.local_client import LocalClient
return LocalClient(repository_id, database)
if not report: if not report:
return Client() return make_local_client()
if not configuration.getboolean("status", "enabled", fallback=True): # global switch if not configuration.getboolean("status", "enabled", fallback=True): # global switch
return Client() return make_local_client()
# new-style section # new-style section
address = configuration.get("status", "address", fallback=None) address = configuration.get("status", "address", fallback=None)
@ -65,16 +76,8 @@ class Client:
if address or legacy_address or (host and port) or socket: if address or legacy_address or (host and port) or socket:
from ahriman.core.status.web_client import WebClient from ahriman.core.status.web_client import WebClient
return WebClient(repository_id, configuration) return WebClient(repository_id, configuration)
return Client()
def package_add(self, package: Package, status: BuildStatusEnum) -> None: return make_local_client()
"""
add new package with status
Args:
package(Package): package properties
status(BuildStatusEnum): current package build status
"""
def package_changes_get(self, package_base: str) -> Changes: def package_changes_get(self, package_base: str) -> Changes:
""" """
@ -85,18 +88,52 @@ class Client:
Returns: Returns:
Changes: package changes if available and empty object otherwise Changes: package changes if available and empty object otherwise
"""
del package_base
return Changes()
def package_changes_set(self, package_base: str, changes: Changes) -> None: Raises:
NotImplementedError: not implemented method
"""
raise NotImplementedError
def package_changes_update(self, package_base: str, changes: Changes) -> None:
""" """
update package changes update package changes
Args: Args:
package_base(str): package base to update package_base(str): package base to update
changes(Changes): changes descriptor changes(Changes): changes descriptor
Raises:
NotImplementedError: not implemented method
""" """
raise NotImplementedError
def package_dependencies_get(self, package_base: str) -> Dependencies:
"""
get package dependencies
Args:
package_base(str): package base to retrieve
Returns:
list[Dependencies]: package implicit dependencies if available
Raises:
NotImplementedError: not implemented method
"""
raise NotImplementedError
def package_dependencies_update(self, package_base: str, dependencies: Dependencies) -> None:
"""
update package dependencies
Args:
package_base(str): package base to update
dependencies(Dependencies): dependencies descriptor
Raises:
NotImplementedError: not implemented method
"""
raise NotImplementedError
def package_get(self, package_base: str | None) -> list[tuple[Package, BuildStatus]]: def package_get(self, package_base: str | None) -> list[tuple[Package, BuildStatus]]:
""" """
@ -107,18 +144,94 @@ class Client:
Returns: Returns:
list[tuple[Package, BuildStatus]]: list of current package description and status if it has been found list[tuple[Package, BuildStatus]]: list of current package description and status if it has been found
"""
del package_base
return []
def package_logs(self, log_record_id: LogRecordId, record: logging.LogRecord) -> None: Raises:
NotImplementedError: not implemented method
"""
raise NotImplementedError
def package_logs_add(self, log_record_id: LogRecordId, created: float, message: str) -> None:
""" """
post log record post log record
Args: Args:
log_record_id(LogRecordId): log record id log_record_id(LogRecordId): log record id
record(logging.LogRecord): log record to post to api created(float): log created timestamp
message(str): log message
""" """
# this method does not raise NotImplementedError because it is actively used as dummy client for http log
def package_logs_get(self, package_base: str, limit: int = -1, offset: int = 0) -> list[tuple[float, str]]:
"""
get package logs
Args:
package_base(str): package base
limit(int, optional): limit records to the specified count, -1 means unlimited (Default value = -1)
offset(int, optional): records offset (Default value = 0)
Returns:
list[tuple[float, str]]: package logs
Raises:
NotImplementedError: not implemented method
"""
raise NotImplementedError
def package_logs_remove(self, package_base: str, version: str | None) -> None:
"""
remove package logs
Args:
package_base(str): package base
version(str | None): package version to remove logs. If None set, all logs will be removed
Raises:
NotImplementedError: not implemented method
"""
raise NotImplementedError
def package_patches_get(self, package_base: str, variable: str | None) -> list[PkgbuildPatch]:
"""
get package patches
Args:
package_base(str): package base to retrieve
variable(str | None): optional filter by patch variable
Returns:
list[PkgbuildPatch]: list of patches for the specified package
Raises:
NotImplementedError: not implemented method
"""
raise NotImplementedError
def package_patches_remove(self, package_base: str, variable: str | None) -> None:
"""
remove package patch
Args:
package_base(str): package base to update
variable(str | None): patch name. If None set, all patches will be removed
Raises:
NotImplementedError: not implemented method
"""
raise NotImplementedError
def package_patches_update(self, package_base: str, patch: PkgbuildPatch) -> None:
"""
create or update package patch
Args:
package_base(str): package base to update
patch(PkgbuildPatch): package patch
Raises:
NotImplementedError: not implemented method
"""
raise NotImplementedError
def package_remove(self, package_base: str) -> None: def package_remove(self, package_base: str) -> None:
""" """
@ -126,16 +239,37 @@ class Client:
Args: Args:
package_base(str): package base to remove package_base(str): package base to remove
"""
def package_update(self, package_base: str, status: BuildStatusEnum) -> None: Raises:
NotImplementedError: not implemented method
""" """
update package build status. Unlike :func:`package_add()` it does not update package properties raise NotImplementedError
def package_status_update(self, package_base: str, status: BuildStatusEnum) -> None:
"""
update package build status. Unlike :func:`package_update()` it does not update package properties
Args: Args:
package_base(str): package base to update package_base(str): package base to update
status(BuildStatusEnum): current package build status status(BuildStatusEnum): current package build status
Raises:
NotImplementedError: not implemented method
""" """
raise NotImplementedError
def package_update(self, package: Package, status: BuildStatusEnum) -> None:
"""
add new package or update existing one with status
Args:
package(Package): package properties
status(BuildStatusEnum): current package build status
Raises:
NotImplementedError: not implemented method
"""
raise NotImplementedError
def set_building(self, package_base: str) -> None: def set_building(self, package_base: str) -> None:
""" """
@ -144,7 +278,7 @@ class Client:
Args: Args:
package_base(str): package base to update package_base(str): package base to update
""" """
return self.package_update(package_base, BuildStatusEnum.Building) self.package_status_update(package_base, BuildStatusEnum.Building)
def set_failed(self, package_base: str) -> None: def set_failed(self, package_base: str) -> None:
""" """
@ -153,7 +287,7 @@ class Client:
Args: Args:
package_base(str): package base to update package_base(str): package base to update
""" """
return self.package_update(package_base, BuildStatusEnum.Failed) self.package_status_update(package_base, BuildStatusEnum.Failed)
def set_pending(self, package_base: str) -> None: def set_pending(self, package_base: str) -> None:
""" """
@ -162,7 +296,7 @@ class Client:
Args: Args:
package_base(str): package base to update package_base(str): package base to update
""" """
return self.package_update(package_base, BuildStatusEnum.Pending) self.package_status_update(package_base, BuildStatusEnum.Pending)
def set_success(self, package: Package) -> None: def set_success(self, package: Package) -> None:
""" """
@ -171,16 +305,19 @@ class Client:
Args: Args:
package(Package): current package properties package(Package): current package properties
""" """
return self.package_add(package, BuildStatusEnum.Success) self.package_update(package, BuildStatusEnum.Success)
def set_unknown(self, package: Package) -> None: def set_unknown(self, package: Package) -> None:
""" """
set package status to unknown set package status to unknown. Unlike other methods, this method also checks if package is known,
and - in case if it is - it silently skips updatd
Args: Args:
package(Package): current package properties package(Package): current package properties
""" """
return self.package_add(package, BuildStatusEnum.Unknown) if self.package_get(package.base):
return # skip update in case if package is already known
self.package_update(package, BuildStatusEnum.Unknown)
def status_get(self) -> InternalStatus: def status_get(self) -> InternalStatus:
""" """

View File

@ -0,0 +1,214 @@
#
# Copyright (c) 2021-2024 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from ahriman.core.database import SQLite
from ahriman.core.status import Client
from ahriman.models.build_status import BuildStatus, BuildStatusEnum
from ahriman.models.changes import Changes
from ahriman.models.dependencies import Dependencies
from ahriman.models.log_record_id import LogRecordId
from ahriman.models.package import Package
from ahriman.models.pkgbuild_patch import PkgbuildPatch
from ahriman.models.repository_id import RepositoryId
class LocalClient(Client):
"""
local database handler
Attributes:
database(SQLite): database instance
repository_id(RepositoryId): repository unique identifier
"""
def __init__(self, repository_id: RepositoryId, database: SQLite) -> None:
"""
default constructor
Args:
repository_id(RepositoryId): repository unique identifier
database(SQLite): database instance:
"""
self.database = database
self.repository_id = repository_id
def package_changes_get(self, package_base: str) -> Changes:
"""
get package changes
Args:
package_base(str): package base to retrieve
Returns:
Changes: package changes if available and empty object otherwise
"""
return self.database.changes_get(package_base, self.repository_id)
def package_changes_update(self, package_base: str, changes: Changes) -> None:
"""
update package changes
Args:
package_base(str): package base to update
changes(Changes): changes descriptor
"""
self.database.changes_insert(package_base, changes, self.repository_id)
def package_dependencies_get(self, package_base: str) -> Dependencies:
"""
get package dependencies
Args:
package_base(str): package base to retrieve
Returns:
list[Dependencies]: package implicit dependencies if available
"""
return self.database.dependencies_get(package_base, self.repository_id).get(package_base, Dependencies())
def package_dependencies_update(self, package_base: str, dependencies: Dependencies) -> None:
"""
update package dependencies
Args:
package_base(str): package base to update
dependencies(Dependencies): dependencies descriptor
"""
self.database.dependencies_insert(package_base, dependencies, self.repository_id)
def package_get(self, package_base: str | None) -> list[tuple[Package, BuildStatus]]:
"""
get package status
Args:
package_base(str | None): package base to get
Returns:
list[tuple[Package, BuildStatus]]: list of current package description and status if it has been found
"""
packages = self.database.packages_get(self.repository_id)
if package_base is None:
return packages
return [(package, status) for package, status in packages if package.base == package_base]
def package_logs_add(self, log_record_id: LogRecordId, created: float, message: str) -> None:
"""
post log record
Args:
log_record_id(LogRecordId): log record id
created(float): log created timestamp
message(str): log message
"""
self.database.logs_insert(log_record_id, created, message, self.repository_id)
def package_logs_get(self, package_base: str, limit: int = -1, offset: int = 0) -> list[tuple[float, str]]:
"""
get package logs
Args:
package_base(str): package base
limit(int, optional): limit records to the specified count, -1 means unlimited (Default value = -1)
offset(int, optional): records offset (Default value = 0)
Returns:
list[tuple[float, str]]: package logs
"""
return self.database.logs_get(package_base, limit, offset, self.repository_id)
def package_logs_remove(self, package_base: str, version: str | None) -> None:
"""
remove package logs
Args:
package_base(str): package base
version(str | None): package version to remove logs. If None set, all logs will be removed
"""
self.database.logs_remove(package_base, version, self.repository_id)
def package_patches_get(self, package_base: str, variable: str | None) -> list[PkgbuildPatch]:
"""
get package patches
Args:
package_base(str): package base to retrieve
variable(str | None): optional filter by patch variable
Returns:
list[PkgbuildPatch]: list of patches for the specified package
"""
variables = [variable] if variable is not None else None
return self.database.patches_list(package_base, variables).get(package_base, [])
def package_patches_remove(self, package_base: str, variable: str | None) -> None:
"""
remove package patch
Args:
package_base(str): package base to update
variable(str | None): patch name. If None set, all patches will be removed
"""
variables = [variable] if variable is not None else None
self.database.patches_remove(package_base, variables)
def package_patches_update(self, package_base: str, patch: PkgbuildPatch) -> None:
"""
create or update package patch
Args:
package_base(str): package base to update
patch(PkgbuildPatch): package patch
"""
self.database.patches_insert(package_base, [patch])
def package_remove(self, package_base: str) -> None:
"""
remove packages from watcher
Args:
package_base(str): package base to remove
"""
self.database.package_clear(package_base)
def package_status_update(self, package_base: str, status: BuildStatusEnum) -> None:
"""
update package build status. Unlike :func:`package_update()` it does not update package properties
Args:
package_base(str): package base to update
status(BuildStatusEnum): current package build status
Raises:
NotImplementedError: not implemented method
"""
self.database.status_update(package_base, BuildStatus(status), self.repository_id)
def package_update(self, package: Package, status: BuildStatusEnum) -> None:
"""
add new package or update existing one with status
Args:
package(Package): package properties
status(BuildStatusEnum): current package build status
Raises:
NotImplementedError: not implemented method
"""
self.database.package_update(package, self.repository_id)
self.database.status_update(package.base, BuildStatus(status), self.repository_id)

View File

@ -17,17 +17,19 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
from collections.abc import Callable
from threading import Lock from threading import Lock
from typing import Any, Self
from ahriman.core.database import SQLite
from ahriman.core.exceptions import UnknownPackageError from ahriman.core.exceptions import UnknownPackageError
from ahriman.core.log import LazyLogging from ahriman.core.log import LazyLogging
from ahriman.core.status import Client
from ahriman.models.build_status import BuildStatus, BuildStatusEnum from ahriman.models.build_status import BuildStatus, BuildStatusEnum
from ahriman.models.changes import Changes from ahriman.models.changes import Changes
from ahriman.models.dependencies import Dependencies
from ahriman.models.log_record_id import LogRecordId from ahriman.models.log_record_id import LogRecordId
from ahriman.models.package import Package from ahriman.models.package import Package
from ahriman.models.pkgbuild_patch import PkgbuildPatch from ahriman.models.pkgbuild_patch import PkgbuildPatch
from ahriman.models.repository_id import RepositoryId
class Watcher(LazyLogging): class Watcher(LazyLogging):
@ -35,21 +37,18 @@ class Watcher(LazyLogging):
package status watcher package status watcher
Attributes: Attributes:
database(SQLite): database instance client(Client): reporter instance
repository_id(RepositoryId): repository unique identifier
status(BuildStatus): daemon status status(BuildStatus): daemon status
""" """
def __init__(self, repository_id: RepositoryId, database: SQLite) -> None: def __init__(self, client: Client) -> None:
""" """
default constructor default constructor
Args: Args:
repository_id(RepositoryId): repository unique identifier client(Client): reporter instance
database(SQLite): database instance
""" """
self.repository_id = repository_id self.client = client
self.database = database
self._lock = Lock() self._lock = Lock()
self._known: dict[str, tuple[Package, BuildStatus]] = {} self._known: dict[str, tuple[Package, BuildStatus]] = {}
@ -76,61 +75,16 @@ class Watcher(LazyLogging):
with self._lock: with self._lock:
self._known = { self._known = {
package.base: (package, status) package.base: (package, status)
for package, status in self.database.packages_get(self.repository_id) for package, status in self.client.package_get(None)
} }
def logs_get(self, package_base: str, limit: int = -1, offset: int = 0) -> list[tuple[float, str]]: package_changes_get: Callable[[str], Changes]
"""
extract logs for the package base
Args: package_changes_update: Callable[[str, Changes], None]
package_base(str): package base
limit(int, optional): limit records to the specified count, -1 means unlimited (Default value = -1)
offset(int, optional): records offset (Default value = 0)
Returns: package_dependencies_get: Callable[[str], Dependencies]
list[tuple[float, str]]: package logs
"""
self.package_get(package_base)
return self.database.logs_get(package_base, limit, offset, self.repository_id)
def logs_remove(self, package_base: str, version: str | None) -> None: package_dependencies_update: Callable[[str, Dependencies], None]
"""
remove package related logs
Args:
package_base(str): package base
version(str): package versio
"""
self.database.logs_remove(package_base, version, self.repository_id)
def logs_update(self, log_record_id: LogRecordId, created: float, record: str) -> None:
"""
make new log record into database
Args:
log_record_id(LogRecordId): log record id
created(float): log created timestamp
record(str): log record
"""
if self._last_log_record_id != log_record_id:
# there is new log record, so we remove old ones
self.logs_remove(log_record_id.package_base, log_record_id.version)
self._last_log_record_id = log_record_id
self.database.logs_insert(log_record_id, created, record, self.repository_id)
def package_changes_get(self, package_base: str) -> Changes:
"""
retrieve package changes
Args:
package_base(str): package base
Returns:
Changes: package changes if available
"""
self.package_get(package_base)
return self.database.changes_get(package_base, self.repository_id)
def package_get(self, package_base: str) -> tuple[Package, BuildStatus]: def package_get(self, package_base: str) -> tuple[Package, BuildStatus]:
""" """
@ -151,6 +105,31 @@ class Watcher(LazyLogging):
except KeyError: except KeyError:
raise UnknownPackageError(package_base) from None raise UnknownPackageError(package_base) from None
def package_logs_add(self, log_record_id: LogRecordId, created: float, message: str) -> None:
"""
make new log record into database
Args:
log_record_id(LogRecordId): log record id
created(float): log created timestamp
message(str): log message
"""
if self._last_log_record_id != log_record_id:
# there is new log record, so we remove old ones
self.package_logs_remove(log_record_id.package_base, log_record_id.version)
self._last_log_record_id = log_record_id
self.client.package_logs_add(log_record_id, created, message)
package_logs_get: Callable[[str, int, int], list[tuple[float, str]]]
package_logs_remove: Callable[[str, str | None], None]
package_patches_get: Callable[[str, str | None], list[PkgbuildPatch]]
package_patches_remove: Callable[[str, str], None]
package_patches_update: Callable[[str, PkgbuildPatch], None]
def package_remove(self, package_base: str) -> None: def package_remove(self, package_base: str) -> None:
""" """
remove package base from known list if any remove package base from known list if any
@ -160,60 +139,33 @@ class Watcher(LazyLogging):
""" """
with self._lock: with self._lock:
self._known.pop(package_base, None) self._known.pop(package_base, None)
self.database.package_remove(package_base, self.repository_id) self.client.package_remove(package_base)
self.logs_remove(package_base, None) self.package_logs_remove(package_base, None)
def package_update(self, package_base: str, status: BuildStatusEnum, package: Package | None) -> None: def package_status_update(self, package_base: str, status: BuildStatusEnum) -> None:
""" """
update package status and description update package status
Args: Args:
package_base(str): package base to update package_base(str): package base to update
status(BuildStatusEnum): new build status status(BuildStatusEnum): new build status
package(Package | None): optional package description. In case if not set current properties will be used
""" """
if package is None:
package, _ = self.package_get(package_base) package, _ = self.package_get(package_base)
full_status = BuildStatus(status)
with self._lock: with self._lock:
self._known[package_base] = (package, full_status) self._known[package_base] = (package, BuildStatus(status))
self.database.package_update(package, full_status, self.repository_id) self.client.package_status_update(package_base, status)
def patches_get(self, package_base: str, variable: str | None) -> list[PkgbuildPatch]: def package_update(self, package: Package, status: BuildStatusEnum) -> None:
""" """
get patches for the package update package
Args: Args:
package_base(str): package base package(Package): package description
variable(str | None): patch variable name if any status(BuildStatusEnum): new build status
Returns:
list[PkgbuildPatch]: list of patches which are stored for the package
""" """
# patches are package base based, we don't know (and don't differentiate) to which package does them belong with self._lock:
# so here we skip checking if package exists or not self._known[package.base] = (package, BuildStatus(status))
variables = [variable] if variable is not None else None self.client.package_update(package, status)
return self.database.patches_list(package_base, variables).get(package_base, [])
def patches_remove(self, package_base: str, variable: str) -> None:
"""
remove package patch
Args:
package_base(str): package base
variable(str): patch variable name
"""
self.database.patches_remove(package_base, [variable])
def patches_update(self, package_base: str, patch: PkgbuildPatch) -> None:
"""
update package patch
Args:
package_base(str): package base
patch(PkgbuildPatch): package patch
"""
self.database.patches_insert(package_base, [patch])
def status_update(self, status: BuildStatusEnum) -> None: def status_update(self, status: BuildStatusEnum) -> None:
""" """
@ -223,3 +175,34 @@ class Watcher(LazyLogging):
status(BuildStatusEnum): new service status status(BuildStatusEnum): new service status
""" """
self.status = BuildStatus(status) self.status = BuildStatus(status)
def __call__(self, package_base: str | None) -> Self:
"""
extract client for future calls
Args:
package_base(str | None): package base to validate that package exists if applicable
Returns:
Self: instance of self to pass calls to the client
"""
if package_base is not None:
_ = self.package_get(package_base)
return self
def __getattr__(self, item: str) -> Any:
"""
proxy methods for reporter client
Args:
item(str): property name:
Returns:
Any: attribute by its name
Raises:
AttributeError: in case if no such attribute found
"""
if (method := getattr(self.client, item, None)) is not None:
return method
raise AttributeError(f"'{self.__class__.__qualname__}' object has no attribute '{item}'")

View File

@ -18,18 +18,19 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
import contextlib import contextlib
import logging
from urllib.parse import quote_plus as urlencode from urllib.parse import quote_plus as urlencode
from ahriman.core.configuration import Configuration from ahriman.core.configuration import Configuration
from ahriman.core.http import SyncAhrimanClient from ahriman.core.http import SyncAhrimanClient
from ahriman.core.status.client import Client from ahriman.core.status import Client
from ahriman.models.build_status import BuildStatus, BuildStatusEnum from ahriman.models.build_status import BuildStatus, BuildStatusEnum
from ahriman.models.changes import Changes from ahriman.models.changes import Changes
from ahriman.models.dependencies import Dependencies
from ahriman.models.internal_status import InternalStatus from ahriman.models.internal_status import InternalStatus
from ahriman.models.log_record_id import LogRecordId from ahriman.models.log_record_id import LogRecordId
from ahriman.models.package import Package from ahriman.models.package import Package
from ahriman.models.pkgbuild_patch import PkgbuildPatch
from ahriman.models.repository_id import RepositoryId from ahriman.models.repository_id import RepositoryId
@ -92,10 +93,22 @@ class WebClient(Client, SyncAhrimanClient):
package_base(str): package base package_base(str): package base
Returns: Returns:
str: full url for web service for logs str: full url for web service for changes
""" """
return f"{self.address}/api/v1/packages/{urlencode(package_base)}/changes" return f"{self.address}/api/v1/packages/{urlencode(package_base)}/changes"
def _dependencies_url(self, package_base: str) -> str:
"""
get url for the dependencies api
Args:
package_base(str): package base
Returns:
str: full url for web service for dependencies
"""
return f"{self.address}/api/v1/packages/{urlencode(package_base)}/dependencies"
def _logs_url(self, package_base: str) -> str: def _logs_url(self, package_base: str) -> str:
""" """
get url for the logs api get url for the logs api
@ -110,7 +123,7 @@ class WebClient(Client, SyncAhrimanClient):
def _package_url(self, package_base: str = "") -> str: def _package_url(self, package_base: str = "") -> str:
""" """
url generator package url generator
Args: Args:
package_base(str, optional): package base to generate url (Default value = "") package_base(str, optional): package base to generate url (Default value = "")
@ -121,6 +134,20 @@ class WebClient(Client, SyncAhrimanClient):
suffix = f"/{urlencode(package_base)}" if package_base else "" suffix = f"/{urlencode(package_base)}" if package_base else ""
return f"{self.address}/api/v1/packages{suffix}" return f"{self.address}/api/v1/packages{suffix}"
def _patches_url(self, package_base: str, variable: str = "") -> str:
"""
patches url generator
Args:
package_base(str): package base
variable(str, optional): patch variable name to generate url (Default value = "")
Returns:
str: full url of web service for the package patch
"""
suffix = f"/{urlencode(variable)}" if variable else ""
return f"{self.address}/api/v1/packages/{urlencode(package_base)}/patches{suffix}"
def _status_url(self) -> str: def _status_url(self) -> str:
""" """
get url for the status api get url for the status api
@ -130,22 +157,6 @@ class WebClient(Client, SyncAhrimanClient):
""" """
return f"{self.address}/api/v1/status" return f"{self.address}/api/v1/status"
def package_add(self, package: Package, status: BuildStatusEnum) -> None:
"""
add new package with status
Args:
package(Package): package properties
status(BuildStatusEnum): current package build status
"""
payload = {
"status": status.value,
"package": package.view()
}
with contextlib.suppress(Exception):
self.make_request("POST", self._package_url(package.base),
params=self.repository_id.query(), json=payload)
def package_changes_get(self, package_base: str) -> Changes: def package_changes_get(self, package_base: str) -> Changes:
""" """
get package changes get package changes
@ -165,7 +176,7 @@ class WebClient(Client, SyncAhrimanClient):
return Changes() return Changes()
def package_changes_set(self, package_base: str, changes: Changes) -> None: def package_changes_update(self, package_base: str, changes: Changes) -> None:
""" """
update package changes update package changes
@ -177,6 +188,37 @@ class WebClient(Client, SyncAhrimanClient):
self.make_request("POST", self._changes_url(package_base), self.make_request("POST", self._changes_url(package_base),
params=self.repository_id.query(), json=changes.view()) params=self.repository_id.query(), json=changes.view())
def package_dependencies_get(self, package_base: str) -> Dependencies:
"""
get package dependencies
Args:
package_base(str): package base to retrieve
Returns:
list[Dependencies]: package implicit dependencies if available
"""
with contextlib.suppress(Exception):
response = self.make_request("GET", self._dependencies_url(package_base),
params=self.repository_id.query())
response_json = response.json()
return Dependencies.from_json(response_json)
return Dependencies()
def package_dependencies_update(self, package_base: str, dependencies: Dependencies) -> None:
"""
update package dependencies
Args:
package_base(str): package base to update
dependencies(Dependencies): dependencies descriptor
"""
with contextlib.suppress(Exception):
self.make_request("POST", self._dependencies_url(package_base),
params=self.repository_id.query(), json=dependencies.view())
def package_get(self, package_base: str | None) -> list[tuple[Package, BuildStatus]]: def package_get(self, package_base: str | None) -> list[tuple[Package, BuildStatus]]:
""" """
get package status get package status
@ -199,17 +241,18 @@ class WebClient(Client, SyncAhrimanClient):
return [] return []
def package_logs(self, log_record_id: LogRecordId, record: logging.LogRecord) -> None: def package_logs_add(self, log_record_id: LogRecordId, created: float, message: str) -> None:
""" """
post log record post log record
Args: Args:
log_record_id(LogRecordId): log record id log_record_id(LogRecordId): log record id
record(logging.LogRecord): log record to post to api created(float): log created timestamp
message(str): log message
""" """
payload = { payload = {
"created": record.created, "created": created,
"message": record.getMessage(), "message": message,
"version": log_record_id.version, "version": log_record_id.version,
} }
@ -219,6 +262,83 @@ class WebClient(Client, SyncAhrimanClient):
self.make_request("POST", self._logs_url(log_record_id.package_base), self.make_request("POST", self._logs_url(log_record_id.package_base),
params=self.repository_id.query(), json=payload, suppress_errors=True) params=self.repository_id.query(), json=payload, suppress_errors=True)
def package_logs_get(self, package_base: str, limit: int = -1, offset: int = 0) -> list[tuple[float, str]]:
"""
get package logs
Args:
package_base(str): package base
limit(int, optional): limit records to the specified count, -1 means unlimited (Default value = -1)
offset(int, optional): records offset (Default value = 0)
Returns:
list[tuple[float, str]]: package logs
"""
with contextlib.suppress(Exception):
query = self.repository_id.query() + [("limit", str(limit)), ("offset", str(offset))]
response = self.make_request("GET", self._logs_url(package_base), params=query)
response_json = response.json()
return [(record["created"], record["message"]) for record in response_json]
return []
def package_logs_remove(self, package_base: str, version: str | None) -> None:
"""
remove package logs
Args:
package_base(str): package base
version(str | None): package version to remove logs. If None set, all logs will be removed
"""
with contextlib.suppress(Exception):
query = self.repository_id.query()
if version is not None:
query += [("version", version)]
self.make_request("DELETE", self._logs_url(package_base), params=query)
def package_patches_get(self, package_base: str, variable: str | None) -> list[PkgbuildPatch]:
"""
get package patches
Args:
package_base(str): package base to retrieve
variable(str | None): optional filter by patch variable
Returns:
list[PkgbuildPatch]: list of patches for the specified package
"""
with contextlib.suppress(Exception):
response = self.make_request("GET", self._patches_url(package_base, variable or ""))
response_json = response.json()
patches = response_json if variable is None else [response_json]
return [PkgbuildPatch.from_json(patch) for patch in patches]
return []
def package_patches_remove(self, package_base: str, variable: str | None) -> None:
"""
remove package patch
Args:
package_base(str): package base to update
variable(str | None): patch name. If None set, all patches will be removed
"""
with contextlib.suppress(Exception):
self.make_request("DELETE", self._patches_url(package_base, variable or ""))
def package_patches_update(self, package_base: str, patch: PkgbuildPatch) -> None:
"""
create or update package patch
Args:
package_base(str): package base to update
patch(PkgbuildPatch): package patch
"""
with contextlib.suppress(Exception):
self.make_request("POST", self._patches_url(package_base), json=patch.view())
def package_remove(self, package_base: str) -> None: def package_remove(self, package_base: str) -> None:
""" """
remove packages from watcher remove packages from watcher
@ -229,19 +349,41 @@ class WebClient(Client, SyncAhrimanClient):
with contextlib.suppress(Exception): with contextlib.suppress(Exception):
self.make_request("DELETE", self._package_url(package_base), params=self.repository_id.query()) self.make_request("DELETE", self._package_url(package_base), params=self.repository_id.query())
def package_update(self, package_base: str, status: BuildStatusEnum) -> None: def package_status_update(self, package_base: str, status: BuildStatusEnum) -> None:
""" """
update package build status. Unlike :func:`package_add()` it does not update package properties update package build status. Unlike :func:`package_update()` it does not update package properties
Args: Args:
package_base(str): package base to update package_base(str): package base to update
status(BuildStatusEnum): current package build status status(BuildStatusEnum): current package build status
Raises:
NotImplementedError: not implemented method
""" """
payload = {"status": status.value} payload = {"status": status.value}
with contextlib.suppress(Exception): with contextlib.suppress(Exception):
self.make_request("POST", self._package_url(package_base), self.make_request("POST", self._package_url(package_base),
params=self.repository_id.query(), json=payload) params=self.repository_id.query(), json=payload)
def package_update(self, package: Package, status: BuildStatusEnum) -> None:
"""
add new package or update existing one with status
Args:
package(Package): package properties
status(BuildStatusEnum): current package build status
Raises:
NotImplementedError: not implemented method
"""
payload = {
"status": status.value,
"package": package.view(),
}
with contextlib.suppress(Exception):
self.make_request("POST", self._package_url(package.base),
params=self.repository_id.query(), json=payload)
def status_get(self) -> InternalStatus: def status_get(self) -> InternalStatus:
""" """
get internal service status get internal service status

View File

@ -24,7 +24,6 @@ from ahriman.core.sign.gpg import GPG
from ahriman.core.support.package_creator import PackageCreator from ahriman.core.support.package_creator import PackageCreator
from ahriman.core.support.pkgbuild.keyring_generator import KeyringGenerator from ahriman.core.support.pkgbuild.keyring_generator import KeyringGenerator
from ahriman.core.triggers import Trigger from ahriman.core.triggers import Trigger
from ahriman.models.context_key import ContextKey
from ahriman.models.repository_id import RepositoryId from ahriman.models.repository_id import RepositoryId
@ -134,8 +133,8 @@ class KeyringTrigger(Trigger):
trigger action which will be called at the start of the application trigger action which will be called at the start of the application
""" """
ctx = context.get() ctx = context.get()
sign = ctx.get(ContextKey("sign", GPG)) sign = ctx.get(GPG)
database = ctx.get(ContextKey("database", SQLite)) database = ctx.get(SQLite)
for target in self.targets: for target in self.targets:
generator = KeyringGenerator(database, sign, self.repository_id, self.configuration, target) generator = KeyringGenerator(database, sign, self.repository_id, self.configuration, target)

View File

@ -19,13 +19,13 @@
# #
import shutil import shutil
from pathlib import Path
from ahriman.core import context from ahriman.core import context
from ahriman.core.build_tools.sources import Sources from ahriman.core.build_tools.sources import Sources
from ahriman.core.configuration import Configuration from ahriman.core.configuration import Configuration
from ahriman.core.database import SQLite from ahriman.core.status import Client
from ahriman.core.support.pkgbuild.pkgbuild_generator import PkgbuildGenerator from ahriman.core.support.pkgbuild.pkgbuild_generator import PkgbuildGenerator
from ahriman.models.build_status import BuildStatus
from ahriman.models.context_key import ContextKey
from ahriman.models.package import Package from ahriman.models.package import Package
@ -49,23 +49,39 @@ class PackageCreator:
self.configuration = configuration self.configuration = configuration
self.generator = generator self.generator = generator
def package_create(self, path: Path) -> None:
"""
create package files
Args:
path(Path): path to directory with package files
"""
# clear old tree if any
shutil.rmtree(path, ignore_errors=True)
# create local tree
path.mkdir(mode=0o755, parents=True, exist_ok=True)
self.generator.write_pkgbuild(path)
Sources.init(path)
def package_register(self, path: Path) -> None:
"""
register package in build worker
Args:
path(Path): path to directory with package files
"""
ctx = context.get()
reporter = ctx.get(Client)
_, repository_id = self.configuration.check_loaded()
package = Package.from_build(path, repository_id.architecture, None)
reporter.set_unknown(package)
def run(self) -> None: def run(self) -> None:
""" """
create new local package create new local package
""" """
local_path = self.configuration.repository_paths.cache_for(self.generator.pkgname) local_path = self.configuration.repository_paths.cache_for(self.generator.pkgname)
self.package_create(local_path)
# clear old tree if any self.package_register(local_path)
shutil.rmtree(local_path, ignore_errors=True)
# create local tree
local_path.mkdir(mode=0o755, parents=True, exist_ok=True)
self.generator.write_pkgbuild(local_path)
Sources.init(local_path)
# register package
ctx = context.get()
database: SQLite = ctx.get(ContextKey("database", SQLite))
_, repository_id = self.configuration.check_loaded()
package = Package.from_build(local_path, repository_id.architecture, None)
database.package_update(package, BuildStatus())

View File

@ -57,6 +57,7 @@ class AURPackage:
provides(list[str]): list of packages which this package provides provides(list[str]): list of packages which this package provides
license(list[str]): list of package licenses license(list[str]): list of package licenses
keywords(list[str]): list of package keywords keywords(list[str]): list of package keywords
groups(list[str]): list of package groups
Examples: Examples:
Mainly this class must be used from class methods instead of default :func:`__init__()`:: Mainly this class must be used from class methods instead of default :func:`__init__()`::
@ -100,6 +101,7 @@ class AURPackage:
provides: list[str] = field(default_factory=list) provides: list[str] = field(default_factory=list)
license: list[str] = field(default_factory=list) license: list[str] = field(default_factory=list)
keywords: list[str] = field(default_factory=list) keywords: list[str] = field(default_factory=list)
groups: list[str] = field(default_factory=list)
@classmethod @classmethod
def from_json(cls, dump: dict[str, Any]) -> Self: def from_json(cls, dump: dict[str, Any]) -> Self:
@ -153,6 +155,7 @@ class AURPackage:
provides=package.provides, provides=package.provides,
license=package.licenses, license=package.licenses,
keywords=[], keywords=[],
groups=package.groups,
) )
@classmethod @classmethod
@ -191,6 +194,7 @@ class AURPackage:
provides=dump["provides"], provides=dump["provides"],
license=dump["licenses"], license=dump["licenses"],
keywords=[], keywords=[],
groups=dump["groups"],
) )
@staticmethod @staticmethod

View File

@ -18,7 +18,7 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
from dataclasses import dataclass from dataclasses import dataclass
from typing import Generic, TypeVar from typing import Generic, Self, TypeVar
T = TypeVar("T") T = TypeVar("T")
@ -35,3 +35,16 @@ class ContextKey(Generic[T]):
""" """
key: str key: str
return_type: type[T] return_type: type[T]
@classmethod
def from_type(cls, return_type: type[T]) -> Self:
"""
construct key from type
Args:
return_type(type[T]): return type used for the specified context key
Returns:
Self: context key with autogenerated
"""
return cls(return_type.__name__, return_type)

View File

@ -0,0 +1,66 @@
#
# Copyright (c) 2021-2024 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from dataclasses import dataclass, field, fields
from typing import Any, Self
from ahriman.core.util import dataclass_view, filter_json
@dataclass(frozen=True)
class Dependencies:
"""
package paths dependencies
Attributes:
paths(dict[str, list[str]]): map of the paths used by this package to set of packages in which they were found
"""
paths: dict[str, list[str]] = field(default_factory=dict)
def __post_init__(self) -> None:
"""
remove empty paths
"""
paths = {path: packages for path, packages in self.paths.items() if packages}
object.__setattr__(self, "paths", paths)
@classmethod
def from_json(cls, dump: dict[str, Any]) -> Self:
"""
construct dependencies from the json dump
Args:
dump(dict[str, Any]): json dump body
Returns:
Self: dependencies object
"""
# filter to only known fields
known_fields = [pair.name for pair in fields(cls)]
return cls(**filter_json(dump, known_fields))
def view(self) -> dict[str, Any]:
"""
generate json dependencies view
Returns:
dict[str, Any]: json-friendly dictionary
"""
return dataclass_view(self)

View File

@ -0,0 +1,90 @@
#
# Copyright (c) 2021-2024 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import annotations
from collections.abc import Iterable
from dataclasses import dataclass, field
from pathlib import Path
from ahriman.core.util import trim_package
@dataclass(frozen=True, kw_only=True)
class FilesystemPackage:
"""
class representing a simplified model for the package installed to filesystem
Attributes:
package_name(str): package name
depends(set[str]): list of package dependencies
directories(set[Path]): list of directories this package contains
files(list[Path]): list of files this package contains
opt_depends(set[str]): list of package optional dependencies
"""
package_name: str
depends: set[str]
opt_depends: set[str]
directories: list[Path] = field(default_factory=list)
files: list[Path] = field(default_factory=list)
def __post_init__(self) -> None:
"""
update dependencies list accordingly
"""
object.__setattr__(self, "depends", {trim_package(package) for package in self.depends})
object.__setattr__(self, "opt_depends", {trim_package(package) for package in self.opt_depends})
def depends_on(self, package_name: str, *, include_optional: bool) -> bool:
"""
check if package depends on given package name
Args:
package_name(str): package name to check dependencies
include_optional(bool): include optional dependencies to check
Returns:
bool: ``True`` in case if the given package in the dependencies lists
"""
if package_name in self.depends:
return True
if include_optional and package_name in self.opt_depends:
return True
return False
def is_root_package(self, packages: Iterable[FilesystemPackage], *, include_optional: bool) -> bool:
"""
check if the package is the one of the root packages. This method checks if there are any packages which are
dependency of the package and - to avoid circular dependencies - does not depend on the package. In addition,
if ``include_optional`` is set to ``True``, then it will also check optional dependencies of the package
Args:
packages(Iterable[FilesystemPackage]): list of packages in which we need to search
include_optional(bool): include optional dependencies to check
Returns:
bool: whether this package depends on any other package in the list of packages
"""
return not any(
package
for package in packages
if self.depends_on(package.package_name, include_optional=include_optional)
and not package.depends_on(self.package_name, include_optional=False)
)

View File

@ -0,0 +1,250 @@
#
# Copyright (c) 2021-2024 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from dataclasses import dataclass
from elftools.elf.dynamic import DynamicSection
from elftools.elf.elffile import ELFFile
from pathlib import Path
from typing import IO
from ahriman.core.alpm.pacman import Pacman
from ahriman.core.alpm.remote import OfficialSyncdb
from ahriman.core.exceptions import UnknownPackageError
from ahriman.core.util import walk
from ahriman.models.dependencies import Dependencies
from ahriman.models.filesystem_package import FilesystemPackage
from ahriman.models.package import Package
@dataclass
class PackageArchive:
"""
helper for package archives
Attributes:
package(Package): package descriptor
root(Path): path to root filesystem
pacman(Pacman): alpm wrapper instance
"""
root: Path
package: Package
pacman: Pacman
@staticmethod
def dynamic_needed(binary_path: Path) -> list[str]:
"""
extract dynamic libraries required by the specified file
Args:
binary_path(Path): path to library, file, etc
Returns:
list[str]: libraries which this file linked dynamically. Returns empty set in case if file is not
a binary or no dynamic section has been found
"""
with binary_path.open("rb") as binary_file:
if not PackageArchive.is_elf(binary_file):
return []
elf_file = ELFFile(binary_file) # type: ignore[no-untyped-call]
dynamic_section = next(
(section for section in elf_file.iter_sections() # type: ignore[no-untyped-call]
if isinstance(section, DynamicSection)),
None)
if dynamic_section is None:
return []
return [
tag.needed
for tag in dynamic_section.iter_tags() # type: ignore[no-untyped-call]
if tag.entry.d_tag == "DT_NEEDED"
]
@staticmethod
def is_elf(content: IO[bytes]) -> bool:
"""
check if the content is actually elf file
Args:
content(IO[bytes]): content of the file
Returns:
bool: ``True`` in case if file has elf header and ``False`` otherwise
"""
expected = b"\x7fELF"
length = len(expected)
magic_bytes = content.read(length)
content.seek(0) # reset reading position
return magic_bytes == expected
def _load_pacman_package(self, path: Path) -> FilesystemPackage:
"""
load pacman package model from path
Args:
path(Path): path to package files database
Returns:
FilesystemPackage: generated pacman package model with empty paths
"""
package_name, *_ = path.parent.name.rsplit("-", 2)
try:
pacman_package = OfficialSyncdb.info(package_name, pacman=self.pacman)
return FilesystemPackage(
package_name=package_name,
depends=set(pacman_package.depends),
opt_depends=set(pacman_package.opt_depends),
)
except UnknownPackageError:
return FilesystemPackage(package_name=package_name, depends=set(), opt_depends=set())
def _raw_dependencies_packages(self) -> dict[Path, list[FilesystemPackage]]:
"""
extract the initial list of packages which contain specific path this package depends on
Returns:
dict[Path, list[FilesystemPackage]]: map of path to packages containing this path
"""
dependencies, roots = self.depends_on_paths()
installed_packages = self.installed_packages()
# build list of packages, which contains both the package itself and (possible) debug packages
packages = list(self.package.packages) + [f"{package}-debug" for package in self.package.packages]
# build initial map of file path -> packages containing this path
# in fact, keys will contain all libraries the package linked to and all directories it contains
dependencies_per_path: dict[Path, list[FilesystemPackage]] = {}
for package_base, package in installed_packages.items():
if package_base in packages:
continue # skip package itself
required_by = [directory for directory in package.directories if directory in roots]
required_by.extend(library for library in package.files if library.name in dependencies)
for path in required_by:
dependencies_per_path.setdefault(path, []).append(package)
return dependencies_per_path
def _refine_dependencies(self, source: dict[Path, list[FilesystemPackage]]) -> dict[Path, list[FilesystemPackage]]:
"""
reduce the initial dependency list by removing packages which are already satisfied (e.g. by other path or by
dependency list, or belonging to the base packages)
Args:
source(dict[Path, list[FilesystemPackage]]): the initial map of path to packages containing it
Returns:
dict[Path, list[FilesystemPackage]]: reduced source map of packages
"""
# base packages should be always excluded from checking
base_packages = OfficialSyncdb.info("base", pacman=self.pacman).depends
result: dict[Path, list[FilesystemPackage]] = {}
# sort items from children directories to root
for path, packages in reversed(sorted(source.items())):
# skip if this path belongs to the one of the base packages
if any(package.package_name in base_packages for package in packages):
continue
# remove explicit dependencies
packages = [package for package in packages if package.is_root_package(packages, include_optional=False)]
# remove optional dependencies
packages = [package for package in packages if package.is_root_package(packages, include_optional=True)]
# check if there is already parent of current path in the result and has the same packages
for children_path, children_packages in result.items():
if not children_path.is_relative_to(path):
continue
children_packages_names = {package.package_name for package in children_packages}
packages = [package for package in packages if package.package_name not in children_packages_names]
result[path] = packages
return result
def depends_on(self) -> Dependencies:
"""
extract packages and paths which are required for this package
Returns:
Dependencies: map of the package name to set of paths used by this package
"""
initial_packages = self._raw_dependencies_packages()
refined_packages = self._refine_dependencies(initial_packages)
paths = {
str(path): [package.package_name for package in packages]
for path, packages in refined_packages.items()
}
return Dependencies(paths)
def depends_on_paths(self) -> tuple[set[str], set[Path]]:
"""
extract dependencies from installation
Returns:
tuple[set[str], set[Path]]: tuple of dynamically linked libraries and directory paths
"""
dependencies = set()
roots: set[Path] = set()
for package in self.package.packages:
package_dir = self.root / "build" / self.package.base / "pkg" / package
for path in filter(lambda p: p.is_file(), walk(package_dir)):
dependencies.update(PackageArchive.dynamic_needed(path))
filesystem_path = Path(*path.relative_to(package_dir).parts)
roots.update(filesystem_path.parents[:-1]) # last element is always . because paths are relative
return dependencies, roots
def installed_packages(self) -> dict[str, FilesystemPackage]:
"""
extract list of the installed packages and their content
Returns:
dict[str, FilesystemPackage]; map of package name to list of directories and files contained
by this package
"""
result = {}
pacman_local_files = self.root / "var" / "lib" / "pacman" / "local"
for path in filter(lambda fn: fn.name == "files", walk(pacman_local_files)):
package = self._load_pacman_package(path)
is_files_section = False
for line in path.read_text(encoding="utf8").splitlines():
if not line: # skip empty lines
continue
if line.startswith("%") and line.endswith("%"): # directive started
is_files_section = line == "%FILES%"
if not is_files_section: # not a files directive
continue
entry = Path(line)
if line.endswith("/"): # simple check if it is directory
package.directories.append(entry)
else:
package.files.append(entry)
result[package.package_name] = package
return result

View File

@ -19,11 +19,11 @@
# #
import shlex import shlex
from dataclasses import dataclass from dataclasses import dataclass, fields
from pathlib import Path from pathlib import Path
from typing import Any, Generator, Self from typing import Any, Generator, Self
from ahriman.core.util import dataclass_view from ahriman.core.util import dataclass_view, filter_json
@dataclass(frozen=True) @dataclass(frozen=True)
@ -84,6 +84,21 @@ class PkgbuildPatch:
raw_value = next(iter(value_parts), "") # extract raw value raw_value = next(iter(value_parts), "") # extract raw value
return cls(key, cls.parse(raw_value)) return cls(key, cls.parse(raw_value))
@classmethod
def from_json(cls, dump: dict[str, Any]) -> Self:
"""
construct patch descriptor from the json dump
Args:
dump(dict[str, Any]): json dump body
Returns:
Self: patch object
"""
# filter to only known fields
known_fields = [pair.name for pair in fields(cls)]
return cls(**filter_json(dump, known_fields))
@staticmethod @staticmethod
def parse(source: str) -> str | list[str]: def parse(source: str) -> str | list[str]:
""" """

View File

@ -24,6 +24,7 @@ from collections.abc import Generator
from dataclasses import dataclass, field from dataclasses import dataclass, field
from functools import cached_property from functools import cached_property
from pathlib import Path from pathlib import Path
from pwd import getpwuid
from ahriman.core.exceptions import PathError from ahriman.core.exceptions import PathError
from ahriman.core.log import LazyLogging from ahriman.core.log import LazyLogging
@ -83,6 +84,17 @@ class RepositoryPaths(LazyLogging):
return Path(self.repository_id.architecture) # legacy tree suffix return Path(self.repository_id.architecture) # legacy tree suffix
return Path(self.repository_id.name) / self.repository_id.architecture return Path(self.repository_id.name) / self.repository_id.architecture
@property
def build_directory(self) -> Path:
"""
same as :attr:`chroot`, but exactly build chroot
Returns:
Path: path to directory in which build process is run
"""
uid, _ = self.owner(self.root)
return self.chroot / f"{self.repository_id.name}-{self.repository_id.architecture}" / getpwuid(uid).pw_name
@property @property
def cache(self) -> Path: def cache(self) -> Path:
""" """

View File

@ -21,27 +21,87 @@ import time
from collections.abc import Callable from collections.abc import Callable
from dataclasses import dataclass, field from dataclasses import dataclass, field
from typing import ParamSpec from typing import Literal, ParamSpec
Params = ParamSpec("Params") Params = ParamSpec("Params")
@dataclass(frozen=True)
class WaiterResult:
"""
representation of a waiter result. This class should not be used directly, use derivatives instead
Attributes:
took(float): consumed time in seconds
"""
took: float
def __bool__(self) -> bool:
"""
indicates whether the waiter completed with success or not
Raises:
NotImplementedError: not implemented method
"""
raise NotImplementedError
def __float__(self) -> float:
"""
extract time spent to retrieve the result in seconds
Returns:
float: consumed time in seconds
"""
return self.took
class WaiterTaskFinished(WaiterResult):
"""
a waiter result used to notify that the task has been completed successfully
"""
def __bool__(self) -> Literal[True]:
"""
indicates whether the waiter completed with success or not
Returns:
Literal[True]: always False
"""
return True
class WaiterTimedOut(WaiterResult):
"""
a waiter result used to notify that the waiter run out of time
"""
def __bool__(self) -> Literal[False]:
"""
indicates whether the waiter completed with success or not
Returns:
Literal[False]: always False
"""
return False
@dataclass(frozen=True) @dataclass(frozen=True)
class Waiter: class Waiter:
""" """
simple waiter implementation simple waiter implementation
Attributes: Attributes:
interval(int): interval in seconds between checks interval(float): interval in seconds between checks
start_time(float): monotonic time of the waiter start. More likely must not be assigned explicitly start_time(float): monotonic time of the waiter start. More likely must not be assigned explicitly
wait_timeout(int): timeout in seconds to wait for. Negative value will result in immediate exit. Zero value wait_timeout(float): timeout in seconds to wait for. Negative value will result in immediate exit. Zero value
means infinite timeout means infinite timeout
""" """
wait_timeout: int wait_timeout: float
start_time: float = field(default_factory=time.monotonic, kw_only=True) start_time: float = field(default_factory=time.monotonic, kw_only=True)
interval: int = field(default=10, kw_only=True) interval: float = field(default=10, kw_only=True)
def is_timed_out(self) -> bool: def is_timed_out(self) -> bool:
""" """
@ -51,10 +111,10 @@ class Waiter:
bool: True in case current monotonic time is more than :attr:`start_time` and :attr:`wait_timeout` bool: True in case current monotonic time is more than :attr:`start_time` and :attr:`wait_timeout`
doesn't equal to 0 doesn't equal to 0
""" """
since_start: float = time.monotonic() - self.start_time since_start = time.monotonic() - self.start_time
return self.wait_timeout != 0 and since_start > self.wait_timeout return self.wait_timeout != 0 and since_start > self.wait_timeout
def wait(self, in_progress: Callable[Params, bool], *args: Params.args, **kwargs: Params.kwargs) -> float: def wait(self, in_progress: Callable[Params, bool], *args: Params.args, **kwargs: Params.kwargs) -> WaiterResult:
""" """
wait until requirements are not met wait until requirements are not met
@ -64,9 +124,12 @@ class Waiter:
**kwargs(Params.kwargs): keyword arguments for check call **kwargs(Params.kwargs): keyword arguments for check call
Returns: Returns:
float: consumed time in seconds WaiterResult: consumed time in seconds
""" """
while not self.is_timed_out() and in_progress(*args, **kwargs): while not (timed_out := self.is_timed_out()) and in_progress(*args, **kwargs):
time.sleep(self.interval) time.sleep(self.interval)
took = time.monotonic() - self.start_time
return time.monotonic() - self.start_time if timed_out:
return WaiterTimedOut(took)
return WaiterTaskFinished(took)

View File

@ -22,4 +22,3 @@ from collections.abc import Awaitable, Callable
HandlerType = Callable[[Request], Awaitable[StreamResponse]] HandlerType = Callable[[Request], Awaitable[StreamResponse]]
MiddlewareType = Callable[[Request, HandlerType], Awaitable[StreamResponse]]

View File

@ -21,6 +21,7 @@ import aiohttp_security
import socket import socket
import types import types
from aiohttp.typedefs import Middleware
from aiohttp.web import Application, Request, StaticResource, StreamResponse, middleware from aiohttp.web import Application, Request, StaticResource, StreamResponse, middleware
from aiohttp_session import setup as setup_session from aiohttp_session import setup as setup_session
from aiohttp_session.cookie_storage import EncryptedCookieStorage from aiohttp_session.cookie_storage import EncryptedCookieStorage
@ -30,7 +31,7 @@ from enum import Enum
from ahriman.core.auth import Auth from ahriman.core.auth import Auth
from ahriman.core.configuration import Configuration from ahriman.core.configuration import Configuration
from ahriman.models.user_access import UserAccess from ahriman.models.user_access import UserAccess
from ahriman.web.middlewares import HandlerType, MiddlewareType from ahriman.web.middlewares import HandlerType
__all__ = ["setup_auth"] __all__ = ["setup_auth"]
@ -84,7 +85,7 @@ class _AuthorizationPolicy(aiohttp_security.AbstractAuthorizationPolicy):
return await self.validator.verify_access(identity, permission, context) return await self.validator.verify_access(identity, permission, context)
def _auth_handler(allow_read_only: bool) -> MiddlewareType: def _auth_handler(allow_read_only: bool) -> Middleware:
""" """
authorization and authentication middleware authorization and authentication middleware
@ -92,7 +93,7 @@ def _auth_handler(allow_read_only: bool) -> MiddlewareType:
allow_read_only: allow allow_read_only: allow
Returns: Returns:
MiddlewareType: built middleware Middleware: built middleware
""" """
@middleware @middleware
async def handle(request: Request, handler: HandlerType) -> StreamResponse: async def handle(request: Request, handler: HandlerType) -> StreamResponse:

View File

@ -20,10 +20,11 @@
import aiohttp_jinja2 import aiohttp_jinja2
import logging import logging
from aiohttp.typedefs import Middleware
from aiohttp.web import HTTPClientError, HTTPException, HTTPMethodNotAllowed, HTTPNoContent, HTTPServerError, \ from aiohttp.web import HTTPClientError, HTTPException, HTTPMethodNotAllowed, HTTPNoContent, HTTPServerError, \
HTTPUnauthorized, Request, StreamResponse, json_response, middleware HTTPUnauthorized, Request, StreamResponse, json_response, middleware
from ahriman.web.middlewares import HandlerType, MiddlewareType from ahriman.web.middlewares import HandlerType
__all__ = ["exception_handler"] __all__ = ["exception_handler"]
@ -43,7 +44,7 @@ def _is_templated_unauthorized(request: Request) -> bool:
and "application/json" not in request.headers.getall("accept", []) and "application/json" not in request.headers.getall("accept", [])
def exception_handler(logger: logging.Logger) -> MiddlewareType: def exception_handler(logger: logging.Logger) -> Middleware:
""" """
exception handler middleware. Just log any exception (except for client ones) exception handler middleware. Just log any exception (except for client ones)
@ -51,7 +52,7 @@ def exception_handler(logger: logging.Logger) -> MiddlewareType:
logger(logging.Logger): class logger logger(logging.Logger): class logger
Returns: Returns:
MiddlewareType: built middleware Middleware: built middleware
Raises: Raises:
HTTPNoContent: OPTIONS method response HTTPNoContent: OPTIONS method response

View File

@ -22,6 +22,7 @@ from ahriman.web.schemas.auth_schema import AuthSchema
from ahriman.web.schemas.build_options_schema import BuildOptionsSchema from ahriman.web.schemas.build_options_schema import BuildOptionsSchema
from ahriman.web.schemas.changes_schema import ChangesSchema from ahriman.web.schemas.changes_schema import ChangesSchema
from ahriman.web.schemas.counters_schema import CountersSchema from ahriman.web.schemas.counters_schema import CountersSchema
from ahriman.web.schemas.dependencies_schema import DependenciesSchema
from ahriman.web.schemas.error_schema import ErrorSchema from ahriman.web.schemas.error_schema import ErrorSchema
from ahriman.web.schemas.file_schema import FileSchema from ahriman.web.schemas.file_schema import FileSchema
from ahriman.web.schemas.info_schema import InfoSchema from ahriman.web.schemas.info_schema import InfoSchema
@ -36,6 +37,7 @@ from ahriman.web.schemas.package_patch_schema import PackagePatchSchema
from ahriman.web.schemas.package_properties_schema import PackagePropertiesSchema from ahriman.web.schemas.package_properties_schema import PackagePropertiesSchema
from ahriman.web.schemas.package_schema import PackageSchema from ahriman.web.schemas.package_schema import PackageSchema
from ahriman.web.schemas.package_status_schema import PackageStatusSchema, PackageStatusSimplifiedSchema from ahriman.web.schemas.package_status_schema import PackageStatusSchema, PackageStatusSimplifiedSchema
from ahriman.web.schemas.package_version_schema import PackageVersionSchema
from ahriman.web.schemas.pagination_schema import PaginationSchema from ahriman.web.schemas.pagination_schema import PaginationSchema
from ahriman.web.schemas.patch_name_schema import PatchNameSchema from ahriman.web.schemas.patch_name_schema import PatchNameSchema
from ahriman.web.schemas.patch_schema import PatchSchema from ahriman.web.schemas.patch_schema import PatchSchema

View File

@ -0,0 +1,31 @@
#
# Copyright (c) 2021-2024 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from marshmallow import Schema, fields
class DependenciesSchema(Schema):
"""
request/response package dependencies schema
"""
paths = fields.Dict(
keys=fields.String(), values=fields.List(fields.String()), required=True, metadata={
"description": "Map of filesystem paths to packages which contain this path",
})

View File

@ -0,0 +1,34 @@
#
# Copyright (c) 2021-2024 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from marshmallow import fields
from ahriman import __version__
from ahriman.web.schemas.repository_id_schema import RepositoryIdSchema
class PackageVersionSchema(RepositoryIdSchema):
"""
request package name schema
"""
version = fields.String(metadata={
"description": "Package version",
"example": __version__,
})

View File

@ -25,8 +25,8 @@ class PatchSchema(Schema):
request and response patch schema request and response patch schema
""" """
key = fields.String(required=True, metadata={ key = fields.String(metadata={
"description": "environment variable name", "description": "environment variable name. Required in case if it is not full diff",
}) })
value = fields.String(metadata={ value = fields.String(metadata={
"description": "environment variable value", "description": "environment variable value",

View File

@ -25,6 +25,7 @@ from typing import TypeVar
from ahriman.core.auth import Auth from ahriman.core.auth import Auth
from ahriman.core.configuration import Configuration from ahriman.core.configuration import Configuration
from ahriman.core.distributed import WorkersCache from ahriman.core.distributed import WorkersCache
from ahriman.core.exceptions import UnknownPackageError
from ahriman.core.sign.gpg import GPG from ahriman.core.sign.gpg import GPG
from ahriman.core.spawn import Spawn from ahriman.core.spawn import Spawn
from ahriman.core.status.watcher import Watcher from ahriman.core.status.watcher import Watcher
@ -218,12 +219,13 @@ class BaseView(View, CorsViewMixin):
return RepositoryId(architecture, name) return RepositoryId(architecture, name)
return next(iter(sorted(self.services.keys()))) return next(iter(sorted(self.services.keys())))
def service(self, repository_id: RepositoryId | None = None) -> Watcher: def service(self, repository_id: RepositoryId | None = None, package_base: str | None = None) -> Watcher:
""" """
get status watcher instance get status watcher instance
Args: Args:
repository_id(RepositoryId | None, optional): repository unique identifier (Default value = None) repository_id(RepositoryId | None, optional): repository unique identifier (Default value = None)
package_base(str | None, optional): package base to validate if exists (Default value = None)
Returns: Returns:
Watcher: build status watcher instance. If no repository provided, it will return the first one Watcher: build status watcher instance. If no repository provided, it will return the first one
@ -234,9 +236,11 @@ class BaseView(View, CorsViewMixin):
if repository_id is None: if repository_id is None:
repository_id = self.repository_id() repository_id = self.repository_id()
try: try:
return self.services[repository_id] return self.services[repository_id](package_base)
except KeyError: except KeyError:
raise HTTPNotFound(reason=f"Repository {repository_id.id} is unknown") raise HTTPNotFound(reason=f"Repository {repository_id.id} is unknown")
except UnknownPackageError:
raise HTTPNotFound(reason=f"Package {package_base} is unknown")
async def username(self) -> str | None: async def username(self) -> str | None:
""" """

View File

@ -19,9 +19,8 @@
# #
import aiohttp_apispec # type: ignore[import-untyped] import aiohttp_apispec # type: ignore[import-untyped]
from aiohttp.web import HTTPBadRequest, HTTPNoContent, HTTPNotFound, Response, json_response from aiohttp.web import HTTPBadRequest, HTTPNoContent, Response, json_response
from ahriman.core.exceptions import UnknownPackageError
from ahriman.models.changes import Changes from ahriman.models.changes import Changes
from ahriman.models.user_access import UserAccess from ahriman.models.user_access import UserAccess
from ahriman.web.schemas import AuthSchema, ChangesSchema, ErrorSchema, PackageNameSchema, RepositoryIdSchema from ahriman.web.schemas import AuthSchema, ChangesSchema, ErrorSchema, PackageNameSchema, RepositoryIdSchema
@ -70,10 +69,7 @@ class ChangesView(StatusViewGuard, BaseView):
""" """
package_base = self.request.match_info["package"] package_base = self.request.match_info["package"]
try: changes = self.service(package_base=package_base).package_changes_get(package_base)
changes = self.service().package_changes_get(package_base)
except UnknownPackageError:
raise HTTPNotFound(reason=f"Package {package_base} is unknown")
return json_response(changes.view()) return json_response(changes.view())
@ -113,7 +109,6 @@ class ChangesView(StatusViewGuard, BaseView):
raise HTTPBadRequest(reason=str(ex)) raise HTTPBadRequest(reason=str(ex))
changes = Changes(last_commit_sha, change) changes = Changes(last_commit_sha, change)
repository_id = self.repository_id() self.service().package_changes_update(package_base, changes)
self.service(repository_id).database.changes_insert(package_base, changes, repository_id)
raise HTTPNoContent raise HTTPNoContent

View File

@ -0,0 +1,113 @@
#
# Copyright (c) 2021-2024 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import aiohttp_apispec # type: ignore[import-untyped]
from aiohttp.web import HTTPBadRequest, HTTPNoContent, Response, json_response
from ahriman.models.dependencies import Dependencies
from ahriman.models.user_access import UserAccess
from ahriman.web.schemas import AuthSchema, DependenciesSchema, ErrorSchema, PackageNameSchema, RepositoryIdSchema
from ahriman.web.views.base import BaseView
from ahriman.web.views.status_view_guard import StatusViewGuard
class DependenciesView(StatusViewGuard, BaseView):
"""
package dependencies web view
Attributes:
GET_PERMISSION(UserAccess): (class attribute) get permissions of self
POST_PERMISSION(UserAccess): (class attribute) post permissions of self
"""
GET_PERMISSION = UserAccess.Reporter
POST_PERMISSION = UserAccess.Full
ROUTES = ["/api/v1/packages/{package}/dependencies"]
@aiohttp_apispec.docs(
tags=["Build"],
summary="Get package dependencies",
description="Retrieve package implicit dependencies",
responses={
200: {"description": "Success response", "schema": DependenciesSchema},
401: {"description": "Authorization required", "schema": ErrorSchema},
403: {"description": "Access is forbidden", "schema": ErrorSchema},
404: {"description": "Package base and/or repository are unknown", "schema": ErrorSchema},
500: {"description": "Internal server error", "schema": ErrorSchema},
},
security=[{"token": [GET_PERMISSION]}],
)
@aiohttp_apispec.cookies_schema(AuthSchema)
@aiohttp_apispec.match_info_schema(PackageNameSchema)
@aiohttp_apispec.querystring_schema(RepositoryIdSchema)
async def get(self) -> Response:
"""
get package dependencies
Returns:
Response: 200 with package implicit dependencies on success
Raises:
HTTPNotFound: if package base is unknown
"""
package_base = self.request.match_info["package"]
dependencies = self.service(package_base=package_base).package_dependencies_get(package_base)
return json_response(dependencies.view())
@aiohttp_apispec.docs(
tags=["Build"],
summary="Update package dependencies",
description="Set package implicit dependencies",
responses={
204: {"description": "Success response"},
400: {"description": "Bad data is supplied", "schema": ErrorSchema},
401: {"description": "Authorization required", "schema": ErrorSchema},
403: {"description": "Access is forbidden", "schema": ErrorSchema},
404: {"description": "Repository is unknown", "schema": ErrorSchema},
500: {"description": "Internal server error", "schema": ErrorSchema},
},
security=[{"token": [POST_PERMISSION]}],
)
@aiohttp_apispec.cookies_schema(AuthSchema)
@aiohttp_apispec.match_info_schema(PackageNameSchema)
@aiohttp_apispec.querystring_schema(RepositoryIdSchema)
@aiohttp_apispec.json_schema(DependenciesSchema)
async def post(self) -> None:
"""
insert new package dependencies
Raises:
HTTPBadRequest: if bad data is supplied
HTTPNoContent: in case of success response
"""
package_base = self.request.match_info["package"]
try:
data = await self.request.json()
data["package_base"] = package_base # read from path instead of object
dependencies = Dependencies.from_json(data)
except Exception as ex:
raise HTTPBadRequest(reason=str(ex))
self.service(package_base=package_base).package_dependencies_update(package_base, dependencies)
raise HTTPNoContent

View File

@ -25,8 +25,8 @@ from ahriman.core.exceptions import UnknownPackageError
from ahriman.core.util import pretty_datetime from ahriman.core.util import pretty_datetime
from ahriman.models.log_record_id import LogRecordId from ahriman.models.log_record_id import LogRecordId
from ahriman.models.user_access import UserAccess from ahriman.models.user_access import UserAccess
from ahriman.web.schemas import AuthSchema, ErrorSchema, LogsSchema, PackageNameSchema, RepositoryIdSchema, \ from ahriman.web.schemas import AuthSchema, ErrorSchema, LogsSchema, PackageNameSchema, PackageVersionSchema, \
VersionedLogSchema RepositoryIdSchema, VersionedLogSchema
from ahriman.web.views.base import BaseView from ahriman.web.views.base import BaseView
from ahriman.web.views.status_view_guard import StatusViewGuard from ahriman.web.views.status_view_guard import StatusViewGuard
@ -60,7 +60,7 @@ class LogsView(StatusViewGuard, BaseView):
) )
@aiohttp_apispec.cookies_schema(AuthSchema) @aiohttp_apispec.cookies_schema(AuthSchema)
@aiohttp_apispec.match_info_schema(PackageNameSchema) @aiohttp_apispec.match_info_schema(PackageNameSchema)
@aiohttp_apispec.querystring_schema(RepositoryIdSchema) @aiohttp_apispec.querystring_schema(PackageVersionSchema)
async def delete(self) -> None: async def delete(self) -> None:
""" """
delete package logs delete package logs
@ -69,7 +69,8 @@ class LogsView(StatusViewGuard, BaseView):
HTTPNoContent: on success response HTTPNoContent: on success response
""" """
package_base = self.request.match_info["package"] package_base = self.request.match_info["package"]
self.service().logs_remove(package_base, None) version = self.request.query.get("version")
self.service().package_logs_remove(package_base, version)
raise HTTPNoContent raise HTTPNoContent
@ -103,7 +104,7 @@ class LogsView(StatusViewGuard, BaseView):
try: try:
_, status = self.service().package_get(package_base) _, status = self.service().package_get(package_base)
logs = self.service().logs_get(package_base) logs = self.service(package_base=package_base).package_logs_get(package_base, -1, 0)
except UnknownPackageError: except UnknownPackageError:
raise HTTPNotFound(reason=f"Package {package_base} is unknown") raise HTTPNotFound(reason=f"Package {package_base} is unknown")
@ -149,6 +150,6 @@ class LogsView(StatusViewGuard, BaseView):
except Exception as ex: except Exception as ex:
raise HTTPBadRequest(reason=str(ex)) raise HTTPBadRequest(reason=str(ex))
self.service().logs_update(LogRecordId(package_base, version), created, record) self.service().package_logs_add(LogRecordId(package_base, version), created, record)
raise HTTPNoContent raise HTTPNoContent

View File

@ -152,7 +152,10 @@ class PackageView(StatusViewGuard, BaseView):
raise HTTPBadRequest(reason=str(ex)) raise HTTPBadRequest(reason=str(ex))
try: try:
self.service().package_update(package_base, status, package) if package is None:
self.service().package_status_update(package_base, status)
else:
self.service().package_update(package, status)
except UnknownPackageError: except UnknownPackageError:
raise HTTPBadRequest(reason=f"Package {package_base} is unknown, but no package body set") raise HTTPBadRequest(reason=f"Package {package_base} is unknown, but no package body set")

View File

@ -63,7 +63,8 @@ class PatchView(StatusViewGuard, BaseView):
""" """
package_base = self.request.match_info["package"] package_base = self.request.match_info["package"]
variable = self.request.match_info["patch"] variable = self.request.match_info["patch"]
self.service().patches_remove(package_base, variable)
self.service().package_patches_remove(package_base, variable)
raise HTTPNoContent raise HTTPNoContent
@ -95,7 +96,7 @@ class PatchView(StatusViewGuard, BaseView):
package_base = self.request.match_info["package"] package_base = self.request.match_info["package"]
variable = self.request.match_info["patch"] variable = self.request.match_info["patch"]
patches = self.service().patches_get(package_base, variable) patches = self.service().package_patches_get(package_base, variable)
selected = next((patch for patch in patches if patch.key == variable), None) selected = next((patch for patch in patches if patch.key == variable), None)
if selected is None: if selected is None:

View File

@ -63,7 +63,7 @@ class PatchesView(StatusViewGuard, BaseView):
Response: 200 with package patches on success Response: 200 with package patches on success
""" """
package_base = self.request.match_info["package"] package_base = self.request.match_info["package"]
patches = self.service().patches_get(package_base, None) patches = self.service().package_patches_get(package_base, None)
response = [patch.view() for patch in patches] response = [patch.view() for patch in patches]
return json_response(response) return json_response(response)
@ -96,11 +96,11 @@ class PatchesView(StatusViewGuard, BaseView):
try: try:
data = await self.request.json() data = await self.request.json()
key = data["key"] key = data.get("key")
value = data["value"] value = data["value"]
except Exception as ex: except Exception as ex:
raise HTTPBadRequest(reason=str(ex)) raise HTTPBadRequest(reason=str(ex))
self.service().patches_update(package_base, PkgbuildPatch(key, value)) self.service().package_patches_update(package_base, PkgbuildPatch(key, value))
raise HTTPNoContent raise HTTPNoContent

View File

@ -19,9 +19,8 @@
# #
import aiohttp_apispec # type: ignore[import-untyped] import aiohttp_apispec # type: ignore[import-untyped]
from aiohttp.web import HTTPNotFound, Response, json_response from aiohttp.web import Response, json_response
from ahriman.core.exceptions import UnknownPackageError
from ahriman.models.user_access import UserAccess from ahriman.models.user_access import UserAccess
from ahriman.web.schemas import AuthSchema, ErrorSchema, LogSchema, PackageNameSchema, PaginationSchema from ahriman.web.schemas import AuthSchema, ErrorSchema, LogSchema, PackageNameSchema, PaginationSchema
from ahriman.web.views.base import BaseView from ahriman.web.views.base import BaseView
@ -68,10 +67,8 @@ class LogsView(StatusViewGuard, BaseView):
""" """
package_base = self.request.match_info["package"] package_base = self.request.match_info["package"]
limit, offset = self.page() limit, offset = self.page()
try:
logs = self.service().logs_get(package_base, limit, offset) logs = self.service(package_base=package_base).package_logs_get(package_base, limit, offset)
except UnknownPackageError:
raise HTTPNotFound(reason=f"Package {package_base} is unknown")
response = [ response = [
{ {

View File

@ -30,6 +30,7 @@ from ahriman.core.database import SQLite
from ahriman.core.distributed import WorkersCache from ahriman.core.distributed import WorkersCache
from ahriman.core.exceptions import InitializeError from ahriman.core.exceptions import InitializeError
from ahriman.core.spawn import Spawn from ahriman.core.spawn import Spawn
from ahriman.core.status import Client
from ahriman.core.status.watcher import Watcher from ahriman.core.status.watcher import Watcher
from ahriman.models.repository_id import RepositoryId from ahriman.models.repository_id import RepositoryId
from ahriman.web.apispec import setup_apispec from ahriman.web.apispec import setup_apispec
@ -167,7 +168,8 @@ def setup_server(configuration: Configuration, spawner: Spawn, repositories: lis
watchers: dict[RepositoryId, Watcher] = {} watchers: dict[RepositoryId, Watcher] = {}
for repository_id in repositories: for repository_id in repositories:
application.logger.info("load repository %s", repository_id) application.logger.info("load repository %s", repository_id)
watchers[repository_id] = Watcher(repository_id, database) client = Client.load(repository_id, configuration, database, report=False) # explicitly load local client
watchers[repository_id] = Watcher(client)
application[WatcherKey] = watchers application[WatcherKey] = watchers
# workers cache # workers cache
application[WorkersKey] = WorkersCache(configuration) application[WorkersKey] = WorkersCache(configuration)

View File

@ -93,8 +93,7 @@ def test_with_dependencies(application: Application, package_ahriman: Package, p
side_effect=lambda *args: packages[args[0].name]) side_effect=lambda *args: packages[args[0].name])
packages_mock = mocker.patch("ahriman.application.application.Application._known_packages", packages_mock = mocker.patch("ahriman.application.application.Application._known_packages",
return_value={"devtools", "python-build", "python-pytest"}) return_value={"devtools", "python-build", "python-pytest"})
update_remote_mock = mocker.patch("ahriman.core.database.SQLite.package_base_update") status_client_mock = mocker.patch("ahriman.core.status.Client.set_unknown")
status_client_mock = mocker.patch("ahriman.core.status.client.Client.set_unknown")
result = application.with_dependencies([package_ahriman], process_dependencies=True) result = application.with_dependencies([package_ahriman], process_dependencies=True)
assert {package.base: package for package in result} == packages assert {package.base: package for package in result} == packages
@ -107,11 +106,6 @@ def test_with_dependencies(application: Application, package_ahriman: Package, p
], any_order=True) ], any_order=True)
packages_mock.assert_called_once_with() packages_mock.assert_called_once_with()
update_remote_mock.assert_has_calls([
MockCall(package_python_schedule),
MockCall(packages["python"]),
MockCall(packages["python-installer"]),
], any_order=True)
status_client_mock.assert_has_calls([ status_client_mock.assert_has_calls([
MockCall(package_python_schedule), MockCall(package_python_schedule),
MockCall(packages["python"]), MockCall(packages["python"]),

View File

@ -41,11 +41,11 @@ def test_add_aur(application_packages: ApplicationPackages, package_ahriman: Pac
""" """
mocker.patch("ahriman.models.package.Package.from_aur", return_value=package_ahriman) mocker.patch("ahriman.models.package.Package.from_aur", return_value=package_ahriman)
build_queue_mock = mocker.patch("ahriman.core.database.SQLite.build_queue_insert") build_queue_mock = mocker.patch("ahriman.core.database.SQLite.build_queue_insert")
update_remote_mock = mocker.patch("ahriman.core.database.SQLite.package_base_update") status_client_mock = mocker.patch("ahriman.core.status.Client.set_unknown")
application_packages._add_aur(package_ahriman.base, "packager") application_packages._add_aur(package_ahriman.base, "packager")
build_queue_mock.assert_called_once_with(package_ahriman) build_queue_mock.assert_called_once_with(package_ahriman)
update_remote_mock.assert_called_once_with(package_ahriman) status_client_mock.assert_called_once_with(package_ahriman)
def test_add_directory(application_packages: ApplicationPackages, package_ahriman: Package, def test_add_directory(application_packages: ApplicationPackages, package_ahriman: Package,
@ -153,11 +153,11 @@ def test_add_repository(application_packages: ApplicationPackages, package_ahrim
""" """
mocker.patch("ahriman.models.package.Package.from_official", return_value=package_ahriman) mocker.patch("ahriman.models.package.Package.from_official", return_value=package_ahriman)
build_queue_mock = mocker.patch("ahriman.core.database.SQLite.build_queue_insert") build_queue_mock = mocker.patch("ahriman.core.database.SQLite.build_queue_insert")
update_remote_mock = mocker.patch("ahriman.core.database.SQLite.package_base_update") status_client_mock = mocker.patch("ahriman.core.status.Client.set_unknown")
application_packages._add_repository(package_ahriman.base, "packager") application_packages._add_repository(package_ahriman.base, "packager")
build_queue_mock.assert_called_once_with(package_ahriman) build_queue_mock.assert_called_once_with(package_ahriman)
update_remote_mock.assert_called_once_with(package_ahriman) status_client_mock.assert_called_once_with(package_ahriman)
def test_add_add_archive(application_packages: ApplicationPackages, package_ahriman: Package, def test_add_add_archive(application_packages: ApplicationPackages, package_ahriman: Package,

View File

@ -1,15 +1,15 @@
from ahriman.application.application.application_properties import ApplicationProperties from ahriman.application.application.application_properties import ApplicationProperties
def test_create_tree(application_properties: ApplicationProperties) -> None:
"""
must have repository attribute
"""
assert application_properties.repository
def test_architecture(application_properties: ApplicationProperties) -> None: def test_architecture(application_properties: ApplicationProperties) -> None:
""" """
must return repository architecture must return repository architecture
""" """
assert application_properties.architecture == application_properties.repository_id.architecture assert application_properties.architecture == application_properties.repository_id.architecture
def test_reporter(application_properties: ApplicationProperties) -> None:
"""
must have reporter attribute
"""
assert application_properties.reporter

View File

@ -17,14 +17,12 @@ def test_changes(application_repository: ApplicationRepository, package_ahriman:
must generate changes for the packages must generate changes for the packages
""" """
changes = Changes("hash", "change") changes = Changes("hash", "change")
hashes_mock = mocker.patch("ahriman.core.database.SQLite.hashes_get", return_value={ hashes_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_changes_get", return_value=changes)
package_ahriman.base: changes.last_commit_sha,
})
changes_mock = mocker.patch("ahriman.core.repository.Repository.package_changes", return_value=changes) changes_mock = mocker.patch("ahriman.core.repository.Repository.package_changes", return_value=changes)
report_mock = mocker.patch("ahriman.core.status.client.Client.package_changes_set") report_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_changes_update")
application_repository.changes([package_ahriman]) application_repository.changes([package_ahriman])
hashes_mock.assert_called_once_with() hashes_mock.assert_called_once_with(package_ahriman.base)
changes_mock.assert_called_once_with(package_ahriman, changes.last_commit_sha) changes_mock.assert_called_once_with(package_ahriman, changes.last_commit_sha)
report_mock.assert_called_once_with(package_ahriman.base, changes) report_mock.assert_called_once_with(package_ahriman.base, changes)
@ -34,9 +32,8 @@ def test_changes_skip(application_repository: ApplicationRepository, package_ahr
""" """
must skip change generation if no last commit sha has been found must skip change generation if no last commit sha has been found
""" """
mocker.patch("ahriman.core.database.SQLite.hashes_get", return_value={})
changes_mock = mocker.patch("ahriman.core.repository.Repository.package_changes") changes_mock = mocker.patch("ahriman.core.repository.Repository.package_changes")
report_mock = mocker.patch("ahriman.core.status.client.Client.package_changes_set") report_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_changes_update")
application_repository.changes([package_ahriman]) application_repository.changes([package_ahriman])
changes_mock.assert_not_called() changes_mock.assert_not_called()
@ -131,25 +128,6 @@ def test_sign_skip(application_repository: ApplicationRepository, package_ahrima
application_repository.sign([]) application_repository.sign([])
def test_sign_specific(application_repository: ApplicationRepository, package_ahriman: Package,
package_python_schedule: Package, mocker: MockerFixture) -> None:
"""
must sign only specified packages
"""
mocker.patch("ahriman.core.repository.repository.Repository.packages",
return_value=[package_ahriman, package_python_schedule])
sign_package_mock = mocker.patch("ahriman.core.sign.gpg.GPG.process_sign_package")
sign_repository_mock = mocker.patch("ahriman.core.sign.gpg.GPG.process_sign_repository")
on_result_mock = mocker.patch(
"ahriman.application.application.application_repository.ApplicationRepository.on_result")
filename = package_ahriman.packages[package_ahriman.base].filepath
application_repository.sign([package_ahriman.base])
sign_package_mock.assert_called_once_with(filename, None)
sign_repository_mock.assert_called_once_with(application_repository.repository.repo.repo_path)
on_result_mock.assert_called_once_with(Result())
def test_unknown_no_aur(application_repository: ApplicationRepository, package_ahriman: Package, def test_unknown_no_aur(application_repository: ApplicationRepository, package_ahriman: Package,
mocker: MockerFixture) -> None: mocker: MockerFixture) -> None:
""" """
@ -239,11 +217,13 @@ def test_updates_all(application_repository: ApplicationRepository, package_ahri
return_value=[package_ahriman]) return_value=[package_ahriman])
updates_local_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_local") updates_local_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_local")
updates_manual_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_manual") updates_manual_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_manual")
updates_deps_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_dependencies")
application_repository.updates([], aur=True, local=True, manual=True, vcs=True) application_repository.updates([], aur=True, local=True, manual=True, vcs=True, check_files=True)
updates_aur_mock.assert_called_once_with([], vcs=True) updates_aur_mock.assert_called_once_with([], vcs=True)
updates_local_mock.assert_called_once_with(vcs=True) updates_local_mock.assert_called_once_with(vcs=True)
updates_manual_mock.assert_called_once_with() updates_manual_mock.assert_called_once_with()
updates_deps_mock.assert_called_once_with([])
def test_updates_disabled(application_repository: ApplicationRepository, mocker: MockerFixture) -> None: def test_updates_disabled(application_repository: ApplicationRepository, mocker: MockerFixture) -> None:
@ -253,11 +233,13 @@ def test_updates_disabled(application_repository: ApplicationRepository, mocker:
updates_aur_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_aur") updates_aur_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_aur")
updates_local_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_local") updates_local_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_local")
updates_manual_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_manual") updates_manual_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_manual")
updates_deps_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_dependencies")
application_repository.updates([], aur=False, local=False, manual=False, vcs=True) application_repository.updates([], aur=False, local=False, manual=False, vcs=True, check_files=False)
updates_aur_mock.assert_not_called() updates_aur_mock.assert_not_called()
updates_local_mock.assert_not_called() updates_local_mock.assert_not_called()
updates_manual_mock.assert_not_called() updates_manual_mock.assert_not_called()
updates_deps_mock.assert_not_called()
def test_updates_no_aur(application_repository: ApplicationRepository, mocker: MockerFixture) -> None: def test_updates_no_aur(application_repository: ApplicationRepository, mocker: MockerFixture) -> None:
@ -267,11 +249,13 @@ def test_updates_no_aur(application_repository: ApplicationRepository, mocker: M
updates_aur_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_aur") updates_aur_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_aur")
updates_local_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_local") updates_local_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_local")
updates_manual_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_manual") updates_manual_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_manual")
updates_deps_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_dependencies")
application_repository.updates([], aur=False, local=True, manual=True, vcs=True) application_repository.updates([], aur=False, local=True, manual=True, vcs=True, check_files=True)
updates_aur_mock.assert_not_called() updates_aur_mock.assert_not_called()
updates_local_mock.assert_called_once_with(vcs=True) updates_local_mock.assert_called_once_with(vcs=True)
updates_manual_mock.assert_called_once_with() updates_manual_mock.assert_called_once_with()
updates_deps_mock.assert_called_once_with([])
def test_updates_no_local(application_repository: ApplicationRepository, mocker: MockerFixture) -> None: def test_updates_no_local(application_repository: ApplicationRepository, mocker: MockerFixture) -> None:
@ -281,11 +265,13 @@ def test_updates_no_local(application_repository: ApplicationRepository, mocker:
updates_aur_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_aur") updates_aur_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_aur")
updates_local_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_local") updates_local_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_local")
updates_manual_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_manual") updates_manual_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_manual")
updates_deps_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_dependencies")
application_repository.updates([], aur=True, local=False, manual=True, vcs=True) application_repository.updates([], aur=True, local=False, manual=True, vcs=True, check_files=True)
updates_aur_mock.assert_called_once_with([], vcs=True) updates_aur_mock.assert_called_once_with([], vcs=True)
updates_local_mock.assert_not_called() updates_local_mock.assert_not_called()
updates_manual_mock.assert_called_once_with() updates_manual_mock.assert_called_once_with()
updates_deps_mock.assert_called_once_with([])
def test_updates_no_manual(application_repository: ApplicationRepository, mocker: MockerFixture) -> None: def test_updates_no_manual(application_repository: ApplicationRepository, mocker: MockerFixture) -> None:
@ -295,11 +281,13 @@ def test_updates_no_manual(application_repository: ApplicationRepository, mocker
updates_aur_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_aur") updates_aur_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_aur")
updates_local_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_local") updates_local_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_local")
updates_manual_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_manual") updates_manual_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_manual")
updates_deps_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_dependencies")
application_repository.updates([], aur=True, local=True, manual=False, vcs=True) application_repository.updates([], aur=True, local=True, manual=False, vcs=True, check_files=True)
updates_aur_mock.assert_called_once_with([], vcs=True) updates_aur_mock.assert_called_once_with([], vcs=True)
updates_local_mock.assert_called_once_with(vcs=True) updates_local_mock.assert_called_once_with(vcs=True)
updates_manual_mock.assert_not_called() updates_manual_mock.assert_not_called()
updates_deps_mock.assert_called_once_with([])
def test_updates_no_vcs(application_repository: ApplicationRepository, mocker: MockerFixture) -> None: def test_updates_no_vcs(application_repository: ApplicationRepository, mocker: MockerFixture) -> None:
@ -309,11 +297,29 @@ def test_updates_no_vcs(application_repository: ApplicationRepository, mocker: M
updates_aur_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_aur") updates_aur_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_aur")
updates_local_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_local") updates_local_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_local")
updates_manual_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_manual") updates_manual_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_manual")
updates_deps_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_dependencies")
application_repository.updates([], aur=True, local=True, manual=True, vcs=False) application_repository.updates([], aur=True, local=True, manual=True, vcs=False, check_files=True)
updates_aur_mock.assert_called_once_with([], vcs=False) updates_aur_mock.assert_called_once_with([], vcs=False)
updates_local_mock.assert_called_once_with(vcs=False) updates_local_mock.assert_called_once_with(vcs=False)
updates_manual_mock.assert_called_once_with() updates_manual_mock.assert_called_once_with()
updates_deps_mock.assert_called_once_with([])
def test_updates_no_check_files(application_repository: ApplicationRepository, mocker: MockerFixture) -> None:
"""
must get updates without checking broken links
"""
updates_aur_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_aur")
updates_local_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_local")
updates_manual_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_manual")
updates_deps_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_dependencies")
application_repository.updates([], aur=True, local=True, manual=True, vcs=True, check_files=False)
updates_aur_mock.assert_called_once_with([], vcs=True)
updates_local_mock.assert_called_once_with(vcs=True)
updates_manual_mock.assert_called_once_with()
updates_deps_mock.assert_not_called()
def test_updates_with_filter(application_repository: ApplicationRepository, mocker: MockerFixture) -> None: def test_updates_with_filter(application_repository: ApplicationRepository, mocker: MockerFixture) -> None:
@ -323,8 +329,10 @@ def test_updates_with_filter(application_repository: ApplicationRepository, mock
updates_aur_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_aur") updates_aur_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_aur")
updates_local_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_local") updates_local_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_local")
updates_manual_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_manual") updates_manual_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_manual")
updates_deps_mock = mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.updates_dependencies")
application_repository.updates(["filter"], aur=True, local=True, manual=True, vcs=True) application_repository.updates(["filter"], aur=True, local=True, manual=True, vcs=True, check_files=True)
updates_aur_mock.assert_called_once_with(["filter"], vcs=True) updates_aur_mock.assert_called_once_with(["filter"], vcs=True)
updates_local_mock.assert_called_once_with(vcs=True) updates_local_mock.assert_called_once_with(vcs=True)
updates_manual_mock.assert_called_once_with() updates_manual_mock.assert_called_once_with()
updates_deps_mock.assert_called_once_with(["filter"])

View File

@ -62,11 +62,11 @@ def test_run_with_patches(args: argparse.Namespace, configuration: Configuration
args.variable = ["KEY=VALUE"] args.variable = ["KEY=VALUE"]
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository) mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
mocker.patch("ahriman.application.application.Application.add") mocker.patch("ahriman.application.application.Application.add")
application_mock = mocker.patch("ahriman.core.database.SQLite.patches_insert") application_mock = mocker.patch("ahriman.core.status.local_client.LocalClient.package_patches_update")
_, repository_id = configuration.check_loaded() _, repository_id = configuration.check_loaded()
Add.run(args, repository_id, configuration, report=False) Add.run(args, repository_id, configuration, report=False)
application_mock.assert_called_once_with(args.package[0], [PkgbuildPatch("KEY", "VALUE")]) application_mock.assert_called_once_with(args.package[0], PkgbuildPatch("KEY", "VALUE"))
def test_run_with_updates(args: argparse.Namespace, configuration: Configuration, repository: Repository, def test_run_with_updates(args: argparse.Namespace, configuration: Configuration, repository: Repository,
@ -89,7 +89,8 @@ def test_run_with_updates(args: argparse.Namespace, configuration: Configuration
_, repository_id = configuration.check_loaded() _, repository_id = configuration.check_loaded()
Add.run(args, repository_id, configuration, report=False) Add.run(args, repository_id, configuration, report=False)
updates_mock.assert_called_once_with(args.package, aur=False, local=False, manual=True, vcs=False) updates_mock.assert_called_once_with(args.package,
aur=False, local=False, manual=True, vcs=False, check_files=False)
application_mock.assert_called_once_with([package_ahriman], application_mock.assert_called_once_with([package_ahriman],
Packagers(args.username, {package_ahriman.base: "packager"}), Packagers(args.username, {package_ahriman.base: "packager"}),
bump_pkgrel=args.increment) bump_pkgrel=args.increment)

View File

@ -31,7 +31,7 @@ def test_run(args: argparse.Namespace, configuration: Configuration, mocker: Moc
mocker.patch("ahriman.application.handlers.Backup.get_paths", return_value=[Path("path")]) mocker.patch("ahriman.application.handlers.Backup.get_paths", return_value=[Path("path")])
tarfile = MagicMock() tarfile = MagicMock()
add_mock = tarfile.__enter__.return_value = MagicMock() add_mock = tarfile.__enter__.return_value = MagicMock()
mocker.patch("tarfile.TarFile.__new__", return_value=tarfile) mocker.patch("ahriman.application.handlers.backup.tarfile.open", return_value=tarfile)
_, repository_id = configuration.check_loaded() _, repository_id = configuration.check_loaded()
Backup.run(args, repository_id, configuration, report=False) Backup.run(args, repository_id, configuration, report=False)
@ -45,7 +45,7 @@ def test_get_paths(configuration: Configuration, mocker: MockerFixture) -> None:
# gnupg export mock # gnupg export mock
mocker.patch("pathlib.Path.is_dir", return_value=True) mocker.patch("pathlib.Path.is_dir", return_value=True)
mocker.patch.object(RepositoryPaths, "root_owner", (42, 42)) mocker.patch.object(RepositoryPaths, "root_owner", (42, 42))
getpwuid_mock = mocker.patch("pwd.getpwuid", return_value=MagicMock()) getpwuid_mock = mocker.patch("ahriman.application.handlers.backup.getpwuid", return_value=MagicMock())
# well database does not exist so we override it # well database does not exist so we override it
database_mock = mocker.patch("ahriman.core.database.SQLite.database_path", return_value=configuration.path) database_mock = mocker.patch("ahriman.core.database.SQLite.database_path", return_value=configuration.path)

Some files were not shown because too many files have changed in this diff Show More