mirror of
https://github.com/arcan1s/ahriman.git
synced 2025-11-14 20:43:42 +00:00
Compare commits
15 Commits
2.9.0
...
d495163fdd
| Author | SHA1 | Date | |
|---|---|---|---|
| d495163fdd | |||
| 041e9536bf | |||
| 1625fddccd | |||
| 8f2c33fe5d | |||
| d064e46af6 | |||
| 5627c9cca0 | |||
| 8731cee8ea | |||
| 01a4d68ce6 | |||
| 8f0a6cf6b5 | |||
| d96e3c97db | |||
| 1cf027cb55 | |||
| 8e0e57e193 | |||
| f855e645cc | |||
| 8ab0df2bed | |||
| 840921f585 |
2
.github/workflows/setup.sh
vendored
2
.github/workflows/setup.sh
vendored
@ -10,7 +10,7 @@ echo -e '[arcanisrepo]\nServer = http://repo.arcanis.me/$arch\nSigLevel = Never'
|
||||
# refresh the image
|
||||
pacman --noconfirm -Syu
|
||||
# main dependencies
|
||||
pacman --noconfirm -Sy base-devel devtools git pyalpm python-cerberus python-inflection python-passlib python-requests python-srcinfo sudo
|
||||
pacman --noconfirm -Sy base-devel devtools git pyalpm python-cerberus python-inflection python-passlib python-requests python-srcinfo python-systemd sudo
|
||||
# make dependencies
|
||||
pacman --noconfirm -Sy python-build python-installer python-wheel
|
||||
# optional dependencies
|
||||
|
||||
@ -6,7 +6,7 @@ formats:
|
||||
build:
|
||||
os: ubuntu-20.04
|
||||
tools:
|
||||
python: "3.10"
|
||||
python: "3.11"
|
||||
|
||||
sphinx:
|
||||
builder: html
|
||||
|
||||
@ -34,7 +34,7 @@ Again, the most checks can be performed by `make check` command, though some add
|
||||
do foo. With very very very long
|
||||
docstring
|
||||
|
||||
Note:
|
||||
Notes:
|
||||
Very important note about this function
|
||||
|
||||
Args:
|
||||
@ -102,6 +102,9 @@ Again, the most checks can be performed by `make check` command, though some add
|
||||
|
||||
@property
|
||||
def property(self) -> Any: ...
|
||||
|
||||
@cached_property
|
||||
def property_cached(self) -> Any: ... # cached property has to be treated as normal one
|
||||
|
||||
@classmethod
|
||||
def class_method(cls) -> Self: ...
|
||||
|
||||
15
Dockerfile
15
Dockerfile
@ -6,7 +6,7 @@ ENV AHRIMAN_DEBUG=""
|
||||
ENV AHRIMAN_FORCE_ROOT=""
|
||||
ENV AHRIMAN_HOST="0.0.0.0"
|
||||
ENV AHRIMAN_MULTILIB="yes"
|
||||
ENV AHRIMAN_OUTPUT="syslog"
|
||||
ENV AHRIMAN_OUTPUT=""
|
||||
ENV AHRIMAN_PACKAGER="ahriman bot <ahriman@example.com>"
|
||||
ENV AHRIMAN_PACMAN_MIRROR=""
|
||||
ENV AHRIMAN_PORT=""
|
||||
@ -28,15 +28,12 @@ RUN useradd -m -d "/home/build" -s "/usr/bin/nologin" build && \
|
||||
COPY "docker/install-aur-package.sh" "/usr/local/bin/install-aur-package"
|
||||
## install package dependencies
|
||||
## darcs is not installed by reasons, because it requires a lot haskell packages which dramatically increase image size
|
||||
RUN pacman --noconfirm -Sy devtools git pyalpm python-cerberus python-inflection python-passlib python-requests python-srcinfo && \
|
||||
pacman --noconfirm -Sy python-build python-installer python-wheel && \
|
||||
pacman --noconfirm -Sy breezy mercurial python-aiohttp python-aiohttp-cors python-boto3 python-cryptography python-jinja python-requests-unixsocket rsync subversion && \
|
||||
RUN pacman -Sy --noconfirm --asdeps devtools git pyalpm python-cerberus python-inflection python-passlib python-requests python-srcinfo && \
|
||||
pacman -Sy --noconfirm --asdeps python-build python-installer python-wheel && \
|
||||
pacman -Sy --noconfirm --asdeps breezy mercurial python-aiohttp python-aiohttp-cors python-boto3 python-cryptography python-jinja python-requests-unixsocket python-systemd rsync subversion && \
|
||||
runuser -u build -- install-aur-package python-aioauth-client python-aiohttp-apispec-git python-aiohttp-jinja2 \
|
||||
python-aiohttp-debugtoolbar python-aiohttp-session python-aiohttp-security
|
||||
|
||||
# cleanup unused
|
||||
RUN find "/var/cache/pacman/pkg" -type f -delete
|
||||
|
||||
# install ahriman
|
||||
## copy tree
|
||||
COPY --chown=build . "/home/build/ahriman"
|
||||
@ -48,6 +45,10 @@ RUN cd "/home/build/ahriman" && \
|
||||
runuser -u build -- makepkg --noconfirm --install --skipchecksums && \
|
||||
cd / && rm -r "/home/build/ahriman"
|
||||
|
||||
# cleanup unused
|
||||
RUN find "/var/cache/pacman/pkg" -type f -delete
|
||||
RUN pacman -Qdtq | pacman -Rscn --noconfirm -
|
||||
|
||||
VOLUME ["/var/lib/ahriman"]
|
||||
|
||||
# minimal runtime ahriman setup
|
||||
|
||||
@ -15,15 +15,10 @@ database = $AHRIMAN_REPOSITORY_ROOT/ahriman.db
|
||||
host = $AHRIMAN_HOST
|
||||
|
||||
EOF
|
||||
sed -i "s|handlers = syslog_handler|handlers = ${AHRIMAN_OUTPUT}_handler|g" "/etc/ahriman.ini.d/logging.ini"
|
||||
|
||||
AHRIMAN_DEFAULT_ARGS=("--architecture" "$AHRIMAN_ARCHITECTURE")
|
||||
if [[ "$AHRIMAN_OUTPUT" == "syslog" ]]; then
|
||||
if [ ! -e "/dev/log" ]; then
|
||||
# by default ahriman uses syslog which is not available inside container
|
||||
# to make noise less we force quiet mode in case if /dev/log was not mounted
|
||||
AHRIMAN_DEFAULT_ARGS+=("--quiet")
|
||||
fi
|
||||
if [ -n "$AHRIMAN_OUTPUT" ]; then
|
||||
AHRIMAN_DEFAULT_ARGS+=("--log-handler" "$AHRIMAN_OUTPUT")
|
||||
fi
|
||||
|
||||
# create repository root inside the [[mounted]] directory and set correct ownership
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
.TH AHRIMAN "1" "2023\-05\-22" "ahriman" "Generated Python Manual"
|
||||
.TH AHRIMAN "1" "2023\-05\-28" "ahriman" "Generated Python Manual"
|
||||
.SH NAME
|
||||
ahriman
|
||||
.SH SYNOPSIS
|
||||
.B ahriman
|
||||
[-h] [-a ARCHITECTURE] [-c CONFIGURATION] [--force] [-l LOCK] [--report | --no-report] [-q] [--unsafe] [-V] {aur-search,search,help,help-commands-unsafe,help-updates,help-version,version,package-add,add,package-update,package-remove,remove,package-status,status,package-status-remove,package-status-update,status-update,patch-add,patch-list,patch-remove,patch-set-add,repo-backup,repo-check,check,repo-daemon,daemon,repo-rebuild,rebuild,repo-remove-unknown,remove-unknown,repo-report,report,repo-restore,repo-sign,sign,repo-status-update,repo-sync,sync,repo-tree,repo-triggers,repo-update,update,service-clean,clean,repo-clean,service-config,config,repo-config,service-config-validate,config-validate,repo-config-validate,service-key-import,key-import,service-setup,init,repo-init,repo-setup,setup,service-shell,shell,user-add,user-list,user-remove,web} ...
|
||||
[-h] [-a ARCHITECTURE] [-c CONFIGURATION] [--force] [-l LOCK] [--log-handler {console,syslog,journald}] [--report | --no-report] [-q] [--unsafe] [-V] {aur-search,search,help,help-commands-unsafe,help-updates,help-version,version,package-add,add,package-update,package-remove,remove,package-status,status,package-status-remove,package-status-update,status-update,patch-add,patch-list,patch-remove,patch-set-add,repo-backup,repo-check,check,repo-create-keyring,repo-create-mirrorlist,repo-daemon,daemon,repo-rebuild,rebuild,repo-remove-unknown,remove-unknown,repo-report,report,repo-restore,repo-sign,sign,repo-status-update,repo-sync,sync,repo-tree,repo-triggers,repo-update,update,service-clean,clean,repo-clean,service-config,config,repo-config,service-config-validate,config-validate,repo-config-validate,service-key-import,key-import,service-setup,init,repo-init,repo-setup,setup,service-shell,shell,user-add,user-list,user-remove,web} ...
|
||||
.SH DESCRIPTION
|
||||
ArcH linux ReposItory MANager
|
||||
|
||||
@ -24,6 +24,10 @@ force run, remove file lock
|
||||
\fB\-l\fR \fI\,LOCK\/\fR, \fB\-\-lock\fR \fI\,LOCK\/\fR
|
||||
lock file
|
||||
|
||||
.TP
|
||||
\fB\-\-log\-handler\fR \fI\,{console,syslog,journald}\/\fR
|
||||
explicit log handler specification. If none set, the handler will be guessed from environment
|
||||
|
||||
.TP
|
||||
\fB\-\-report\fR, \fB\-\-no\-report\fR
|
||||
force enable or disable reporting to web service
|
||||
@ -91,6 +95,12 @@ backup repository data
|
||||
\fBahriman\fR \fI\,repo\-check\/\fR
|
||||
check for updates
|
||||
.TP
|
||||
\fBahriman\fR \fI\,repo\-create\-keyring\/\fR
|
||||
create keyring package
|
||||
.TP
|
||||
\fBahriman\fR \fI\,repo\-create\-mirrorlist\/\fR
|
||||
create mirrorlist package
|
||||
.TP
|
||||
\fBahriman\fR \fI\,repo\-daemon\/\fR
|
||||
run application as daemon
|
||||
.TP
|
||||
@ -400,6 +410,16 @@ fetch actual version of VCS packages
|
||||
\fB\-y\fR, \fB\-\-refresh\fR
|
||||
download fresh package databases from the mirror before actions, \-yy to force refresh even if up to date
|
||||
|
||||
.SH COMMAND \fI\,'ahriman repo\-create\-keyring'\/\fR
|
||||
usage: ahriman repo\-create\-keyring [\-h]
|
||||
|
||||
create package which contains list of trusted keys as set by configuration. Note, that this action will only create package, the package itself has to be built manually
|
||||
|
||||
.SH COMMAND \fI\,'ahriman repo\-create\-mirrorlist'\/\fR
|
||||
usage: ahriman repo\-create\-mirrorlist [\-h]
|
||||
|
||||
create package which contains list of available mirrors as set by configuration. Note, that this action will only create package, the package itself has to be built manually
|
||||
|
||||
.SH COMMAND \fI\,'ahriman repo\-daemon'\/\fR
|
||||
usage: ahriman repo\-daemon [\-h] [\-i INTERVAL] [\-\-aur | \-\-no\-aur] [\-\-dependencies | \-\-no\-dependencies]
|
||||
[\-\-local | \-\-no\-local] [\-\-manual | \-\-no\-manual] [\-\-vcs | \-\-no\-vcs] [\-y]
|
||||
@ -437,6 +457,7 @@ download fresh package databases from the mirror before actions, \-yy to force r
|
||||
|
||||
.SH COMMAND \fI\,'ahriman repo\-rebuild'\/\fR
|
||||
usage: ahriman repo\-rebuild [\-h] [\-\-depends\-on DEPENDS_ON] [\-\-dry\-run] [\-\-from\-database] [\-e]
|
||||
[\-s {unknown,pending,building,failed,success}]
|
||||
|
||||
force rebuild whole repository
|
||||
|
||||
@ -459,6 +480,10 @@ original ahriman instance run with web service and have run repo\-update at leas
|
||||
\fB\-e\fR, \fB\-\-exit\-code\fR
|
||||
return non\-zero exit status if result is empty
|
||||
|
||||
.TP
|
||||
\fB\-s\fR \fI\,{unknown,pending,building,failed,success}\/\fR, \fB\-\-status\fR \fI\,{unknown,pending,building,failed,success}\/\fR
|
||||
filter packages by status. Requires \-\-from\-database to be set
|
||||
|
||||
.SH COMMAND \fI\,'ahriman repo\-remove\-unknown'\/\fR
|
||||
usage: ahriman repo\-remove\-unknown [\-h] [\-\-dry\-run]
|
||||
|
||||
|
||||
@ -60,6 +60,14 @@ ahriman.core.database.migrations.m006\_packages\_architecture\_required module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.database.migrations.m007\_check\_depends module
|
||||
------------------------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.core.database.migrations.m007_check_depends
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
|
||||
@ -20,6 +20,14 @@ ahriman.core.log.http\_log\_handler module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.log.journal\_handler module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: ahriman.core.log.journal_handler
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.log.lazy\_logging module
|
||||
-------------------------------------
|
||||
|
||||
|
||||
@ -19,6 +19,7 @@ Subpackages
|
||||
ahriman.core.repository
|
||||
ahriman.core.sign
|
||||
ahriman.core.status
|
||||
ahriman.core.support
|
||||
ahriman.core.triggers
|
||||
ahriman.core.upload
|
||||
|
||||
|
||||
37
docs/ahriman.core.support.pkgbuild.rst
Normal file
37
docs/ahriman.core.support.pkgbuild.rst
Normal file
@ -0,0 +1,37 @@
|
||||
ahriman.core.support.pkgbuild package
|
||||
=====================================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
ahriman.core.support.pkgbuild.keyring\_generator module
|
||||
-------------------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.core.support.pkgbuild.keyring_generator
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.support.pkgbuild.mirrorlist\_generator module
|
||||
----------------------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.core.support.pkgbuild.mirrorlist_generator
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.support.pkgbuild.pkgbuild\_generator module
|
||||
--------------------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.core.support.pkgbuild.pkgbuild_generator
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: ahriman.core.support.pkgbuild
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
45
docs/ahriman.core.support.rst
Normal file
45
docs/ahriman.core.support.rst
Normal file
@ -0,0 +1,45 @@
|
||||
ahriman.core.support package
|
||||
============================
|
||||
|
||||
Subpackages
|
||||
-----------
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 4
|
||||
|
||||
ahriman.core.support.pkgbuild
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
ahriman.core.support.keyring\_trigger module
|
||||
--------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.core.support.keyring_trigger
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.support.mirrorlist\_trigger module
|
||||
-----------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.core.support.mirrorlist_trigger
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.support.package\_creator module
|
||||
--------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.core.support.package_creator
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: ahriman.core.support
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
@ -60,6 +60,14 @@ ahriman.models.internal\_status module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.models.log\_handler module
|
||||
----------------------------------
|
||||
|
||||
.. automodule:: ahriman.models.log_handler
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.models.log\_record\_id module
|
||||
-------------------------------------
|
||||
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
# AUTOMATICALLY GENERATED by `shtab`
|
||||
|
||||
_shtab_ahriman_subparsers=('aur-search' 'search' 'help' 'help-commands-unsafe' 'help-updates' 'help-version' 'version' 'package-add' 'add' 'package-update' 'package-remove' 'remove' 'package-status' 'status' 'package-status-remove' 'package-status-update' 'status-update' 'patch-add' 'patch-list' 'patch-remove' 'patch-set-add' 'repo-backup' 'repo-check' 'check' 'repo-daemon' 'daemon' 'repo-rebuild' 'rebuild' 'repo-remove-unknown' 'remove-unknown' 'repo-report' 'report' 'repo-restore' 'repo-sign' 'sign' 'repo-status-update' 'repo-sync' 'sync' 'repo-tree' 'repo-triggers' 'repo-update' 'update' 'service-clean' 'clean' 'repo-clean' 'service-config' 'config' 'repo-config' 'service-config-validate' 'config-validate' 'repo-config-validate' 'service-key-import' 'key-import' 'service-setup' 'init' 'repo-init' 'repo-setup' 'setup' 'service-shell' 'shell' 'user-add' 'user-list' 'user-remove' 'web')
|
||||
_shtab_ahriman_subparsers=('aur-search' 'search' 'help' 'help-commands-unsafe' 'help-updates' 'help-version' 'version' 'package-add' 'add' 'package-update' 'package-remove' 'remove' 'package-status' 'status' 'package-status-remove' 'package-status-update' 'status-update' 'patch-add' 'patch-list' 'patch-remove' 'patch-set-add' 'repo-backup' 'repo-check' 'check' 'repo-create-keyring' 'repo-create-mirrorlist' 'repo-daemon' 'daemon' 'repo-rebuild' 'rebuild' 'repo-remove-unknown' 'remove-unknown' 'repo-report' 'report' 'repo-restore' 'repo-sign' 'sign' 'repo-status-update' 'repo-sync' 'sync' 'repo-tree' 'repo-triggers' 'repo-update' 'update' 'service-clean' 'clean' 'repo-clean' 'service-config' 'config' 'repo-config' 'service-config-validate' 'config-validate' 'repo-config-validate' 'service-key-import' 'key-import' 'service-setup' 'init' 'repo-init' 'repo-setup' 'setup' 'service-shell' 'shell' 'user-add' 'user-list' 'user-remove' 'web')
|
||||
|
||||
_shtab_ahriman_option_strings=('-h' '--help' '-a' '--architecture' '-c' '--configuration' '--force' '-l' '--lock' '--report' '--no-report' '-q' '--quiet' '--unsafe' '-V' '--version')
|
||||
_shtab_ahriman_option_strings=('-h' '--help' '-a' '--architecture' '-c' '--configuration' '--force' '-l' '--lock' '--log-handler' '--report' '--no-report' '-q' '--quiet' '--unsafe' '-V' '--version')
|
||||
_shtab_ahriman_aur_search_option_strings=('-h' '--help' '-e' '--exit-code' '--info' '--no-info' '--sort-by')
|
||||
_shtab_ahriman_search_option_strings=('-h' '--help' '-e' '--exit-code' '--info' '--no-info' '--sort-by')
|
||||
_shtab_ahriman_help_option_strings=('-h' '--help')
|
||||
@ -27,10 +27,12 @@ _shtab_ahriman_patch_set_add_option_strings=('-h' '--help' '-t' '--track')
|
||||
_shtab_ahriman_repo_backup_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_repo_check_option_strings=('-h' '--help' '-e' '--exit-code' '--vcs' '--no-vcs' '-y' '--refresh')
|
||||
_shtab_ahriman_check_option_strings=('-h' '--help' '-e' '--exit-code' '--vcs' '--no-vcs' '-y' '--refresh')
|
||||
_shtab_ahriman_repo_create_keyring_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_repo_create_mirrorlist_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_repo_daemon_option_strings=('-h' '--help' '-i' '--interval' '--aur' '--no-aur' '--dependencies' '--no-dependencies' '--local' '--no-local' '--manual' '--no-manual' '--vcs' '--no-vcs' '-y' '--refresh')
|
||||
_shtab_ahriman_daemon_option_strings=('-h' '--help' '-i' '--interval' '--aur' '--no-aur' '--dependencies' '--no-dependencies' '--local' '--no-local' '--manual' '--no-manual' '--vcs' '--no-vcs' '-y' '--refresh')
|
||||
_shtab_ahriman_repo_rebuild_option_strings=('-h' '--help' '--depends-on' '--dry-run' '--from-database' '-e' '--exit-code')
|
||||
_shtab_ahriman_rebuild_option_strings=('-h' '--help' '--depends-on' '--dry-run' '--from-database' '-e' '--exit-code')
|
||||
_shtab_ahriman_repo_rebuild_option_strings=('-h' '--help' '--depends-on' '--dry-run' '--from-database' '-e' '--exit-code' '-s' '--status')
|
||||
_shtab_ahriman_rebuild_option_strings=('-h' '--help' '--depends-on' '--dry-run' '--from-database' '-e' '--exit-code' '-s' '--status')
|
||||
_shtab_ahriman_repo_remove_unknown_option_strings=('-h' '--help' '--dry-run')
|
||||
_shtab_ahriman_remove_unknown_option_strings=('-h' '--help' '--dry-run')
|
||||
_shtab_ahriman_repo_report_option_strings=('-h' '--help')
|
||||
@ -70,7 +72,8 @@ _shtab_ahriman_web_option_strings=('-h' '--help')
|
||||
|
||||
|
||||
|
||||
_shtab_ahriman_pos_0_choices=('aur-search' 'search' 'help' 'help-commands-unsafe' 'help-updates' 'help-version' 'version' 'package-add' 'add' 'package-update' 'package-remove' 'remove' 'package-status' 'status' 'package-status-remove' 'package-status-update' 'status-update' 'patch-add' 'patch-list' 'patch-remove' 'patch-set-add' 'repo-backup' 'repo-check' 'check' 'repo-daemon' 'daemon' 'repo-rebuild' 'rebuild' 'repo-remove-unknown' 'remove-unknown' 'repo-report' 'report' 'repo-restore' 'repo-sign' 'sign' 'repo-status-update' 'repo-sync' 'sync' 'repo-tree' 'repo-triggers' 'repo-update' 'update' 'service-clean' 'clean' 'repo-clean' 'service-config' 'config' 'repo-config' 'service-config-validate' 'config-validate' 'repo-config-validate' 'service-key-import' 'key-import' 'service-setup' 'init' 'repo-init' 'repo-setup' 'setup' 'service-shell' 'shell' 'user-add' 'user-list' 'user-remove' 'web')
|
||||
_shtab_ahriman_pos_0_choices=('aur-search' 'search' 'help' 'help-commands-unsafe' 'help-updates' 'help-version' 'version' 'package-add' 'add' 'package-update' 'package-remove' 'remove' 'package-status' 'status' 'package-status-remove' 'package-status-update' 'status-update' 'patch-add' 'patch-list' 'patch-remove' 'patch-set-add' 'repo-backup' 'repo-check' 'check' 'repo-create-keyring' 'repo-create-mirrorlist' 'repo-daemon' 'daemon' 'repo-rebuild' 'rebuild' 'repo-remove-unknown' 'remove-unknown' 'repo-report' 'report' 'repo-restore' 'repo-sign' 'sign' 'repo-status-update' 'repo-sync' 'sync' 'repo-tree' 'repo-triggers' 'repo-update' 'update' 'service-clean' 'clean' 'repo-clean' 'service-config' 'config' 'repo-config' 'service-config-validate' 'config-validate' 'repo-config-validate' 'service-key-import' 'key-import' 'service-setup' 'init' 'repo-init' 'repo-setup' 'setup' 'service-shell' 'shell' 'user-add' 'user-list' 'user-remove' 'web')
|
||||
_shtab_ahriman___log_handler_choices=('console' 'syslog' 'journald')
|
||||
_shtab_ahriman_aur_search___sort_by_choices=('description' 'first_submitted' 'id' 'last_modified' 'maintainer' 'name' 'num_votes' 'out_of_date' 'package_base' 'package_base_id' 'popularity' 'repository' 'submitter' 'url' 'url_path' 'version')
|
||||
_shtab_ahriman_search___sort_by_choices=('description' 'first_submitted' 'id' 'last_modified' 'maintainer' 'name' 'num_votes' 'out_of_date' 'package_base' 'package_base_id' 'popularity' 'repository' 'submitter' 'url' 'url_path' 'version')
|
||||
_shtab_ahriman_package_add__s_choices=('auto' 'archive' 'aur' 'directory' 'local' 'remote' 'repository')
|
||||
@ -87,6 +90,10 @@ _shtab_ahriman_package_status_update__s_choices=('unknown' 'pending' 'building'
|
||||
_shtab_ahriman_package_status_update___status_choices=('unknown' 'pending' 'building' 'failed' 'success')
|
||||
_shtab_ahriman_status_update__s_choices=('unknown' 'pending' 'building' 'failed' 'success')
|
||||
_shtab_ahriman_status_update___status_choices=('unknown' 'pending' 'building' 'failed' 'success')
|
||||
_shtab_ahriman_repo_rebuild__s_choices=('unknown' 'pending' 'building' 'failed' 'success')
|
||||
_shtab_ahriman_repo_rebuild___status_choices=('unknown' 'pending' 'building' 'failed' 'success')
|
||||
_shtab_ahriman_rebuild__s_choices=('unknown' 'pending' 'building' 'failed' 'success')
|
||||
_shtab_ahriman_rebuild___status_choices=('unknown' 'pending' 'building' 'failed' 'success')
|
||||
_shtab_ahriman_repo_status_update__s_choices=('unknown' 'pending' 'building' 'failed' 'success')
|
||||
_shtab_ahriman_repo_status_update___status_choices=('unknown' 'pending' 'building' 'failed' 'success')
|
||||
_shtab_ahriman_service_setup___sign_target_choices=('disabled' 'packages' 'repository')
|
||||
@ -230,6 +237,10 @@ _shtab_ahriman_check___vcs_nargs=0
|
||||
_shtab_ahriman_check___no_vcs_nargs=0
|
||||
_shtab_ahriman_check__y_nargs=0
|
||||
_shtab_ahriman_check___refresh_nargs=0
|
||||
_shtab_ahriman_repo_create_keyring__h_nargs=0
|
||||
_shtab_ahriman_repo_create_keyring___help_nargs=0
|
||||
_shtab_ahriman_repo_create_mirrorlist__h_nargs=0
|
||||
_shtab_ahriman_repo_create_mirrorlist___help_nargs=0
|
||||
_shtab_ahriman_repo_daemon__h_nargs=0
|
||||
_shtab_ahriman_repo_daemon___help_nargs=0
|
||||
_shtab_ahriman_repo_daemon___aur_nargs=0
|
||||
|
||||
@ -36,6 +36,8 @@ _shtab_ahriman_commands() {
|
||||
"repo-clean:remove local caches"
|
||||
"repo-config:dump configuration for the specified architecture"
|
||||
"repo-config-validate:validate configuration and print found errors"
|
||||
"repo-create-keyring:create package which contains list of trusted keys as set by configuration. Note, that this action will only create package, the package itself has to be built manually"
|
||||
"repo-create-mirrorlist:create package which contains list of available mirrors as set by configuration. Note, that this action will only create package, the package itself has to be built manually"
|
||||
"repo-daemon:start process which periodically will run update process"
|
||||
"repo-init:create initial service configuration, requires root"
|
||||
"repo-rebuild:force rebuild whole repository"
|
||||
@ -79,6 +81,7 @@ _shtab_ahriman_options=(
|
||||
{-c,--configuration}"[configuration path]:configuration:"
|
||||
"--force[force run, remove file lock]"
|
||||
{-l,--lock}"[lock file]:lock:"
|
||||
"--log-handler[explicit log handler specification. If none set, the handler will be guessed from environment]:log_handler:(console syslog journald)"
|
||||
{--report,--no-report}"[force enable or disable reporting to web service]:report:"
|
||||
{-q,--quiet}"[force disable any logging]"
|
||||
"--unsafe[allow to run ahriman as non-ahriman user. Some actions might be unavailable]"
|
||||
@ -259,6 +262,7 @@ _shtab_ahriman_rebuild_options=(
|
||||
"--dry-run[just perform check for packages without rebuild process itself]"
|
||||
"--from-database[read packages from database instead of filesystem. This feature in particular is required in case if you would like to restore repository from another repository instance. Note, however, that in order to restore packages you need to have original ahriman instance run with web service and have run repo-update at least once.]"
|
||||
{-e,--exit-code}"[return non-zero exit status if result is empty]"
|
||||
{-s,--status}"[filter packages by status. Requires --from-database to be set]:status:(unknown pending building failed success)"
|
||||
)
|
||||
|
||||
_shtab_ahriman_remove_options=(
|
||||
@ -303,6 +307,14 @@ _shtab_ahriman_repo_config_validate_options=(
|
||||
{-e,--exit-code}"[return non-zero exit status if configuration is invalid]"
|
||||
)
|
||||
|
||||
_shtab_ahriman_repo_create_keyring_options=(
|
||||
"(- : *)"{-h,--help}"[show this help message and exit]"
|
||||
)
|
||||
|
||||
_shtab_ahriman_repo_create_mirrorlist_options=(
|
||||
"(- : *)"{-h,--help}"[show this help message and exit]"
|
||||
)
|
||||
|
||||
_shtab_ahriman_repo_daemon_options=(
|
||||
"(- : *)"{-h,--help}"[show this help message and exit]"
|
||||
{-i,--interval}"[interval between runs in seconds]:interval:"
|
||||
@ -336,6 +348,7 @@ _shtab_ahriman_repo_rebuild_options=(
|
||||
"--dry-run[just perform check for packages without rebuild process itself]"
|
||||
"--from-database[read packages from database instead of filesystem. This feature in particular is required in case if you would like to restore repository from another repository instance. Note, however, that in order to restore packages you need to have original ahriman instance run with web service and have run repo-update at least once.]"
|
||||
{-e,--exit-code}"[return non-zero exit status if result is empty]"
|
||||
{-s,--status}"[filter packages by status. Requires --from-database to be set]:status:(unknown pending building failed success)"
|
||||
)
|
||||
|
||||
_shtab_ahriman_repo_remove_unknown_options=(
|
||||
@ -595,6 +608,8 @@ _shtab_ahriman() {
|
||||
repo-clean) _arguments -C $_shtab_ahriman_repo_clean_options ;;
|
||||
repo-config) _arguments -C $_shtab_ahriman_repo_config_options ;;
|
||||
repo-config-validate) _arguments -C $_shtab_ahriman_repo_config_validate_options ;;
|
||||
repo-create-keyring) _arguments -C $_shtab_ahriman_repo_create_keyring_options ;;
|
||||
repo-create-mirrorlist) _arguments -C $_shtab_ahriman_repo_create_mirrorlist_options ;;
|
||||
repo-daemon) _arguments -C $_shtab_ahriman_repo_daemon_options ;;
|
||||
repo-init) _arguments -C $_shtab_ahriman_repo_init_options ;;
|
||||
repo-rebuild) _arguments -C $_shtab_ahriman_repo_rebuild_options ;;
|
||||
|
||||
10
docs/conf.py
10
docs/conf.py
@ -14,7 +14,6 @@ import os
|
||||
import sys
|
||||
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
|
||||
from ahriman.version import __version__
|
||||
|
||||
@ -24,13 +23,6 @@ sys.path.insert(0, str(basedir))
|
||||
|
||||
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
|
||||
|
||||
for module in (
|
||||
"pyalpm",
|
||||
):
|
||||
if module in sys.modules:
|
||||
continue
|
||||
sys.modules[module] = mock.Mock()
|
||||
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
|
||||
@ -92,6 +84,8 @@ autoclass_content = "both"
|
||||
|
||||
autodoc_member_order = "groupwise"
|
||||
|
||||
autodoc_mock_imports = ["pyalpm"]
|
||||
|
||||
autodoc_default_options = {
|
||||
"no-undoc-members": True,
|
||||
}
|
||||
|
||||
@ -30,8 +30,8 @@ Base configuration settings.
|
||||
* ``logging`` - path to logging configuration, string, required. Check ``logging.ini`` for reference.
|
||||
* ``suppress_http_log_errors`` - suppress http log errors, boolean, optional, default ``no``. If set to ``yes``, any http log errors (e.g. if web server is not available, but http logging is enabled) will be suppressed.
|
||||
|
||||
``alpm`` group
|
||||
--------------
|
||||
``alpm:*`` groups
|
||||
-----------------
|
||||
|
||||
libalpm and AUR related configuration. Group name can refer to architecture, e.g. ``alpm:x86_64`` can be used for x86_64 architecture specific settings.
|
||||
|
||||
@ -69,6 +69,7 @@ Build related configuration. Group name can refer to architecture, e.g. ``build:
|
||||
* ``makepkg_flags`` - additional flags passed to ``makepkg`` command, space separated list of strings, optional.
|
||||
* ``makechrootpkg_flags`` - additional flags passed to ``makechrootpkg`` command, space separated list of strings, optional.
|
||||
* ``triggers`` - list of ``ahriman.core.triggers.Trigger`` class implementation (e.g. ``ahriman.core.report.ReportTrigger ahriman.core.upload.UploadTrigger``) which will be loaded and run at the end of processing, space separated list of strings, optional. You can also specify triggers by their paths, e.g. ``/usr/lib/python3.10/site-packages/ahriman/core/report/report.py.ReportTrigger``. Triggers are run in the order of mention.
|
||||
* ``triggers_known`` - optional list of ``ahriman.core.triggers.Trigger`` class implementations which are not run automatically and used only for trigger discovery and configuration validation.
|
||||
* ``vcs_allowed_age`` - maximal age in seconds of the VCS packages before their version will be updated with its remote source, int, optional, default ``604800``.
|
||||
|
||||
``repository`` group
|
||||
@ -107,6 +108,41 @@ Web server settings. If any of ``host``/``port`` is not set, web integration wil
|
||||
* ``unix_socket_unsafe`` - set unsafe (o+w) permissions to unix socket, boolean, optional, default ``yes``. This option is enabled by default, because it is supposed that unix socket is created in safe environment (only web service is supposed to be used in unsafe), but it can be disabled by configuration.
|
||||
* ``username`` - username to authorize in web service in order to update service status, string, required in case if authorization enabled.
|
||||
|
||||
``keyring`` group
|
||||
--------------------
|
||||
|
||||
Keyring package generator plugin.
|
||||
|
||||
* ``target`` - list of generator settings sections, space separated list of strings, required. It must point to valid section name.
|
||||
|
||||
Keyring generator plugin
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
* ``description`` - keyring package description, string, optional, default is ``repo PGP keyring``, where ``repo`` is the repository name.
|
||||
* ``homepage`` - url to homepage location if any, string, optional.
|
||||
* ``license`` - list of licenses which are applied to this package, space separated list of strings, optional, default is ``Unlicense``.
|
||||
* ``package`` - keyring package name, string, optional, default is ``repo-keyring``, where ``repo`` is the repository name.
|
||||
* ``packagers`` - list of packagers keys, space separated list of strings, optional, if not set, the ``key_*`` options from ``sign`` group will be used.
|
||||
* ``revoked`` - list of revoked packagers keys, space separated list of strings, optional.
|
||||
* ``trusted`` - list of master keys, space separated list of strings, optional, if not set, the ``key`` option from ``sign`` group will be used.
|
||||
|
||||
``mirrorlist`` group
|
||||
--------------------
|
||||
|
||||
Mirrorlist package generator plugin.
|
||||
|
||||
* ``target`` - list of generator settings sections, space separated list of strings, required. It must point to valid section name.
|
||||
|
||||
Mirrorlist generator plugin
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
* ``description`` - mirrorlist package description, string, optional, default is ``repo mirror list for use by pacman``, where ``repo`` is the repository name.
|
||||
* ``homepage`` - url to homepage location if any, string, optional.
|
||||
* ``license`` - list of licenses which are applied to this package, space separated list of strings, optional, default is ``Unlicense``.
|
||||
* ``package`` - mirrorlist package name, string, optional, default is ``repo-mirrorlist``, where ``repo`` is the repository name.
|
||||
* ``path`` - absolute path to generated mirrorlist file, string, optional, default is ``/etc/pacman.d/repo-mirrorlist``, where ``repo`` is the repository name.
|
||||
* ``servers`` - list of repository mirrors, space separated list of strings, required.
|
||||
|
||||
``remote-pull`` group
|
||||
---------------------
|
||||
|
||||
|
||||
106
docs/faq.rst
106
docs/faq.rst
@ -1,4 +1,3 @@
|
||||
|
||||
FAQ
|
||||
===
|
||||
|
||||
@ -392,7 +391,7 @@ The following environment variables are supported:
|
||||
* ``AHRIMAN_FORCE_ROOT`` - force run ahriman as root instead of guessing by subcommand.
|
||||
* ``AHRIMAN_HOST`` - host for the web interface, default is ``0.0.0.0``.
|
||||
* ``AHRIMAN_MULTILIB`` - if set (default) multilib repository will be used, disabled otherwise.
|
||||
* ``AHRIMAN_OUTPUT`` - controls logging handler, e.g. ``syslog``, ``console``. The name must be found in logging configuration. Note that if ``syslog`` (the default) handler is used you will need to mount ``/dev/log`` inside container because it is not available there.
|
||||
* ``AHRIMAN_OUTPUT`` - controls logging handler, e.g. ``syslog``, ``console``. The name must be found in logging configuration. Note that if ``syslog`` handler is used you will need to mount ``/dev/log`` inside container because it is not available there.
|
||||
* ``AHRIMAN_PACKAGER`` - packager name from which packages will be built, default is ``ahriman bot <ahriman@example.com>``.
|
||||
* ``AHRIMAN_PACMAN_MIRROR`` - override pacman mirror server if set.
|
||||
* ``AHRIMAN_PORT`` - HTTP server port if any, default is empty.
|
||||
@ -664,7 +663,7 @@ How to report by email
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
yay -S python-jinja
|
||||
yay -S --asdeps python-jinja
|
||||
|
||||
#.
|
||||
Configure the service:
|
||||
@ -691,7 +690,7 @@ How to generate index page for S3
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
yay -S python-jinja
|
||||
yay -S --asdeps python-jinja
|
||||
|
||||
#.
|
||||
Configure the service:
|
||||
@ -715,7 +714,7 @@ How to post build report to telegram
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
yay -S python-jinja
|
||||
yay -S --asdeps python-jinja
|
||||
|
||||
#.
|
||||
Register bot in telegram. You can do it by talking with `@BotFather <https://t.me/botfather>`_. For more details please refer to `official documentation <https://core.telegram.org/bots>`_.
|
||||
@ -754,6 +753,80 @@ If you did everything fine you should receive the message with the next update.
|
||||
|
||||
(replace ``${CHAT_ID}`` and ``${API_KEY}`` with the values from configuration).
|
||||
|
||||
Maintenance packages
|
||||
--------------------
|
||||
|
||||
Generate keyring package
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The application provides special plugin which generates keyring package. This plugin heavily depends on ``sign`` group settings, however it is possible to override them. The minimal package can be generated in the following way:
|
||||
|
||||
#.
|
||||
Edit configuration:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[keyring]
|
||||
target = keyring_generator
|
||||
|
||||
By default it will use ``sign.key`` as trusted key and all other keys as packagers ones. For all available options refer to :doc:`configuration <configuration>`.
|
||||
|
||||
#.
|
||||
Create package source files:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
sudo -u ahriman ahriman repo-create-keyring
|
||||
|
||||
This command will generate PKGBUILD, revoked and trusted listings and keyring itself and will register the package in database.
|
||||
|
||||
#.
|
||||
Build new package as usual:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
sudo -u ahriman ahriman package-add aur-clone-keyring --source local --now
|
||||
|
||||
where ``aur-clone`` is your repository name.
|
||||
|
||||
This plugin might have some issues, in case of any of them, kindly create `new issue <https://github.com/arcan1s/ahriman/issues/new/choose>`_.
|
||||
|
||||
Generate mirrorlist package
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The application provides special plugin which generates mirrorlist package also. It is possible to distribute this package as usual later. The package can be generated in the following way:
|
||||
|
||||
#.
|
||||
Edit configuration:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[mirrorlist]
|
||||
target = mirrorlist_generator
|
||||
|
||||
[mirrorlist_generator]
|
||||
servers = https://repo.example.com/$arch
|
||||
|
||||
The ``mirrorlist_generator.servers`` must contain list of available mirrors, the ``$arch`` and ``$repo`` variables are supported. For more options kindly refer to :doc:`configuration <configuration>`.
|
||||
|
||||
#.
|
||||
Create package source files:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
sudo -u ahriman ahriman repo-create-mirrorlist
|
||||
|
||||
This command will generate PKGBUILD and mirrorlist file and will register the package in database.
|
||||
|
||||
#.
|
||||
Build new package as usual:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
sudo -u ahriman ahriman package-add aur-clone-mirrorlist --source local --now
|
||||
|
||||
where ``aur-clone`` is your repository name.
|
||||
|
||||
Web service
|
||||
-----------
|
||||
|
||||
@ -765,7 +838,7 @@ How to setup web service
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
yay -S python-aiohttp python-aiohttp-jinja2
|
||||
yay -S --asdeps python-aiohttp python-aiohttp-jinja2 python-aiohttp-apispec>=3.0.0 python-aiohttp-cors
|
||||
|
||||
#.
|
||||
Configure service:
|
||||
@ -786,7 +859,7 @@ How to enable basic authorization
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
yay -S python-aiohttp-security python-aiohttp-session python-cryptography
|
||||
yay -S --asdeps python-aiohttp-security python-aiohttp-session python-cryptography
|
||||
|
||||
#.
|
||||
Configure the service to enable authorization:
|
||||
@ -842,7 +915,7 @@ How to enable OAuth authorization
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
yay -S python-aiohttp-security python-aiohttp-session python-cryptography python-aioauth-client
|
||||
yay -S --asdeps python-aiohttp-security python-aiohttp-session python-cryptography python-aioauth-client
|
||||
|
||||
#.
|
||||
Configure the service:
|
||||
@ -954,6 +1027,11 @@ Don't know, haven't tried it. But it lacks of documentation at least.
|
||||
* ``repoctl`` is able to store old packages.
|
||||
* Ability to host repository from same command in ``repoctl`` vs external services (e.g. nginx) in ``ahriman``.
|
||||
|
||||
`repod <https://gitlab.archlinux.org/archlinux/repod>`_
|
||||
"""""""""""""""""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
Official tool provided by distribution, has clean logic, but it is just a helper for ``repo-add``, e.g. it doesn't work with AUR and all packages builds have to be handled separately.
|
||||
|
||||
`repo-scripts <https://github.com/arcan1s/repo-scripts>`_
|
||||
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
@ -975,13 +1053,17 @@ It is automation tools for ``repoctl`` mentioned above. Except for using shell i
|
||||
How to check service logs
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
By default, the service writes logs to ``/dev/log`` which can be accessed by using ``journalctl`` command (logs are written to the journal of the user under which command is run).
|
||||
|
||||
You can also edit configuration and forward logs to ``stderr``, just change ``handlers`` value, e.g.:
|
||||
By default, the service writes logs to ``journald`` which can be accessed by using ``journalctl`` command (logs are written to the journal of the user under which command is run). In order to retrieve logs for the process you can use the following command:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
sed -i 's/handlers = syslog_handler/handlers = console_handler/g' /etc/ahriman.ini.d/logging.ini
|
||||
sudo journalctl SYSLOG_IDENTIFIER=ahriman
|
||||
|
||||
You can also ask to forward logs to ``stderr``, just set ``--log-handler`` flag, e.g.:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
ahriman --log-handler console ...
|
||||
|
||||
You can even configure logging as you wish, but kindly refer to python ``logging`` module `configuration <https://docs.python.org/3/library/logging.config.html>`_. The application uses java concept to log messages, e.g. class ``Application`` imported from ``ahriman.application.application`` package will have logger called ``ahriman.application.application.Application``. In order to e.g. change logger name for whole application package it is possible to change values for ``ahriman.application`` package; thus editing ``ahriman`` logger configuration will change logging for whole application (unless there are overrides for another logger).
|
||||
|
||||
|
||||
@ -31,6 +31,16 @@ This trigger will be called right after build process (``on_result``). It will p
|
||||
|
||||
Trigger which can be used for reporting. It implements ``on_result`` method and thus being called on each build update and generates report (e.g. html, telegram etc) according to the current settings.
|
||||
|
||||
``ahriman.core.support.KeyringTrigger``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Generator for keyring package. This trigger will extract keys from local keychain and pack them into keyring specific format. This trigger will generate sources including PKGBUILD, which can be used later for package building.
|
||||
|
||||
``ahriman.core.support.MirrorlistTrigger``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Simple generator for mirrorlist package, based on the urls which were set by configuration. This trigger will generate sources including PKGBUILD, which can be used later for package building.
|
||||
|
||||
``ahriman.core.upload.UploadTrigger``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@ -73,7 +83,7 @@ The ``context`` also implements collection methods such as ``__iter__`` and ``__
|
||||
Trigger example
|
||||
---------------
|
||||
|
||||
Lets consider example of reporting trigger (e.g. `slack <https://slack.com/>`_, which provides easy HTTP API for integration triggers).gre
|
||||
Lets consider example of reporting trigger (e.g. `slack <https://slack.com/>`_, which provides easy HTTP API for integration triggers).
|
||||
|
||||
In order to post message to slack we will need a specific trigger url (something like ``https://hooks.slack.com/services/company_id/trigger_id``), channel (e.g. ``#archrepo``) and username (``repo-bot``).
|
||||
|
||||
|
||||
@ -7,7 +7,7 @@ pkgdesc="ArcH linux ReposItory MANager"
|
||||
arch=('any')
|
||||
url="https://github.com/arcan1s/ahriman"
|
||||
license=('GPL3')
|
||||
depends=('devtools' 'git' 'pyalpm' 'python-cerberus' 'python-inflection' 'python-passlib' 'python-requests' 'python-srcinfo')
|
||||
depends=('devtools>=1:1.0.0' 'git' 'pyalpm' 'python-cerberus' 'python-inflection' 'python-passlib' 'python-requests' 'python-srcinfo')
|
||||
makedepends=('python-build' 'python-installer' 'python-wheel')
|
||||
optdepends=('breezy: -bzr packages support'
|
||||
'darcs: -darcs packages support'
|
||||
@ -24,11 +24,13 @@ optdepends=('breezy: -bzr packages support'
|
||||
'python-cryptography: web server with authorization'
|
||||
'python-requests-unixsocket: client report to web server by unix socket'
|
||||
'python-jinja: html report generation'
|
||||
'python-systemd: journal support'
|
||||
'rsync: sync by using rsync'
|
||||
'subversion: -svn packages support')
|
||||
source=("https://github.com/arcan1s/ahriman/releases/download/$pkgver/$pkgname-$pkgver-src.tar.xz"
|
||||
'ahriman.sysusers'
|
||||
'ahriman.tmpfiles')
|
||||
install="$pkgname.install"
|
||||
backup=('etc/ahriman.ini'
|
||||
'etc/ahriman.ini.d/logging.ini')
|
||||
|
||||
@ -52,6 +54,6 @@ package() {
|
||||
install -Dm644 "$srcdir/$pkgname.tmpfiles" "$pkgdir/usr/lib/tmpfiles.d/$pkgname.conf"
|
||||
}
|
||||
|
||||
sha512sums=('112b0d8aac68e5330bbdd2b86a59c8a9af8ab7a7c636489623c8460bb90f1318585851edd2a97a8ce20e2d2ad93b847b522685df707c190aa39d23ab908fa8ef'
|
||||
sha512sums=('19841842641520b573cdde6cb80a7cfcd69756d323fdfeebc2eee2d264a1325ead4ab2f8383bb369f7896bfc1de59d7358f133f4afeb90a9b9f0695f482a58d0'
|
||||
'53d37efec812afebf86281716259f9ea78a307b83897166c72777251c3eebcb587ecee375d907514781fb2a5c808cbb24ef9f3f244f12740155d0603bf213131'
|
||||
'62b2eccc352d33853ef243c9cddd63663014aa97b87242f1b5bc5099a7dbd69ff3821f24ffc58e1b7f2387bd4e9e9712cc4c67f661b1724ad99cdf09b3717794')
|
||||
|
||||
25
package/archlinux/ahriman.install
Normal file
25
package/archlinux/ahriman.install
Normal file
@ -0,0 +1,25 @@
|
||||
post_upgrade() {
|
||||
local breakpoints=(
|
||||
2.9.0-1
|
||||
)
|
||||
|
||||
for v in "${breakpoints[@]}"; do
|
||||
if [[ $(vercmp "$v" "$2") -eq 1 ]]; then
|
||||
"_${v//[.-]/_}_changes"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
_2_9_0_1_changes() {
|
||||
cat << EOF
|
||||
It was found that you were upgrading from old-devtools package to the new one, which requires manual intervention:
|
||||
|
||||
* make sure that devtools are upgraded to the latest release;
|
||||
* merge service configuration if required;
|
||||
* run setup command (i.e. sudo ahriman service-setup) with the same arguments as you did before;
|
||||
* remove build chroot: sudo rm -r /var/lib/ahriman/chroot/ahriman-x86_64/;
|
||||
* update local databases: sudo -u ahriman ahriman update --no-aur --no-local --no-manual -yy;
|
||||
|
||||
For more information kindly refer to changelog https://github.com/arcan1s/ahriman/releases/tag/2.9.0
|
||||
EOF
|
||||
}
|
||||
@ -1 +1 @@
|
||||
u ahriman 643 "ArcH linux ReposItory MANager" /var/lib/ahriman
|
||||
u ahriman 643 "ArcH linux ReposItory MANager" /var/lib/ahriman
|
||||
|
||||
@ -1,2 +1 @@
|
||||
d /var/lib/ahriman 0755 ahriman ahriman
|
||||
d /var/log/ahriman 0755 ahriman ahriman
|
||||
d /var/lib/ahriman 0755 ahriman ahriman
|
||||
@ -25,6 +25,7 @@ ignore_packages =
|
||||
makechrootpkg_flags =
|
||||
makepkg_flags = --nocolor --ignorearch
|
||||
triggers = ahriman.core.gitremote.RemotePullTrigger ahriman.core.report.ReportTrigger ahriman.core.upload.UploadTrigger ahriman.core.gitremote.RemotePushTrigger
|
||||
triggers_known = ahriman.core.gitremote.RemotePullTrigger ahriman.core.gitremote.RemotePushTrigger ahriman.core.report.ReportTrigger ahriman.core.upload.UploadTrigger ahriman.core.support.KeyringTrigger ahriman.core.support.MirrorlistTrigger
|
||||
vcs_allowed_age = 604800
|
||||
|
||||
[repository]
|
||||
@ -34,6 +35,12 @@ root = /var/lib/ahriman
|
||||
[sign]
|
||||
target =
|
||||
|
||||
[keyring]
|
||||
target =
|
||||
|
||||
[mirrorlist]
|
||||
target =
|
||||
|
||||
[remote-pull]
|
||||
target =
|
||||
|
||||
|
||||
@ -2,17 +2,23 @@
|
||||
keys = root,http,stderr,boto3,botocore,nose,s3transfer
|
||||
|
||||
[handlers]
|
||||
keys = console_handler,syslog_handler
|
||||
keys = console_handler,journald_handler,syslog_handler
|
||||
|
||||
[formatters]
|
||||
keys = generic_format,syslog_format
|
||||
|
||||
[handler_console_handler]
|
||||
class = StreamHandler
|
||||
class = logging.StreamHandler
|
||||
level = DEBUG
|
||||
formatter = generic_format
|
||||
args = (sys.stderr,)
|
||||
|
||||
[handler_journald_handler]
|
||||
class = ahriman.core.log.journal_handler.JournalHandler
|
||||
level = DEBUG
|
||||
formatter = syslog_format
|
||||
kwargs = {"SYSLOG_IDENTIFIER": "ahriman"}
|
||||
|
||||
[handler_syslog_handler]
|
||||
class = logging.handlers.SysLogHandler
|
||||
level = DEBUG
|
||||
@ -21,20 +27,16 @@ args = ("/dev/log",)
|
||||
|
||||
[formatter_generic_format]
|
||||
format = [%(levelname)s %(asctime)s] [%(name)s]: %(message)s
|
||||
datefmt =
|
||||
|
||||
[formatter_syslog_format]
|
||||
format = [%(levelname)s] [%(name)s]: %(message)s
|
||||
datefmt =
|
||||
|
||||
[logger_root]
|
||||
level = DEBUG
|
||||
handlers = syslog_handler
|
||||
qualname = root
|
||||
|
||||
[logger_http]
|
||||
level = DEBUG
|
||||
handlers = syslog_handler
|
||||
qualname = http
|
||||
propagate = 0
|
||||
|
||||
@ -45,24 +47,20 @@ qualname = stderr
|
||||
|
||||
[logger_boto3]
|
||||
level = INFO
|
||||
handlers = syslog_handler
|
||||
qualname = boto3
|
||||
propagate = 0
|
||||
|
||||
[logger_botocore]
|
||||
level = INFO
|
||||
handlers = syslog_handler
|
||||
qualname = botocore
|
||||
propagate = 0
|
||||
|
||||
[logger_nose]
|
||||
level = INFO
|
||||
handlers = syslog_handler
|
||||
qualname = nose
|
||||
propagate = 0
|
||||
|
||||
[logger_s3transfer]
|
||||
level = INFO
|
||||
handlers = syslog_handler
|
||||
qualname = s3transfer
|
||||
propagate = 0
|
||||
|
||||
3
setup.py
3
setup.py
@ -121,6 +121,9 @@ setup(
|
||||
"sphinx-rtd-theme>=1.1.1", # https://stackoverflow.com/a/74355734
|
||||
"sphinxcontrib-napoleon",
|
||||
],
|
||||
"journald": [
|
||||
"systemd-python",
|
||||
],
|
||||
# FIXME technically this dependency is required, but in some cases we do not have access to
|
||||
# the libalpm which is required in order to install the package. Thus in case if we do not
|
||||
# really need to run the application we can move it to "optional" dependencies
|
||||
|
||||
@ -30,6 +30,7 @@ from ahriman.application import handlers
|
||||
from ahriman.core.util import enum_values
|
||||
from ahriman.models.action import Action
|
||||
from ahriman.models.build_status import BuildStatusEnum
|
||||
from ahriman.models.log_handler import LogHandler
|
||||
from ahriman.models.package_source import PackageSource
|
||||
from ahriman.models.sign_settings import SignSettings
|
||||
from ahriman.models.user_access import UserAccess
|
||||
@ -58,6 +59,7 @@ def _formatter(prog: str) -> argparse.HelpFormatter:
|
||||
return argparse.ArgumentDefaultsHelpFormatter(prog, width=120)
|
||||
|
||||
|
||||
# pylint: disable=too-many-statements
|
||||
def _parser() -> argparse.ArgumentParser:
|
||||
"""
|
||||
command line parser generator
|
||||
@ -75,6 +77,9 @@ def _parser() -> argparse.ArgumentParser:
|
||||
parser.add_argument("--force", help="force run, remove file lock", action="store_true")
|
||||
parser.add_argument("-l", "--lock", help="lock file", type=Path,
|
||||
default=Path(tempfile.gettempdir()) / "ahriman.lock")
|
||||
parser.add_argument("--log-handler", help="explicit log handler specification. If none set, the handler will be "
|
||||
"guessed from environment",
|
||||
type=LogHandler, choices=enum_values(LogHandler))
|
||||
parser.add_argument("--report", help="force enable or disable reporting to web service",
|
||||
action=argparse.BooleanOptionalAction, default=True)
|
||||
parser.add_argument("-q", "--quiet", help="force disable any logging", action="store_true")
|
||||
@ -100,6 +105,8 @@ def _parser() -> argparse.ArgumentParser:
|
||||
_set_patch_set_add_parser(subparsers)
|
||||
_set_repo_backup_parser(subparsers)
|
||||
_set_repo_check_parser(subparsers)
|
||||
_set_repo_create_keyring_parser(subparsers)
|
||||
_set_repo_create_mirrorlist_parser(subparsers)
|
||||
_set_repo_daemon_parser(subparsers)
|
||||
_set_repo_rebuild_parser(subparsers)
|
||||
_set_repo_remove_unknown_parser(subparsers)
|
||||
@ -478,6 +485,44 @@ def _set_repo_check_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
return parser
|
||||
|
||||
|
||||
def _set_repo_create_keyring_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
"""
|
||||
add parser for create-keyring subcommand
|
||||
|
||||
Args:
|
||||
root(SubParserAction): subparsers for the commands
|
||||
|
||||
Returns:
|
||||
argparse.ArgumentParser: created argument parser
|
||||
"""
|
||||
parser = root.add_parser("repo-create-keyring", help="create keyring package",
|
||||
description="create package which contains list of trusted keys as set by "
|
||||
"configuration. Note, that this action will only create package, the package "
|
||||
"itself has to be built manually",
|
||||
formatter_class=_formatter)
|
||||
parser.set_defaults(handler=handlers.Triggers, trigger=["ahriman.core.support.KeyringTrigger"])
|
||||
return parser
|
||||
|
||||
|
||||
def _set_repo_create_mirrorlist_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
"""
|
||||
add parser for create-mirrorlist subcommand
|
||||
|
||||
Args:
|
||||
root(SubParserAction): subparsers for the commands
|
||||
|
||||
Returns:
|
||||
argparse.ArgumentParser: created argument parser
|
||||
"""
|
||||
parser = root.add_parser("repo-create-mirrorlist", help="create mirrorlist package",
|
||||
description="create package which contains list of available mirrors as set by "
|
||||
"configuration. Note, that this action will only create package, the package "
|
||||
"itself has to be built manually",
|
||||
formatter_class=_formatter)
|
||||
parser.set_defaults(handler=handlers.Triggers, trigger=["ahriman.core.support.MirrorlistTrigger"])
|
||||
return parser
|
||||
|
||||
|
||||
def _set_repo_daemon_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
"""
|
||||
add parser for daemon subcommand
|
||||
@ -531,6 +576,8 @@ def _set_repo_rebuild_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
"ahriman instance run with web service and have run repo-update at least once.",
|
||||
action="store_true")
|
||||
parser.add_argument("-e", "--exit-code", help="return non-zero exit status if result is empty", action="store_true")
|
||||
parser.add_argument("-s", "--status", help="filter packages by status. Requires --from-database to be set",
|
||||
type=BuildStatusEnum, choices=enum_values(BuildStatusEnum))
|
||||
parser.set_defaults(handler=handlers.Rebuild)
|
||||
return parser
|
||||
|
||||
|
||||
@ -26,6 +26,7 @@ from typing import Any
|
||||
|
||||
from ahriman.application.application.application_properties import ApplicationProperties
|
||||
from ahriman.core.build_tools.sources import Sources
|
||||
from ahriman.core.exceptions import UnknownPackageError
|
||||
from ahriman.core.util import package_like
|
||||
from ahriman.models.package import Package
|
||||
from ahriman.models.package_source import PackageSource
|
||||
@ -43,8 +44,14 @@ class ApplicationPackages(ApplicationProperties):
|
||||
|
||||
Args:
|
||||
source(str): path to package archive
|
||||
|
||||
Raises:
|
||||
UnknownPackageError: if specified path doesn't exist
|
||||
"""
|
||||
local_path = Path(source)
|
||||
if not local_path.is_file():
|
||||
raise UnknownPackageError(source)
|
||||
|
||||
dst = self.repository.paths.packages / local_path.name
|
||||
shutil.copy(local_path, dst)
|
||||
|
||||
@ -68,6 +75,9 @@ class ApplicationPackages(ApplicationProperties):
|
||||
source(str): path to local directory
|
||||
"""
|
||||
local_dir = Path(source)
|
||||
if not local_dir.is_dir():
|
||||
raise UnknownPackageError(source)
|
||||
|
||||
for full_path in filter(package_like, local_dir.iterdir()):
|
||||
self._add_archive(str(full_path))
|
||||
|
||||
@ -77,12 +87,19 @@ class ApplicationPackages(ApplicationProperties):
|
||||
|
||||
Args:
|
||||
source(str): path to directory with local source files
|
||||
|
||||
Raises:
|
||||
UnknownPackageError: if specified package is unknown or doesn't exist
|
||||
"""
|
||||
source_dir = Path(source)
|
||||
package = Package.from_build(source_dir, self.architecture)
|
||||
cache_dir = self.repository.paths.cache_for(package.base)
|
||||
shutil.copytree(source_dir, cache_dir) # copy package to store in caches
|
||||
Sources.init(cache_dir) # we need to run init command in directory where we do have permissions
|
||||
if (source_dir := Path(source)).is_dir():
|
||||
package = Package.from_build(source_dir, self.architecture)
|
||||
cache_dir = self.repository.paths.cache_for(package.base)
|
||||
shutil.copytree(source_dir, cache_dir) # copy package to store in caches
|
||||
Sources.init(cache_dir) # we need to run init command in directory where we do have permissions
|
||||
elif (source_dir := self.repository.paths.cache_for(source)).is_dir():
|
||||
package = Package.from_build(source_dir, self.architecture)
|
||||
else:
|
||||
raise UnknownPackageError(source)
|
||||
|
||||
self.database.build_queue_insert(package)
|
||||
|
||||
@ -95,8 +112,11 @@ class ApplicationPackages(ApplicationProperties):
|
||||
"""
|
||||
dst = self.repository.paths.packages / Path(source).name # URL is path, is not it?
|
||||
# timeout=None to suppress pylint warns. Also suppress bandit warnings
|
||||
response = requests.get(source, stream=True, timeout=None) # nosec
|
||||
response.raise_for_status()
|
||||
try:
|
||||
response = requests.get(source, stream=True, timeout=None) # nosec
|
||||
response.raise_for_status()
|
||||
except Exception:
|
||||
raise UnknownPackageError(source)
|
||||
|
||||
with dst.open("wb") as local_file:
|
||||
for chunk in response.iter_content(chunk_size=1024):
|
||||
|
||||
@ -94,7 +94,8 @@ class Handler:
|
||||
"""
|
||||
try:
|
||||
configuration = Configuration.from_path(args.configuration, architecture)
|
||||
Log.load(configuration, quiet=args.quiet, report=args.report)
|
||||
log_handler = Log.handler(args.log_handler)
|
||||
Log.load(configuration, log_handler, quiet=args.quiet, report=args.report)
|
||||
with Lock(args, architecture, configuration):
|
||||
cls.run(args, architecture, configuration, report=args.report, unsafe=args.unsafe)
|
||||
return True
|
||||
|
||||
@ -23,6 +23,7 @@ from ahriman.application.application import Application
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.formatters import UpdatePrinter
|
||||
from ahriman.models.build_status import BuildStatusEnum
|
||||
from ahriman.models.package import Package
|
||||
|
||||
|
||||
@ -47,7 +48,7 @@ class Rebuild(Handler):
|
||||
application = Application(architecture, configuration, report=report, unsafe=unsafe)
|
||||
application.on_start()
|
||||
|
||||
packages = Rebuild.extract_packages(application, from_database=args.from_database)
|
||||
packages = Rebuild.extract_packages(application, args.status, from_database=args.from_database)
|
||||
updates = application.repository.packages_depend_on(packages, args.depends_on or None)
|
||||
|
||||
Rebuild.check_if_empty(args.exit_code, not updates)
|
||||
@ -60,17 +61,24 @@ class Rebuild(Handler):
|
||||
Rebuild.check_if_empty(args.exit_code, result.is_empty)
|
||||
|
||||
@staticmethod
|
||||
def extract_packages(application: Application, *, from_database: bool) -> list[Package]:
|
||||
def extract_packages(application: Application, status: BuildStatusEnum | None, *,
|
||||
from_database: bool) -> list[Package]:
|
||||
"""
|
||||
extract packages from database file
|
||||
|
||||
Args:
|
||||
application(Application): application instance
|
||||
status(BuildStatusEnum | None): optional filter by package status
|
||||
from_database(bool): extract packages from database instead of repository filesystem
|
||||
|
||||
Returns:
|
||||
list[Package]: list of packages which were stored in database
|
||||
"""
|
||||
if from_database:
|
||||
return [package for (package, _) in application.database.packages_get()]
|
||||
return [
|
||||
package
|
||||
for (package, last_status) in application.database.packages_get()
|
||||
if status is None or last_status.status == status
|
||||
]
|
||||
|
||||
return application.repository.packages()
|
||||
|
||||
@ -136,7 +136,7 @@ class Setup(Handler):
|
||||
"""
|
||||
create configuration for devtools based on ``source`` configuration
|
||||
|
||||
Note:
|
||||
Notes:
|
||||
devtools does not allow to specify the pacman configuration, thus we still have to use configuration in /usr
|
||||
|
||||
Args:
|
||||
|
||||
@ -79,5 +79,6 @@ class UnsafeCommands(Handler):
|
||||
"""
|
||||
# should never fail
|
||||
# pylint: disable=protected-access
|
||||
subparser = next(action for action in parser._actions if isinstance(action, argparse._SubParsersAction))
|
||||
return sorted(action_name for action_name, action in subparser.choices.items() if action.get_default("unsafe"))
|
||||
subparser = next((action for action in parser._actions if isinstance(action, argparse._SubParsersAction)), None)
|
||||
actions = subparser.choices if subparser is not None else {}
|
||||
return sorted(action_name for action_name, action in actions.items() if action.get_default("unsafe"))
|
||||
|
||||
@ -78,7 +78,9 @@ class Validate(Handler):
|
||||
|
||||
# create trigger loader instance
|
||||
loader = TriggerLoader()
|
||||
for trigger in loader.selected_triggers(configuration):
|
||||
triggers = loader.selected_triggers(configuration) + loader.known_triggers(configuration)
|
||||
|
||||
for trigger in set(triggers):
|
||||
try:
|
||||
trigger_class = loader.load_trigger_class(trigger)
|
||||
except ExtensionError:
|
||||
|
||||
@ -19,6 +19,8 @@
|
||||
#
|
||||
import argparse
|
||||
|
||||
from collections.abc import Generator
|
||||
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.spawn import Spawn
|
||||
@ -31,6 +33,7 @@ class Web(Handler):
|
||||
|
||||
ALLOW_AUTO_ARCHITECTURE_RUN = False
|
||||
ALLOW_MULTI_ARCHITECTURE_RUN = False # required to be able to spawn external processes
|
||||
COMMAND_ARGS_WHITELIST = ["force", "log_handler", ""]
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *,
|
||||
@ -48,7 +51,8 @@ class Web(Handler):
|
||||
# we are using local import for optional dependencies
|
||||
from ahriman.web.web import run_server, setup_service
|
||||
|
||||
spawner = Spawn(args.parser(), architecture, configuration)
|
||||
spawner_args = Web.extract_arguments(args, architecture, configuration)
|
||||
spawner = Spawn(args.parser(), architecture, list(spawner_args))
|
||||
spawner.start()
|
||||
|
||||
application = setup_service(architecture, configuration, spawner)
|
||||
@ -57,3 +61,33 @@ class Web(Handler):
|
||||
# terminate spawn process at the last
|
||||
spawner.stop()
|
||||
spawner.join()
|
||||
|
||||
@staticmethod
|
||||
def extract_arguments(args: argparse.Namespace, architecture: str,
|
||||
configuration: Configuration) -> Generator[str, None, None]:
|
||||
"""
|
||||
extract list of arguments used for current command, except for command specific ones
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
configuration(Configuration): configuration instance
|
||||
|
||||
Returns:
|
||||
Generator[str, None, None]: command line arguments which were used for this specific command
|
||||
"""
|
||||
# read architecture from the same argument list
|
||||
yield from ["--architecture", architecture]
|
||||
# read configuration path from current settings
|
||||
if (configuration_path := configuration.path) is not None:
|
||||
yield from ["--configuration", str(configuration_path)]
|
||||
|
||||
# arguments from command line
|
||||
if args.force:
|
||||
yield "--force"
|
||||
if args.log_handler is not None:
|
||||
yield from ["--log-handler", args.log_handler.value]
|
||||
if args.quiet:
|
||||
yield "--quiet"
|
||||
if args.unsafe:
|
||||
yield "--unsafe"
|
||||
|
||||
@ -20,9 +20,9 @@
|
||||
import shutil
|
||||
|
||||
from collections.abc import Callable, Generator
|
||||
from functools import cached_property
|
||||
from pathlib import Path
|
||||
from pyalpm import DB, Handle, Package, SIG_PACKAGE, error as PyalpmError # type: ignore[import]
|
||||
from typing import Any
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.log import LazyLogging
|
||||
@ -34,13 +34,8 @@ from ahriman.models.repository_paths import RepositoryPaths
|
||||
class Pacman(LazyLogging):
|
||||
"""
|
||||
alpm wrapper
|
||||
|
||||
Attributes:
|
||||
handle(Handle): pyalpm root ``Handle``
|
||||
"""
|
||||
|
||||
handle: Handle
|
||||
|
||||
def __init__(self, architecture: str, configuration: Configuration, *,
|
||||
refresh_database: PacmanSynchronization) -> None:
|
||||
"""
|
||||
@ -84,6 +79,16 @@ class Pacman(LazyLogging):
|
||||
|
||||
return handle
|
||||
|
||||
@cached_property
|
||||
def handle(self) -> Handle:
|
||||
"""
|
||||
pyalpm handle
|
||||
|
||||
Returns:
|
||||
Handle: generated pyalpm handle instance
|
||||
"""
|
||||
return self.__create_handle_fn()
|
||||
|
||||
def database_copy(self, handle: Handle, database: DB, pacman_root: Path, paths: RepositoryPaths, *,
|
||||
use_ahriman_cache: bool) -> None:
|
||||
"""
|
||||
@ -184,22 +189,3 @@ class Pacman(LazyLogging):
|
||||
result.update(trim_package(provides) for provides in package.provides)
|
||||
|
||||
return result
|
||||
|
||||
def __getattr__(self, item: str) -> Any:
|
||||
"""
|
||||
pacman handle extractor
|
||||
|
||||
Args:
|
||||
item(str): property name
|
||||
|
||||
Returns:
|
||||
Any: attribute by its name
|
||||
|
||||
Raises:
|
||||
AttributeError: in case if no such attribute found
|
||||
"""
|
||||
if item == "handle":
|
||||
handle = self.__create_handle_fn()
|
||||
setattr(self, item, handle)
|
||||
return handle
|
||||
return super().__getattr__(item) # required for logging attribute
|
||||
|
||||
@ -125,6 +125,12 @@ class Sources(LazyLogging):
|
||||
Sources._check_output("git", "init", "--initial-branch", instance.DEFAULT_BRANCH,
|
||||
cwd=sources_dir, logger=instance.logger)
|
||||
|
||||
# extract local files...
|
||||
files = ["PKGBUILD", ".SRCINFO"] + [str(path) for path in Package.local_files(sources_dir)]
|
||||
instance.add(sources_dir, *files)
|
||||
# ...and commit them
|
||||
instance.commit(sources_dir, author="ahriman <ahriman@localhost>")
|
||||
|
||||
@staticmethod
|
||||
def load(sources_dir: Path, package: Package, patches: list[PkgbuildPatch], paths: RepositoryPaths) -> None:
|
||||
"""
|
||||
|
||||
@ -99,6 +99,16 @@ class Configuration(configparser.RawConfigParser):
|
||||
"""
|
||||
return self.getpath("settings", "logging")
|
||||
|
||||
@property
|
||||
def repository_name(self) -> str:
|
||||
"""
|
||||
repository name as defined by configuration
|
||||
|
||||
Returns:
|
||||
str: repository name from configuration
|
||||
"""
|
||||
return self.get("repository", "name")
|
||||
|
||||
@property
|
||||
def repository_paths(self) -> RepositoryPaths:
|
||||
"""
|
||||
|
||||
@ -163,6 +163,11 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
||||
"coerce": "list",
|
||||
"schema": {"type": "string"},
|
||||
},
|
||||
"triggers_known": {
|
||||
"type": "list",
|
||||
"coerce": "list",
|
||||
"schema": {"type": "string"},
|
||||
},
|
||||
"vcs_allowed_age": {
|
||||
"type": "integer",
|
||||
"coerce": "integer",
|
||||
|
||||
@ -144,6 +144,24 @@ class Validator(RootValidator):
|
||||
if constraint and url.scheme not in constraint:
|
||||
self._error(field, f"Url {value} scheme must be one of {constraint}")
|
||||
|
||||
def _validate_path_is_absolute(self, constraint: bool, field: str, value: Path) -> None:
|
||||
"""
|
||||
check if path is absolute or not
|
||||
|
||||
Args:
|
||||
constraint(bool): True in case if path must be absolute and False if it must be relative
|
||||
field(str): field name to be checked
|
||||
value(Path): value to be checked
|
||||
|
||||
Examples:
|
||||
The rule's arguments are validated against this schema:
|
||||
{"type": "boolean"}
|
||||
"""
|
||||
if constraint and not value.is_absolute():
|
||||
self._error(field, f"Path {value} must be absolute")
|
||||
if not constraint and value.is_absolute():
|
||||
self._error(field, f"Path {value} must be relative")
|
||||
|
||||
def _validate_path_exists(self, constraint: bool, field: str, value: Path) -> None:
|
||||
"""
|
||||
check if paths exists
|
||||
@ -159,3 +177,5 @@ class Validator(RootValidator):
|
||||
"""
|
||||
if constraint and not value.exists():
|
||||
self._error(field, f"Path {value} must exist")
|
||||
if not constraint and value.exists():
|
||||
self._error(field, f"Path {value} must not exist")
|
||||
|
||||
80
src/ahriman/core/database/migrations/m007_check_depends.py
Normal file
80
src/ahriman/core/database/migrations/m007_check_depends.py
Normal file
@ -0,0 +1,80 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from sqlite3 import Connection
|
||||
|
||||
from ahriman.core.alpm.pacman import Pacman
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.util import package_like
|
||||
from ahriman.models.package import Package
|
||||
from ahriman.models.pacman_synchronization import PacmanSynchronization
|
||||
|
||||
|
||||
__all__ = ["migrate_data", "steps"]
|
||||
|
||||
|
||||
steps = [
|
||||
"""
|
||||
alter table packages add column check_depends json
|
||||
""",
|
||||
]
|
||||
|
||||
|
||||
def migrate_data(connection: Connection, configuration: Configuration) -> None:
|
||||
"""
|
||||
perform data migration
|
||||
|
||||
Args:
|
||||
connection(Connection): database connection
|
||||
configuration(Configuration): configuration instance
|
||||
"""
|
||||
migrate_package_check_depends(connection, configuration)
|
||||
|
||||
|
||||
def migrate_package_check_depends(connection: Connection, configuration: Configuration) -> None:
|
||||
"""
|
||||
migrate package check depends fields
|
||||
|
||||
Args:
|
||||
connection(Connection): database connection
|
||||
configuration(Configuration): configuration instance
|
||||
"""
|
||||
if not configuration.repository_paths.repository.is_dir():
|
||||
return
|
||||
|
||||
_, architecture = configuration.check_loaded()
|
||||
pacman = Pacman(architecture, configuration, refresh_database=PacmanSynchronization.Disabled)
|
||||
|
||||
package_list = []
|
||||
for full_path in filter(package_like, configuration.repository_paths.repository.iterdir()):
|
||||
base = Package.from_archive(full_path, pacman, remote=None)
|
||||
for package, description in base.packages.items():
|
||||
package_list.append({
|
||||
"check_depends": description.check_depends,
|
||||
"package": package,
|
||||
})
|
||||
|
||||
connection.executemany(
|
||||
"""
|
||||
update packages set
|
||||
check_depends = :check_depends
|
||||
where package = :package
|
||||
""",
|
||||
package_list
|
||||
)
|
||||
@ -113,17 +113,17 @@ class PackageOperations(Operations):
|
||||
(package, package_base, architecture, archive_size,
|
||||
build_date, depends, description, filename,
|
||||
"groups", installed_size, licenses, provides,
|
||||
url, make_depends, opt_depends)
|
||||
url, make_depends, opt_depends, check_depends)
|
||||
values
|
||||
(:package, :package_base, :architecture, :archive_size,
|
||||
:build_date, :depends, :description, :filename,
|
||||
:groups, :installed_size, :licenses, :provides,
|
||||
:url, :make_depends, :opt_depends)
|
||||
:url, :make_depends, :opt_depends, :check_depends)
|
||||
on conflict (package, architecture) do update set
|
||||
package_base = :package_base, archive_size = :archive_size,
|
||||
build_date = :build_date, depends = :depends, description = :description, filename = :filename,
|
||||
"groups" = :groups, installed_size = :installed_size, licenses = :licenses, provides = :provides,
|
||||
url = :url, make_depends = :make_depends, opt_depends = :opt_depends
|
||||
url = :url, make_depends = :make_depends, opt_depends = :opt_depends, check_depends = :check_depends
|
||||
""",
|
||||
package_list)
|
||||
|
||||
|
||||
@ -194,6 +194,18 @@ class PasswordError(ValueError):
|
||||
ValueError.__init__(self, f"Password error: {details}")
|
||||
|
||||
|
||||
class PkgbuildGeneratorError(RuntimeError):
|
||||
"""
|
||||
exception class for support type triggers
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""
|
||||
default constructor
|
||||
"""
|
||||
RuntimeError.__init__(self, "Could not generate package")
|
||||
|
||||
|
||||
class ReportError(RuntimeError):
|
||||
"""
|
||||
report generation exception
|
||||
|
||||
47
src/ahriman/core/log/journal_handler.py
Normal file
47
src/ahriman/core/log/journal_handler.py
Normal file
@ -0,0 +1,47 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from logging import NullHandler
|
||||
from typing import Any
|
||||
|
||||
|
||||
__all__ = ["JournalHandler"]
|
||||
|
||||
|
||||
class _JournalHandler(NullHandler):
|
||||
"""
|
||||
wrapper for unexpected args and kwargs
|
||||
"""
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
*args(Any): positional arguments
|
||||
**kwargs(Any): keyword arguments
|
||||
"""
|
||||
NullHandler.__init__(self)
|
||||
del args, kwargs
|
||||
|
||||
|
||||
try:
|
||||
from systemd.journal import JournalHandler # type: ignore[import]
|
||||
except ImportError:
|
||||
JournalHandler = _JournalHandler
|
||||
@ -21,18 +21,24 @@ import contextlib
|
||||
import logging
|
||||
|
||||
from collections.abc import Generator
|
||||
from functools import cached_property
|
||||
from typing import Any
|
||||
|
||||
|
||||
class LazyLogging:
|
||||
"""
|
||||
wrapper for the logger library inspired by scala lazy logging module
|
||||
|
||||
Attributes:
|
||||
logger(logging.Logger): class logger instance
|
||||
"""
|
||||
|
||||
logger: logging.Logger
|
||||
@cached_property
|
||||
def logger(self) -> logging.Logger:
|
||||
"""
|
||||
get class logger instance
|
||||
|
||||
Returns:
|
||||
logging.Logger: class logger instance
|
||||
"""
|
||||
return logging.getLogger(self.logger_name)
|
||||
|
||||
@property
|
||||
def logger_name(self) -> str:
|
||||
@ -89,22 +95,3 @@ class LazyLogging:
|
||||
yield
|
||||
finally:
|
||||
self._package_logger_reset()
|
||||
|
||||
def __getattr__(self, item: str) -> Any:
|
||||
"""
|
||||
logger extractor
|
||||
|
||||
Args:
|
||||
item(str): property name
|
||||
|
||||
Returns:
|
||||
Any: attribute by its name
|
||||
|
||||
Raises:
|
||||
AttributeError: in case if no such attribute found
|
||||
"""
|
||||
if item == "logger":
|
||||
logger = logging.getLogger(self.logger_name)
|
||||
setattr(self, item, logger)
|
||||
return logger
|
||||
raise AttributeError(f"'{self.__class__.__qualname__}' object has no attribute '{item}'")
|
||||
|
||||
@ -20,9 +20,11 @@
|
||||
import logging
|
||||
|
||||
from logging.config import fileConfig
|
||||
from pathlib import Path
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.log.http_log_handler import HttpLogHandler
|
||||
from ahriman.models.log_handler import LogHandler
|
||||
|
||||
|
||||
class Log:
|
||||
@ -32,24 +34,65 @@ class Log:
|
||||
Attributes:
|
||||
DEFAULT_LOG_FORMAT(str): (class attribute) default log format (in case of fallback)
|
||||
DEFAULT_LOG_LEVEL(int): (class attribute) default log level (in case of fallback)
|
||||
DEFAULT_SYSLOG_DEVICE(Path): (class attribute) default path to syslog device
|
||||
"""
|
||||
|
||||
DEFAULT_LOG_FORMAT = "[%(levelname)s %(asctime)s] [%(filename)s:%(lineno)d %(funcName)s]: %(message)s"
|
||||
DEFAULT_LOG_LEVEL = logging.DEBUG
|
||||
DEFAULT_SYSLOG_DEVICE = Path("/dev") / "log"
|
||||
|
||||
@staticmethod
|
||||
def load(configuration: Configuration, *, quiet: bool, report: bool) -> None:
|
||||
def handler(selected: LogHandler | None) -> LogHandler:
|
||||
"""
|
||||
try to guess default log handler. In case if ``selected`` is set, it will return specified value with appended
|
||||
_handler suffix. Otherwise, it will try to import journald handler and returns ``journald_handler`` if library
|
||||
is available. Otherwise, it will check if there is ``/dev/log`` device and returns ``syslog_handler`` in this
|
||||
case. And, finally, it will fall back to ``console_handler`` if none were found
|
||||
|
||||
Args:
|
||||
selected(LogHandler | None): user specified handler if any
|
||||
|
||||
Returns:
|
||||
LogHandler: selected log handler
|
||||
"""
|
||||
if selected is not None:
|
||||
return selected
|
||||
|
||||
try:
|
||||
from systemd.journal import JournalHandler # type: ignore[import]
|
||||
del JournalHandler
|
||||
return LogHandler.Journald # journald import was found
|
||||
except ImportError:
|
||||
if Log.DEFAULT_SYSLOG_DEVICE.exists():
|
||||
return LogHandler.Syslog
|
||||
return LogHandler.Console
|
||||
|
||||
@staticmethod
|
||||
def load(configuration: Configuration, handler: LogHandler, *, quiet: bool, report: bool) -> None:
|
||||
"""
|
||||
setup logging settings from configuration
|
||||
|
||||
Args:
|
||||
configuration(Configuration): configuration instance
|
||||
handler(LogHandler): selected default log handler, which will be used if no handlers were set
|
||||
quiet(bool): force disable any log messages
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
default_handler = f"{handler.value}_handler"
|
||||
|
||||
try:
|
||||
path = configuration.logging_path
|
||||
fileConfig(path)
|
||||
log_configuration = Configuration()
|
||||
log_configuration.read(configuration.logging_path)
|
||||
|
||||
# set handlers if they are not set
|
||||
for section in filter(lambda s: s.startswith("logger_"), log_configuration.sections()):
|
||||
if "handlers" in log_configuration[section]:
|
||||
continue
|
||||
log_configuration.set_option(section, "handlers", default_handler)
|
||||
|
||||
# load logging configuration
|
||||
fileConfig(log_configuration, disable_existing_loggers=True)
|
||||
logging.debug("using %s logger", default_handler)
|
||||
except Exception:
|
||||
logging.basicConfig(filename=None, format=Log.DEFAULT_LOG_FORMAT,
|
||||
level=Log.DEFAULT_LOG_LEVEL)
|
||||
|
||||
@ -57,4 +57,4 @@ class HTML(Report, JinjaTemplate):
|
||||
result(Result): build result
|
||||
"""
|
||||
html = self.make_html(Result(success=packages), self.template_path)
|
||||
self.report_path.write_text(html)
|
||||
self.report_path.write_text(html, encoding="utf8")
|
||||
|
||||
@ -75,7 +75,7 @@ class JinjaTemplate:
|
||||
|
||||
# base template vars
|
||||
self.homepage = configuration.get(section, "homepage", fallback=None)
|
||||
self.name = configuration.get("repository", "name")
|
||||
self.name = configuration.repository_name
|
||||
|
||||
self.sign_targets, self.default_pgp_key = GPG.sign_options(configuration)
|
||||
|
||||
|
||||
@ -67,7 +67,7 @@ class RepositoryProperties(LazyLogging):
|
||||
self.configuration = configuration
|
||||
self.database = database
|
||||
|
||||
self.name = configuration.get("repository", "name")
|
||||
self.name = configuration.repository_name
|
||||
self.vcs_allowed_age = configuration.getint("build", "vcs_allowed_age", fallback=0)
|
||||
|
||||
self.paths: RepositoryPaths = configuration.repository_paths # additional workaround for pycharm typing
|
||||
@ -79,7 +79,7 @@ class RepositoryProperties(LazyLogging):
|
||||
|
||||
self.ignore_list = configuration.getlist("build", "ignore_packages", fallback=[])
|
||||
self.pacman = Pacman(architecture, configuration, refresh_database=refresh_pacman_database)
|
||||
self.sign = GPG(architecture, configuration)
|
||||
self.sign = GPG(configuration)
|
||||
self.repo = Repo(self.name, self.paths, self.sign.repository_sign_args)
|
||||
self.reporter = Client.load(configuration, report=report)
|
||||
self.triggers = TriggerLoader.load(architecture, configuration)
|
||||
|
||||
@ -19,6 +19,7 @@
|
||||
#
|
||||
import requests
|
||||
|
||||
from collections.abc import Generator
|
||||
from pathlib import Path
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
@ -34,7 +35,6 @@ class GPG(LazyLogging):
|
||||
|
||||
Attributes:
|
||||
DEFAULT_TIMEOUT(int): (class attribute) HTTP request timeout in seconds
|
||||
architecture(str): repository architecture
|
||||
configuration(Configuration): configuration instance
|
||||
default_key(str | None): default PGP key ID to use
|
||||
targets(set[SignSettings]): list of targets to sign (repository, package etc)
|
||||
@ -43,15 +43,13 @@ class GPG(LazyLogging):
|
||||
_check_output = check_output
|
||||
DEFAULT_TIMEOUT = 30
|
||||
|
||||
def __init__(self, architecture: str, configuration: Configuration) -> None:
|
||||
def __init__(self, configuration: Configuration) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
architecture(str): repository architecture
|
||||
configuration(Configuration): configuration instance
|
||||
"""
|
||||
self.architecture = architecture
|
||||
self.configuration = configuration
|
||||
self.targets, self.default_key = self.sign_options(configuration)
|
||||
|
||||
@ -128,6 +126,34 @@ class GPG(LazyLogging):
|
||||
raise
|
||||
return response.text
|
||||
|
||||
def key_export(self, key: str) -> str:
|
||||
"""
|
||||
export public key from stored keychain
|
||||
|
||||
Args:
|
||||
key(str): key ID to export
|
||||
|
||||
Returns:
|
||||
str: PGP key in .asc format
|
||||
"""
|
||||
return GPG._check_output("gpg", "--armor", "--no-emit-version", "--export", key, logger=self.logger)
|
||||
|
||||
def key_fingerprint(self, key: str) -> str:
|
||||
"""
|
||||
get full key fingerprint from short key id
|
||||
|
||||
Args:
|
||||
key(str): key ID to lookup
|
||||
|
||||
Returns:
|
||||
str: full PGP key fingerprint
|
||||
"""
|
||||
metadata = GPG._check_output("gpg", "--with-colons", "--fingerprint", key, logger=self.logger)
|
||||
# fingerprint line will be like
|
||||
# fpr:::::::::43A663569A07EE1E4ECC55CC7E3A4240CE3C45C2:
|
||||
fingerprint = next(filter(lambda line: line[:3] == "fpr", metadata.splitlines()))
|
||||
return fingerprint.split(":")[-2]
|
||||
|
||||
def key_import(self, server: str, key: str) -> None:
|
||||
"""
|
||||
import key to current user and sign it locally
|
||||
@ -139,6 +165,21 @@ class GPG(LazyLogging):
|
||||
key_body = self.key_download(server, key)
|
||||
GPG._check_output("gpg", "--import", input_data=key_body, logger=self.logger)
|
||||
|
||||
def keys(self) -> list[str]:
|
||||
"""
|
||||
extract list of keys described in configuration
|
||||
|
||||
Returns:
|
||||
list[str]: list of unique keys which are set in configuration
|
||||
"""
|
||||
def generator() -> Generator[str, None, None]:
|
||||
if self.default_key is not None:
|
||||
yield self.default_key
|
||||
for _, value in filter(lambda pair: pair[0].startswith("key_"), self.configuration["sign"].items()):
|
||||
yield value
|
||||
|
||||
return sorted(set(generator()))
|
||||
|
||||
def process(self, path: Path, key: str) -> list[Path]:
|
||||
"""
|
||||
gpg command wrapper
|
||||
@ -179,7 +220,7 @@ class GPG(LazyLogging):
|
||||
"""
|
||||
sign repository if required by configuration
|
||||
|
||||
Note:
|
||||
Notes:
|
||||
More likely you just want to pass ``repository_sign_args`` to repo wrapper
|
||||
|
||||
Args:
|
||||
|
||||
@ -26,7 +26,6 @@ from collections.abc import Callable, Iterable
|
||||
from multiprocessing import Process, Queue
|
||||
from threading import Lock, Thread
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.log import LazyLogging
|
||||
from ahriman.models.package_source import PackageSource
|
||||
|
||||
@ -39,23 +38,24 @@ class Spawn(Thread, LazyLogging):
|
||||
Attributes:
|
||||
active(dict[str, Process]): map of active child processes required to avoid zombies
|
||||
architecture(str): repository architecture
|
||||
configuration(Configuration): configuration instance
|
||||
command_arguments(list[str]): base command line arguments
|
||||
queue(Queue[tuple[str, bool]]): multiprocessing queue to read updates from processes
|
||||
"""
|
||||
|
||||
def __init__(self, args_parser: argparse.ArgumentParser, architecture: str, configuration: Configuration) -> None:
|
||||
def __init__(self, args_parser: argparse.ArgumentParser, architecture: str, command_arguments: list[str]) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
args_parser(argparse.ArgumentParser): command line parser for the application
|
||||
architecture(str): repository architecture
|
||||
configuration(Configuration): configuration instance
|
||||
command_arguments(list[str]): base command line arguments
|
||||
"""
|
||||
Thread.__init__(self, name="spawn")
|
||||
self.architecture = architecture
|
||||
|
||||
self.args_parser = args_parser
|
||||
self.configuration = configuration
|
||||
self.command_arguments = command_arguments
|
||||
|
||||
self.lock = Lock()
|
||||
self.active: dict[str, Process] = {}
|
||||
@ -88,9 +88,7 @@ class Spawn(Thread, LazyLogging):
|
||||
**kwargs(str): named command arguments
|
||||
"""
|
||||
# default arguments
|
||||
arguments = ["--architecture", self.architecture]
|
||||
if self.configuration.path is not None:
|
||||
arguments.extend(["--configuration", str(self.configuration.path)])
|
||||
arguments = self.command_arguments[:]
|
||||
# positional command arguments
|
||||
arguments.append(command)
|
||||
arguments.extend(args)
|
||||
@ -172,7 +170,6 @@ class Spawn(Thread, LazyLogging):
|
||||
process = self.active.pop(process_id, None)
|
||||
|
||||
if process is not None:
|
||||
process.terminate() # make sure lol
|
||||
process.join()
|
||||
|
||||
def stop(self) -> None:
|
||||
|
||||
@ -102,15 +102,22 @@ class WebClient(Client, LazyLogging):
|
||||
return address, False
|
||||
|
||||
@contextlib.contextmanager
|
||||
def __get_session(self) -> Generator[requests.Session, None, None]:
|
||||
def __get_session(self, session: requests.Session | None = None) -> Generator[requests.Session, None, None]:
|
||||
"""
|
||||
execute request and handle exceptions
|
||||
|
||||
Args:
|
||||
session(requests.Session | None, optional): session to be used or stored instance property otherwise
|
||||
(Default value = None)
|
||||
|
||||
Yields:
|
||||
requests.Session: session for requests
|
||||
"""
|
||||
try:
|
||||
yield self.__session
|
||||
if session is not None:
|
||||
yield session # use session from arguments
|
||||
else:
|
||||
yield self.__session # use instance generated session
|
||||
except requests.RequestException as e:
|
||||
if self.suppress_errors:
|
||||
return
|
||||
@ -136,13 +143,16 @@ class WebClient(Client, LazyLogging):
|
||||
return session
|
||||
|
||||
session = requests.Session()
|
||||
self._login()
|
||||
self._login(session)
|
||||
|
||||
return session
|
||||
|
||||
def _login(self) -> None:
|
||||
def _login(self, session: requests.Session) -> None:
|
||||
"""
|
||||
process login to the service
|
||||
|
||||
Args:
|
||||
session(requests.Session): request session to login
|
||||
"""
|
||||
if self.user is None:
|
||||
return # no auth configured
|
||||
@ -152,7 +162,7 @@ class WebClient(Client, LazyLogging):
|
||||
"password": self.user.password
|
||||
}
|
||||
|
||||
with self.__get_session() as session:
|
||||
with self.__get_session(session):
|
||||
response = session.post(self._login_url, json=payload)
|
||||
response.raise_for_status()
|
||||
|
||||
|
||||
21
src/ahriman/core/support/__init__.py
Normal file
21
src/ahriman/core/support/__init__.py
Normal file
@ -0,0 +1,21 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from ahriman.core.support.keyring_trigger import KeyringTrigger
|
||||
from ahriman.core.support.mirrorlist_trigger import MirrorlistTrigger
|
||||
114
src/ahriman/core/support/keyring_trigger.py
Normal file
114
src/ahriman/core/support/keyring_trigger.py
Normal file
@ -0,0 +1,114 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from ahriman.core import context
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.sign.gpg import GPG
|
||||
from ahriman.core.support.package_creator import PackageCreator
|
||||
from ahriman.core.support.pkgbuild.keyring_generator import KeyringGenerator
|
||||
from ahriman.core.triggers import Trigger
|
||||
from ahriman.models.context_key import ContextKey
|
||||
|
||||
|
||||
class KeyringTrigger(Trigger):
|
||||
"""
|
||||
keyring generator trigger
|
||||
|
||||
Attributes:
|
||||
targets(list[str]): git remote target list
|
||||
"""
|
||||
|
||||
CONFIGURATION_SCHEMA = {
|
||||
"keyring": {
|
||||
"type": "dict",
|
||||
"schema": {
|
||||
"target": {
|
||||
"type": "list",
|
||||
"coerce": "list",
|
||||
"schema": {"type": "string"},
|
||||
},
|
||||
},
|
||||
},
|
||||
"keyring_generator": {
|
||||
"type": "dict",
|
||||
"schema": {
|
||||
"description": {
|
||||
"type": "string",
|
||||
},
|
||||
"homepage": {
|
||||
"type": "string",
|
||||
},
|
||||
"license": {
|
||||
"type": "list",
|
||||
"coerce": "list",
|
||||
},
|
||||
"package": {
|
||||
"type": "string",
|
||||
},
|
||||
"packagers": {
|
||||
"type": "list",
|
||||
"coerce": "list",
|
||||
},
|
||||
"revoked": {
|
||||
"type": "list",
|
||||
"coerce": "list",
|
||||
},
|
||||
"trusted": {
|
||||
"type": "list",
|
||||
"coerce": "list",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self, architecture: str, configuration: Configuration) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
architecture(str): repository architecture
|
||||
configuration(Configuration): configuration instance
|
||||
"""
|
||||
Trigger.__init__(self, architecture, configuration)
|
||||
self.targets = self.configuration_sections(configuration)
|
||||
|
||||
@classmethod
|
||||
def configuration_sections(cls, configuration: Configuration) -> list[str]:
|
||||
"""
|
||||
extract configuration sections from configuration
|
||||
|
||||
Args:
|
||||
configuration(Configuration): configuration instance
|
||||
|
||||
Returns:
|
||||
list[str]: read configuration sections belong to this trigger
|
||||
"""
|
||||
return configuration.getlist("keyring", "target", fallback=[])
|
||||
|
||||
def on_start(self) -> None:
|
||||
"""
|
||||
trigger action which will be called at the start of the application
|
||||
"""
|
||||
ctx = context.get()
|
||||
sign = ctx.get(ContextKey("sign", GPG))
|
||||
|
||||
for target in self.targets:
|
||||
generator = KeyringGenerator(sign, self.configuration, target)
|
||||
runner = PackageCreator(self.configuration, generator)
|
||||
runner.run()
|
||||
105
src/ahriman/core/support/mirrorlist_trigger.py
Normal file
105
src/ahriman/core/support/mirrorlist_trigger.py
Normal file
@ -0,0 +1,105 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.support.package_creator import PackageCreator
|
||||
from ahriman.core.support.pkgbuild.mirrorlist_generator import MirrorlistGenerator
|
||||
from ahriman.core.triggers import Trigger
|
||||
|
||||
|
||||
class MirrorlistTrigger(Trigger):
|
||||
"""
|
||||
mirrorlist generator trigger
|
||||
|
||||
Attributes:
|
||||
targets(list[str]): git remote target list
|
||||
"""
|
||||
|
||||
CONFIGURATION_SCHEMA = {
|
||||
"mirrorlist": {
|
||||
"type": "dict",
|
||||
"schema": {
|
||||
"target": {
|
||||
"type": "list",
|
||||
"coerce": "list",
|
||||
"schema": {"type": "string"},
|
||||
},
|
||||
},
|
||||
},
|
||||
"mirrorlist_generator": {
|
||||
"type": "dict",
|
||||
"schema": {
|
||||
"description": {
|
||||
"type": "string",
|
||||
},
|
||||
"homepage": {
|
||||
"type": "string",
|
||||
},
|
||||
"license": {
|
||||
"type": "list",
|
||||
"coerce": "list",
|
||||
},
|
||||
"package": {
|
||||
"type": "string",
|
||||
},
|
||||
"path": {
|
||||
"type": "string",
|
||||
"path_is_absolute": True,
|
||||
},
|
||||
"servers": {
|
||||
"type": "list",
|
||||
"coerce": "list",
|
||||
"required": True,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self, architecture: str, configuration: Configuration) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
architecture(str): repository architecture
|
||||
configuration(Configuration): configuration instance
|
||||
"""
|
||||
Trigger.__init__(self, architecture, configuration)
|
||||
self.targets = self.configuration_sections(configuration)
|
||||
|
||||
@classmethod
|
||||
def configuration_sections(cls, configuration: Configuration) -> list[str]:
|
||||
"""
|
||||
extract configuration sections from configuration
|
||||
|
||||
Args:
|
||||
configuration(Configuration): configuration instance
|
||||
|
||||
Returns:
|
||||
list[str]: read configuration sections belong to this trigger
|
||||
"""
|
||||
return configuration.getlist("mirrorlist", "target", fallback=[])
|
||||
|
||||
def on_start(self) -> None:
|
||||
"""
|
||||
trigger action which will be called at the start of the application
|
||||
"""
|
||||
for target in self.targets:
|
||||
generator = MirrorlistGenerator(self.configuration, target)
|
||||
runner = PackageCreator(self.configuration, generator)
|
||||
runner.run()
|
||||
71
src/ahriman/core/support/package_creator.py
Normal file
71
src/ahriman/core/support/package_creator.py
Normal file
@ -0,0 +1,71 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import shutil
|
||||
|
||||
from ahriman.core import context
|
||||
from ahriman.core.build_tools.sources import Sources
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.database import SQLite
|
||||
from ahriman.core.support.pkgbuild.pkgbuild_generator import PkgbuildGenerator
|
||||
from ahriman.models.build_status import BuildStatus
|
||||
from ahriman.models.context_key import ContextKey
|
||||
from ahriman.models.package import Package
|
||||
|
||||
|
||||
class PackageCreator:
|
||||
"""
|
||||
helper which creates packages based on pkgbuild generator
|
||||
|
||||
Attributes:
|
||||
configuration(Configuration): configuration instance
|
||||
generator(PkgbuildGenerator): PKGBUILD generator instance
|
||||
"""
|
||||
|
||||
def __init__(self, configuration: Configuration, generator: PkgbuildGenerator) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
configuration(Configuration): configuration instance
|
||||
generator(PkgbuildGenerator): PKGBUILD generator instance
|
||||
"""
|
||||
self.configuration = configuration
|
||||
self.generator = generator
|
||||
|
||||
def run(self) -> None:
|
||||
"""
|
||||
create new local package
|
||||
"""
|
||||
local_path = self.configuration.repository_paths.cache_for(self.generator.pkgname)
|
||||
|
||||
# clear old tree if any
|
||||
shutil.rmtree(local_path, ignore_errors=True)
|
||||
|
||||
# create local tree
|
||||
local_path.mkdir(mode=0o755, parents=True, exist_ok=True)
|
||||
self.generator.write_pkgbuild(local_path)
|
||||
Sources.init(local_path)
|
||||
|
||||
# register package
|
||||
ctx = context.get()
|
||||
database: SQLite = ctx.get(ContextKey("database", SQLite))
|
||||
_, architecture = self.configuration.check_loaded()
|
||||
package = Package.from_build(local_path, architecture)
|
||||
database.package_update(package, BuildStatus())
|
||||
19
src/ahriman/core/support/pkgbuild/__init__.py
Normal file
19
src/ahriman/core/support/pkgbuild/__init__.py
Normal file
@ -0,0 +1,19 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
194
src/ahriman/core/support/pkgbuild/keyring_generator.py
Normal file
194
src/ahriman/core/support/pkgbuild/keyring_generator.py
Normal file
@ -0,0 +1,194 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from collections.abc import Callable
|
||||
from pathlib import Path
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.exceptions import PkgbuildGeneratorError
|
||||
from ahriman.core.sign.gpg import GPG
|
||||
from ahriman.core.support.pkgbuild.pkgbuild_generator import PkgbuildGenerator
|
||||
|
||||
|
||||
class KeyringGenerator(PkgbuildGenerator):
|
||||
"""
|
||||
generator for keyring PKGBUILD
|
||||
|
||||
Attributes:
|
||||
sign(GPG): GPG wrapper instance
|
||||
name(str): repository name
|
||||
packagers(list[str]): list of packagers PGP keys
|
||||
pkgbuild_license(list[str]): keyring package license
|
||||
pkgbuild_pkgdesc(str): keyring package description
|
||||
pkgbuild_pkgname(str): keyring package name
|
||||
pkgbuild_url(str): keyring package home page
|
||||
revoked(list[str]): list of revoked PGP keys
|
||||
trusted(list[str]): lif of trusted PGP keys
|
||||
"""
|
||||
|
||||
def __init__(self, sign: GPG, configuration: Configuration, section: str) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
sign(GPG): GPG wrapper instance
|
||||
configuration(Configuration): configuration instance
|
||||
section(str): settings section name
|
||||
"""
|
||||
self.sign = sign
|
||||
self.name = configuration.repository_name
|
||||
|
||||
# configuration fields
|
||||
self.packagers = configuration.getlist(section, "packagers", fallback=sign.keys())
|
||||
self.revoked = configuration.getlist(section, "revoked", fallback=[])
|
||||
self.trusted = configuration.getlist(
|
||||
section, "trusted", fallback=[sign.default_key] if sign.default_key is not None else [])
|
||||
# pkgbuild description fields
|
||||
self.pkgbuild_pkgname = configuration.get(section, "package", fallback=f"{self.name}-keyring")
|
||||
self.pkgbuild_pkgdesc = configuration.get(section, "description", fallback=f"{self.name} PGP keyring")
|
||||
self.pkgbuild_license = configuration.getlist(section, "license", fallback=["Unlicense"])
|
||||
self.pkgbuild_url = configuration.get(section, "homepage", fallback="")
|
||||
|
||||
@property
|
||||
def license(self) -> list[str]:
|
||||
"""
|
||||
package licenses list
|
||||
|
||||
Returns:
|
||||
list[str]: package licenses as PKGBUILD property
|
||||
"""
|
||||
return self.pkgbuild_license
|
||||
|
||||
@property
|
||||
def pkgdesc(self) -> str:
|
||||
"""
|
||||
package description
|
||||
|
||||
Returns:
|
||||
str: package description as PKGBUILD property
|
||||
"""
|
||||
return self.pkgbuild_pkgdesc
|
||||
|
||||
@property
|
||||
def pkgname(self) -> str:
|
||||
"""
|
||||
package name
|
||||
|
||||
Returns:
|
||||
str: package name as PKGBUILD property
|
||||
"""
|
||||
return self.pkgbuild_pkgname
|
||||
|
||||
@property
|
||||
def url(self) -> str:
|
||||
"""
|
||||
package upstream url
|
||||
|
||||
Returns:
|
||||
str: package upstream url as PKGBUILD property
|
||||
"""
|
||||
return self.pkgbuild_url
|
||||
|
||||
def _generate_gpg(self, source_path: Path) -> None:
|
||||
"""
|
||||
generate GPG keychain
|
||||
|
||||
Args:
|
||||
source_path(Path): destination of the file content
|
||||
"""
|
||||
with source_path.open("w") as source_file:
|
||||
for key in sorted(set(self.trusted + self.packagers + self.revoked)):
|
||||
public_key = self.sign.key_export(key)
|
||||
source_file.write(public_key)
|
||||
source_file.write("\n")
|
||||
|
||||
def _generate_revoked(self, source_path: Path) -> None:
|
||||
"""
|
||||
generate revoked PGP keys
|
||||
|
||||
Args:
|
||||
source_path(Path): destination of the file content
|
||||
"""
|
||||
with source_path.open("w") as source_file:
|
||||
for key in sorted(set(self.revoked)):
|
||||
fingerprint = self.sign.key_fingerprint(key)
|
||||
source_file.write(fingerprint)
|
||||
source_file.write("\n")
|
||||
|
||||
def _generate_trusted(self, source_path: Path) -> None:
|
||||
"""
|
||||
generate trusted PGP keys
|
||||
|
||||
Args:
|
||||
source_path(Path): destination of the file content
|
||||
"""
|
||||
if not self.trusted:
|
||||
raise PkgbuildGeneratorError
|
||||
with source_path.open("w") as source_file:
|
||||
for key in sorted(set(self.trusted)):
|
||||
fingerprint = self.sign.key_fingerprint(key)
|
||||
source_file.write(fingerprint)
|
||||
source_file.write(":4:\n")
|
||||
|
||||
def install(self) -> str | None:
|
||||
"""
|
||||
content of the install functions
|
||||
|
||||
Returns:
|
||||
str | None: content of the install functions if any
|
||||
"""
|
||||
# copy-paste from archlinux-keyring
|
||||
return f"""post_upgrade() {{
|
||||
if usr/bin/pacman-key -l >/dev/null 2>&1; then
|
||||
usr/bin/pacman-key --populate {self.name}
|
||||
usr/bin/pacman-key --updatedb
|
||||
fi
|
||||
}}
|
||||
|
||||
post_install() {{
|
||||
if [ -x usr/bin/pacman-key ]; then
|
||||
post_upgrade
|
||||
fi
|
||||
}}"""
|
||||
|
||||
def package(self) -> str:
|
||||
"""
|
||||
package function generator
|
||||
|
||||
Returns:
|
||||
str: package() function for PKGBUILD
|
||||
"""
|
||||
return f"""{{
|
||||
install -Dm644 "{Path("$srcdir") / f"{self.name}.gpg"}" "{Path("$pkgdir") / "usr" / "share" / "pacman" / "keyrings" / f"{self.name}.gpg"}"
|
||||
install -Dm644 "{Path("$srcdir") / f"{self.name}-revoked"}" "{Path("$pkgdir") / "usr" / "share" / "pacman" / "keyrings" / f"{self.name}-revoked"}"
|
||||
install -Dm644 "{Path("$srcdir") / f"{self.name}-trusted"}" "{Path("$pkgdir") / "usr" / "share" / "pacman" / "keyrings" / f"{self.name}-trusted"}"
|
||||
}}"""
|
||||
|
||||
def sources(self) -> dict[str, Callable[[Path], None]]:
|
||||
"""
|
||||
return list of sources for the package
|
||||
|
||||
Returns:
|
||||
dict[str, Callable[[Path], None]]: map of source identifier (e.g. filename) to its generator function
|
||||
"""
|
||||
return {
|
||||
f"{self.name}.gpg": self._generate_gpg,
|
||||
f"{self.name}-revoked": self._generate_revoked,
|
||||
f"{self.name}-trusted": self._generate_trusted,
|
||||
}
|
||||
143
src/ahriman/core/support/pkgbuild/mirrorlist_generator.py
Normal file
143
src/ahriman/core/support/pkgbuild/mirrorlist_generator.py
Normal file
@ -0,0 +1,143 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from collections.abc import Callable
|
||||
from pathlib import Path
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.support.pkgbuild.pkgbuild_generator import PkgbuildGenerator
|
||||
from ahriman.models.pkgbuild_patch import PkgbuildPatch
|
||||
|
||||
|
||||
class MirrorlistGenerator(PkgbuildGenerator):
|
||||
"""
|
||||
generator for mirrorlist PKGBUILD
|
||||
|
||||
Attributes:
|
||||
path(Path): path to mirrorlist relative to /
|
||||
pkgbuild_license(list[str]): mirrorlist package license
|
||||
pkgbuild_pkgdesc(str): mirrorlist package description
|
||||
pkgbuild_pkgname(str): mirrorlist package name
|
||||
pkgbuild_url(str): mirrorlist package home page
|
||||
servers(list[str]): list of mirror servers
|
||||
"""
|
||||
|
||||
def __init__(self, configuration: Configuration, section: str) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
configuration(Configuration): configuration instance
|
||||
section(str): settings section name
|
||||
"""
|
||||
name = configuration.repository_name
|
||||
|
||||
# configuration fields
|
||||
self.servers = configuration.getlist(section, "servers")
|
||||
self.path = configuration.getpath(section, "path", fallback=Path("/etc") / "pacman.d" / f"{name}-mirrorlist")
|
||||
self.path = self.path.relative_to("/") # in pkgbuild we are always operating with relative to / path
|
||||
# pkgbuild description fields
|
||||
self.pkgbuild_pkgname = configuration.get(section, "package", fallback=f"{name}-mirrorlist")
|
||||
self.pkgbuild_pkgdesc = configuration.get(
|
||||
section, "description", fallback=f"{name} mirror list for use by pacman")
|
||||
self.pkgbuild_license = configuration.getlist(section, "license", fallback=["Unlicense"])
|
||||
self.pkgbuild_url = configuration.get(section, "homepage", fallback="")
|
||||
|
||||
@property
|
||||
def license(self) -> list[str]:
|
||||
"""
|
||||
package licenses list
|
||||
|
||||
Returns:
|
||||
list[str]: package licenses as PKGBUILD property
|
||||
"""
|
||||
return self.pkgbuild_license
|
||||
|
||||
@property
|
||||
def pkgdesc(self) -> str:
|
||||
"""
|
||||
package description
|
||||
|
||||
Returns:
|
||||
str: package description as PKGBUILD property
|
||||
"""
|
||||
return self.pkgbuild_pkgdesc
|
||||
|
||||
@property
|
||||
def pkgname(self) -> str:
|
||||
"""
|
||||
package name
|
||||
|
||||
Returns:
|
||||
str: package name as PKGBUILD property
|
||||
"""
|
||||
return self.pkgbuild_pkgname
|
||||
|
||||
@property
|
||||
def url(self) -> str:
|
||||
"""
|
||||
package upstream url
|
||||
|
||||
Returns:
|
||||
str: package upstream url as PKGBUILD property
|
||||
"""
|
||||
return self.pkgbuild_url
|
||||
|
||||
def _generate_mirrorlist(self, source_path: Path) -> None:
|
||||
"""
|
||||
generate mirrorlist file
|
||||
|
||||
Args:
|
||||
source_path(Path): destination of the mirrorlist content
|
||||
"""
|
||||
content = "".join([f"Server = {server}\n" for server in self.servers])
|
||||
source_path.write_text(content, encoding="utf8")
|
||||
|
||||
def package(self) -> str:
|
||||
"""
|
||||
package function generator
|
||||
|
||||
Returns:
|
||||
str: package() function for PKGBUILD
|
||||
"""
|
||||
return f"""{{
|
||||
install -Dm644 "{Path("$srcdir") / "mirrorlist"}" "{Path("$pkgdir") / self.path}"
|
||||
}}"""
|
||||
|
||||
def patches(self) -> list[PkgbuildPatch]:
|
||||
"""
|
||||
list of additional PKGBUILD properties
|
||||
|
||||
Returns:
|
||||
list[PkgbuildPatch]: list of patches which generate PKGBUILD content
|
||||
"""
|
||||
return [
|
||||
PkgbuildPatch("backup", [str(self.path)]),
|
||||
]
|
||||
|
||||
def sources(self) -> dict[str, Callable[[Path], None]]:
|
||||
"""
|
||||
return list of sources for the package
|
||||
|
||||
Returns:
|
||||
dict[str, Callable[[Path], None]]: map of source identifier (e.g. filename) to its generator function
|
||||
"""
|
||||
return {
|
||||
"mirrorlist": self._generate_mirrorlist,
|
||||
}
|
||||
201
src/ahriman/core/support/pkgbuild/pkgbuild_generator.py
Normal file
201
src/ahriman/core/support/pkgbuild/pkgbuild_generator.py
Normal file
@ -0,0 +1,201 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import hashlib
|
||||
import itertools
|
||||
|
||||
from collections.abc import Callable, Generator
|
||||
from pathlib import Path
|
||||
|
||||
from ahriman.core.util import utcnow
|
||||
from ahriman.models.pkgbuild_patch import PkgbuildPatch
|
||||
|
||||
|
||||
class PkgbuildGenerator:
|
||||
"""
|
||||
main class for generating PKGBUILDs
|
||||
|
||||
Attributes:
|
||||
PKGBUILD_STATIC_PROPERTIES(list[PkgbuildPatch]): (class attribute) list of default pkgbuild static properties
|
||||
"""
|
||||
|
||||
PKGBUILD_STATIC_PROPERTIES = [
|
||||
PkgbuildPatch("pkgrel", "1"),
|
||||
PkgbuildPatch("arch", ["any"]),
|
||||
]
|
||||
|
||||
@property
|
||||
def license(self) -> list[str]:
|
||||
"""
|
||||
package licenses list
|
||||
|
||||
Returns:
|
||||
list[str]: package licenses as PKGBUILD property
|
||||
"""
|
||||
return []
|
||||
|
||||
@property
|
||||
def pkgdesc(self) -> str:
|
||||
"""
|
||||
package description
|
||||
|
||||
Returns:
|
||||
str: package description as PKGBUILD property
|
||||
|
||||
Raises:
|
||||
NotImplementedError: not implemented method
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def pkgname(self) -> str:
|
||||
"""
|
||||
package name
|
||||
|
||||
Returns:
|
||||
str: package name as PKGBUILD property
|
||||
|
||||
Raises:
|
||||
NotImplementedError: not implemented method
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def pkgver(self) -> str:
|
||||
"""
|
||||
package version
|
||||
|
||||
Returns:
|
||||
str: package version as PKGBUILD property
|
||||
"""
|
||||
return utcnow().strftime("%Y%m%d")
|
||||
|
||||
@property
|
||||
def url(self) -> str:
|
||||
"""
|
||||
package upstream url
|
||||
|
||||
Returns:
|
||||
str: package upstream url as PKGBUILD property
|
||||
"""
|
||||
return ""
|
||||
|
||||
def install(self) -> str | None:
|
||||
"""
|
||||
content of the install functions
|
||||
|
||||
Returns:
|
||||
str | None: content of the install functions if any
|
||||
"""
|
||||
|
||||
def package(self) -> str:
|
||||
"""
|
||||
package function generator
|
||||
|
||||
Returns:
|
||||
str: package() function for PKGBUILD
|
||||
|
||||
Raises:
|
||||
NotImplementedError: not implemented method
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def patches(self) -> list[PkgbuildPatch]:
|
||||
"""
|
||||
list of additional PKGBUILD properties
|
||||
|
||||
Returns:
|
||||
list[PkgbuildPatch]: list of patches which generate PKGBUILD content
|
||||
"""
|
||||
return []
|
||||
|
||||
def sources(self) -> dict[str, Callable[[Path], None]]:
|
||||
"""
|
||||
return list of sources for the package
|
||||
|
||||
Returns:
|
||||
dict[str, Callable[[Path], None]]: map of source identifier (e.g. filename) to its generator function
|
||||
"""
|
||||
return {}
|
||||
|
||||
def write_install(self, source_dir: Path) -> list[PkgbuildPatch]:
|
||||
"""
|
||||
generate content of install file
|
||||
|
||||
Args:
|
||||
source_dir(Path): path to directory in which sources must be generated
|
||||
|
||||
Returns:
|
||||
list[PkgbuildPatch]: patch for the pkgbuild if install file exists and empty list otherwise
|
||||
"""
|
||||
content: str | None = self.install()
|
||||
if content is None:
|
||||
return []
|
||||
|
||||
source_path = source_dir / f"{self.pkgname}.install"
|
||||
source_path.write_text(content)
|
||||
return [PkgbuildPatch("install", source_path.name)]
|
||||
|
||||
def write_pkgbuild(self, source_dir: Path) -> None:
|
||||
"""
|
||||
generate PKGBUILD content to the specified path
|
||||
|
||||
Args:
|
||||
source_dir(Path): path to directory in which sources must be generated
|
||||
"""
|
||||
patches = self.PKGBUILD_STATIC_PROPERTIES # default static properties...
|
||||
patches.extend([
|
||||
PkgbuildPatch("license", self.license),
|
||||
PkgbuildPatch("pkgdesc", self.pkgdesc),
|
||||
PkgbuildPatch("pkgname", self.pkgname),
|
||||
PkgbuildPatch("pkgver", self.pkgver),
|
||||
PkgbuildPatch("url", self.url),
|
||||
]) # ...main properties as defined by derived class...
|
||||
patches.extend(self.patches()) # ...optional properties as defined by derived class...
|
||||
patches.extend(self.write_install(source_dir)) # ...install function...
|
||||
patches.append(PkgbuildPatch("package()", self.package())) # ...package function...
|
||||
|
||||
patches.extend(self.write_sources(source_dir)) # ...and finally source files
|
||||
|
||||
for patch in patches:
|
||||
patch.write(source_dir / "PKGBUILD")
|
||||
|
||||
def write_sources(self, source_dir: Path) -> list[PkgbuildPatch]:
|
||||
"""
|
||||
write sources and returns valid PKGBUILD properties for them
|
||||
|
||||
Args:
|
||||
source_dir(Path): path to directory in which sources must be generated
|
||||
|
||||
Returns:
|
||||
list[PkgbuildPatch]: list of patches to be applied to the PKGBUILD
|
||||
"""
|
||||
def sources_generator() -> Generator[tuple[str, str], None, None]:
|
||||
for source, generator in sorted(self.sources().items()):
|
||||
source_path = source_dir / source
|
||||
generator(source_path)
|
||||
with source_path.open("rb") as source_file:
|
||||
source_hash = hashlib.sha512(source_file.read())
|
||||
yield source, source_hash.hexdigest()
|
||||
|
||||
sources_iter, hashes_iter = itertools.tee(sources_generator())
|
||||
return [
|
||||
PkgbuildPatch("source", [source for source, _ in sources_iter]),
|
||||
PkgbuildPatch("sha512sums", [sha512 for _, sha512 in hashes_iter]),
|
||||
]
|
||||
@ -17,6 +17,8 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from collections.abc import Callable
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.configuration.schema import ConfigurationSchema
|
||||
from ahriman.core.log import LazyLogging
|
||||
@ -128,8 +130,10 @@ class Trigger(LazyLogging):
|
||||
result(Result): build result
|
||||
packages(list[Package]): list of all available packages
|
||||
"""
|
||||
if (run := getattr(self, "run", None)) is not None:
|
||||
run(result, packages) # compatibility with old triggers
|
||||
# compatibility with old triggers
|
||||
run: Callable[[Result, list[Package]], None] | None = getattr(self, "run", None)
|
||||
if run is not None:
|
||||
run(result, packages)
|
||||
|
||||
def on_start(self) -> None:
|
||||
"""
|
||||
|
||||
@ -84,6 +84,20 @@ class TriggerLoader(LazyLogging):
|
||||
|
||||
return instance
|
||||
|
||||
@staticmethod
|
||||
def known_triggers(configuration: Configuration) -> list[str]:
|
||||
"""
|
||||
read configuration and return list of known triggers. Unlike ``selected_triggers`` this option is used mainly
|
||||
for configuration and validation and mentioned triggers are not being executed automatically
|
||||
|
||||
Args:
|
||||
configuration(Configuration): configuration instance
|
||||
|
||||
Returns:
|
||||
list[str]: list of registered, but not enabled, triggers
|
||||
"""
|
||||
return configuration.getlist("build", "triggers_known", fallback=[])
|
||||
|
||||
@staticmethod
|
||||
def selected_triggers(configuration: Configuration) -> list[str]:
|
||||
"""
|
||||
|
||||
@ -17,6 +17,7 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
# pylint: disable=too-many-lines
|
||||
import datetime
|
||||
import io
|
||||
import itertools
|
||||
@ -48,6 +49,8 @@ __all__ = [
|
||||
"pretty_datetime",
|
||||
"pretty_size",
|
||||
"safe_filename",
|
||||
"srcinfo_property",
|
||||
"srcinfo_property_list",
|
||||
"trim_package",
|
||||
"utcnow",
|
||||
"walk",
|
||||
@ -117,8 +120,6 @@ def check_output(*args: str, exception: Exception | None = None, cwd: Path | Non
|
||||
result: list[str] = []
|
||||
for line in iter(get_io(process, "stdout").readline, ""):
|
||||
line = line.strip()
|
||||
if not line: # skip empty lines
|
||||
continue
|
||||
result.append(line)
|
||||
log(line)
|
||||
|
||||
@ -133,7 +134,7 @@ def check_output(*args: str, exception: Exception | None = None, cwd: Path | Non
|
||||
raise exception
|
||||
raise subprocess.CalledProcessError(status_code, process.args)
|
||||
|
||||
return "\n".join(result)
|
||||
return "\n".join(result).rstrip("\n") # remove newline at the end of any
|
||||
|
||||
|
||||
def check_user(paths: RepositoryPaths, *, unsafe: bool) -> None:
|
||||
@ -328,6 +329,47 @@ def safe_filename(source: str) -> str:
|
||||
return re.sub(r"[^A-Za-z\d\-._~:\[\]@]", "-", source)
|
||||
|
||||
|
||||
def srcinfo_property(key: str, srcinfo: dict[str, Any], package_srcinfo: dict[str, Any], *,
|
||||
default: Any = None) -> Any:
|
||||
"""
|
||||
extract property from SRCINFO. This method extracts property from package if this property is presented in
|
||||
``package``. Otherwise, it looks for the same property in root srcinfo. If none found, the default value will be
|
||||
returned
|
||||
|
||||
Args:
|
||||
key(str): key to extract from srcinfo
|
||||
srcinfo(dict[str, Any]): root structure of SRCINFO
|
||||
package_srcinfo(dict[str, Any]): package specific SRCINFO
|
||||
default(Any, optional): the default value for the specified key (Default value = None)
|
||||
|
||||
Returns:
|
||||
Any: extracted value from SRCINFO
|
||||
"""
|
||||
return package_srcinfo.get(key) or srcinfo.get(key) or default
|
||||
|
||||
|
||||
def srcinfo_property_list(key: str, srcinfo: dict[str, Any], package_srcinfo: dict[str, Any], *,
|
||||
architecture: str | None = None) -> list[Any]:
|
||||
"""
|
||||
extract list property from SRCINFO. Unlike ``srcinfo_property`` it supposes that default return value is always
|
||||
empty list. If ``architecture`` is supplied, then it will try to lookup for architecture specific values and will
|
||||
append it at the end of result
|
||||
|
||||
Args:
|
||||
key(str): key to extract from srcinfo
|
||||
srcinfo(dict[str, Any]): root structure of SRCINFO
|
||||
package_srcinfo(dict[str, Any]): package specific SRCINFO
|
||||
architecture(str | None, optional): package architecture if set (Default value = None)
|
||||
|
||||
Returns:
|
||||
list[Any]: list of extracted properties from SRCINFO
|
||||
"""
|
||||
values: list[Any] = srcinfo_property(key, srcinfo, package_srcinfo, default=[])
|
||||
if architecture is not None:
|
||||
values.extend(srcinfo_property(f"{key}_{architecture}", srcinfo, package_srcinfo, default=[]))
|
||||
return values
|
||||
|
||||
|
||||
def trim_package(package_name: str) -> str:
|
||||
"""
|
||||
remove version bound and description from package name. Pacman allows to specify version bound (=, <=, >= etc) for
|
||||
|
||||
@ -52,6 +52,7 @@ class AURPackage:
|
||||
depends(list[str]): list of package dependencies
|
||||
make_depends(l[str]): list of package make dependencies
|
||||
opt_depends(list[str]): list of package optional dependencies
|
||||
check_depends(list[str]): list of package test dependencies
|
||||
conflicts(list[str]): conflicts list for the package
|
||||
provides(list[str]): list of packages which this package provides
|
||||
license(list[str]): list of package licenses
|
||||
@ -94,6 +95,7 @@ class AURPackage:
|
||||
depends: list[str] = field(default_factory=list)
|
||||
make_depends: list[str] = field(default_factory=list)
|
||||
opt_depends: list[str] = field(default_factory=list)
|
||||
check_depends: list[str] = field(default_factory=list)
|
||||
conflicts: list[str] = field(default_factory=list)
|
||||
provides: list[str] = field(default_factory=list)
|
||||
license: list[str] = field(default_factory=list)
|
||||
@ -146,6 +148,7 @@ class AURPackage:
|
||||
depends=package.depends,
|
||||
make_depends=package.makedepends,
|
||||
opt_depends=package.optdepends,
|
||||
check_depends=package.checkdepends,
|
||||
conflicts=package.conflicts,
|
||||
provides=package.provides,
|
||||
license=package.licenses,
|
||||
@ -185,6 +188,7 @@ class AURPackage:
|
||||
depends=dump["depends"],
|
||||
make_depends=dump["makedepends"],
|
||||
opt_depends=dump["optdepends"],
|
||||
check_depends=dump["checkdepends"],
|
||||
conflicts=dump["conflicts"],
|
||||
provides=dump["provides"],
|
||||
license=dump["licenses"],
|
||||
|
||||
35
src/ahriman/models/log_handler.py
Normal file
35
src/ahriman/models/log_handler.py
Normal file
@ -0,0 +1,35 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class LogHandler(str, Enum):
|
||||
"""
|
||||
log handler as described by default configuration
|
||||
|
||||
Attributes:
|
||||
Console(LogHandler): (class attribute) write logs to console
|
||||
Syslog(LogHandler): (class attribute) write logs to syslog device /dev/null
|
||||
Journald(LogHandler): (class attribute) write logs to journald directly
|
||||
"""
|
||||
|
||||
Console = "console"
|
||||
Syslog = "syslog"
|
||||
Journald = "journald"
|
||||
@ -22,18 +22,19 @@ from __future__ import annotations
|
||||
|
||||
import copy
|
||||
|
||||
from collections.abc import Iterable
|
||||
from collections.abc import Callable, Generator, Iterable
|
||||
from dataclasses import asdict, dataclass
|
||||
from pathlib import Path
|
||||
from pyalpm import vercmp # type: ignore[import]
|
||||
from srcinfo.parse import parse_srcinfo # type: ignore[import]
|
||||
from typing import Any, Self
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from ahriman.core.alpm.pacman import Pacman
|
||||
from ahriman.core.alpm.remote import AUR, Official, OfficialSyncdb
|
||||
from ahriman.core.exceptions import PackageInfoError
|
||||
from ahriman.core.log import LazyLogging
|
||||
from ahriman.core.util import check_output, full_version, utcnow
|
||||
from ahriman.core.util import check_output, full_version, srcinfo_property_list, utcnow
|
||||
from ahriman.models.package_description import PackageDescription
|
||||
from ahriman.models.package_source import PackageSource
|
||||
from ahriman.models.remote_source import RemoteSource
|
||||
@ -87,7 +88,7 @@ class Package(LazyLogging):
|
||||
Returns:
|
||||
list[str]: sum of dependencies per each package
|
||||
"""
|
||||
return sorted(set(sum((package.depends for package in self.packages.values()), start=[])))
|
||||
return self._package_list_property(lambda package: package.depends)
|
||||
|
||||
@property
|
||||
def depends_build(self) -> set[str]:
|
||||
@ -97,7 +98,17 @@ class Package(LazyLogging):
|
||||
Returns:
|
||||
set[str]: full dependencies list used by devtools
|
||||
"""
|
||||
return (set(self.depends) | set(self.depends_make)).difference(self.packages_full)
|
||||
return (set(self.depends) | set(self.depends_make) | set(self.depends_check)).difference(self.packages_full)
|
||||
|
||||
@property
|
||||
def depends_check(self) -> list[str]:
|
||||
"""
|
||||
get package test dependencies
|
||||
|
||||
Returns:
|
||||
list[str]: sum of test dependencies per each package
|
||||
"""
|
||||
return self._package_list_property(lambda package: package.check_depends)
|
||||
|
||||
@property
|
||||
def depends_make(self) -> list[str]:
|
||||
@ -107,7 +118,7 @@ class Package(LazyLogging):
|
||||
Returns:
|
||||
list[str]: sum of make dependencies per each package
|
||||
"""
|
||||
return sorted(set(sum((package.make_depends for package in self.packages.values()), start=[])))
|
||||
return self._package_list_property(lambda package: package.make_depends)
|
||||
|
||||
@property
|
||||
def depends_opt(self) -> list[str]:
|
||||
@ -117,7 +128,7 @@ class Package(LazyLogging):
|
||||
Returns:
|
||||
list[str]: sum of optional dependencies per each package
|
||||
"""
|
||||
return sorted(set(sum((package.opt_depends for package in self.packages.values()), start=[])))
|
||||
return self._package_list_property(lambda package: package.opt_depends)
|
||||
|
||||
@property
|
||||
def groups(self) -> list[str]:
|
||||
@ -127,7 +138,7 @@ class Package(LazyLogging):
|
||||
Returns:
|
||||
list[str]: sum of groups per each package
|
||||
"""
|
||||
return sorted(set(sum((package.groups for package in self.packages.values()), start=[])))
|
||||
return self._package_list_property(lambda package: package.groups)
|
||||
|
||||
@property
|
||||
def is_single_package(self) -> bool:
|
||||
@ -162,7 +173,7 @@ class Package(LazyLogging):
|
||||
Returns:
|
||||
list[str]: sum of licenses per each package
|
||||
"""
|
||||
return sorted(set(sum((package.licenses for package in self.packages.values()), start=[])))
|
||||
return self._package_list_property(lambda package: package.licenses)
|
||||
|
||||
@property
|
||||
def packages_full(self) -> list[str]:
|
||||
@ -235,23 +246,26 @@ class Package(LazyLogging):
|
||||
if errors:
|
||||
raise PackageInfoError(errors)
|
||||
|
||||
def get_property(key: str, properties: dict[str, Any], default: Any) -> Any:
|
||||
return properties.get(key) or srcinfo.get(key) or default
|
||||
|
||||
def get_list(key: str, properties: dict[str, Any]) -> Any:
|
||||
return get_property(key, properties, []) + get_property(f"{key}_{architecture}", properties, [])
|
||||
|
||||
packages = {
|
||||
package: PackageDescription(
|
||||
depends=get_list("depends", properties),
|
||||
make_depends=get_list("makedepends", properties),
|
||||
opt_depends=get_list("optdepends", properties),
|
||||
depends=srcinfo_property_list("depends", srcinfo, properties, architecture=architecture),
|
||||
make_depends=srcinfo_property_list("makedepends", srcinfo, properties, architecture=architecture),
|
||||
opt_depends=srcinfo_property_list("optdepends", srcinfo, properties, architecture=architecture),
|
||||
check_depends=srcinfo_property_list("checkdepends", srcinfo, properties, architecture=architecture),
|
||||
)
|
||||
for package, properties in srcinfo["packages"].items()
|
||||
}
|
||||
version = full_version(srcinfo.get("epoch"), srcinfo["pkgver"], srcinfo["pkgrel"])
|
||||
|
||||
return cls(base=srcinfo["pkgbase"], version=version, remote=None, packages=packages)
|
||||
remote = RemoteSource(
|
||||
git_url=path.absolute().as_uri(),
|
||||
web_url="",
|
||||
path=".",
|
||||
branch="master",
|
||||
source=PackageSource.Local,
|
||||
)
|
||||
|
||||
return cls(base=srcinfo["pkgbase"], version=version, remote=remote, packages=packages)
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, dump: dict[str, Any]) -> Self:
|
||||
@ -293,6 +307,41 @@ class Package(LazyLogging):
|
||||
remote=remote,
|
||||
packages={package.name: PackageDescription.from_aur(package)})
|
||||
|
||||
@staticmethod
|
||||
def local_files(path: Path) -> Generator[Path, None, None]:
|
||||
"""
|
||||
extract list of local files
|
||||
|
||||
Args:
|
||||
path(Path): path to package sources directory
|
||||
|
||||
Returns:
|
||||
Generator[Path, None, None]: list of paths of files which belong to the package and distributed together
|
||||
with this tarball. All paths are relative to the ``path``
|
||||
"""
|
||||
srcinfo_source = Package._check_output("makepkg", "--printsrcinfo", cwd=path)
|
||||
srcinfo, errors = parse_srcinfo(srcinfo_source)
|
||||
if errors:
|
||||
raise PackageInfoError(errors)
|
||||
|
||||
# we could use arch property, but for consistency it is better to call special method
|
||||
architectures = Package.supported_architectures(path)
|
||||
|
||||
for architecture in architectures:
|
||||
for source in srcinfo_property_list("source", srcinfo, {}, architecture=architecture):
|
||||
if "::" in source:
|
||||
_, source = source.split("::", 1) # in case if filename is specified, remove it
|
||||
|
||||
if urlparse(source).scheme:
|
||||
# basically file schema should use absolute path which is impossible if we are distributing
|
||||
# files together with PKGBUILD. In this case we are going to skip it also
|
||||
continue
|
||||
|
||||
yield Path(source)
|
||||
|
||||
if (install := srcinfo.get("install", None)) is not None:
|
||||
yield Path(install)
|
||||
|
||||
@staticmethod
|
||||
def supported_architectures(path: Path) -> set[str]:
|
||||
"""
|
||||
@ -313,6 +362,26 @@ class Package(LazyLogging):
|
||||
raise PackageInfoError(errors)
|
||||
return set(srcinfo.get("arch", []))
|
||||
|
||||
def _package_list_property(self, extractor: Callable[[PackageDescription], list[str]]) -> list[str]:
|
||||
"""
|
||||
extract list property from single packages and combine them into one list
|
||||
|
||||
Notes:
|
||||
Basically this method is generic for type of ``list[T]``, but there is no trait ``Comparable`` in default
|
||||
packages, thus we limit this method only to new types
|
||||
|
||||
Args:
|
||||
extractor(Callable[[PackageDescription], list[str]): package property extractor
|
||||
|
||||
Returns:
|
||||
list[str]: combined list of unique entries in properties list
|
||||
"""
|
||||
def generator() -> Generator[str, None, None]:
|
||||
for package in self.packages.values():
|
||||
yield from extractor(package)
|
||||
|
||||
return sorted(set(generator()))
|
||||
|
||||
def actual_version(self, paths: RepositoryPaths) -> str:
|
||||
"""
|
||||
additional method to handle VCS package versions
|
||||
|
||||
@ -35,6 +35,7 @@ class PackageDescription:
|
||||
architecture(str | None): package architecture
|
||||
archive_size(int | None): package archive size
|
||||
build_date(int | None): package build date
|
||||
check_depends(list[str]): package dependencies list used for check functions
|
||||
depends(list[str]): package dependencies list
|
||||
opt_depends(list[str]): optional package dependencies list
|
||||
make_depends(list[str]): package dependencies list used for building
|
||||
@ -70,6 +71,7 @@ class PackageDescription:
|
||||
depends: list[str] = field(default_factory=list)
|
||||
make_depends: list[str] = field(default_factory=list)
|
||||
opt_depends: list[str] = field(default_factory=list)
|
||||
check_depends: list[str] = field(default_factory=list)
|
||||
description: str | None = None
|
||||
filename: str | None = None
|
||||
groups: list[str] = field(default_factory=list)
|
||||
@ -85,6 +87,7 @@ class PackageDescription:
|
||||
self.depends = [trim_package(package) for package in self.depends]
|
||||
self.opt_depends = [trim_package(package) for package in self.opt_depends]
|
||||
self.make_depends = [trim_package(package) for package in self.make_depends]
|
||||
self.check_depends = [trim_package(package) for package in self.check_depends]
|
||||
|
||||
@property
|
||||
def filepath(self) -> Path | None:
|
||||
@ -111,6 +114,7 @@ class PackageDescription:
|
||||
depends=package.depends,
|
||||
make_depends=package.make_depends,
|
||||
opt_depends=package.opt_depends,
|
||||
check_depends=package.check_depends,
|
||||
description=package.description,
|
||||
licenses=package.license,
|
||||
provides=package.provides,
|
||||
@ -151,6 +155,7 @@ class PackageDescription:
|
||||
depends=package.depends,
|
||||
make_depends=package.makedepends,
|
||||
opt_depends=package.optdepends,
|
||||
check_depends=package.checkdepends,
|
||||
description=package.desc,
|
||||
filename=path.name,
|
||||
groups=package.groups,
|
||||
|
||||
@ -49,6 +49,10 @@ class PackagePropertiesSchema(Schema):
|
||||
"description": "Package optional dependencies list",
|
||||
"example": ["python-aiohttp"],
|
||||
})
|
||||
check_depends = fields.List(fields.String(), metadata={
|
||||
"description": "Package test dependencies list",
|
||||
"example": ["python-pytest"],
|
||||
})
|
||||
description = fields.String(metadata={
|
||||
"description": "Package description",
|
||||
"example": "ArcH linux ReposItory MANager",
|
||||
|
||||
@ -74,7 +74,7 @@ def test_with_dependencies(application: Application, package_ahriman: Package, p
|
||||
|
||||
package_mock = mocker.patch("ahriman.models.package.Package.from_aur", side_effect=lambda p, _: packages[p])
|
||||
packages_mock = mocker.patch("ahriman.application.application.Application._known_packages",
|
||||
return_value=["devtools", "python-build"])
|
||||
return_value=["devtools", "python-build", "python-pytest"])
|
||||
|
||||
result = application.with_dependencies([package_ahriman], process_dependencies=True)
|
||||
assert {package.base: package for package in result} == packages
|
||||
|
||||
@ -5,25 +5,36 @@ from pytest_mock import MockerFixture
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from ahriman.application.application.application_packages import ApplicationPackages
|
||||
from ahriman.core.exceptions import UnknownPackageError
|
||||
from ahriman.models.package import Package
|
||||
from ahriman.models.package_description import PackageDescription
|
||||
from ahriman.models.package_source import PackageSource
|
||||
from ahriman.models.result import Result
|
||||
|
||||
|
||||
def test_add_archive(
|
||||
application_packages: ApplicationPackages,
|
||||
package_ahriman: Package,
|
||||
mocker: MockerFixture) -> None:
|
||||
def test_add_archive(application_packages: ApplicationPackages, package_ahriman: Package,
|
||||
mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must add package from archive
|
||||
"""
|
||||
is_file_mock = mocker.patch("pathlib.Path.is_file", return_value=True)
|
||||
copy_mock = mocker.patch("shutil.copy")
|
||||
|
||||
application_packages._add_archive(package_ahriman.base)
|
||||
is_file_mock.assert_called_once_with()
|
||||
copy_mock.assert_called_once_with(
|
||||
Path(package_ahriman.base), application_packages.repository.paths.packages / package_ahriman.base)
|
||||
|
||||
|
||||
def test_add_archive_missing(application_packages: ApplicationPackages, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must raise UnknownPackageError on unknown path
|
||||
"""
|
||||
mocker.patch("pathlib.Path.is_file", return_value=False)
|
||||
with pytest.raises(UnknownPackageError):
|
||||
application_packages._add_archive("package")
|
||||
|
||||
|
||||
def test_add_aur(application_packages: ApplicationPackages, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must add package from AUR
|
||||
@ -37,21 +48,29 @@ def test_add_aur(application_packages: ApplicationPackages, package_ahriman: Pac
|
||||
update_remote_mock.assert_called_once_with(package_ahriman)
|
||||
|
||||
|
||||
def test_add_directory(
|
||||
application_packages: ApplicationPackages,
|
||||
package_ahriman: Package,
|
||||
mocker: MockerFixture) -> None:
|
||||
def test_add_directory(application_packages: ApplicationPackages, package_ahriman: Package,
|
||||
mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must add packages from directory
|
||||
"""
|
||||
iterdir_mock = mocker.patch("pathlib.Path.iterdir",
|
||||
return_value=[package.filepath for package in package_ahriman.packages.values()])
|
||||
copy_mock = mocker.patch("shutil.copy")
|
||||
is_dir_mock = mocker.patch("pathlib.Path.is_dir", return_value=True)
|
||||
filename = package_ahriman.packages[package_ahriman.base].filepath
|
||||
iterdir_mock = mocker.patch("pathlib.Path.iterdir", return_value=[filename])
|
||||
add_mock = mocker.patch("ahriman.application.application.application_packages.ApplicationPackages._add_archive")
|
||||
|
||||
application_packages._add_directory(package_ahriman.base)
|
||||
is_dir_mock.assert_called_once_with()
|
||||
iterdir_mock.assert_called_once_with()
|
||||
copy_mock.assert_called_once_with(filename, application_packages.repository.paths.packages / filename.name)
|
||||
add_mock.assert_called_once_with(str(filename))
|
||||
|
||||
|
||||
def test_add_directory_missing(application_packages: ApplicationPackages, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must raise UnknownPackageError on unknown directory path
|
||||
"""
|
||||
mocker.patch("pathlib.Path.is_dir", return_value=False)
|
||||
with pytest.raises(UnknownPackageError):
|
||||
application_packages._add_directory("package")
|
||||
|
||||
|
||||
def test_add_local(application_packages: ApplicationPackages, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||
@ -59,17 +78,46 @@ def test_add_local(application_packages: ApplicationPackages, package_ahriman: P
|
||||
must add package from local sources
|
||||
"""
|
||||
mocker.patch("ahriman.models.package.Package.from_build", return_value=package_ahriman)
|
||||
is_dir_mock = mocker.patch("pathlib.Path.is_dir", return_value=True)
|
||||
init_mock = mocker.patch("ahriman.core.build_tools.sources.Sources.init")
|
||||
copytree_mock = mocker.patch("shutil.copytree")
|
||||
build_queue_mock = mocker.patch("ahriman.core.database.SQLite.build_queue_insert")
|
||||
|
||||
application_packages._add_local(package_ahriman.base)
|
||||
is_dir_mock.assert_called_once_with()
|
||||
copytree_mock.assert_called_once_with(
|
||||
Path(package_ahriman.base), application_packages.repository.paths.cache_for(package_ahriman.base))
|
||||
init_mock.assert_called_once_with(application_packages.repository.paths.cache_for(package_ahriman.base))
|
||||
build_queue_mock.assert_called_once_with(package_ahriman)
|
||||
|
||||
|
||||
def test_add_local_cache(application_packages: ApplicationPackages, package_ahriman: Package,
|
||||
mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must add package from local source if there is cache
|
||||
"""
|
||||
mocker.patch("ahriman.models.package.Package.from_build", return_value=package_ahriman)
|
||||
mocker.patch("pathlib.Path.is_dir", autospec=True,
|
||||
side_effect=lambda p: True if p.is_relative_to(application_packages.repository.paths.cache) else False)
|
||||
init_mock = mocker.patch("ahriman.core.build_tools.sources.Sources.init")
|
||||
copytree_mock = mocker.patch("shutil.copytree")
|
||||
build_queue_mock = mocker.patch("ahriman.core.database.SQLite.build_queue_insert")
|
||||
|
||||
application_packages._add_local(package_ahriman.base)
|
||||
copytree_mock.assert_not_called()
|
||||
init_mock.assert_not_called()
|
||||
build_queue_mock.assert_called_once_with(package_ahriman)
|
||||
|
||||
|
||||
def test_add_local_missing(application_packages: ApplicationPackages, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must raise UnknownPackageError if package wasn't found
|
||||
"""
|
||||
mocker.patch("pathlib.Path.is_dir", return_value=False)
|
||||
with pytest.raises(UnknownPackageError):
|
||||
application_packages._add_local("package")
|
||||
|
||||
|
||||
def test_add_remote(application_packages: ApplicationPackages, package_description_ahriman: PackageDescription,
|
||||
mocker: MockerFixture) -> None:
|
||||
"""
|
||||
@ -87,6 +135,15 @@ def test_add_remote(application_packages: ApplicationPackages, package_descripti
|
||||
response_mock.raise_for_status.assert_called_once_with()
|
||||
|
||||
|
||||
def test_add_remote_missing(application_packages: ApplicationPackages, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must add package from remote source
|
||||
"""
|
||||
mocker.patch("requests.get", side_effect=Exception())
|
||||
with pytest.raises(UnknownPackageError):
|
||||
application_packages._add_remote("url")
|
||||
|
||||
|
||||
def test_add_repository(application_packages: ApplicationPackages, package_ahriman: Package,
|
||||
mocker: MockerFixture) -> None:
|
||||
"""
|
||||
@ -112,10 +169,8 @@ def test_add_add_archive(application_packages: ApplicationPackages, package_ahri
|
||||
add_mock.assert_called_once_with(package_ahriman.base)
|
||||
|
||||
|
||||
def test_add_add_aur(
|
||||
application_packages: ApplicationPackages,
|
||||
package_ahriman: Package,
|
||||
mocker: MockerFixture) -> None:
|
||||
def test_add_add_aur(application_packages: ApplicationPackages, package_ahriman: Package,
|
||||
mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must add package from AUR via add function
|
||||
"""
|
||||
|
||||
@ -7,6 +7,7 @@ from pytest_mock import MockerFixture
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.exceptions import ExitCode, MissingArchitectureError, MultipleArchitecturesError
|
||||
from ahriman.models.log_handler import LogHandler
|
||||
|
||||
|
||||
def test_architectures_extract(args: argparse.Namespace, configuration: Configuration, mocker: MockerFixture) -> None:
|
||||
@ -56,17 +57,20 @@ def test_call(args: argparse.Namespace, configuration: Configuration, mocker: Mo
|
||||
must call inside lock
|
||||
"""
|
||||
args.configuration = Path("")
|
||||
args.log_handler = LogHandler.Console
|
||||
args.quiet = False
|
||||
args.report = False
|
||||
mocker.patch("ahriman.application.handlers.Handler.run")
|
||||
configuration_mock = mocker.patch("ahriman.core.configuration.Configuration.from_path", return_value=configuration)
|
||||
log_handler_mock = mocker.patch("ahriman.core.log.Log.handler", return_value=args.log_handler)
|
||||
log_load_mock = mocker.patch("ahriman.core.log.Log.load")
|
||||
enter_mock = mocker.patch("ahriman.application.lock.Lock.__enter__")
|
||||
exit_mock = mocker.patch("ahriman.application.lock.Lock.__exit__")
|
||||
|
||||
assert Handler.call(args, "x86_64")
|
||||
configuration_mock.assert_called_once_with(args.configuration, "x86_64")
|
||||
log_load_mock.assert_called_once_with(configuration, quiet=args.quiet, report=args.report)
|
||||
log_handler_mock.assert_called_once_with(args.log_handler)
|
||||
log_load_mock.assert_called_once_with(configuration, args.log_handler, quiet=args.quiet, report=args.report)
|
||||
enter_mock.assert_called_once_with()
|
||||
exit_mock.assert_called_once_with(None, None, None)
|
||||
|
||||
|
||||
@ -8,6 +8,7 @@ from ahriman.application.application import Application
|
||||
from ahriman.application.handlers import Rebuild
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.repository import Repository
|
||||
from ahriman.models.build_status import BuildStatus, BuildStatusEnum
|
||||
from ahriman.models.package import Package
|
||||
from ahriman.models.result import Result
|
||||
|
||||
@ -26,6 +27,7 @@ def _default_args(args: argparse.Namespace) -> argparse.Namespace:
|
||||
args.dry_run = False
|
||||
args.from_database = False
|
||||
args.exit_code = False
|
||||
args.status = None
|
||||
return args
|
||||
|
||||
|
||||
@ -46,7 +48,7 @@ def test_run(args: argparse.Namespace, package_ahriman: Package, configuration:
|
||||
on_start_mock = mocker.patch("ahriman.application.application.Application.on_start")
|
||||
|
||||
Rebuild.run(args, "x86_64", configuration, report=False, unsafe=False)
|
||||
extract_mock.assert_called_once_with(pytest.helpers.anyvar(int), from_database=args.from_database)
|
||||
extract_mock.assert_called_once_with(pytest.helpers.anyvar(int), args.status, from_database=args.from_database)
|
||||
application_packages_mock.assert_called_once_with([package_ahriman], None)
|
||||
application_mock.assert_called_once_with([package_ahriman])
|
||||
check_mock.assert_has_calls([MockCall(False, False), MockCall(False, False)])
|
||||
@ -56,7 +58,7 @@ def test_run(args: argparse.Namespace, package_ahriman: Package, configuration:
|
||||
def test_run_extract_packages(args: argparse.Namespace, configuration: Configuration, repository: Repository,
|
||||
mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must run command
|
||||
must run command from database
|
||||
"""
|
||||
args = _default_args(args)
|
||||
args.from_database = True
|
||||
@ -66,7 +68,7 @@ def test_run_extract_packages(args: argparse.Namespace, configuration: Configura
|
||||
extract_mock = mocker.patch("ahriman.application.handlers.Rebuild.extract_packages", return_value=[])
|
||||
|
||||
Rebuild.run(args, "x86_64", configuration, report=False, unsafe=False)
|
||||
extract_mock.assert_called_once_with(pytest.helpers.anyvar(int), from_database=args.from_database)
|
||||
extract_mock.assert_called_once_with(pytest.helpers.anyvar(int), args.status, from_database=args.from_database)
|
||||
|
||||
|
||||
def test_run_dry_run(args: argparse.Namespace, configuration: Configuration, repository: Repository,
|
||||
@ -156,7 +158,19 @@ def test_extract_packages(application: Application, mocker: MockerFixture) -> No
|
||||
must extract packages from database
|
||||
"""
|
||||
packages_mock = mocker.patch("ahriman.core.repository.repository.Repository.packages")
|
||||
Rebuild.extract_packages(application, from_database=False)
|
||||
Rebuild.extract_packages(application, None, from_database=False)
|
||||
packages_mock.assert_called_once_with()
|
||||
|
||||
|
||||
def test_extract_packages_by_status(application: Application, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must extract packages from database and filter them by status
|
||||
"""
|
||||
packages_mock = mocker.patch("ahriman.core.database.SQLite.packages_get", return_value=[
|
||||
("package1", BuildStatus(BuildStatusEnum.Success)),
|
||||
("package2", BuildStatus(BuildStatusEnum.Failed)),
|
||||
])
|
||||
assert Rebuild.extract_packages(application, BuildStatusEnum.Failed, from_database=True) == ["package2"]
|
||||
packages_mock.assert_called_once_with()
|
||||
|
||||
|
||||
@ -165,5 +179,5 @@ def test_extract_packages_from_database(application: Application, mocker: Mocker
|
||||
must extract packages from database
|
||||
"""
|
||||
packages_mock = mocker.patch("ahriman.core.database.SQLite.packages_get")
|
||||
Rebuild.extract_packages(application, from_database=True)
|
||||
Rebuild.extract_packages(application, None, from_database=True)
|
||||
packages_mock.assert_called_once_with()
|
||||
|
||||
@ -61,7 +61,14 @@ def test_schema(configuration: Configuration) -> None:
|
||||
assert schema.pop("console")
|
||||
assert schema.pop("email")
|
||||
assert schema.pop("github")
|
||||
assert schema.pop("gitremote")
|
||||
assert schema.pop("html")
|
||||
assert schema.pop("keyring")
|
||||
assert schema.pop("keyring_generator")
|
||||
assert schema.pop("mirrorlist")
|
||||
assert schema.pop("mirrorlist_generator")
|
||||
assert schema.pop("remote-pull")
|
||||
assert schema.pop("remote-push")
|
||||
assert schema.pop("report")
|
||||
assert schema.pop("rsync")
|
||||
assert schema.pop("s3")
|
||||
@ -76,6 +83,7 @@ def test_schema_invalid_trigger(configuration: Configuration) -> None:
|
||||
must skip trigger if it caused exception on load
|
||||
"""
|
||||
configuration.set_option("build", "triggers", "some.invalid.trigger.path.Trigger")
|
||||
configuration.remove_option("build", "triggers_known")
|
||||
assert Validate.schema("x86_64", configuration) == CONFIGURATION_SCHEMA
|
||||
|
||||
|
||||
|
||||
@ -6,6 +6,7 @@ from pytest_mock import MockerFixture
|
||||
from ahriman.application.handlers import Web
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.repository import Repository
|
||||
from ahriman.models.log_handler import LogHandler
|
||||
|
||||
|
||||
def _default_args(args: argparse.Namespace) -> argparse.Namespace:
|
||||
@ -19,6 +20,11 @@ def _default_args(args: argparse.Namespace) -> argparse.Namespace:
|
||||
argparse.Namespace: generated arguments for these test cases
|
||||
"""
|
||||
args.parser = lambda: True
|
||||
args.force = False
|
||||
args.log_handler = None
|
||||
args.report = True
|
||||
args.quiet = False
|
||||
args.unsafe = False
|
||||
return args
|
||||
|
||||
|
||||
@ -43,6 +49,63 @@ def test_run(args: argparse.Namespace, configuration: Configuration, repository:
|
||||
join_mock.assert_called_once_with()
|
||||
|
||||
|
||||
def test_extract_arguments(args: argparse.Namespace, configuration: Configuration):
|
||||
"""
|
||||
must extract correct args
|
||||
"""
|
||||
expected = [
|
||||
"--architecture", "x86_64",
|
||||
"--configuration", str(configuration.path),
|
||||
]
|
||||
|
||||
probe = _default_args(args)
|
||||
assert list(Web.extract_arguments(probe, "x86_64", configuration)) == expected
|
||||
|
||||
probe.force = True
|
||||
expected.extend(["--force"])
|
||||
assert list(Web.extract_arguments(probe, "x86_64", configuration)) == expected
|
||||
|
||||
probe.log_handler = LogHandler.Console
|
||||
expected.extend(["--log-handler", probe.log_handler.value])
|
||||
assert list(Web.extract_arguments(probe, "x86_64", configuration)) == expected
|
||||
|
||||
probe.quiet = True
|
||||
expected.extend(["--quiet"])
|
||||
assert list(Web.extract_arguments(probe, "x86_64", configuration)) == expected
|
||||
|
||||
probe.unsafe = True
|
||||
expected.extend(["--unsafe"])
|
||||
assert list(Web.extract_arguments(probe, "x86_64", configuration)) == expected
|
||||
|
||||
|
||||
def test_extract_arguments_full(parser: argparse.ArgumentParser, configuration: Configuration):
|
||||
"""
|
||||
must extract all available args except for blacklisted
|
||||
"""
|
||||
# append all options from parser
|
||||
args = argparse.Namespace()
|
||||
for action in parser._actions:
|
||||
if action.default == argparse.SUPPRESS:
|
||||
continue
|
||||
# extract option from the following list
|
||||
value = action.const or \
|
||||
next(iter(action.choices or []), None) or \
|
||||
(not action.default if isinstance(action.default, bool) else None) or \
|
||||
"random string"
|
||||
if action.type is not None:
|
||||
value = action.type(value)
|
||||
setattr(args, action.dest, value)
|
||||
|
||||
assert list(Web.extract_arguments(args, "x86_64", configuration)) == [
|
||||
"--architecture", "x86_64",
|
||||
"--configuration", str(configuration.path),
|
||||
"--force",
|
||||
"--log-handler", "console",
|
||||
"--quiet",
|
||||
"--unsafe",
|
||||
]
|
||||
|
||||
|
||||
def test_disallow_auto_architecture_run() -> None:
|
||||
"""
|
||||
must not allow auto architecture run
|
||||
|
||||
@ -6,6 +6,7 @@ from pytest_mock import MockerFixture
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.models.action import Action
|
||||
from ahriman.models.build_status import BuildStatusEnum
|
||||
from ahriman.models.log_handler import LogHandler
|
||||
from ahriman.models.sign_settings import SignSettings
|
||||
from ahriman.models.user_access import UserAccess
|
||||
|
||||
@ -37,6 +38,14 @@ def test_parser_option_lock(parser: argparse.ArgumentParser) -> None:
|
||||
assert isinstance(args.lock, Path)
|
||||
|
||||
|
||||
def test_parser_option_log_handler(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
must convert log-handler option to LogHandler instance
|
||||
"""
|
||||
args = parser.parse_args(["--log-handler", "console", "service-config"])
|
||||
assert isinstance(args.log_handler, LogHandler)
|
||||
|
||||
|
||||
def test_multiple_architectures(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
must accept multiple architectures
|
||||
@ -373,6 +382,42 @@ def test_subparsers_repo_check_option_refresh(parser: argparse.ArgumentParser) -
|
||||
assert args.refresh == 2
|
||||
|
||||
|
||||
def test_subparsers_repo_create_keyring(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
repo-create-keyring command must imply trigger
|
||||
"""
|
||||
args = parser.parse_args(["repo-create-keyring"])
|
||||
assert args.trigger == ["ahriman.core.support.KeyringTrigger"]
|
||||
|
||||
|
||||
def test_subparsers_repo_create_keyring_architecture(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
repo-create-keyring command must correctly parse architecture list
|
||||
"""
|
||||
args = parser.parse_args(["repo-create-keyring"])
|
||||
assert args.architecture is None
|
||||
args = parser.parse_args(["-a", "x86_64", "repo-create-keyring"])
|
||||
assert args.architecture == ["x86_64"]
|
||||
|
||||
|
||||
def test_subparsers_repo_create_mirrorlist(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
repo-create-mirrorlist command must imply trigger
|
||||
"""
|
||||
args = parser.parse_args(["repo-create-mirrorlist"])
|
||||
assert args.trigger == ["ahriman.core.support.MirrorlistTrigger"]
|
||||
|
||||
|
||||
def test_subparsers_repo_create_mirrorlist_architecture(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
repo-create-mirrorlist command must correctly parse architecture list
|
||||
"""
|
||||
args = parser.parse_args(["repo-create-mirrorlist"])
|
||||
assert args.architecture is None
|
||||
args = parser.parse_args(["-a", "x86_64", "repo-create-mirrorlist"])
|
||||
assert args.architecture == ["x86_64"]
|
||||
|
||||
|
||||
def test_subparsers_repo_daemon(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
repo-daemon command must imply dry run, exit code and package
|
||||
@ -415,6 +460,14 @@ def test_subparsers_repo_rebuild_architecture(parser: argparse.ArgumentParser) -
|
||||
assert args.architecture == ["x86_64"]
|
||||
|
||||
|
||||
def test_subparsers_repo_rebuild_option_status(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
repo-rebuild command must convert status option to BuildStatusEnum instance
|
||||
"""
|
||||
args = parser.parse_args(["-a", "x86_64", "repo-rebuild", "--status", "failed"])
|
||||
assert isinstance(args.status, BuildStatusEnum)
|
||||
|
||||
|
||||
def test_subparsers_repo_remove_unknown_architecture(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
repo-remove-unknown command must correctly parse architecture list
|
||||
@ -488,7 +541,7 @@ def test_subparsers_repo_status_update(parser: argparse.ArgumentParser) -> None:
|
||||
|
||||
def test_subparsers_repo_status_update_option_status(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
repo-status-update command must convert status option to buildstatusenum instance
|
||||
repo-status-update command must convert status option to BuildStatusEnum instance
|
||||
"""
|
||||
args = parser.parse_args(["-a", "x86_64", "repo-status-update"])
|
||||
assert isinstance(args.status, BuildStatusEnum)
|
||||
@ -655,7 +708,7 @@ def test_subparsers_service_setup_option_from_configuration(parser: argparse.Arg
|
||||
|
||||
def test_subparsers_service_setup_option_sign_target(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
service-setup command must convert sign-target option to signsettings instance
|
||||
service-setup command must convert sign-target option to SignSettings instance
|
||||
"""
|
||||
args = parser.parse_args(["-a", "x86_64", "service-setup", "--packager", "John Doe <john@doe.com>",
|
||||
"--repository", "aur-clone", "--sign-target", "packages"])
|
||||
@ -694,7 +747,7 @@ def test_subparsers_user_add_architecture(parser: argparse.ArgumentParser) -> No
|
||||
|
||||
def test_subparsers_user_add_option_role(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
user-add command must convert role option to useraccess instance
|
||||
user-add command must convert role option to UserAccess instance
|
||||
"""
|
||||
args = parser.parse_args(["user-add", "username"])
|
||||
assert isinstance(args.role, UserAccess)
|
||||
@ -726,7 +779,7 @@ def test_subparsers_user_list_architecture(parser: argparse.ArgumentParser) -> N
|
||||
|
||||
def test_subparsers_user_list_option_role(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
user-list command must convert role option to useraccess instance
|
||||
user-list command must convert role option to UserAccess instance
|
||||
"""
|
||||
args = parser.parse_args(["user-list", "--role", "full"])
|
||||
assert isinstance(args.role, UserAccess)
|
||||
|
||||
@ -146,6 +146,9 @@ def aur_package_ahriman() -> AURPackage:
|
||||
"rsync",
|
||||
"subversion",
|
||||
],
|
||||
check_depends=[
|
||||
"python-pytest",
|
||||
],
|
||||
conflicts=[],
|
||||
provides=[],
|
||||
license=["GPL3"],
|
||||
@ -335,6 +338,9 @@ def package_description_ahriman() -> PackageDescription:
|
||||
"rsync",
|
||||
"subversion",
|
||||
],
|
||||
check_depends=[
|
||||
"python-pytest",
|
||||
],
|
||||
description="ArcH linux ReposItory MANager",
|
||||
filename="ahriman-2.6.0-1-any.pkg.tar.zst",
|
||||
groups=[],
|
||||
@ -482,7 +488,7 @@ def spawner(configuration: Configuration) -> Spawn:
|
||||
Returns:
|
||||
Spawn: spawner fixture
|
||||
"""
|
||||
return Spawn(MagicMock(), "x86_64", configuration)
|
||||
return Spawn(MagicMock(), "x86_64", ["--architecture", "x86_64", "--configuration", str(configuration.path)])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@ -135,12 +135,17 @@ def test_init(mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must create empty repository at the specified path
|
||||
"""
|
||||
mocker.patch("ahriman.models.package.Package.local_files", return_value=[Path("local")])
|
||||
add_mock = mocker.patch("ahriman.core.build_tools.sources.Sources.add")
|
||||
check_output_mock = mocker.patch("ahriman.core.build_tools.sources.Sources._check_output")
|
||||
commit_mock = mocker.patch("ahriman.core.build_tools.sources.Sources.commit")
|
||||
|
||||
local = Path("local")
|
||||
Sources.init(local)
|
||||
check_output_mock.assert_called_once_with("git", "init", "--initial-branch", Sources.DEFAULT_BRANCH,
|
||||
cwd=local, logger=pytest.helpers.anyvar(int))
|
||||
add_mock.assert_called_once_with(local, "PKGBUILD", ".SRCINFO", "local")
|
||||
commit_mock.assert_called_once_with(local, author="ahriman <ahriman@localhost>")
|
||||
|
||||
|
||||
def test_load(package_ahriman: Package, repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
|
||||
|
||||
@ -10,6 +10,13 @@ from ahriman.core.exceptions import InitializeError
|
||||
from ahriman.models.repository_paths import RepositoryPaths
|
||||
|
||||
|
||||
def test_repository_name(configuration: Configuration) -> None:
|
||||
"""
|
||||
must return valid repository name
|
||||
"""
|
||||
assert configuration.repository_name == "aur-clone"
|
||||
|
||||
|
||||
def test_repository_paths(configuration: Configuration, repository_paths: RepositoryPaths) -> None:
|
||||
"""
|
||||
must return repository paths
|
||||
|
||||
@ -73,6 +73,30 @@ def test_validate_is_ip_address(validator: Validator, mocker: MockerFixture) ->
|
||||
])
|
||||
|
||||
|
||||
def test_validate_path_is_absolute(validator: Validator, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must validate that path is absolute
|
||||
"""
|
||||
error_mock = mocker.patch("ahriman.core.configuration.validator.Validator._error")
|
||||
|
||||
mocker.patch("pathlib.Path.is_absolute", return_value=False)
|
||||
validator._validate_path_is_absolute(False, "field", Path("1"))
|
||||
|
||||
mocker.patch("pathlib.Path.is_absolute", return_value=True)
|
||||
validator._validate_path_is_absolute(False, "field", Path("2"))
|
||||
|
||||
mocker.patch("pathlib.Path.is_absolute", return_value=False)
|
||||
validator._validate_path_is_absolute(True, "field", Path("3"))
|
||||
|
||||
mocker.patch("pathlib.Path.is_absolute", return_value=True)
|
||||
validator._validate_path_is_absolute(True, "field", Path("4"))
|
||||
|
||||
error_mock.assert_has_calls([
|
||||
MockCall("field", "Path 2 must be relative"),
|
||||
MockCall("field", "Path 3 must be absolute"),
|
||||
])
|
||||
|
||||
|
||||
def test_validate_is_url(validator: Validator, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must validate url correctly
|
||||
@ -105,12 +129,16 @@ def test_validate_path_exists(validator: Validator, mocker: MockerFixture) -> No
|
||||
mocker.patch("pathlib.Path.exists", return_value=False)
|
||||
validator._validate_path_exists(False, "field", Path("1"))
|
||||
|
||||
mocker.patch("pathlib.Path.exists", return_value=False)
|
||||
validator._validate_path_exists(True, "field", Path("2"))
|
||||
|
||||
mocker.patch("pathlib.Path.exists", return_value=True)
|
||||
validator._validate_path_exists(False, "field", Path("2"))
|
||||
|
||||
mocker.patch("pathlib.Path.exists", return_value=False)
|
||||
validator._validate_path_exists(True, "field", Path("3"))
|
||||
|
||||
mocker.patch("pathlib.Path.exists", return_value=True)
|
||||
validator._validate_path_exists(True, "field", Path("4"))
|
||||
|
||||
error_mock.assert_has_calls([
|
||||
MockCall("field", "Path 2 must exist"),
|
||||
MockCall("field", "Path 2 must not exist"),
|
||||
MockCall("field", "Path 3 must exist"),
|
||||
])
|
||||
|
||||
@ -4,6 +4,7 @@ import pytest
|
||||
from ahriman.core.alpm.repo import Repo
|
||||
from ahriman.core.build_tools.task import Task
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.sign.gpg import GPG
|
||||
from ahriman.core.tree import Leaf
|
||||
from ahriman.models.package import Package
|
||||
from ahriman.models.repository_paths import RepositoryPaths
|
||||
@ -63,6 +64,20 @@ def repo(configuration: Configuration, repository_paths: RepositoryPaths) -> Rep
|
||||
return Repo(configuration.get("repository", "name"), repository_paths, [])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def gpg(configuration: Configuration) -> GPG:
|
||||
"""
|
||||
fixture for empty GPG
|
||||
|
||||
Args:
|
||||
configuration(Configuration): configuration fixture
|
||||
|
||||
Returns:
|
||||
GPG: GPG test instance
|
||||
"""
|
||||
return GPG(configuration)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def task_ahriman(package_ahriman: Package, configuration: Configuration, repository_paths: RepositoryPaths) -> Task:
|
||||
"""
|
||||
|
||||
@ -0,0 +1,52 @@
|
||||
import pytest
|
||||
|
||||
from pytest_mock import MockerFixture
|
||||
from sqlite3 import Connection
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.database.migrations.m007_check_depends import migrate_data, migrate_package_check_depends, steps
|
||||
from ahriman.models.package import Package
|
||||
|
||||
|
||||
def test_migration_check_depends() -> None:
|
||||
"""
|
||||
migration must not be empty
|
||||
"""
|
||||
assert steps
|
||||
|
||||
|
||||
def test_migrate_data(connection: Connection, configuration: Configuration, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must perform data migration
|
||||
"""
|
||||
depends_mock = mocker.patch("ahriman.core.database.migrations.m007_check_depends.migrate_package_check_depends")
|
||||
migrate_data(connection, configuration)
|
||||
depends_mock.assert_called_once_with(connection, configuration)
|
||||
|
||||
|
||||
def test_migrate_package_depends(connection: Connection, configuration: Configuration, package_ahriman: Package,
|
||||
mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must update make and opt depends list
|
||||
"""
|
||||
mocker.patch("pathlib.Path.is_dir", return_value=True)
|
||||
mocker.patch("pathlib.Path.iterdir", return_value=[package_ahriman.packages[package_ahriman.base].filepath])
|
||||
package_mock = mocker.patch("ahriman.models.package.Package.from_archive", return_value=package_ahriman)
|
||||
|
||||
migrate_package_check_depends(connection, configuration)
|
||||
package_mock.assert_called_once_with(
|
||||
package_ahriman.packages[package_ahriman.base].filepath, pytest.helpers.anyvar(int), remote=None)
|
||||
connection.executemany.assert_called_once_with(pytest.helpers.anyvar(str, strict=True), [{
|
||||
"check_depends": package_ahriman.packages[package_ahriman.base].check_depends,
|
||||
"package": package_ahriman.base,
|
||||
}])
|
||||
|
||||
|
||||
def test_migrate_package_depends_skip(connection: Connection, configuration: Configuration,
|
||||
mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must skip update make and opt depends list if no repository directory found
|
||||
"""
|
||||
mocker.patch("pathlib.Path.is_dir", return_value=False)
|
||||
migrate_package_check_depends(connection, configuration)
|
||||
connection.executemany.assert_not_called()
|
||||
31
tests/ahriman/core/log/test_journal_handler.py
Normal file
31
tests/ahriman/core/log/test_journal_handler.py
Normal file
@ -0,0 +1,31 @@
|
||||
import sys
|
||||
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
|
||||
# because of how imports work it must be first test
|
||||
def test_dummy_journal_handler(mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must import dummy journal handler if upstream systemd was not found
|
||||
"""
|
||||
mocker.patch.dict(sys.modules, {"systemd.journal": None})
|
||||
from logging import NullHandler
|
||||
from ahriman.core.log.journal_handler import JournalHandler
|
||||
assert issubclass(JournalHandler, NullHandler)
|
||||
|
||||
|
||||
def test_init() -> None:
|
||||
"""
|
||||
must init dummy handler
|
||||
"""
|
||||
from ahriman.core.log.journal_handler import _JournalHandler
|
||||
assert _JournalHandler(42, answer=42)
|
||||
|
||||
|
||||
def test_journal_handler() -> None:
|
||||
"""
|
||||
must import journal handler
|
||||
"""
|
||||
from systemd.journal import JournalHandler as UpstreamJournalHandler
|
||||
from ahriman.core.log.journal_handler import JournalHandler
|
||||
assert JournalHandler is UpstreamJournalHandler
|
||||
@ -1,21 +1,59 @@
|
||||
import logging
|
||||
import pytest
|
||||
import sys
|
||||
|
||||
from logging.config import fileConfig
|
||||
from pytest_mock import MockerFixture
|
||||
from systemd.journal import JournalHandler
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.log import Log
|
||||
from ahriman.models.log_handler import LogHandler
|
||||
|
||||
|
||||
def test_handler() -> None:
|
||||
"""
|
||||
must extract journald handler if available
|
||||
"""
|
||||
assert Log.handler(None) == LogHandler.Journald
|
||||
|
||||
|
||||
def test_handler_selected() -> None:
|
||||
"""
|
||||
must return selected log handler
|
||||
"""
|
||||
assert Log.handler(LogHandler.Console) == LogHandler.Console
|
||||
|
||||
|
||||
def test_handler_syslog(mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must return syslog handler if no journal is available
|
||||
"""
|
||||
mocker.patch("pathlib.Path.exists", return_value=True)
|
||||
mocker.patch.dict(sys.modules, {"systemd.journal": None})
|
||||
assert Log.handler(None) == LogHandler.Syslog
|
||||
|
||||
|
||||
def test_handler_console(mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must return console handler if no journal is available and no log device was found
|
||||
"""
|
||||
mocker.patch("pathlib.Path.exists", return_value=False)
|
||||
mocker.patch.dict(sys.modules, {"systemd.journal": None})
|
||||
assert Log.handler(None) == LogHandler.Console
|
||||
|
||||
|
||||
def test_load(configuration: Configuration, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must load logging
|
||||
"""
|
||||
logging_mock = mocker.patch("ahriman.core.log.log.fileConfig")
|
||||
logging_mock = mocker.patch("ahriman.core.log.log.fileConfig", side_effect=fileConfig)
|
||||
http_log_mock = mocker.patch("ahriman.core.log.http_log_handler.HttpLogHandler.load")
|
||||
|
||||
Log.load(configuration, quiet=False, report=False)
|
||||
logging_mock.assert_called_once_with(configuration.logging_path)
|
||||
Log.load(configuration, LogHandler.Journald, quiet=False, report=False)
|
||||
logging_mock.assert_called_once_with(pytest.helpers.anyvar(int), disable_existing_loggers=True)
|
||||
http_log_mock.assert_called_once_with(configuration, report=False)
|
||||
assert all(isinstance(handler, JournalHandler) for handler in logging.getLogger().handlers)
|
||||
|
||||
|
||||
def test_load_fallback(configuration: Configuration, mocker: MockerFixture) -> None:
|
||||
@ -23,7 +61,7 @@ def test_load_fallback(configuration: Configuration, mocker: MockerFixture) -> N
|
||||
must fall back to stderr without errors
|
||||
"""
|
||||
mocker.patch("ahriman.core.log.log.fileConfig", side_effect=PermissionError())
|
||||
Log.load(configuration, quiet=False, report=False)
|
||||
Log.load(configuration, LogHandler.Journald, quiet=False, report=False)
|
||||
|
||||
|
||||
def test_load_quiet(configuration: Configuration, mocker: MockerFixture) -> None:
|
||||
@ -31,5 +69,5 @@ def test_load_quiet(configuration: Configuration, mocker: MockerFixture) -> None
|
||||
must disable logging in case if quiet flag set
|
||||
"""
|
||||
disable_mock = mocker.patch("logging.disable")
|
||||
Log.load(configuration, quiet=True, report=False)
|
||||
Log.load(configuration, LogHandler.Journald, quiet=True, report=False)
|
||||
disable_mock.assert_called_once_with(logging.WARNING)
|
||||
|
||||
@ -16,4 +16,4 @@ def test_generate(configuration: Configuration, package_ahriman: Package, mocker
|
||||
|
||||
report = HTML("x86_64", configuration, "html")
|
||||
report.generate([package_ahriman], Result())
|
||||
write_mock.assert_called_once_with(pytest.helpers.anyvar(int))
|
||||
write_mock.assert_called_once_with(pytest.helpers.anyvar(int), encoding="utf8")
|
||||
|
||||
@ -1,23 +1,8 @@
|
||||
import pytest
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.sign.gpg import GPG
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def gpg(configuration: Configuration) -> GPG:
|
||||
"""
|
||||
fixture for empty GPG
|
||||
|
||||
Args:
|
||||
configuration(Configuration): configuration fixture
|
||||
|
||||
Returns:
|
||||
GPG: GPG test instance
|
||||
"""
|
||||
return GPG("x86_64", configuration)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def gpg_with_key(gpg: GPG) -> GPG:
|
||||
"""
|
||||
|
||||
@ -97,6 +97,33 @@ def test_key_download_failure(gpg: GPG, mocker: MockerFixture) -> None:
|
||||
gpg.key_download("keyserver.ubuntu.com", "0xE989490C")
|
||||
|
||||
|
||||
def test_key_export(gpg: GPG, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must export gpg key correctly
|
||||
"""
|
||||
check_output_mock = mocker.patch("ahriman.core.sign.gpg.GPG._check_output", return_value="key")
|
||||
assert gpg.key_export("k") == "key"
|
||||
check_output_mock.assert_called_once_with("gpg", "--armor", "--no-emit-version", "--export", "k",
|
||||
logger=pytest.helpers.anyvar(int))
|
||||
|
||||
|
||||
def test_key_fingerprint(gpg: GPG, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must extract fingerprint
|
||||
"""
|
||||
check_output_mock = mocker.patch(
|
||||
"ahriman.core.sign.gpg.GPG._check_output",
|
||||
return_value="""tru::1:1576103830:0:3:1:5
|
||||
fpr:::::::::C6EBB9222C3C8078631A0DE4BD2AC8C5E989490C:
|
||||
sub:-:4096:1:7E3A4240CE3C45C2:1615121387::::::e::::::23:
|
||||
fpr:::::::::43A663569A07EE1E4ECC55CC7E3A4240CE3C45C2:""")
|
||||
|
||||
key = "0xCE3C45C2"
|
||||
assert gpg.key_fingerprint(key) == "C6EBB9222C3C8078631A0DE4BD2AC8C5E989490C"
|
||||
check_output_mock.assert_called_once_with("gpg", "--with-colons", "--fingerprint", key,
|
||||
logger=pytest.helpers.anyvar(int))
|
||||
|
||||
|
||||
def test_key_import(gpg: GPG, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must import PGP key from the server
|
||||
@ -108,6 +135,21 @@ def test_key_import(gpg: GPG, mocker: MockerFixture) -> None:
|
||||
check_output_mock.assert_called_once_with("gpg", "--import", input_data="key", logger=pytest.helpers.anyvar(int))
|
||||
|
||||
|
||||
def test_keys(gpg: GPG) -> None:
|
||||
"""
|
||||
must extract keys
|
||||
"""
|
||||
assert gpg.keys() == []
|
||||
|
||||
gpg.default_key = "key"
|
||||
assert gpg.keys() == [gpg.default_key]
|
||||
|
||||
gpg.configuration.set_option("sign", "key_a", "key1")
|
||||
gpg.configuration.set_option("sign", "key_b", "key1")
|
||||
gpg.configuration.set_option("sign", "key_c", "key2")
|
||||
assert gpg.keys() == ["key", "key1", "key2"]
|
||||
|
||||
|
||||
def test_process(gpg_with_key: GPG, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must call process method correctly
|
||||
|
||||
@ -55,7 +55,7 @@ def test_create_session(web_client: WebClient, mocker: MockerFixture) -> None:
|
||||
session = web_client._create_session(use_unix_socket=False)
|
||||
assert isinstance(session, requests.Session)
|
||||
assert not isinstance(session, requests_unixsocket.Session)
|
||||
login_mock.assert_called_once_with()
|
||||
login_mock.assert_called_once_with(pytest.helpers.anyvar(int))
|
||||
|
||||
|
||||
def test_create_session_unix_socket(web_client: WebClient, mocker: MockerFixture) -> None:
|
||||
@ -80,7 +80,7 @@ def test_login(web_client: WebClient, user: User, mocker: MockerFixture) -> None
|
||||
"password": user.password
|
||||
}
|
||||
|
||||
web_client._login()
|
||||
web_client._login(requests.Session())
|
||||
requests_mock.assert_called_once_with(pytest.helpers.anyvar(str, True), json=payload)
|
||||
|
||||
|
||||
@ -90,7 +90,7 @@ def test_login_failed(web_client: WebClient, user: User, mocker: MockerFixture)
|
||||
"""
|
||||
web_client.user = user
|
||||
mocker.patch("requests.Session.post", side_effect=Exception())
|
||||
web_client._login()
|
||||
web_client._login(requests.Session())
|
||||
|
||||
|
||||
def test_login_failed_http_error(web_client: WebClient, user: User, mocker: MockerFixture) -> None:
|
||||
@ -99,7 +99,7 @@ def test_login_failed_http_error(web_client: WebClient, user: User, mocker: Mock
|
||||
"""
|
||||
web_client.user = user
|
||||
mocker.patch("requests.Session.post", side_effect=requests.exceptions.HTTPError())
|
||||
web_client._login()
|
||||
web_client._login(requests.Session())
|
||||
|
||||
|
||||
def test_login_skip(web_client: WebClient, mocker: MockerFixture) -> None:
|
||||
@ -107,7 +107,7 @@ def test_login_skip(web_client: WebClient, mocker: MockerFixture) -> None:
|
||||
must skip login if no user set
|
||||
"""
|
||||
requests_mock = mocker.patch("requests.Session.post")
|
||||
web_client._login()
|
||||
web_client._login(requests.Session())
|
||||
requests_mock.assert_not_called()
|
||||
|
||||
|
||||
|
||||
34
tests/ahriman/core/support/conftest.py
Normal file
34
tests/ahriman/core/support/conftest.py
Normal file
@ -0,0 +1,34 @@
|
||||
import pytest
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.support.package_creator import PackageCreator
|
||||
from ahriman.core.support.pkgbuild.mirrorlist_generator import MirrorlistGenerator
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mirrorlist_generator(configuration: Configuration) -> MirrorlistGenerator:
|
||||
"""
|
||||
fixture for mirrorlist pkgbuild generator
|
||||
|
||||
Args:
|
||||
configuration(Configuration): configuration fixture
|
||||
|
||||
Returns:
|
||||
MirrorlistGenerator: mirrorlist pkgbuild generator test instance
|
||||
"""
|
||||
return MirrorlistGenerator(configuration, "mirrorlist")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def package_creator(configuration: Configuration, mirrorlist_generator: MirrorlistGenerator) -> PackageCreator:
|
||||
"""
|
||||
package creator fixture
|
||||
|
||||
Args:
|
||||
configuration(Configuration): configuration fixture
|
||||
mirrorlist_generator(MirrorlistGenerator):
|
||||
|
||||
Returns:
|
||||
PackageCreator: package creator test instance
|
||||
"""
|
||||
return PackageCreator(configuration, mirrorlist_generator)
|
||||
32
tests/ahriman/core/support/pkgbuild/conftest.py
Normal file
32
tests/ahriman/core/support/pkgbuild/conftest.py
Normal file
@ -0,0 +1,32 @@
|
||||
import pytest
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.sign.gpg import GPG
|
||||
from ahriman.core.support.pkgbuild.keyring_generator import KeyringGenerator
|
||||
from ahriman.core.support.pkgbuild.pkgbuild_generator import PkgbuildGenerator
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def keyring_generator(gpg: GPG, configuration: Configuration) -> KeyringGenerator:
|
||||
"""
|
||||
fixture for keyring pkgbuild generator
|
||||
|
||||
Args:
|
||||
gpg(GPG): empty GPG fixture
|
||||
configuration(Configuration): configuration fixture
|
||||
|
||||
Returns:
|
||||
KeyringGenerator: keyring generator test instance
|
||||
"""
|
||||
return KeyringGenerator(gpg, configuration, "keyring")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pkgbuild_generator() -> PkgbuildGenerator:
|
||||
"""
|
||||
fixture for dummy pkgbuild generator
|
||||
|
||||
Returns:
|
||||
PkgbuildGenerator: pkgbuild generator test instance
|
||||
"""
|
||||
return PkgbuildGenerator()
|
||||
185
tests/ahriman/core/support/pkgbuild/test_keyring_generator.py
Normal file
185
tests/ahriman/core/support/pkgbuild/test_keyring_generator.py
Normal file
@ -0,0 +1,185 @@
|
||||
import pytest
|
||||
|
||||
from pathlib import Path
|
||||
from pytest_mock import MockerFixture
|
||||
from unittest.mock import MagicMock, call as MockCall
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.exceptions import PkgbuildGeneratorError
|
||||
from ahriman.core.sign.gpg import GPG
|
||||
from ahriman.core.support.pkgbuild.keyring_generator import KeyringGenerator
|
||||
|
||||
|
||||
def test_init_packagers(gpg: GPG, configuration: Configuration, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must extract packagers keys
|
||||
"""
|
||||
mocker.patch("ahriman.core.sign.gpg.GPG.keys", return_value=["key"])
|
||||
|
||||
assert KeyringGenerator(gpg, configuration, "keyring").packagers == ["key"]
|
||||
|
||||
configuration.set_option("keyring", "packagers", "key1")
|
||||
assert KeyringGenerator(gpg, configuration, "keyring").packagers == ["key1"]
|
||||
|
||||
|
||||
def test_init_revoked(gpg: GPG, configuration: Configuration) -> None:
|
||||
"""
|
||||
must extract revoked keys
|
||||
"""
|
||||
assert KeyringGenerator(gpg, configuration, "keyring").revoked == []
|
||||
|
||||
configuration.set_option("keyring", "revoked", "key1")
|
||||
assert KeyringGenerator(gpg, configuration, "keyring").revoked == ["key1"]
|
||||
|
||||
|
||||
def test_init_trusted(gpg: GPG, configuration: Configuration) -> None:
|
||||
"""
|
||||
must extract trusted keys
|
||||
"""
|
||||
assert KeyringGenerator(gpg, configuration, "keyring").trusted == []
|
||||
|
||||
gpg.default_key = "key"
|
||||
assert KeyringGenerator(gpg, configuration, "keyring").trusted == ["key"]
|
||||
|
||||
configuration.set_option("keyring", "trusted", "key1")
|
||||
assert KeyringGenerator(gpg, configuration, "keyring").trusted == ["key1"]
|
||||
|
||||
|
||||
def test_license(gpg: GPG, configuration: Configuration) -> None:
|
||||
"""
|
||||
must generate correct licenses list
|
||||
"""
|
||||
assert KeyringGenerator(gpg, configuration, "keyring").license == ["Unlicense"]
|
||||
|
||||
configuration.set_option("keyring", "license", "GPL MPL")
|
||||
assert KeyringGenerator(gpg, configuration, "keyring").license == ["GPL", "MPL"]
|
||||
|
||||
|
||||
def test_pkgdesc(gpg: GPG, configuration: Configuration) -> None:
|
||||
"""
|
||||
must generate correct pkgdesc property
|
||||
"""
|
||||
assert KeyringGenerator(gpg, configuration, "keyring").pkgdesc == "aur-clone PGP keyring"
|
||||
|
||||
configuration.set_option("keyring", "description", "description")
|
||||
assert KeyringGenerator(gpg, configuration, "keyring").pkgdesc == "description"
|
||||
|
||||
|
||||
def test_pkgname(gpg: GPG, configuration: Configuration) -> None:
|
||||
"""
|
||||
must generate correct pkgname property
|
||||
"""
|
||||
assert KeyringGenerator(gpg, configuration, "keyring").pkgname == "aur-clone-keyring"
|
||||
|
||||
configuration.set_option("keyring", "package", "keyring")
|
||||
assert KeyringGenerator(gpg, configuration, "keyring").pkgname == "keyring"
|
||||
|
||||
|
||||
def test_url(gpg: GPG, configuration: Configuration) -> None:
|
||||
"""
|
||||
must generate correct url property
|
||||
"""
|
||||
assert KeyringGenerator(gpg, configuration, "keyring").url == ""
|
||||
|
||||
configuration.set_option("keyring", "homepage", "homepage")
|
||||
assert KeyringGenerator(gpg, configuration, "keyring").url == "homepage"
|
||||
|
||||
|
||||
def test_generate_gpg(keyring_generator: KeyringGenerator, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must correctly generate file with all PGP keys
|
||||
"""
|
||||
file_mock = MagicMock()
|
||||
export_mock = mocker.patch("ahriman.core.sign.gpg.GPG.key_export", side_effect=lambda key: key)
|
||||
open_mock = mocker.patch("pathlib.Path.open")
|
||||
open_mock.return_value.__enter__.return_value = file_mock
|
||||
keyring_generator.packagers = ["key"]
|
||||
keyring_generator.revoked = ["revoked"]
|
||||
keyring_generator.trusted = ["trusted", "key"]
|
||||
|
||||
keyring_generator._generate_gpg(Path("local"))
|
||||
open_mock.assert_called_once_with("w")
|
||||
export_mock.assert_has_calls([MockCall("key"), MockCall("revoked"), MockCall("trusted")])
|
||||
file_mock.write.assert_has_calls([
|
||||
MockCall("key"), MockCall("\n"),
|
||||
MockCall("revoked"), MockCall("\n"),
|
||||
MockCall("trusted"), MockCall("\n"),
|
||||
])
|
||||
|
||||
|
||||
def test_generate_revoked(keyring_generator: KeyringGenerator, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must correctly generate file with revoked keys
|
||||
"""
|
||||
file_mock = MagicMock()
|
||||
fingerprint_mock = mocker.patch("ahriman.core.sign.gpg.GPG.key_fingerprint", side_effect=lambda key: key)
|
||||
open_mock = mocker.patch("pathlib.Path.open")
|
||||
open_mock.return_value.__enter__.return_value = file_mock
|
||||
keyring_generator.revoked = ["revoked"]
|
||||
|
||||
keyring_generator._generate_revoked(Path("local"))
|
||||
open_mock.assert_called_once_with("w")
|
||||
fingerprint_mock.assert_called_once_with("revoked")
|
||||
file_mock.write.assert_has_calls([MockCall("revoked"), MockCall("\n")])
|
||||
|
||||
|
||||
def test_generate_trusted(keyring_generator: KeyringGenerator, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must correctly generate file with trusted keys
|
||||
"""
|
||||
file_mock = MagicMock()
|
||||
fingerprint_mock = mocker.patch("ahriman.core.sign.gpg.GPG.key_fingerprint", side_effect=lambda key: key)
|
||||
open_mock = mocker.patch("pathlib.Path.open")
|
||||
open_mock.return_value.__enter__.return_value = file_mock
|
||||
keyring_generator.trusted = ["trusted", "trusted"]
|
||||
|
||||
keyring_generator._generate_trusted(Path("local"))
|
||||
open_mock.assert_called_once_with("w")
|
||||
fingerprint_mock.assert_called_once_with("trusted")
|
||||
file_mock.write.assert_has_calls([MockCall("trusted"), MockCall(":4:\n")])
|
||||
|
||||
|
||||
def test_generate_trusted_empty(keyring_generator: KeyringGenerator) -> None:
|
||||
"""
|
||||
must raise PkgbuildGeneratorError if no trusted keys set
|
||||
"""
|
||||
with pytest.raises(PkgbuildGeneratorError):
|
||||
keyring_generator._generate_trusted(Path("local"))
|
||||
|
||||
|
||||
def test_install(keyring_generator: KeyringGenerator) -> None:
|
||||
"""
|
||||
must return install functions
|
||||
"""
|
||||
assert keyring_generator.install() == """post_upgrade() {
|
||||
if usr/bin/pacman-key -l >/dev/null 2>&1; then
|
||||
usr/bin/pacman-key --populate aur-clone
|
||||
usr/bin/pacman-key --updatedb
|
||||
fi
|
||||
}
|
||||
|
||||
post_install() {
|
||||
if [ -x usr/bin/pacman-key ]; then
|
||||
post_upgrade
|
||||
fi
|
||||
}"""
|
||||
|
||||
|
||||
def test_package(keyring_generator: KeyringGenerator) -> None:
|
||||
"""
|
||||
must generate package function correctly
|
||||
"""
|
||||
assert keyring_generator.package() == """{
|
||||
install -Dm644 "$srcdir/aur-clone.gpg" "$pkgdir/usr/share/pacman/keyrings/aur-clone.gpg"
|
||||
install -Dm644 "$srcdir/aur-clone-revoked" "$pkgdir/usr/share/pacman/keyrings/aur-clone-revoked"
|
||||
install -Dm644 "$srcdir/aur-clone-trusted" "$pkgdir/usr/share/pacman/keyrings/aur-clone-trusted"
|
||||
}"""
|
||||
|
||||
|
||||
def test_sources(keyring_generator: KeyringGenerator) -> None:
|
||||
"""
|
||||
must return valid sources files list
|
||||
"""
|
||||
assert keyring_generator.sources().get("aur-clone.gpg")
|
||||
assert keyring_generator.sources().get("aur-clone-revoked")
|
||||
assert keyring_generator.sources().get("aur-clone-trusted")
|
||||
@ -0,0 +1,90 @@
|
||||
from pathlib import Path
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.support.pkgbuild.mirrorlist_generator import MirrorlistGenerator
|
||||
|
||||
|
||||
def test_init_path(configuration: Configuration) -> None:
|
||||
"""
|
||||
must set relative path to mirrorlist
|
||||
"""
|
||||
assert MirrorlistGenerator(configuration, "mirrorlist").path == Path("etc") / "pacman.d" / "aur-clone-mirrorlist"
|
||||
|
||||
configuration.set_option("mirrorlist", "path", "/etc")
|
||||
assert MirrorlistGenerator(configuration, "mirrorlist").path == Path("etc")
|
||||
|
||||
|
||||
def test_license(configuration: Configuration) -> None:
|
||||
"""
|
||||
must generate correct licenses list
|
||||
"""
|
||||
assert MirrorlistGenerator(configuration, "mirrorlist").license == ["Unlicense"]
|
||||
|
||||
configuration.set_option("mirrorlist", "license", "GPL MPL")
|
||||
assert MirrorlistGenerator(configuration, "mirrorlist").license == ["GPL", "MPL"]
|
||||
|
||||
|
||||
def test_pkgdesc(configuration: Configuration) -> None:
|
||||
"""
|
||||
must generate correct pkgdesc property
|
||||
"""
|
||||
assert MirrorlistGenerator(configuration, "mirrorlist").pkgdesc == "aur-clone mirror list for use by pacman"
|
||||
|
||||
configuration.set_option("mirrorlist", "description", "description")
|
||||
assert MirrorlistGenerator(configuration, "mirrorlist").pkgdesc == "description"
|
||||
|
||||
|
||||
def test_pkgname(configuration: Configuration) -> None:
|
||||
"""
|
||||
must generate correct pkgname property
|
||||
"""
|
||||
assert MirrorlistGenerator(configuration, "mirrorlist").pkgname == "aur-clone-mirrorlist"
|
||||
|
||||
configuration.set_option("mirrorlist", "package", "mirrorlist")
|
||||
assert MirrorlistGenerator(configuration, "mirrorlist").pkgname == "mirrorlist"
|
||||
|
||||
|
||||
def test_url(configuration: Configuration) -> None:
|
||||
"""
|
||||
must generate correct url property
|
||||
"""
|
||||
assert MirrorlistGenerator(configuration, "mirrorlist").url == ""
|
||||
|
||||
configuration.set_option("mirrorlist", "homepage", "homepage")
|
||||
assert MirrorlistGenerator(configuration, "mirrorlist").url == "homepage"
|
||||
|
||||
|
||||
def test_generate_mirrorlist(mirrorlist_generator: MirrorlistGenerator, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must correctly generate mirrorlist file
|
||||
"""
|
||||
write_mock = mocker.patch("pathlib.Path.write_text")
|
||||
mirrorlist_generator._generate_mirrorlist(Path("local"))
|
||||
write_mock.assert_called_once_with("Server = http://localhost\n", encoding="utf8")
|
||||
|
||||
|
||||
def test_package(mirrorlist_generator: MirrorlistGenerator) -> None:
|
||||
"""
|
||||
must generate package function correctly
|
||||
"""
|
||||
assert mirrorlist_generator.package() == """{
|
||||
install -Dm644 "$srcdir/mirrorlist" "$pkgdir/etc/pacman.d/aur-clone-mirrorlist"
|
||||
}"""
|
||||
|
||||
|
||||
def test_patches(mirrorlist_generator: MirrorlistGenerator) -> None:
|
||||
"""
|
||||
must generate additional patch list
|
||||
"""
|
||||
patches = {patch.key: patch for patch in mirrorlist_generator.patches()}
|
||||
|
||||
assert "backup" in patches
|
||||
assert patches["backup"].value == [str(mirrorlist_generator.path)]
|
||||
|
||||
|
||||
def test_sources(mirrorlist_generator: MirrorlistGenerator) -> None:
|
||||
"""
|
||||
must return valid sources files list
|
||||
"""
|
||||
assert mirrorlist_generator.sources().get("mirrorlist")
|
||||
141
tests/ahriman/core/support/pkgbuild/test_pkgbuild_generator.py
Normal file
141
tests/ahriman/core/support/pkgbuild/test_pkgbuild_generator.py
Normal file
@ -0,0 +1,141 @@
|
||||
import datetime
|
||||
import pytest
|
||||
|
||||
from pathlib import Path
|
||||
from pytest_mock import MockerFixture
|
||||
from unittest.mock import MagicMock, call as MockCall
|
||||
|
||||
from ahriman.core.support.pkgbuild.pkgbuild_generator import PkgbuildGenerator
|
||||
from ahriman.core.util import utcnow
|
||||
from ahriman.models.pkgbuild_patch import PkgbuildPatch
|
||||
|
||||
|
||||
def test_license(pkgbuild_generator: PkgbuildGenerator) -> None:
|
||||
"""
|
||||
must return empty license list
|
||||
"""
|
||||
assert pkgbuild_generator.license == []
|
||||
|
||||
|
||||
def test_pkgdesc(pkgbuild_generator: PkgbuildGenerator) -> None:
|
||||
"""
|
||||
must raise NotImplementedError on missing pkgdesc property
|
||||
"""
|
||||
with pytest.raises(NotImplementedError):
|
||||
assert pkgbuild_generator.pkgdesc
|
||||
|
||||
|
||||
def test_pkgname(pkgbuild_generator: PkgbuildGenerator) -> None:
|
||||
"""
|
||||
must raise NotImplementedError on missing pkgname property
|
||||
"""
|
||||
with pytest.raises(NotImplementedError):
|
||||
assert pkgbuild_generator.pkgname
|
||||
|
||||
|
||||
def test_pkgver(pkgbuild_generator: PkgbuildGenerator, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must implement default version as current date
|
||||
"""
|
||||
mocker.patch("ahriman.core.support.pkgbuild.pkgbuild_generator.utcnow", return_value=datetime.datetime(2002, 3, 11))
|
||||
assert pkgbuild_generator.pkgver == utcnow().strftime("20020311")
|
||||
|
||||
|
||||
def test_url(pkgbuild_generator: PkgbuildGenerator) -> None:
|
||||
"""
|
||||
must return empty url
|
||||
"""
|
||||
assert pkgbuild_generator.url == ""
|
||||
|
||||
|
||||
def test_install(pkgbuild_generator: PkgbuildGenerator) -> None:
|
||||
"""
|
||||
must return empty install function
|
||||
"""
|
||||
assert pkgbuild_generator.install() is None
|
||||
|
||||
|
||||
def test_package(pkgbuild_generator: PkgbuildGenerator) -> None:
|
||||
"""
|
||||
must raise NotImplementedError on missing package function
|
||||
"""
|
||||
with pytest.raises(NotImplementedError):
|
||||
pkgbuild_generator.package()
|
||||
|
||||
|
||||
def test_patches(pkgbuild_generator: PkgbuildGenerator) -> None:
|
||||
"""
|
||||
must return empty patches list
|
||||
"""
|
||||
assert pkgbuild_generator.patches() == []
|
||||
|
||||
|
||||
def test_sources(pkgbuild_generator: PkgbuildGenerator) -> None:
|
||||
"""
|
||||
must return empty sources list
|
||||
"""
|
||||
assert pkgbuild_generator.sources() == {}
|
||||
|
||||
|
||||
def test_write_install(pkgbuild_generator: PkgbuildGenerator, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must write install file
|
||||
"""
|
||||
mocker.patch.object(PkgbuildGenerator, "pkgname", "package")
|
||||
mocker.patch("ahriman.core.support.pkgbuild.pkgbuild_generator.PkgbuildGenerator.install", return_value="content")
|
||||
write_mock = mocker.patch("pathlib.Path.write_text")
|
||||
|
||||
assert pkgbuild_generator.write_install(Path("local")) == [PkgbuildPatch("install", "package.install")]
|
||||
write_mock.assert_called_once_with("content")
|
||||
|
||||
|
||||
def test_write_install_empty(pkgbuild_generator: PkgbuildGenerator) -> None:
|
||||
"""
|
||||
must return empty patch list for missing install function
|
||||
"""
|
||||
assert pkgbuild_generator.write_install(Path("local")) == []
|
||||
|
||||
|
||||
def test_write_pkgbuild(pkgbuild_generator: PkgbuildGenerator, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must write PKGBUILD content to file
|
||||
"""
|
||||
path = Path("local")
|
||||
for prop in ("pkgdesc", "pkgname"):
|
||||
mocker.patch.object(PkgbuildGenerator, prop, "")
|
||||
mocker.patch("ahriman.core.support.pkgbuild.pkgbuild_generator.PkgbuildGenerator.package", return_value="{}")
|
||||
patches_mock = mocker.patch("ahriman.core.support.pkgbuild.pkgbuild_generator.PkgbuildGenerator.patches",
|
||||
return_value=[PkgbuildPatch("property", "value")])
|
||||
install_mock = mocker.patch("ahriman.core.support.pkgbuild.pkgbuild_generator.PkgbuildGenerator.write_install",
|
||||
return_value=[PkgbuildPatch("install", "pkgname.install")])
|
||||
sources_mock = mocker.patch("ahriman.core.support.pkgbuild.pkgbuild_generator.PkgbuildGenerator.write_sources",
|
||||
return_value=[PkgbuildPatch("source", []), PkgbuildPatch("sha512sums", [])])
|
||||
write_mock = mocker.patch("ahriman.models.pkgbuild_patch.PkgbuildPatch.write")
|
||||
|
||||
pkgbuild_generator.write_pkgbuild(path)
|
||||
patches_mock.assert_called_once_with()
|
||||
install_mock.assert_called_once_with(path)
|
||||
sources_mock.assert_called_once_with(path)
|
||||
write_mock.assert_has_calls([MockCall(path / "PKGBUILD")] * 12)
|
||||
|
||||
|
||||
def test_write_sources(pkgbuild_generator: PkgbuildGenerator, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must write sources files
|
||||
"""
|
||||
path = Path("local")
|
||||
generator_mock = MagicMock()
|
||||
sources_mock = mocker.patch("ahriman.core.support.pkgbuild.pkgbuild_generator.PkgbuildGenerator.sources",
|
||||
return_value={"source": generator_mock})
|
||||
open_mock = mocker.patch("pathlib.Path.open")
|
||||
hash_mock = MagicMock()
|
||||
hash_mock.hexdigest.return_value = "hash"
|
||||
mocker.patch("hashlib.sha512", return_value=hash_mock)
|
||||
|
||||
assert pkgbuild_generator.write_sources(path) == [
|
||||
PkgbuildPatch("source", ["source"]),
|
||||
PkgbuildPatch("sha512sums", ["hash"]),
|
||||
]
|
||||
generator_mock.assert_called_once_with(path / "source")
|
||||
sources_mock.assert_called_once_with()
|
||||
open_mock.assert_called_once_with("rb")
|
||||
30
tests/ahriman/core/support/test_keyring_trigger.py
Normal file
30
tests/ahriman/core/support/test_keyring_trigger.py
Normal file
@ -0,0 +1,30 @@
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.sign.gpg import GPG
|
||||
from ahriman.core.support import KeyringTrigger
|
||||
from ahriman.models.context_key import ContextKey
|
||||
|
||||
|
||||
def test_configuration_sections(configuration: Configuration) -> None:
|
||||
"""
|
||||
must correctly parse target list
|
||||
"""
|
||||
configuration.set_option("keyring", "target", "a b c")
|
||||
assert KeyringTrigger.configuration_sections(configuration) == ["a", "b", "c"]
|
||||
|
||||
configuration.remove_option("keyring", "target")
|
||||
assert KeyringTrigger.configuration_sections(configuration) == []
|
||||
|
||||
|
||||
def test_on_start(configuration: Configuration, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must run report for specified targets
|
||||
"""
|
||||
gpg_mock = mocker.patch("ahriman.core._Context.get")
|
||||
run_mock = mocker.patch("ahriman.core.support.package_creator.PackageCreator.run")
|
||||
|
||||
trigger = KeyringTrigger("x86_64", configuration)
|
||||
trigger.on_start()
|
||||
gpg_mock.assert_called_once_with(ContextKey("sign", GPG))
|
||||
run_mock.assert_called_once_with()
|
||||
26
tests/ahriman/core/support/test_mirrorlist_trigger.py
Normal file
26
tests/ahriman/core/support/test_mirrorlist_trigger.py
Normal file
@ -0,0 +1,26 @@
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.support import MirrorlistTrigger
|
||||
|
||||
|
||||
def test_configuration_sections(configuration: Configuration) -> None:
|
||||
"""
|
||||
must correctly parse target list
|
||||
"""
|
||||
configuration.set_option("mirrorlist", "target", "a b c")
|
||||
assert MirrorlistTrigger.configuration_sections(configuration) == ["a", "b", "c"]
|
||||
|
||||
configuration.remove_option("mirrorlist", "target")
|
||||
assert MirrorlistTrigger.configuration_sections(configuration) == []
|
||||
|
||||
|
||||
def test_on_start(configuration: Configuration, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must run report for specified targets
|
||||
"""
|
||||
run_mock = mocker.patch("ahriman.core.support.package_creator.PackageCreator.run")
|
||||
|
||||
trigger = MirrorlistTrigger("x86_64", configuration)
|
||||
trigger.on_start()
|
||||
run_mock.assert_called_once_with()
|
||||
40
tests/ahriman/core/support/test_package_creator.py
Normal file
40
tests/ahriman/core/support/test_package_creator.py
Normal file
@ -0,0 +1,40 @@
|
||||
import pytest
|
||||
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
from ahriman.core.database import SQLite
|
||||
from ahriman.core.support.package_creator import PackageCreator
|
||||
from ahriman.models.context_key import ContextKey
|
||||
from ahriman.models.package import Package
|
||||
from ahriman.models.package_description import PackageDescription
|
||||
|
||||
|
||||
def test_run(package_creator: PackageCreator, database: SQLite, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must correctly process package creation
|
||||
"""
|
||||
package = Package(
|
||||
base=package_creator.generator.pkgname,
|
||||
version=package_creator.generator.pkgver,
|
||||
remote=None,
|
||||
packages={package_creator.generator.pkgname: PackageDescription()},
|
||||
)
|
||||
local_path = package_creator.configuration.repository_paths.cache_for(package_creator.generator.pkgname)
|
||||
|
||||
rmtree_mock = mocker.patch("shutil.rmtree")
|
||||
database_mock = mocker.patch("ahriman.core._Context.get", return_value=database)
|
||||
init_mock = mocker.patch("ahriman.core.build_tools.sources.Sources.init")
|
||||
insert_mock = mocker.patch("ahriman.core.database.SQLite.package_update")
|
||||
mkdir_mock = mocker.patch("pathlib.Path.mkdir")
|
||||
package_mock = mocker.patch("ahriman.models.package.Package.from_build", return_value=package)
|
||||
write_mock = mocker.patch("ahriman.core.support.pkgbuild.pkgbuild_generator.PkgbuildGenerator.write_pkgbuild")
|
||||
|
||||
package_creator.run()
|
||||
rmtree_mock.assert_called_once_with(local_path, ignore_errors=True)
|
||||
mkdir_mock.assert_called_once_with(mode=0o755, parents=True, exist_ok=True)
|
||||
write_mock.assert_called_once_with(local_path)
|
||||
init_mock.assert_called_once_with(local_path)
|
||||
|
||||
package_mock.assert_called_once_with(local_path, "x86_64")
|
||||
database_mock.assert_called_once_with(ContextKey("database", SQLite))
|
||||
insert_mock.assert_called_once_with(package, pytest.helpers.anyvar(int))
|
||||
@ -44,10 +44,11 @@ def test_spawn_process(spawner: Spawn, mocker: MockerFixture) -> None:
|
||||
|
||||
spawner._spawn_process("add", "ahriman", now="", maybe="?")
|
||||
start_mock.assert_called_once_with()
|
||||
spawner.args_parser.parse_args.assert_called_once_with([
|
||||
"--architecture", spawner.architecture, "--configuration", str(spawner.configuration.path),
|
||||
"add", "ahriman", "--now", "--maybe", "?"
|
||||
])
|
||||
spawner.args_parser.parse_args.assert_called_once_with(
|
||||
spawner.command_arguments + [
|
||||
"add", "ahriman", "--now", "--maybe", "?"
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_key_import(spawner: Spawn, mocker: MockerFixture) -> None:
|
||||
@ -140,9 +141,7 @@ def test_run_pop(spawner: Spawn) -> None:
|
||||
|
||||
spawner.run()
|
||||
|
||||
first.terminate.assert_called_once_with()
|
||||
first.join.assert_called_once_with()
|
||||
second.terminate.assert_called_once_with()
|
||||
second.join.assert_called_once_with()
|
||||
assert not spawner.active
|
||||
|
||||
|
||||
@ -12,7 +12,8 @@ from unittest.mock import MagicMock
|
||||
|
||||
from ahriman.core.exceptions import BuildError, OptionError, UnsafeRunError
|
||||
from ahriman.core.util import check_output, check_user, enum_values, exception_response_text, filter_json, \
|
||||
full_version, package_like, partition, pretty_datetime, pretty_size, safe_filename, trim_package, utcnow, walk
|
||||
full_version, package_like, partition, pretty_datetime, pretty_size, safe_filename, srcinfo_property, \
|
||||
srcinfo_property_list, trim_package, utcnow, walk
|
||||
from ahriman.models.package import Package
|
||||
from ahriman.models.package_source import PackageSource
|
||||
from ahriman.models.repository_paths import RepositoryPaths
|
||||
@ -81,12 +82,12 @@ def test_check_output_with_user(passwd: Any, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must run command as specified user and set its homedir
|
||||
"""
|
||||
assert check_output("python", "-c", "import os; print(os.getenv('HOME'))") != passwd.pw_dir
|
||||
assert check_output("python", "-c", """import os; print(os.getenv("HOME"))""") != passwd.pw_dir
|
||||
|
||||
getpwuid_mock = mocker.patch("ahriman.core.util.getpwuid", return_value=passwd)
|
||||
user = os.getuid()
|
||||
|
||||
assert check_output("python", "-c", "import os; print(os.getenv('HOME'))", user=user) == passwd.pw_dir
|
||||
assert check_output("python", "-c", """import os; print(os.getenv("HOME"))""", user=user) == passwd.pw_dir
|
||||
getpwuid_mock.assert_called_once_with(user)
|
||||
|
||||
|
||||
@ -331,6 +332,31 @@ def test_safe_filename() -> None:
|
||||
assert safe_filename("tolua++-1.0.93-4-x86_64.pkg.tar.zst") == "tolua---1.0.93-4-x86_64.pkg.tar.zst"
|
||||
|
||||
|
||||
def test_srcinfo_property() -> None:
|
||||
"""
|
||||
must correctly extract properties
|
||||
"""
|
||||
assert srcinfo_property("key", {"key": "root"}, {"key": "overrides"}, default="default") == "overrides"
|
||||
assert srcinfo_property("key", {"key": "root"}, {}, default="default") == "root"
|
||||
assert srcinfo_property("key", {}, {"key": "overrides"}, default="default") == "overrides"
|
||||
assert srcinfo_property("key", {}, {}, default="default") == "default"
|
||||
assert srcinfo_property("key", {}, {}) is None
|
||||
|
||||
|
||||
def test_srcinfo_property_list() -> None:
|
||||
"""
|
||||
must correctly extract property list
|
||||
"""
|
||||
assert srcinfo_property_list("key", {"key": ["root"]}, {"key": ["overrides"]}) == ["overrides"]
|
||||
assert srcinfo_property_list("key", {"key": ["root"]}, {"key_x86_64": ["overrides"]}, architecture="x86_64") == [
|
||||
"root", "overrides"
|
||||
]
|
||||
assert srcinfo_property_list("key", {"key": ["root"], "key_x86_64": ["overrides"]}, {}, architecture="x86_64") == [
|
||||
"root", "overrides"
|
||||
]
|
||||
assert srcinfo_property_list("key", {"key_x86_64": ["overrides"]}, {}, architecture="x86_64") == ["overrides"]
|
||||
|
||||
|
||||
def test_trim_package() -> None:
|
||||
"""
|
||||
must trim package version
|
||||
|
||||
@ -11,6 +11,17 @@ from ahriman.models.package import Package
|
||||
from ahriman.models.result import Result
|
||||
|
||||
|
||||
def test_known_triggers(configuration: Configuration) -> None:
|
||||
"""
|
||||
must return used triggers
|
||||
"""
|
||||
configuration.set_option("build", "triggers_known", "a b c")
|
||||
assert TriggerLoader.known_triggers(configuration) == ["a", "b", "c"]
|
||||
|
||||
configuration.remove_option("build", "triggers_known")
|
||||
assert TriggerLoader.known_triggers(configuration) == []
|
||||
|
||||
|
||||
def test_selected_triggers(configuration: Configuration) -> None:
|
||||
"""
|
||||
must return used triggers
|
||||
|
||||
@ -22,7 +22,7 @@ def test_calculate_hash_small(resource_path_root: Path) -> None:
|
||||
must calculate checksum for path which is single chunk
|
||||
"""
|
||||
path = resource_path_root / "models" / "package_ahriman_srcinfo"
|
||||
assert HttpUpload.calculate_hash(path) == "79b0f84e0232ed34fd191a85c383ecc5"
|
||||
assert HttpUpload.calculate_hash(path) == "2635e2898452d594025517cfe529b1f2"
|
||||
|
||||
|
||||
def test_get_body_get_hashes() -> None:
|
||||
|
||||
@ -30,7 +30,7 @@ def test_calculate_etag_small(resource_path_root: Path) -> None:
|
||||
must calculate checksum for path which is single chunk
|
||||
"""
|
||||
path = resource_path_root / "models" / "package_ahriman_srcinfo"
|
||||
assert S3.calculate_etag(path, _chunk_size) == "79b0f84e0232ed34fd191a85c383ecc5"
|
||||
assert S3.calculate_etag(path, _chunk_size) == "2635e2898452d594025517cfe529b1f2"
|
||||
|
||||
|
||||
def test_files_remove(s3_remote_objects: list[Any]) -> None:
|
||||
|
||||
@ -115,6 +115,7 @@ def pyalpm_package_ahriman(aur_package_ahriman: AURPackage) -> MagicMock:
|
||||
type(mock).makedepends = PropertyMock(return_value=aur_package_ahriman.make_depends)
|
||||
type(mock).name = PropertyMock(return_value=aur_package_ahriman.name)
|
||||
type(mock).optdepends = PropertyMock(return_value=aur_package_ahriman.opt_depends)
|
||||
type(mock).checkdepends = PropertyMock(return_value=aur_package_ahriman.check_depends)
|
||||
type(mock).provides = PropertyMock(return_value=aur_package_ahriman.provides)
|
||||
type(mock).version = PropertyMock(return_value=aur_package_ahriman.version)
|
||||
type(mock).url = PropertyMock(return_value=aur_package_ahriman.url)
|
||||
@ -139,6 +140,7 @@ def pyalpm_package_description_ahriman(package_description_ahriman: PackageDescr
|
||||
type(mock).depends = PropertyMock(return_value=package_description_ahriman.depends)
|
||||
type(mock).makedepends = PropertyMock(return_value=package_description_ahriman.make_depends)
|
||||
type(mock).optdepends = PropertyMock(return_value=package_description_ahriman.opt_depends)
|
||||
type(mock).checkdepends = PropertyMock(return_value=package_description_ahriman.check_depends)
|
||||
type(mock).desc = PropertyMock(return_value=package_description_ahriman.description)
|
||||
type(mock).groups = PropertyMock(return_value=package_description_ahriman.groups)
|
||||
type(mock).isize = PropertyMock(return_value=package_description_ahriman.installed_size)
|
||||
|
||||
0
tests/ahriman/models/test_log_handler.py
Normal file
0
tests/ahriman/models/test_log_handler.py
Normal file
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user