Compare commits

...

18 Commits
2.6.1 ... 2.7.1

Author SHA1 Message Date
ec0550a275 Release 2.7.1 2023-03-06 01:15:47 +02:00
df23be9269 gracefully terminate web server
In previous revisions server was terminated by itself, thus no lock or
socket was removed. In new version, graceful termination of the queue
has been added as well as server now handles singals
2023-03-06 01:13:41 +02:00
a8c40a6b87 replace InitializeException with InitializeError in docs 2023-03-02 11:07:59 +02:00
a274f91677 simplify login ttl processing 2023-02-24 16:52:55 +02:00
13faf66bdb add more validation rules 2023-02-23 15:18:56 +02:00
4fb9335df9 add ability to read cookie secret from config 2023-02-22 18:47:56 +02:00
d517d8bfbb Release 2.7.0 2023-02-20 03:05:08 +02:00
37e57c13c8 update dependencies before build (#91)
Old implementation has used add step in order to fetch dependencies,
which could lead to build errors in case if dependency list was updated.

New solution uses dependencies which are declared at current version and
fetch them (if required and if enabled) before update process.

Closes #90
2023-02-12 06:02:30 +03:00
19bb19e9f5 handle .gitignore file correctly in remote push trigger 2023-02-11 04:41:24 +02:00
3a4e8f4d97 mask mypy warning
The newest mypy produces the following warning:

src/ahriman/application/handlers/search.py:43: error: Non-overlapping identity check (left operand type: "Union[_DefaultFactory[Any], Literal[_MISSING_TYPE.MISSING]]", right operand type: "Type[List[Any]]")  [comparison-overlap]

which is more likely caused by updated dataclass models to protoocol (however decorators are still calllable). This commit masks problematic line from checking
2023-02-09 22:46:08 +02:00
4db8ad8e8d hide passwords and secrets from repo-config subcommand by default 2023-02-05 16:44:48 +02:00
117f096d41 note about local database update (see #85) 2023-01-31 14:47:58 +02:00
917ec48be5 handle architecture specific fields for dependencies
This change requires srcinfo at least 0.1.2 version. Unfortunatelly aur
api don't support architecture specific arrays for now, so we just leave
it as is

Closes #82
2023-01-31 14:34:09 +02:00
0a2ba4ae07 filter empty packages from database
In some cases (e.g. during addition of the package to build queue) we don't have
full information about package inself; in these cases we produce lines
with empty architecture, which duplicates normal ones.

This commit changes architecture column type to required and also
filters packages which don't have architecture set yet.

Closes #83
2023-01-30 17:57:13 +02:00
9d7f63e549 trim version from provides list
Closes #87
2023-01-30 17:27:34 +02:00
25eee9ca5e add ability to suppress http logging errors (#86) 2023-01-30 17:19:01 +02:00
5af84955ac calculate dependencies based on package information (#89) 2023-01-30 17:28:05 +03:00
d3ad4c3c08 remove debug line 2023-01-27 16:41:41 +02:00
118 changed files with 5245 additions and 5063 deletions

View File

@ -1 +1 @@
skips: ['B101', 'B105', 'B106', 'B404']
skips: ['B101', 'B104', 'B105', 'B106', 'B404']

View File

@ -533,5 +533,5 @@ valid-metaclass-classmethod-first-arg=cls
# Exceptions that will emit a warning when being caught. Defaults to
# "BaseException, Exception".
overgeneral-exceptions=BaseException,
Exception
overgeneral-exceptions=builtins.BaseException,
builtins.Exception

File diff suppressed because it is too large Load Diff

Before

Width:  |  Height:  |  Size: 673 KiB

After

Width:  |  Height:  |  Size: 660 KiB

View File

@ -1,4 +1,4 @@
.TH AHRIMAN "1" "2023\-01\-25" "ahriman" "Generated Python Manual"
.TH AHRIMAN "1" "2023\-03\-06" "ahriman" "Generated Python Manual"
.SH NAME
ahriman
.SH SYNOPSIS
@ -156,7 +156,7 @@ web server
.SH COMMAND \fI\,'ahriman aur\-search'\/\fR
usage: ahriman aur\-search [\-h] [\-e] [\-\-info | \-\-no\-info]
[\-\-sort\-by {description,first_submitted,id,last_modified,maintainer,name,num_votes,out_of_date,package_base,package_base_id,popularity,repository,url,url_path,version}]
[\-\-sort\-by {description,first_submitted,id,last_modified,maintainer,name,num_votes,out_of_date,package_base,package_base_id,popularity,repository,submitter,url,url_path,version}]
search [search ...]
search for package in AUR using API
@ -175,7 +175,7 @@ return non\-zero exit status if result is empty
show additional package information (default: False)
.TP
\fB\-\-sort\-by\fR \fI\,{description,first_submitted,id,last_modified,maintainer,name,num_votes,out_of_date,package_base,package_base_id,popularity,repository,url,url_path,version}\/\fR
\fB\-\-sort\-by\fR \fI\,{description,first_submitted,id,last_modified,maintainer,name,num_votes,out_of_date,package_base,package_base_id,popularity,repository,submitter,url,url_path,version}\/\fR
sort field by this field. In case if two packages have the same value of the specified field, they will be always sorted
by name
@ -215,8 +215,8 @@ usage: ahriman help\-version [\-h]
print application and its dependencies versions
.SH COMMAND \fI\,'ahriman package\-add'\/\fR
usage: ahriman package\-add [\-h] [\-e] [\-n] [\-y] [\-s {auto,archive,aur,directory,local,remote,repository}]
[\-\-without\-dependencies]
usage: ahriman package\-add [\-h] [\-\-dependencies | \-\-no\-dependencies] [\-e] [\-n] [\-y]
[\-s {auto,archive,aur,directory,local,remote,repository}]
package [package ...]
add existing or new package to the build queue
@ -226,6 +226,10 @@ add existing or new package to the build queue
package source (base name, path to local files, remote URL)
.SH OPTIONS \fI\,'ahriman package\-add'\/\fR
.TP
\fB\-\-dependencies\fR, \fB\-\-no\-dependencies\fR
process missing package dependencies (default: True)
.TP
\fB\-e\fR, \fB\-\-exit\-code\fR
return non\-zero exit status if result is empty
@ -242,10 +246,6 @@ download fresh package databases from the mirror before actions, \-yy to force r
\fB\-s\fR \fI\,{auto,archive,aur,directory,local,remote,repository}\/\fR, \fB\-\-source\fR \fI\,{auto,archive,aur,directory,local,remote,repository}\/\fR
explicitly specify the package source for this command
.TP
\fB\-\-without\-dependencies\fR
do not add dependencies
.SH COMMAND \fI\,'ahriman package\-remove'\/\fR
usage: ahriman package\-remove [\-h] package [package ...]
@ -401,8 +401,8 @@ fetch actual version of VCS packages (default: True)
download fresh package databases from the mirror before actions, \-yy to force refresh even if up to date
.SH COMMAND \fI\,'ahriman repo\-daemon'\/\fR
usage: ahriman repo\-daemon [\-h] [\-i INTERVAL] [\-\-aur | \-\-no\-aur] [\-\-local | \-\-no\-local] [\-\-manual | \-\-no\-manual]
[\-\-vcs | \-\-no\-vcs] [\-y]
usage: ahriman repo\-daemon [\-h] [\-i INTERVAL] [\-\-aur | \-\-no\-aur] [\-\-dependencies | \-\-no\-dependencies]
[\-\-local | \-\-no\-local] [\-\-manual | \-\-no\-manual] [\-\-vcs | \-\-no\-vcs] [\-y]
start process which periodically will run update process
@ -415,6 +415,10 @@ interval between runs in seconds
\fB\-\-aur\fR, \fB\-\-no\-aur\fR
enable or disable checking for AUR updates (default: True)
.TP
\fB\-\-dependencies\fR, \fB\-\-no\-dependencies\fR
process missing package dependencies (default: True)
.TP
\fB\-\-local\fR, \fB\-\-no\-local\fR
enable or disable checking of local packages for updates (default: True)
@ -523,8 +527,8 @@ run triggers on empty build result as configured by settings
instead of running all triggers as set by configuration, just process specified ones in order of mention
.SH COMMAND \fI\,'ahriman repo\-update'\/\fR
usage: ahriman repo\-update [\-h] [\-\-dry\-run] [\-e] [\-\-aur | \-\-no\-aur] [\-\-local | \-\-no\-local] [\-\-manual | \-\-no\-manual]
[\-\-vcs | \-\-no\-vcs] [\-y]
usage: ahriman repo\-update [\-h] [\-\-aur | \-\-no\-aur] [\-\-dependencies | \-\-no\-dependencies] [\-\-dry\-run] [\-e]
[\-\-local | \-\-no\-local] [\-\-manual | \-\-no\-manual] [\-\-vcs | \-\-no\-vcs] [\-y]
[package ...]
check for packages updates and run build process if requested
@ -534,6 +538,14 @@ check for packages updates and run build process if requested
filter check by package base
.SH OPTIONS \fI\,'ahriman repo\-update'\/\fR
.TP
\fB\-\-aur\fR, \fB\-\-no\-aur\fR
enable or disable checking for AUR updates (default: True)
.TP
\fB\-\-dependencies\fR, \fB\-\-no\-dependencies\fR
process missing package dependencies (default: True)
.TP
\fB\-\-dry\-run\fR
just perform check for updates, same as check command
@ -542,10 +554,6 @@ just perform check for updates, same as check command
\fB\-e\fR, \fB\-\-exit\-code\fR
return non\-zero exit status if result is empty
.TP
\fB\-\-aur\fR, \fB\-\-no\-aur\fR
enable or disable checking for AUR updates (default: True)
.TP
\fB\-\-local\fR, \fB\-\-no\-local\fR
enable or disable checking of local packages for updates (default: True)
@ -590,10 +598,15 @@ clear directory with built packages (default: False)
clear directory with pacman local database cache (default: False)
.SH COMMAND \fI\,'ahriman service\-config'\/\fR
usage: ahriman service\-config [\-h]
usage: ahriman service\-config [\-h] [\-\-secure | \-\-no\-secure]
dump configuration for the specified architecture
.SH OPTIONS \fI\,'ahriman service\-config'\/\fR
.TP
\fB\-\-secure\fR, \fB\-\-no\-secure\fR
hide passwords and secrets from output (default: True)
.SH COMMAND \fI\,'ahriman service\-config\-validate'\/\fR
usage: ahriman service\-config\-validate [\-h] [\-e]

View File

@ -1,45 +0,0 @@
ahriman.core.database.data package
==================================
Submodules
----------
ahriman.core.database.data.package\_remotes module
--------------------------------------------------
.. automodule:: ahriman.core.database.data.package_remotes
:members:
:no-undoc-members:
:show-inheritance:
ahriman.core.database.data.package\_statuses module
---------------------------------------------------
.. automodule:: ahriman.core.database.data.package_statuses
:members:
:no-undoc-members:
:show-inheritance:
ahriman.core.database.data.patches module
-----------------------------------------
.. automodule:: ahriman.core.database.data.patches
:members:
:no-undoc-members:
:show-inheritance:
ahriman.core.database.data.users module
---------------------------------------
.. automodule:: ahriman.core.database.data.users
:members:
:no-undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: ahriman.core.database.data
:members:
:no-undoc-members:
:show-inheritance:

View File

@ -44,6 +44,22 @@ ahriman.core.database.migrations.m004\_logs module
:no-undoc-members:
:show-inheritance:
ahriman.core.database.migrations.m005\_make\_opt\_depends module
----------------------------------------------------------------
.. automodule:: ahriman.core.database.migrations.m005_make_opt_depends
:members:
:no-undoc-members:
:show-inheritance:
ahriman.core.database.migrations.m006\_packages\_architecture\_required module
------------------------------------------------------------------------------
.. automodule:: ahriman.core.database.migrations.m006_packages_architecture_required
:members:
:no-undoc-members:
:show-inheritance:
Module contents
---------------

View File

@ -7,7 +7,6 @@ Subpackages
.. toctree::
:maxdepth: 4
ahriman.core.database.data
ahriman.core.database.migrations
ahriman.core.database.operations

View File

@ -196,14 +196,6 @@ ahriman.models.user\_access module
:no-undoc-members:
:show-inheritance:
ahriman.models.user\_identity module
------------------------------------
.. automodule:: ahriman.models.user_identity
:members:
:no-undoc-members:
:show-inheritance:
Module contents
---------------

View File

@ -114,7 +114,7 @@ Schema and data migrations
The schema migration are applied according to current ``pragma user_info`` values, located at ``ahriman.core.database.migrations`` package and named as ``m000_migration_name.py`` (the preceding ``m`` is required in order to import migration content for tests). Additional class ``ahriman.core.database.migrations.Migrations`` reads all migrations automatically and applies them in alphabetical order.
There are also data migrations which are located at ``ahriman.core.database.data`` package and move data from old-style (e.g. json files in filesystem, directory trees, etc) to the database. They are also part of migration and (unlike schema migrations) are applied only at specific version breakpoints (e.g. if ``user_version`` is more than 0 no initial migration will be applied).
These migrations also contain data migrations. Though the recommended way is to migrate data directly from SQL requests, sometimes it is required to have external data (like packages list) in order to set correct data. To do so, special method `migrate_data` is used.
Type conversions
^^^^^^^^^^^^^^^^

View File

@ -10,9 +10,9 @@ _shtab_ahriman_help_commands_unsafe_option_strings=('-h' '--help' '--command')
_shtab_ahriman_help_updates_option_strings=('-h' '--help' '-e' '--exit-code')
_shtab_ahriman_help_version_option_strings=('-h' '--help')
_shtab_ahriman_version_option_strings=('-h' '--help')
_shtab_ahriman_package_add_option_strings=('-h' '--help' '-e' '--exit-code' '-n' '--now' '-y' '--refresh' '-s' '--source' '--without-dependencies')
_shtab_ahriman_add_option_strings=('-h' '--help' '-e' '--exit-code' '-n' '--now' '-y' '--refresh' '-s' '--source' '--without-dependencies')
_shtab_ahriman_package_update_option_strings=('-h' '--help' '-e' '--exit-code' '-n' '--now' '-y' '--refresh' '-s' '--source' '--without-dependencies')
_shtab_ahriman_package_add_option_strings=('-h' '--help' '--dependencies' '--no-dependencies' '-e' '--exit-code' '-n' '--now' '-y' '--refresh' '-s' '--source')
_shtab_ahriman_add_option_strings=('-h' '--help' '--dependencies' '--no-dependencies' '-e' '--exit-code' '-n' '--now' '-y' '--refresh' '-s' '--source')
_shtab_ahriman_package_update_option_strings=('-h' '--help' '--dependencies' '--no-dependencies' '-e' '--exit-code' '-n' '--now' '-y' '--refresh' '-s' '--source')
_shtab_ahriman_package_remove_option_strings=('-h' '--help')
_shtab_ahriman_remove_option_strings=('-h' '--help')
_shtab_ahriman_package_status_option_strings=('-h' '--help' '--ahriman' '-e' '--exit-code' '--info' '--no-info' '-s' '--status')
@ -27,8 +27,8 @@ _shtab_ahriman_patch_set_add_option_strings=('-h' '--help' '-t' '--track')
_shtab_ahriman_repo_backup_option_strings=('-h' '--help')
_shtab_ahriman_repo_check_option_strings=('-h' '--help' '-e' '--exit-code' '--vcs' '--no-vcs' '-y' '--refresh')
_shtab_ahriman_check_option_strings=('-h' '--help' '-e' '--exit-code' '--vcs' '--no-vcs' '-y' '--refresh')
_shtab_ahriman_repo_daemon_option_strings=('-h' '--help' '-i' '--interval' '--aur' '--no-aur' '--local' '--no-local' '--manual' '--no-manual' '--vcs' '--no-vcs' '-y' '--refresh')
_shtab_ahriman_daemon_option_strings=('-h' '--help' '-i' '--interval' '--aur' '--no-aur' '--local' '--no-local' '--manual' '--no-manual' '--vcs' '--no-vcs' '-y' '--refresh')
_shtab_ahriman_repo_daemon_option_strings=('-h' '--help' '-i' '--interval' '--aur' '--no-aur' '--dependencies' '--no-dependencies' '--local' '--no-local' '--manual' '--no-manual' '--vcs' '--no-vcs' '-y' '--refresh')
_shtab_ahriman_daemon_option_strings=('-h' '--help' '-i' '--interval' '--aur' '--no-aur' '--dependencies' '--no-dependencies' '--local' '--no-local' '--manual' '--no-manual' '--vcs' '--no-vcs' '-y' '--refresh')
_shtab_ahriman_repo_rebuild_option_strings=('-h' '--help' '--depends-on' '--dry-run' '--from-database' '-e' '--exit-code')
_shtab_ahriman_rebuild_option_strings=('-h' '--help' '--depends-on' '--dry-run' '--from-database' '-e' '--exit-code')
_shtab_ahriman_repo_remove_unknown_option_strings=('-h' '--help' '--dry-run')
@ -43,14 +43,14 @@ _shtab_ahriman_repo_sync_option_strings=('-h' '--help')
_shtab_ahriman_sync_option_strings=('-h' '--help')
_shtab_ahriman_repo_tree_option_strings=('-h' '--help')
_shtab_ahriman_repo_triggers_option_strings=('-h' '--help')
_shtab_ahriman_repo_update_option_strings=('-h' '--help' '--dry-run' '-e' '--exit-code' '--aur' '--no-aur' '--local' '--no-local' '--manual' '--no-manual' '--vcs' '--no-vcs' '-y' '--refresh')
_shtab_ahriman_update_option_strings=('-h' '--help' '--dry-run' '-e' '--exit-code' '--aur' '--no-aur' '--local' '--no-local' '--manual' '--no-manual' '--vcs' '--no-vcs' '-y' '--refresh')
_shtab_ahriman_repo_update_option_strings=('-h' '--help' '--aur' '--no-aur' '--dependencies' '--no-dependencies' '--dry-run' '-e' '--exit-code' '--local' '--no-local' '--manual' '--no-manual' '--vcs' '--no-vcs' '-y' '--refresh')
_shtab_ahriman_update_option_strings=('-h' '--help' '--aur' '--no-aur' '--dependencies' '--no-dependencies' '--dry-run' '-e' '--exit-code' '--local' '--no-local' '--manual' '--no-manual' '--vcs' '--no-vcs' '-y' '--refresh')
_shtab_ahriman_service_clean_option_strings=('-h' '--help' '--cache' '--no-cache' '--chroot' '--no-chroot' '--manual' '--no-manual' '--packages' '--no-packages' '--pacman' '--no-pacman')
_shtab_ahriman_clean_option_strings=('-h' '--help' '--cache' '--no-cache' '--chroot' '--no-chroot' '--manual' '--no-manual' '--packages' '--no-packages' '--pacman' '--no-pacman')
_shtab_ahriman_repo_clean_option_strings=('-h' '--help' '--cache' '--no-cache' '--chroot' '--no-chroot' '--manual' '--no-manual' '--packages' '--no-packages' '--pacman' '--no-pacman')
_shtab_ahriman_service_config_option_strings=('-h' '--help')
_shtab_ahriman_config_option_strings=('-h' '--help')
_shtab_ahriman_repo_config_option_strings=('-h' '--help')
_shtab_ahriman_service_config_option_strings=('-h' '--help' '--secure' '--no-secure')
_shtab_ahriman_config_option_strings=('-h' '--help' '--secure' '--no-secure')
_shtab_ahriman_repo_config_option_strings=('-h' '--help' '--secure' '--no-secure')
_shtab_ahriman_service_config_validate_option_strings=('-h' '--help' '-e' '--exit-code')
_shtab_ahriman_config_validate_option_strings=('-h' '--help' '-e' '--exit-code')
_shtab_ahriman_repo_config_validate_option_strings=('-h' '--help' '-e' '--exit-code')
@ -71,8 +71,8 @@ _shtab_ahriman_web_option_strings=('-h' '--help')
_shtab_ahriman_pos_0_choices=('aur-search' 'search' 'help' 'help-commands-unsafe' 'help-updates' 'help-version' 'version' 'package-add' 'add' 'package-update' 'package-remove' 'remove' 'package-status' 'status' 'package-status-remove' 'package-status-update' 'status-update' 'patch-add' 'patch-list' 'patch-remove' 'patch-set-add' 'repo-backup' 'repo-check' 'check' 'repo-daemon' 'daemon' 'repo-rebuild' 'rebuild' 'repo-remove-unknown' 'remove-unknown' 'repo-report' 'report' 'repo-restore' 'repo-sign' 'sign' 'repo-status-update' 'repo-sync' 'sync' 'repo-tree' 'repo-triggers' 'repo-update' 'update' 'service-clean' 'clean' 'repo-clean' 'service-config' 'config' 'repo-config' 'service-config-validate' 'config-validate' 'repo-config-validate' 'service-key-import' 'key-import' 'service-setup' 'init' 'repo-init' 'repo-setup' 'setup' 'service-shell' 'shell' 'user-add' 'user-list' 'user-remove' 'web')
_shtab_ahriman_aur_search___sort_by_choices=('description' 'first_submitted' 'id' 'last_modified' 'maintainer' 'name' 'num_votes' 'out_of_date' 'package_base' 'package_base_id' 'popularity' 'repository' 'url' 'url_path' 'version')
_shtab_ahriman_search___sort_by_choices=('description' 'first_submitted' 'id' 'last_modified' 'maintainer' 'name' 'num_votes' 'out_of_date' 'package_base' 'package_base_id' 'popularity' 'repository' 'url' 'url_path' 'version')
_shtab_ahriman_aur_search___sort_by_choices=('description' 'first_submitted' 'id' 'last_modified' 'maintainer' 'name' 'num_votes' 'out_of_date' 'package_base' 'package_base_id' 'popularity' 'repository' 'submitter' 'url' 'url_path' 'version')
_shtab_ahriman_search___sort_by_choices=('description' 'first_submitted' 'id' 'last_modified' 'maintainer' 'name' 'num_votes' 'out_of_date' 'package_base' 'package_base_id' 'popularity' 'repository' 'submitter' 'url' 'url_path' 'version')
_shtab_ahriman_package_add__s_choices=('auto' 'archive' 'aur' 'directory' 'local' 'remote' 'repository')
_shtab_ahriman_package_add___source_choices=('auto' 'archive' 'aur' 'directory' 'local' 'remote' 'repository')
_shtab_ahriman_add__s_choices=('auto' 'archive' 'aur' 'directory' 'local' 'remote' 'repository')
@ -139,33 +139,36 @@ _shtab_ahriman_version___help_nargs=0
_shtab_ahriman_package_add_pos_0_nargs=+
_shtab_ahriman_package_add__h_nargs=0
_shtab_ahriman_package_add___help_nargs=0
_shtab_ahriman_package_add___dependencies_nargs=0
_shtab_ahriman_package_add___no_dependencies_nargs=0
_shtab_ahriman_package_add__e_nargs=0
_shtab_ahriman_package_add___exit_code_nargs=0
_shtab_ahriman_package_add__n_nargs=0
_shtab_ahriman_package_add___now_nargs=0
_shtab_ahriman_package_add__y_nargs=0
_shtab_ahriman_package_add___refresh_nargs=0
_shtab_ahriman_package_add___without_dependencies_nargs=0
_shtab_ahriman_add_pos_0_nargs=+
_shtab_ahriman_add__h_nargs=0
_shtab_ahriman_add___help_nargs=0
_shtab_ahriman_add___dependencies_nargs=0
_shtab_ahriman_add___no_dependencies_nargs=0
_shtab_ahriman_add__e_nargs=0
_shtab_ahriman_add___exit_code_nargs=0
_shtab_ahriman_add__n_nargs=0
_shtab_ahriman_add___now_nargs=0
_shtab_ahriman_add__y_nargs=0
_shtab_ahriman_add___refresh_nargs=0
_shtab_ahriman_add___without_dependencies_nargs=0
_shtab_ahriman_package_update_pos_0_nargs=+
_shtab_ahriman_package_update__h_nargs=0
_shtab_ahriman_package_update___help_nargs=0
_shtab_ahriman_package_update___dependencies_nargs=0
_shtab_ahriman_package_update___no_dependencies_nargs=0
_shtab_ahriman_package_update__e_nargs=0
_shtab_ahriman_package_update___exit_code_nargs=0
_shtab_ahriman_package_update__n_nargs=0
_shtab_ahriman_package_update___now_nargs=0
_shtab_ahriman_package_update__y_nargs=0
_shtab_ahriman_package_update___refresh_nargs=0
_shtab_ahriman_package_update___without_dependencies_nargs=0
_shtab_ahriman_package_remove_pos_0_nargs=+
_shtab_ahriman_package_remove__h_nargs=0
_shtab_ahriman_package_remove___help_nargs=0
@ -231,6 +234,8 @@ _shtab_ahriman_repo_daemon__h_nargs=0
_shtab_ahriman_repo_daemon___help_nargs=0
_shtab_ahriman_repo_daemon___aur_nargs=0
_shtab_ahriman_repo_daemon___no_aur_nargs=0
_shtab_ahriman_repo_daemon___dependencies_nargs=0
_shtab_ahriman_repo_daemon___no_dependencies_nargs=0
_shtab_ahriman_repo_daemon___local_nargs=0
_shtab_ahriman_repo_daemon___no_local_nargs=0
_shtab_ahriman_repo_daemon___manual_nargs=0
@ -243,6 +248,8 @@ _shtab_ahriman_daemon__h_nargs=0
_shtab_ahriman_daemon___help_nargs=0
_shtab_ahriman_daemon___aur_nargs=0
_shtab_ahriman_daemon___no_aur_nargs=0
_shtab_ahriman_daemon___dependencies_nargs=0
_shtab_ahriman_daemon___no_dependencies_nargs=0
_shtab_ahriman_daemon___local_nargs=0
_shtab_ahriman_daemon___no_local_nargs=0
_shtab_ahriman_daemon___manual_nargs=0
@ -295,11 +302,13 @@ _shtab_ahriman_repo_triggers___help_nargs=0
_shtab_ahriman_repo_update_pos_0_nargs=*
_shtab_ahriman_repo_update__h_nargs=0
_shtab_ahriman_repo_update___help_nargs=0
_shtab_ahriman_repo_update___aur_nargs=0
_shtab_ahriman_repo_update___no_aur_nargs=0
_shtab_ahriman_repo_update___dependencies_nargs=0
_shtab_ahriman_repo_update___no_dependencies_nargs=0
_shtab_ahriman_repo_update___dry_run_nargs=0
_shtab_ahriman_repo_update__e_nargs=0
_shtab_ahriman_repo_update___exit_code_nargs=0
_shtab_ahriman_repo_update___aur_nargs=0
_shtab_ahriman_repo_update___no_aur_nargs=0
_shtab_ahriman_repo_update___local_nargs=0
_shtab_ahriman_repo_update___no_local_nargs=0
_shtab_ahriman_repo_update___manual_nargs=0
@ -311,11 +320,13 @@ _shtab_ahriman_repo_update___refresh_nargs=0
_shtab_ahriman_update_pos_0_nargs=*
_shtab_ahriman_update__h_nargs=0
_shtab_ahriman_update___help_nargs=0
_shtab_ahriman_update___aur_nargs=0
_shtab_ahriman_update___no_aur_nargs=0
_shtab_ahriman_update___dependencies_nargs=0
_shtab_ahriman_update___no_dependencies_nargs=0
_shtab_ahriman_update___dry_run_nargs=0
_shtab_ahriman_update__e_nargs=0
_shtab_ahriman_update___exit_code_nargs=0
_shtab_ahriman_update___aur_nargs=0
_shtab_ahriman_update___no_aur_nargs=0
_shtab_ahriman_update___local_nargs=0
_shtab_ahriman_update___no_local_nargs=0
_shtab_ahriman_update___manual_nargs=0
@ -362,10 +373,16 @@ _shtab_ahriman_repo_clean___pacman_nargs=0
_shtab_ahriman_repo_clean___no_pacman_nargs=0
_shtab_ahriman_service_config__h_nargs=0
_shtab_ahriman_service_config___help_nargs=0
_shtab_ahriman_service_config___secure_nargs=0
_shtab_ahriman_service_config___no_secure_nargs=0
_shtab_ahriman_config__h_nargs=0
_shtab_ahriman_config___help_nargs=0
_shtab_ahriman_config___secure_nargs=0
_shtab_ahriman_config___no_secure_nargs=0
_shtab_ahriman_repo_config__h_nargs=0
_shtab_ahriman_repo_config___help_nargs=0
_shtab_ahriman_repo_config___secure_nargs=0
_shtab_ahriman_repo_config___no_secure_nargs=0
_shtab_ahriman_service_config_validate__h_nargs=0
_shtab_ahriman_service_config_validate___help_nargs=0
_shtab_ahriman_service_config_validate__e_nargs=0

View File

@ -87,11 +87,11 @@ _shtab_ahriman_options=(
_shtab_ahriman_add_options=(
"(- : *)"{-h,--help}"[show this help message and exit]"
{--dependencies,--no-dependencies}"[process missing package dependencies (default\: \%(default)s)]:dependencies:"
{-e,--exit-code}"[return non-zero exit status if result is empty]"
{-n,--now}"[run update function after]"
"*"{-y,--refresh}"[download fresh package databases from the mirror before actions, -yy to force refresh even if up to date]"
{-s,--source}"[explicitly specify the package source for this command]:source:(auto archive aur directory local remote repository)"
"--without-dependencies[do not add dependencies]"
"(*):package source (base name, path to local files, remote URL):"
)
@ -99,7 +99,7 @@ _shtab_ahriman_aur_search_options=(
"(- : *)"{-h,--help}"[show this help message and exit]"
{-e,--exit-code}"[return non-zero exit status if result is empty]"
{--info,--no-info}"[show additional package information (default\: \%(default)s)]:info:"
"--sort-by[sort field by this field. In case if two packages have the same value of the specified field, they will be always sorted by name]:sort_by:(description first_submitted id last_modified maintainer name num_votes out_of_date package_base package_base_id popularity repository url url_path version)"
"--sort-by[sort field by this field. In case if two packages have the same value of the specified field, they will be always sorted by name]:sort_by:(description first_submitted id last_modified maintainer name num_votes out_of_date package_base package_base_id popularity repository submitter url url_path version)"
"(*):search terms, can be specified multiple times, the result will match all terms:"
)
@ -122,6 +122,7 @@ _shtab_ahriman_clean_options=(
_shtab_ahriman_config_options=(
"(- : *)"{-h,--help}"[show this help message and exit]"
{--secure,--no-secure}"[hide passwords and secrets from output (default\: \%(default)s)]:secure:"
)
_shtab_ahriman_config_validate_options=(
@ -133,6 +134,7 @@ _shtab_ahriman_daemon_options=(
"(- : *)"{-h,--help}"[show this help message and exit]"
{-i,--interval}"[interval between runs in seconds]:interval:"
{--aur,--no-aur}"[enable or disable checking for AUR updates (default\: \%(default)s)]:aur:"
{--dependencies,--no-dependencies}"[process missing package dependencies (default\: \%(default)s)]:dependencies:"
{--local,--no-local}"[enable or disable checking of local packages for updates (default\: \%(default)s)]:local:"
{--manual,--no-manual}"[include or exclude manual updates (default\: \%(default)s)]:manual:"
{--vcs,--no-vcs}"[fetch actual version of VCS packages (default\: \%(default)s)]:vcs:"
@ -182,11 +184,11 @@ _shtab_ahriman_key_import_options=(
_shtab_ahriman_package_add_options=(
"(- : *)"{-h,--help}"[show this help message and exit]"
{--dependencies,--no-dependencies}"[process missing package dependencies (default\: \%(default)s)]:dependencies:"
{-e,--exit-code}"[return non-zero exit status if result is empty]"
{-n,--now}"[run update function after]"
"*"{-y,--refresh}"[download fresh package databases from the mirror before actions, -yy to force refresh even if up to date]"
{-s,--source}"[explicitly specify the package source for this command]:source:(auto archive aur directory local remote repository)"
"--without-dependencies[do not add dependencies]"
"(*):package source (base name, path to local files, remote URL):"
)
@ -217,11 +219,11 @@ _shtab_ahriman_package_status_update_options=(
_shtab_ahriman_package_update_options=(
"(- : *)"{-h,--help}"[show this help message and exit]"
{--dependencies,--no-dependencies}"[process missing package dependencies (default\: \%(default)s)]:dependencies:"
{-e,--exit-code}"[return non-zero exit status if result is empty]"
{-n,--now}"[run update function after]"
"*"{-y,--refresh}"[download fresh package databases from the mirror before actions, -yy to force refresh even if up to date]"
{-s,--source}"[explicitly specify the package source for this command]:source:(auto archive aur directory local remote repository)"
"--without-dependencies[do not add dependencies]"
"(*):package source (base name, path to local files, remote URL):"
)
@ -293,6 +295,7 @@ _shtab_ahriman_repo_clean_options=(
_shtab_ahriman_repo_config_options=(
"(- : *)"{-h,--help}"[show this help message and exit]"
{--secure,--no-secure}"[hide passwords and secrets from output (default\: \%(default)s)]:secure:"
)
_shtab_ahriman_repo_config_validate_options=(
@ -304,6 +307,7 @@ _shtab_ahriman_repo_daemon_options=(
"(- : *)"{-h,--help}"[show this help message and exit]"
{-i,--interval}"[interval between runs in seconds]:interval:"
{--aur,--no-aur}"[enable or disable checking for AUR updates (default\: \%(default)s)]:aur:"
{--dependencies,--no-dependencies}"[process missing package dependencies (default\: \%(default)s)]:dependencies:"
{--local,--no-local}"[enable or disable checking of local packages for updates (default\: \%(default)s)]:local:"
{--manual,--no-manual}"[include or exclude manual updates (default\: \%(default)s)]:manual:"
{--vcs,--no-vcs}"[fetch actual version of VCS packages (default\: \%(default)s)]:vcs:"
@ -390,9 +394,10 @@ _shtab_ahriman_repo_triggers_options=(
_shtab_ahriman_repo_update_options=(
"(- : *)"{-h,--help}"[show this help message and exit]"
{--aur,--no-aur}"[enable or disable checking for AUR updates (default\: \%(default)s)]:aur:"
{--dependencies,--no-dependencies}"[process missing package dependencies (default\: \%(default)s)]:dependencies:"
"--dry-run[just perform check for updates, same as check command]"
{-e,--exit-code}"[return non-zero exit status if result is empty]"
{--aur,--no-aur}"[enable or disable checking for AUR updates (default\: \%(default)s)]:aur:"
{--local,--no-local}"[enable or disable checking of local packages for updates (default\: \%(default)s)]:local:"
{--manual,--no-manual}"[include or exclude manual updates (default\: \%(default)s)]:manual:"
{--vcs,--no-vcs}"[fetch actual version of VCS packages (default\: \%(default)s)]:vcs:"
@ -408,7 +413,7 @@ _shtab_ahriman_search_options=(
"(- : *)"{-h,--help}"[show this help message and exit]"
{-e,--exit-code}"[return non-zero exit status if result is empty]"
{--info,--no-info}"[show additional package information (default\: \%(default)s)]:info:"
"--sort-by[sort field by this field. In case if two packages have the same value of the specified field, they will be always sorted by name]:sort_by:(description first_submitted id last_modified maintainer name num_votes out_of_date package_base package_base_id popularity repository url url_path version)"
"--sort-by[sort field by this field. In case if two packages have the same value of the specified field, they will be always sorted by name]:sort_by:(description first_submitted id last_modified maintainer name num_votes out_of_date package_base package_base_id popularity repository submitter url url_path version)"
"(*):search terms, can be specified multiple times, the result will match all terms:"
)
@ -423,6 +428,7 @@ _shtab_ahriman_service_clean_options=(
_shtab_ahriman_service_config_options=(
"(- : *)"{-h,--help}"[show this help message and exit]"
{--secure,--no-secure}"[hide passwords and secrets from output (default\: \%(default)s)]:secure:"
)
_shtab_ahriman_service_config_validate_options=(
@ -504,9 +510,10 @@ _shtab_ahriman_sync_options=(
_shtab_ahriman_update_options=(
"(- : *)"{-h,--help}"[show this help message and exit]"
{--aur,--no-aur}"[enable or disable checking for AUR updates (default\: \%(default)s)]:aur:"
{--dependencies,--no-dependencies}"[process missing package dependencies (default\: \%(default)s)]:dependencies:"
"--dry-run[just perform check for updates, same as check command]"
{-e,--exit-code}"[return non-zero exit status if result is empty]"
{--aur,--no-aur}"[enable or disable checking for AUR updates (default\: \%(default)s)]:aur:"
{--local,--no-local}"[enable or disable checking of local packages for updates (default\: \%(default)s)]:local:"
{--manual,--no-manual}"[include or exclude manual updates (default\: \%(default)s)]:manual:"
{--vcs,--no-vcs}"[fetch actual version of VCS packages (default\: \%(default)s)]:vcs:"

View File

@ -28,6 +28,7 @@ Base configuration settings.
* ``include`` - path to directory with configuration files overrides, string, required.
* ``database`` - path to SQLite database, string, required.
* ``logging`` - path to logging configuration, string, required. Check ``logging.ini`` for reference.
* ``suppress_http_log_errors`` - suppress http log errors, boolean, optional, default ``no``. If set to ``yes``, any http log errors (e.g. if web server is not available, but http logging is enabled) will be suppressed.
``alpm`` group
--------------
@ -49,6 +50,7 @@ Base authorization settings. ``OAuth`` provider requires ``aioauth-client`` libr
* ``allow_read_only`` - allow requesting status APIs without authorization, boolean, required.
* ``client_id`` - OAuth2 application client ID, string, required in case if ``oauth`` is used.
* ``client_secret`` - OAuth2 application client secret key, string, required in case if ``oauth`` is used.
* ``cookie_secret_key`` - secret key which will be used for cookies encryption, string, optional. It must be 32 url-safe base64-encoded bytes and can be generated as following ``base64.urlsafe_b64encode(os.urandom(32)).decode("utf8")``. If not set, it will be generated automatically; note, however, that in this case, all sessions will be automatically expired during restart.
* ``max_age`` - parameter which controls both cookie expiration and token expiration inside the service, integer, optional, default is 7 days.
* ``oauth_provider`` - OAuth2 provider class name as is in ``aioauth-client`` (e.g. ``GoogleClient``, ``GithubClient`` etc), string, required in case if ``oauth`` is used.
* ``oauth_scopes`` - scopes list for OAuth2 provider, which will allow retrieving user email (which is used for checking user permissions), e.g. ``https://www.googleapis.com/auth/userinfo.email`` for ``GoogleClient`` or ``user:email`` for ``GithubClient``, space separated list of strings, required in case if ``oauth`` is used.
@ -67,7 +69,7 @@ Build related configuration. Group name can refer to architecture, e.g. ``build:
* ``makepkg_flags`` - additional flags passed to ``makepkg`` command, space separated list of strings, optional.
* ``makechrootpkg_flags`` - additional flags passed to ``makechrootpkg`` command, space separated list of strings, optional.
* ``triggers`` - list of ``ahriman.core.triggers.Trigger`` class implementation (e.g. ``ahriman.core.report.ReportTrigger ahriman.core.upload.UploadTrigger``) which will be loaded and run at the end of processing, space separated list of strings, optional. You can also specify triggers by their paths, e.g. ``/usr/lib/python3.10/site-packages/ahriman/core/report/report.py.ReportTrigger``. Triggers are run in the order of mention.
* ``vcs_allowed_age`` - maximal age in seconds of the VCS packages before their version will be updated with its remote source, int, optional, default ``0``.
* ``vcs_allowed_age`` - maximal age in seconds of the VCS packages before their version will be updated with its remote source, int, optional, default ``604800``.
``repository`` group
--------------------

View File

@ -88,4 +88,6 @@ Initial setup
.. code-block:: shell
sudo -u ahriman ahriman -a x86_64 package-add ahriman --now
sudo -u ahriman ahriman -a x86_64 package-add ahriman --now --refresh
The ``--refresh`` flag is required in order to handle local database update.

View File

@ -1,7 +1,7 @@
# Maintainer: Evgeniy Alekseev
pkgname='ahriman'
pkgver=2.6.1
pkgver=2.7.1
pkgrel=1
pkgdesc="ArcH linux ReposItory MANager"
arch=('any')

View File

@ -2,6 +2,7 @@
include = ahriman.ini.d
logging = ahriman.ini.d/logging.ini
database = /var/lib/ahriman/ahriman.db
suppress_http_log_errors = yes
[alpm]
database = /var/lib/pacman

View File

@ -245,6 +245,8 @@ def _set_package_add_parser(root: SubParserAction) -> argparse.ArgumentParser:
"5) and finally you can add package from AUR.",
formatter_class=_formatter)
parser.add_argument("package", help="package source (base name, path to local files, remote URL)", nargs="+")
parser.add_argument("--dependencies", help="process missing package dependencies",
action=argparse.BooleanOptionalAction, default=True)
parser.add_argument("-e", "--exit-code", help="return non-zero exit status if result is empty", action="store_true")
parser.add_argument("-n", "--now", help="run update function after", action="store_true")
parser.add_argument("-y", "--refresh", help="download fresh package databases from the mirror before actions, "
@ -252,7 +254,6 @@ def _set_package_add_parser(root: SubParserAction) -> argparse.ArgumentParser:
action="count", default=False)
parser.add_argument("-s", "--source", help="explicitly specify the package source for this command",
type=PackageSource, choices=enum_values(PackageSource), default=PackageSource.Auto)
parser.add_argument("--without-dependencies", help="do not add dependencies", action="store_true")
parser.set_defaults(handler=handlers.Add)
return parser
@ -472,7 +473,7 @@ def _set_repo_check_parser(root: SubParserAction) -> argparse.ArgumentParser:
parser.add_argument("-y", "--refresh", help="download fresh package databases from the mirror before actions, "
"-yy to force refresh even if up to date",
action="count", default=False)
parser.set_defaults(handler=handlers.Update, dry_run=True, aur=True, local=True, manual=False)
parser.set_defaults(handler=handlers.Update, dependencies=False, dry_run=True, aur=True, local=True, manual=False)
return parser
@ -492,6 +493,8 @@ def _set_repo_daemon_parser(root: SubParserAction) -> argparse.ArgumentParser:
parser.add_argument("-i", "--interval", help="interval between runs in seconds", type=int, default=60 * 60 * 12)
parser.add_argument("--aur", help="enable or disable checking for AUR updates",
action=argparse.BooleanOptionalAction, default=True)
parser.add_argument("--dependencies", help="process missing package dependencies",
action=argparse.BooleanOptionalAction, default=True)
parser.add_argument("--local", help="enable or disable checking of local packages for updates",
action=argparse.BooleanOptionalAction, default=True)
parser.add_argument("--manual", help="include or exclude manual updates",
@ -691,10 +694,12 @@ def _set_repo_update_parser(root: SubParserAction) -> argparse.ArgumentParser:
description="check for packages updates and run build process if requested",
formatter_class=_formatter)
parser.add_argument("package", help="filter check by package base", nargs="*")
parser.add_argument("--dry-run", help="just perform check for updates, same as check command", action="store_true")
parser.add_argument("-e", "--exit-code", help="return non-zero exit status if result is empty", action="store_true")
parser.add_argument("--aur", help="enable or disable checking for AUR updates",
action=argparse.BooleanOptionalAction, default=True)
parser.add_argument("--dependencies", help="process missing package dependencies",
action=argparse.BooleanOptionalAction, default=True)
parser.add_argument("--dry-run", help="just perform check for updates, same as check command", action="store_true")
parser.add_argument("-e", "--exit-code", help="return non-zero exit status if result is empty", action="store_true")
parser.add_argument("--local", help="enable or disable checking of local packages for updates",
action=argparse.BooleanOptionalAction, default=True)
parser.add_argument("--manual", help="include or exclude manual updates",
@ -750,6 +755,8 @@ def _set_service_config_parser(root: SubParserAction) -> argparse.ArgumentParser
parser = root.add_parser("service-config", aliases=["config", "repo-config"], help="dump configuration",
description="dump configuration for the specified architecture",
formatter_class=_formatter)
parser.add_argument("--secure", help="hide passwords and secrets from output",
action=argparse.BooleanOptionalAction, default=True)
parser.set_defaults(handler=handlers.Dump, lock=None, report=False, quiet=True, unsafe=True)
return parser

View File

@ -17,10 +17,11 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from typing import Set
from typing import Iterable, List, Set
from ahriman.application.application.application_packages import ApplicationPackages
from ahriman.application.application.application_repository import ApplicationRepository
from ahriman.models.package import Package
from ahriman.models.result import Result
@ -87,3 +88,39 @@ class Application(ApplicationPackages, ApplicationRepository):
directly as it will be called after on_start action
"""
self.repository.triggers.on_stop()
def with_dependencies(self, packages: List[Package], *, process_dependencies: bool) -> List[Package]:
"""
add missing dependencies to list of packages
Args:
packages(List[Package]): list of source packages of which dependencies have to be processed
process_dependencies(bool): if no set, dependencies will not be processed
"""
def missing_dependencies(source: Iterable[Package]) -> Set[str]:
# build initial list of dependencies
result = set()
for package in source:
result.update(package.depends_build)
# remove ones which are already well-known
result = result.difference(known_packages)
# remove ones which are in this list already
for package in source:
result = result.difference(package.packages_full)
return result
if not process_dependencies or not packages:
return packages
known_packages = self._known_packages()
with_dependencies = {package.base: package for package in packages}
while missing := missing_dependencies(with_dependencies.values()):
for package_name in missing:
package = Package.from_aur(package_name, self.repository.pacman)
with_dependencies[package.base] = package
return list(with_dependencies.values())

View File

@ -21,8 +21,7 @@ import requests
import shutil
from pathlib import Path
from tempfile import TemporaryDirectory
from typing import Any, Iterable, Set
from typing import Any, Iterable
from ahriman.application.application.application_properties import ApplicationProperties
from ahriman.core.build_tools.sources import Sources
@ -48,24 +47,18 @@ class ApplicationPackages(ApplicationProperties):
dst = self.repository.paths.packages / local_path.name
shutil.copy(local_path, dst)
def _add_aur(self, source: str, known_packages: Set[str], without_dependencies: bool) -> None:
def _add_aur(self, source: str) -> None:
"""
add package from AUR
Args:
source(str): package base name
known_packages(Set[str]): list of packages which are known by the service
without_dependencies(bool): if set, dependency check will be disabled
"""
package = Package.from_aur(source, self.repository.pacman)
self.database.build_queue_insert(package)
self.database.remote_update(package)
with TemporaryDirectory(ignore_cleanup_errors=True) as dir_name, (local_dir := Path(dir_name)):
Sources.load(local_dir, package, self.database.patches_get(package.base), self.repository.paths)
self._process_dependencies(local_dir, known_packages, without_dependencies)
def _add_directory(self, source: str, *_: Any) -> None:
"""
add packages from directory
@ -77,25 +70,21 @@ class ApplicationPackages(ApplicationProperties):
for full_path in filter(package_like, local_dir.iterdir()):
self._add_archive(str(full_path))
def _add_local(self, source: str, known_packages: Set[str], without_dependencies: bool) -> None:
def _add_local(self, source: str) -> None:
"""
add package from local PKGBUILDs
Args:
source(str): path to directory with local source files
known_packages(Set[str]): list of packages which are known by the service
without_dependencies(bool): if set, dependency check will be disabled
"""
source_dir = Path(source)
package = Package.from_build(source_dir)
package = Package.from_build(source_dir, self.architecture)
cache_dir = self.repository.paths.cache_for(package.base)
shutil.copytree(source_dir, cache_dir) # copy package to store in caches
Sources.init(cache_dir) # we need to run init command in directory where we do have permissions
self.database.build_queue_insert(package)
self._process_dependencies(cache_dir, known_packages, without_dependencies)
def _add_remote(self, source: str, *_: Any) -> None:
"""
add package from remote sources (e.g. HTTP)
@ -121,50 +110,19 @@ class ApplicationPackages(ApplicationProperties):
package = Package.from_official(source, self.repository.pacman)
self.database.build_queue_insert(package)
self.database.remote_update(package)
# repository packages must not depend on unknown packages, thus we are not going to process dependencies
def _known_packages(self) -> Set[str]:
"""
load packages from repository and pacman repositories
Returns:
Set[str]: list of known packages
Raises:
NotImplementedError: not implemented method
"""
raise NotImplementedError
def _process_dependencies(self, local_dir: Path, known_packages: Set[str], without_dependencies: bool) -> None:
"""
process package dependencies
Args:
local_dir(Path): path to local package sources (i.e. cloned AUR repository)
known_packages(Set[str]): list of packages which are known by the service
without_dependencies(bool): if set, dependency check will be disabled
"""
if without_dependencies:
return
dependencies = Package.dependencies(local_dir)
self.add(dependencies.difference(known_packages), PackageSource.AUR, without_dependencies)
def add(self, names: Iterable[str], source: PackageSource, without_dependencies: bool) -> None:
def add(self, names: Iterable[str], source: PackageSource) -> None:
"""
add packages for the next build
Args:
names(Iterable[str]): list of package bases to add
source(PackageSource): package source to add
without_dependencies(bool): if set, dependency check will be disabled
"""
known_packages = self._known_packages() # speedup dependencies processing
for name in names:
resolved_source = source.resolve(name)
fn = getattr(self, f"_add_{resolved_source.value}")
fn(name, known_packages, without_dependencies)
fn(name)
def on_result(self, result: Result) -> None:
"""

View File

@ -111,7 +111,7 @@ class ApplicationRepository(ApplicationProperties):
def unknown_local(probe: Package) -> List[str]:
cache_dir = self.repository.paths.cache_for(probe.base)
local = Package.from_build(cache_dir)
local = Package.from_build(cache_dir, self.architecture)
packages = set(probe.packages.keys()).difference(local.packages.keys())
return list(packages)
@ -145,7 +145,7 @@ class ApplicationRepository(ApplicationProperties):
process_update(packages, build_result)
# process manual packages
tree = Tree.resolve(updates, self.repository.paths, self.database)
tree = Tree.resolve(updates)
for num, level in enumerate(tree):
self.logger.info("processing level #%i %s", num, [package.base for package in level])
build_result = self.repository.process_build(level)
@ -183,7 +183,7 @@ class ApplicationRepository(ApplicationProperties):
updated_packages = [package for _, package in sorted(updates.items())]
# reorder updates according to the dependency tree
tree = Tree.resolve(updated_packages, self.repository.paths, self.database)
tree = Tree.resolve(updated_packages)
for level in tree:
for package in level:
UpdatePrinter(package, local_versions.get(package.base)).print(

View File

@ -47,11 +47,12 @@ class Add(Handler):
application = Application(architecture, configuration,
report=report, unsafe=unsafe, refresh_pacman_database=args.refresh)
application.on_start()
application.add(args.package, args.source, args.without_dependencies)
application.add(args.package, args.source)
if not args.now:
return
packages = application.updates(args.package, aur=False, local=False, manual=True, vcs=False,
log_fn=application.logger.info)
packages = application.with_dependencies(packages, process_dependencies=args.dependencies)
result = application.update(packages)
Add.check_if_empty(args.exit_code, result.is_empty)

View File

@ -48,4 +48,4 @@ class Dump(Handler):
"""
dump = configuration.dump()
for section, values in sorted(dump.items()):
ConfigurationPrinter(section, values).print(verbose=False, separator=" = ")
ConfigurationPrinter(section, values).print(verbose=not args.secure, separator=" = ")

View File

@ -58,7 +58,7 @@ class Patch(Handler):
patch = Patch.patch_create_from_function(args.variable, args.patch)
Patch.patch_set_create(application, args.package, patch)
elif args.action == Action.Update and args.variable is None:
package_base, patch = Patch.patch_create_from_diff(args.package, args.track)
package_base, patch = Patch.patch_create_from_diff(args.package, architecture, args.track)
Patch.patch_set_create(application, package_base, patch)
elif args.action == Action.List:
Patch.patch_set_list(application, args.package, args.variable, args.exit_code)
@ -66,19 +66,20 @@ class Patch(Handler):
Patch.patch_set_remove(application, args.package, args.variable)
@staticmethod
def patch_create_from_diff(sources_dir: Path, track: List[str]) -> Tuple[str, PkgbuildPatch]:
def patch_create_from_diff(sources_dir: Path, architecture: str, track: List[str]) -> Tuple[str, PkgbuildPatch]:
"""
create PKGBUILD plain diff patches from sources directory
Args:
sources_dir(Path): path to directory with the package sources
architecture(str): repository architecture
track(List[str]): track files which match the glob before creating the patch
Returns:
Tuple[str, PkgbuildPatch]: package base and created PKGBUILD patch based on the diff from master HEAD
to current files
"""
package = Package.from_build(sources_dir)
package = Package.from_build(sources_dir, architecture)
patch = Sources.patch_create(sources_dir, *track)
return package.base, PkgbuildPatch(None, patch)

View File

@ -51,7 +51,7 @@ class RemoveUnknown(Handler):
if args.dry_run:
for package in sorted(unknown_packages):
StringPrinter(package).print(False)
StringPrinter(package).print(verbose=False)
return
application.remove(unknown_packages)

View File

@ -40,7 +40,7 @@ class Search(Handler):
"""
ALLOW_AUTO_ARCHITECTURE_RUN = False # it should be called only as "no-architecture"
SORT_FIELDS = {field.name for field in fields(AURPackage) if field.default_factory is not list}
SORT_FIELDS = {field.name for field in fields(AURPackage) if field.default_factory is not list} # type: ignore
@classmethod
def run(cls: Type[Handler], args: argparse.Namespace, architecture: str, configuration: Configuration, *,
@ -64,7 +64,7 @@ class Search(Handler):
for packages_list in (official_packages_list, aur_packages_list):
# keep sorting by packages source
for package in Search.sort(packages_list, args.sort_by):
AurPrinter(package).print(args.info)
AurPrinter(package).print(verbose=args.info)
@staticmethod
def sort(packages: Iterable[AURPackage], sort_by: str) -> List[AURPackage]:

View File

@ -53,7 +53,7 @@ class Status(Handler):
client = Application(architecture, configuration, report=True, unsafe=unsafe).repository.reporter
if args.ahriman:
service_status = client.get_internal()
StatusPrinter(service_status.status).print(args.info)
StatusPrinter(service_status.status).print(verbose=args.info)
if args.package:
packages: Iterable[Tuple[Package, BuildStatus]] = sum(
(client.get(base) for base in args.package),
@ -67,4 +67,4 @@ class Status(Handler):
filter_fn: Callable[[Tuple[Package, BuildStatus]], bool] =\
lambda item: args.status is None or item[1].status == args.status
for package, package_status in sorted(filter(filter_fn, packages), key=comparator):
PackagePrinter(package, package_status).print(args.info)
PackagePrinter(package, package_status).print(verbose=args.info)

View File

@ -51,6 +51,6 @@ class Structure(Handler):
application = Application(architecture, configuration, report=report, unsafe=unsafe)
packages = application.repository.packages()
tree = Tree.resolve(packages, application.repository.paths, application.database)
tree = Tree.resolve(packages)
for num, level in enumerate(tree):
TreePrinter(num, level).print(verbose=True, separator=" ")

View File

@ -53,6 +53,7 @@ class Update(Handler):
if args.dry_run:
return
packages = application.with_dependencies(packages, process_dependencies=args.dependencies)
result = application.update(packages)
Update.check_if_empty(args.exit_code, result.is_empty)

View File

@ -52,7 +52,7 @@ class Validate(Handler):
unsafe(bool): if set no user check will be performed before path creation
"""
schema = Validate.schema(architecture, configuration)
validator = Validator(instance=configuration, schema=schema)
validator = Validator(configuration=configuration, schema=schema)
if validator.validate(configuration.dump()):
return # no errors found

View File

@ -55,3 +55,7 @@ class Web(Handler):
application = setup_service(architecture, configuration, spawner)
run_server(application)
# terminate spawn process at the last
spawner.stop()
spawner.join()

View File

@ -68,7 +68,6 @@ class Lock(LazyLogging):
configuration(Configuration): configuration instance
"""
self.path = args.lock.with_stem(f"{args.lock.stem}_{architecture}") if args.lock is not None else None
print(self.path)
self.force = args.force
self.unsafe = args.unsafe

View File

@ -25,6 +25,7 @@ from typing import Any, Callable, Generator, Set
from ahriman.core.configuration import Configuration
from ahriman.core.log import LazyLogging
from ahriman.core.util import trim_package
from ahriman.models.repository_paths import RepositoryPaths
@ -175,8 +176,10 @@ class Pacman(LazyLogging):
result: Set[str] = set()
for database in self.handle.get_syncdbs():
for package in database.pkgcache:
result.add(package.name) # package itself
result.update(package.provides) # provides list for meta-packages
# package itself
result.add(package.name)
# provides list for meta-packages
result.update(trim_package(provides) for provides in package.provides)
return result

View File

@ -56,7 +56,7 @@ class Configuration(configparser.RawConfigParser):
architecture according to the merge rules. Moreover, the architecture names will be removed from section names.
In order to get current settings, the ``check_loaded`` method can be used. This method will raise an
``InitializeException`` in case if configuration was not yet loaded::
``InitializeError`` in case if configuration was not yet loaded::
>>> path, architecture = configuration.check_loaded()
"""
@ -165,7 +165,7 @@ class Configuration(configparser.RawConfigParser):
Tuple[Path, str]: configuration root path and architecture if loaded
Raises:
InitializeException: in case if architecture and/or path are not set
InitializeError: in case if architecture and/or path are not set
"""
if self.path is None or self.architecture is None:
raise InitializeError("Configuration path and/or architecture are not set")

View File

@ -47,6 +47,10 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
"required": True,
"path_exists": True,
},
"suppress_http_log_errors": {
"type": "boolean",
"coerce": "boolean",
}
},
},
"alpm": {
@ -60,6 +64,7 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
"mirror": {
"type": "string",
"required": True,
"is_url": [],
},
"repositories": {
"type": "list",
@ -105,9 +110,15 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
"client_secret": {
"type": "string",
},
"cookie_secret_key": {
"type": "string",
"minlength": 32,
"maxlength": 64, # we cannot verify maxlength, because base64 representation might be longer than bytes
},
"max_age": {
"type": "integer",
"coerce": "integer",
"min": 0,
},
"oauth_provider": {
"type": "string",
@ -155,6 +166,7 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
"vcs_allowed_age": {
"type": "integer",
"coerce": "integer",
"min": 0,
},
},
},
@ -197,6 +209,7 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
"schema": {
"address": {
"type": "string",
"is_url": ["http", "https"],
},
"debug": {
"type": "boolean",
@ -213,9 +226,11 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
},
"host": {
"type": "string",
"is_ip_address": ["localhost"],
},
"index_url": {
"type": "string",
"is_url": ["http", "https"],
},
"password": {
"type": "string",
@ -251,44 +266,4 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
},
},
},
"remote-pull": {
"type": "dict",
"schema": {
"target": {
"type": "list",
"coerce": "list",
"schema": {"type": "string"},
},
},
},
"remote-push": {
"type": "dict",
"schema": {
"target": {
"type": "list",
"coerce": "list",
"schema": {"type": "string"},
},
},
},
"report": {
"type": "dict",
"schema": {
"target": {
"type": "list",
"coerce": "list",
"schema": {"type": "string"},
},
},
},
"upload": {
"type": "dict",
"schema": {
"target": {
"type": "list",
"coerce": "list",
"schema": {"type": "string"},
},
},
},
}

View File

@ -17,9 +17,12 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import ipaddress
from cerberus import TypeDefinition, Validator as RootValidator # type: ignore
from pathlib import Path
from typing import Any, List
from urllib.parse import urlparse
from ahriman.core.configuration import Configuration
@ -29,7 +32,7 @@ class Validator(RootValidator): # type: ignore
class which defines custom validation methods for the service configuration
Attributes:
instance(Configuration): configuration instance
configuration(Configuration): configuration instance
"""
types_mapping = RootValidator.types_mapping.copy()
@ -40,12 +43,12 @@ class Validator(RootValidator): # type: ignore
default constructor
Args:
instance(Configuration): configuration instance used for extraction
configuration(Configuration): configuration instance used for extraction
*args(Any): positional arguments to be passed to base validator
**kwargs(): keyword arguments to be passed to base validator
"""
RootValidator.__init__(self, *args, **kwargs)
self.instance: Configuration = kwargs["instance"]
self.configuration: Configuration = kwargs["configuration"]
def _normalize_coerce_absolute_path(self, value: str) -> Path:
"""
@ -57,7 +60,7 @@ class Validator(RootValidator): # type: ignore
Returns:
Path: value converted to path instance according to configuration rules
"""
converted: Path = self.instance.converters["path"](value)
converted: Path = self.configuration.converters["path"](value)
return converted
def _normalize_coerce_boolean(self, value: str) -> bool:
@ -71,7 +74,7 @@ class Validator(RootValidator): # type: ignore
bool: value converted to boolean according to configuration rules
"""
# pylint: disable=protected-access
converted: bool = self.instance._convert_to_boolean(value) # type: ignore
converted: bool = self.configuration._convert_to_boolean(value) # type: ignore
return converted
def _normalize_coerce_integer(self, value: str) -> int:
@ -97,9 +100,50 @@ class Validator(RootValidator): # type: ignore
Returns:
List[str]: value converted to string list instance according to configuration rules
"""
converted: List[str] = self.instance.converters["list"](value)
converted: List[str] = self.configuration.converters["list"](value)
return converted
def _validate_is_ip_address(self, constraint: List[str], field: str, value: str) -> None:
"""
check if the specified value is valid ip address
Args:
constraint(List[str]): optional list of allowed special words (e.g. ``localhost``)
field(str): field name to be checked
value(Path): value to be checked
Examples:
The rule's arguments are validated against this schema:
{"type": "list", "schema": {"type": "string"}}
"""
if value in constraint:
return
try:
ipaddress.ip_address(value)
except ValueError:
self._error(field, f"Value {value} must be valid IP address")
def _validate_is_url(self, constraint: List[str], field: str, value: str) -> None:
"""
check if the specified value is a valid url
Args:
constraint(List[str]): optional list of supported schemas. If empty, no schema validation will be performed
field(str): field name to be checked
value(str): value to be checked
Examples:
The rule's arguments are validated against this schema:
{"type": "list", "schema": {"type": "string"}}
"""
url = urlparse(value) # it probably will never rise exceptions on parse
if not url.scheme:
self._error(field, f"Url scheme is not set for {value}")
if not url.netloc and url.scheme not in ("file",):
self._error(field, f"Location must be set for url {value} of scheme {url.scheme}")
if constraint and url.scheme not in constraint:
self._error(field, f"Url {value} scheme must be one of {constraint}")
def _validate_path_exists(self, constraint: bool, field: str, value: Path) -> None:
"""
check if paths exists

View File

@ -1,47 +0,0 @@
#
# Copyright (c) 2021-2023 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from sqlite3 import Connection
from ahriman.core.configuration import Configuration
from ahriman.core.database.data.package_remotes import migrate_package_remotes
from ahriman.core.database.data.package_statuses import migrate_package_statuses
from ahriman.core.database.data.patches import migrate_patches
from ahriman.core.database.data.users import migrate_users_data
from ahriman.models.migration_result import MigrationResult
def migrate_data(result: MigrationResult, connection: Connection, configuration: Configuration) -> None:
"""
perform data migration
Args:
result(MigrationResult): result of the schema migration
connection(Connection): database connection
configuration(Configuration): configuration instance
"""
# initial data migration
repository_paths = configuration.repository_paths
if result.old_version <= 0:
migrate_package_statuses(connection, repository_paths)
migrate_patches(connection, repository_paths)
migrate_users_data(connection, configuration)
if result.old_version <= 1:
migrate_package_remotes(connection, repository_paths)

View File

@ -1,64 +0,0 @@
#
# Copyright (c) 2021-2023 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from sqlite3 import Connection
from ahriman.models.package_source import PackageSource
from ahriman.models.remote_source import RemoteSource
from ahriman.models.repository_paths import RepositoryPaths
__all__ = ["migrate_package_remotes"]
# pylint: disable=protected-access
def migrate_package_remotes(connection: Connection, paths: RepositoryPaths) -> None:
"""
perform migration for package remote sources
Args:
connection(Connection): database connection
paths(RepositoryPaths): repository paths instance
"""
from ahriman.core.database.operations import PackageOperations
def insert_remote(base: str, remote: RemoteSource) -> None:
connection.execute(
"""
update package_bases set
branch = :branch, git_url = :git_url, path = :path,
web_url = :web_url, source = :source
where package_base = :package_base
""",
dict(
package_base=base,
branch=remote.branch, git_url=remote.git_url, path=remote.path,
web_url=remote.web_url, source=remote.source
)
)
packages = PackageOperations._packages_get_select_package_bases(connection)
for package_base, package in packages.items():
local_cache = paths.cache_for(package_base)
if local_cache.exists() and not package.is_vcs:
continue # skip packages which are not VCS and with local cache
remote_source = RemoteSource.from_source(PackageSource.AUR, package_base, "aur")
if remote_source is None:
continue # should never happen
insert_remote(package_base, remote_source)

View File

@ -1,82 +0,0 @@
#
# Copyright (c) 2021-2023 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import json
from sqlite3 import Connection
from ahriman.models.build_status import BuildStatus
from ahriman.models.package import Package
from ahriman.models.repository_paths import RepositoryPaths
__all__ = ["migrate_package_statuses"]
def migrate_package_statuses(connection: Connection, paths: RepositoryPaths) -> None:
"""
perform migration for package statuses
Args:
connection(Connection): database connection
paths(RepositoryPaths): repository paths instance
"""
def insert_base(metadata: Package, last_status: BuildStatus) -> None:
connection.execute(
"""
insert into package_bases
(package_base, version, aur_url)
values
(:package_base, :version, :aur_url)
""",
dict(package_base=metadata.base, version=metadata.version, aur_url=""))
connection.execute(
"""
insert into package_statuses
(package_base, status, last_updated)
values
(:package_base, :status, :last_updated)""",
dict(package_base=metadata.base, status=last_status.status.value, last_updated=last_status.timestamp))
def insert_packages(metadata: Package) -> None:
package_list = []
for name, description in metadata.packages.items():
package_list.append(dict(package=name, package_base=metadata.base, **description.view()))
connection.executemany(
"""
insert into packages
(package, package_base, architecture, archive_size, build_date, depends, description,
filename, "groups", installed_size, licenses, provides, url)
values
(:package, :package_base, :architecture, :archive_size, :build_date, :depends, :description,
:filename, :groups, :installed_size, :licenses, :provides, :url)
""",
package_list)
cache_path = paths.root / "status_cache.json"
if not cache_path.is_file():
return # no file found
with cache_path.open() as cache:
dump = json.load(cache)
for item in dump.get("packages", []):
package = Package.from_json(item["package"])
status = BuildStatus.from_json(item["status"])
insert_base(package, status)
insert_packages(package)

View File

@ -1,47 +0,0 @@
#
# Copyright (c) 2021-2023 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from sqlite3 import Connection
from ahriman.models.repository_paths import RepositoryPaths
__all__ = ["migrate_patches"]
def migrate_patches(connection: Connection, paths: RepositoryPaths) -> None:
"""
perform migration for patches
Args:
connection(Connection): database connection
paths(RepositoryPaths): repository paths instance
"""
root = paths.root / "patches"
if not root.is_dir():
return # no directory found
for package in root.iterdir():
patch_path = package / "00-main.patch"
if not patch_path.is_file():
continue # not exist
content = patch_path.read_text(encoding="utf8")
connection.execute(
"""insert into patches (package_base, patch) values (:package_base, :patch)""",
{"package_base": package.name, "patch": content})

View File

@ -1,43 +0,0 @@
#
# Copyright (c) 2021-2023 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from sqlite3 import Connection
from ahriman.core.configuration import Configuration
__all__ = ["migrate_users_data"]
def migrate_users_data(connection: Connection, configuration: Configuration) -> None:
"""
perform migration for users
Args:
connection(Connection): database connection
configuration(Configuration): configuration instance
"""
for section in configuration.sections():
for option, value in configuration[section].items():
if not section.startswith("auth:"):
continue
access = section[5:]
connection.execute(
"""insert into users (username, access, password) values (:username, :access, :password)""",
{"username": option.lower(), "access": access, "password": value})

View File

@ -17,16 +17,13 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import annotations
from importlib import import_module
from pathlib import Path
from pkgutil import iter_modules
from sqlite3 import Connection
from typing import List, Type
from sqlite3 import Connection, Cursor
from typing import Callable, List
from ahriman.core.configuration import Configuration
from ahriman.core.database.data import migrate_data
from ahriman.core.log import LazyLogging
from ahriman.models.migration import Migration
from ahriman.models.migration_result import MigrationResult
@ -53,8 +50,8 @@ class Migrations(LazyLogging):
self.connection = connection
self.configuration = configuration
@classmethod
def migrate(cls: Type[Migrations], connection: Connection, configuration: Configuration) -> MigrationResult:
@staticmethod
def migrate(connection: Connection, configuration: Configuration) -> MigrationResult:
"""
perform migrations implicitly
@ -65,7 +62,26 @@ class Migrations(LazyLogging):
Returns:
MigrationResult: current schema version
"""
return cls(connection, configuration).run()
return Migrations(connection, configuration).run()
def migration(self, cursor: Cursor, migration: Migration) -> None:
"""
perform single migration
Args:
cursor(Cursor): connection cursor
migration(Migration): single migration to perform
"""
self.logger.info("applying table migration %s at index %s", migration.name, migration.index)
for statement in migration.steps:
cursor.execute(statement)
self.logger.info("table migration %s at index %s has been applied", migration.name, migration.index)
self.logger.info("perform data migration %s at index %s", migration.name, migration.index)
migration.migrate_data(self.connection, self.configuration)
self.logger.info(
"data migration %s at index %s has been performed",
migration.name, migration.index)
def migrations(self) -> List[Migration]:
"""
@ -81,9 +97,21 @@ class Migrations(LazyLogging):
for index, module_name in enumerate(sorted(modules)):
module = import_module(f"{__name__}.{module_name}")
steps: List[str] = getattr(module, "steps", [])
self.logger.debug("found migration %s at index %s with steps count %s", module_name, index, len(steps))
migrations.append(Migration(index=index, name=module_name, steps=steps))
migrate_data: Callable[[Connection, Configuration], None] = \
getattr(module, "migrate_data", lambda *args: None)
migrations.append(
Migration(
index=index,
name=module_name,
steps=steps,
migrate_data=migrate_data
)
)
return migrations
@ -110,13 +138,7 @@ class Migrations(LazyLogging):
try:
cursor.execute("begin exclusive")
for migration in migrations[current_version:]:
self.logger.info("applying migration %s at index %s", migration.name, migration.index)
for statement in migration.steps:
cursor.execute(statement)
self.logger.info("migration %s at index %s has been applied", migration.name, migration.index)
migrate_data(result, self.connection, self.configuration)
self.migration(cursor, migration)
cursor.execute(f"pragma user_version = {expected_version}") # no support for ? placeholders
except Exception:
self.logger.exception("migration failed with exception")

View File

@ -17,7 +17,17 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__all__ = ["steps"]
import json
from sqlite3 import Connection
from ahriman.core.configuration import Configuration
from ahriman.models.build_status import BuildStatus
from ahriman.models.package import Package
from ahriman.models.repository_paths import RepositoryPaths
__all__ = ["migrate_data", "steps"]
steps = [
@ -73,3 +83,109 @@ steps = [
)
""",
]
def migrate_data(connection: Connection, configuration: Configuration) -> None:
"""
perform data migration
Args:
connection(Connection): database connection
configuration(Configuration): configuration instance
"""
migrate_package_statuses(connection, configuration.repository_paths)
migrate_patches(connection, configuration.repository_paths)
migrate_users_data(connection, configuration)
def migrate_package_statuses(connection: Connection, paths: RepositoryPaths) -> None:
"""
perform migration for package statuses
Args:
connection(Connection): database connection
paths(RepositoryPaths): repository paths instance
"""
def insert_base(metadata: Package, last_status: BuildStatus) -> None:
connection.execute(
"""
insert into package_bases
(package_base, version, aur_url)
values
(:package_base, :version, :aur_url)
""",
{"package_base": metadata.base, "version": metadata.version, "aur_url": ""})
connection.execute(
"""
insert into package_statuses
(package_base, status, last_updated)
values
(:package_base, :status, :last_updated)""",
{"package_base": metadata.base, "status": last_status.status.value, "last_updated": last_status.timestamp})
def insert_packages(metadata: Package) -> None:
package_list = []
for name, description in metadata.packages.items():
package_list.append({"package": name, "package_base": metadata.base, **description.view()})
connection.executemany(
"""
insert into packages
(package, package_base, architecture, archive_size, build_date, depends, description,
filename, "groups", installed_size, licenses, provides, url)
values
(:package, :package_base, :architecture, :archive_size, :build_date, :depends, :description,
:filename, :groups, :installed_size, :licenses, :provides, :url)
""",
package_list)
cache_path = paths.root / "status_cache.json"
if not cache_path.is_file():
return # no file found
with cache_path.open() as cache:
dump = json.load(cache)
for item in dump.get("packages", []):
package = Package.from_json(item["package"])
status = BuildStatus.from_json(item["status"])
insert_base(package, status)
insert_packages(package)
def migrate_patches(connection: Connection, paths: RepositoryPaths) -> None:
"""
perform migration for patches
Args:
connection(Connection): database connection
paths(RepositoryPaths): repository paths instance
"""
root = paths.root / "patches"
if not root.is_dir():
return # no directory found
for package in root.iterdir():
patch_path = package / "00-main.patch"
if not patch_path.is_file():
continue # not exist
content = patch_path.read_text(encoding="utf8")
connection.execute(
"""insert into patches (package_base, patch) values (:package_base, :patch)""",
{"package_base": package.name, "patch": content})
def migrate_users_data(connection: Connection, configuration: Configuration) -> None:
"""
perform migration for users
Args:
connection(Connection): database connection
configuration(Configuration): configuration instance
"""
for section in configuration.sections():
for option, value in configuration[section].items():
if not section.startswith("auth:"):
continue
access = section[5:]
connection.execute(
"""insert into users (username, access, password) values (:username, :access, :password)""",
{"username": option.lower(), "access": access, "password": value})

View File

@ -17,7 +17,15 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__all__ = ["steps"]
from sqlite3 import Connection
from ahriman.core.configuration import Configuration
from ahriman.models.package_source import PackageSource
from ahriman.models.remote_source import RemoteSource
from ahriman.models.repository_paths import RepositoryPaths
__all__ = ["migrate_data", "steps"]
steps = [
@ -40,3 +48,51 @@ steps = [
alter table package_bases drop column aur_url
""",
]
def migrate_data(connection: Connection, configuration: Configuration) -> None:
"""
perform data migration
Args:
connection(Connection): database connection
configuration(Configuration): configuration instance
"""
migrate_package_remotes(connection, configuration.repository_paths)
# pylint: disable=protected-access
def migrate_package_remotes(connection: Connection, paths: RepositoryPaths) -> None:
"""
perform migration for package remote sources
Args:
connection(Connection): database connection
paths(RepositoryPaths): repository paths instance
"""
from ahriman.core.database.operations import PackageOperations
def insert_remote(base: str, remote: RemoteSource) -> None:
connection.execute(
"""
update package_bases set
branch = :branch, git_url = :git_url, path = :path,
web_url = :web_url, source = :source
where package_base = :package_base
""",
{
"package_base": base,
"branch": remote.branch, "git_url": remote.git_url, "path": remote.path,
"web_url": remote.web_url, "source": remote.source
}
)
packages = PackageOperations._packages_get_select_package_bases(connection)
for package_base, package in packages.items():
local_cache = paths.cache_for(package_base)
if local_cache.exists() and not package.is_vcs:
continue # skip packages which are not VCS and with local cache
remote_source = RemoteSource.from_source(PackageSource.AUR, package_base, "aur")
if remote_source is None:
continue # should never happen
insert_remote(package_base, remote_source)

View File

@ -0,0 +1,83 @@
#
# Copyright (c) 2021-2023 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from sqlite3 import Connection
from ahriman.core.alpm.pacman import Pacman
from ahriman.core.configuration import Configuration
from ahriman.core.util import package_like
from ahriman.models.package import Package
__all__ = ["migrate_data", "steps"]
steps = [
"""
alter table packages add column make_depends json
""",
"""
alter table packages add column opt_depends json
""",
]
def migrate_data(connection: Connection, configuration: Configuration) -> None:
"""
perform data migration
Args:
connection(Connection): database connection
configuration(Configuration): configuration instance
"""
migrate_package_depends(connection, configuration)
def migrate_package_depends(connection: Connection, configuration: Configuration) -> None:
"""
migrate package opt and make depends fields
Args:
connection(Connection): database connection
configuration(Configuration): configuration instance
"""
if not configuration.repository_paths.repository.is_dir():
return
_, architecture = configuration.check_loaded()
pacman = Pacman(architecture, configuration, refresh_database=False)
package_list = []
for full_path in filter(package_like, configuration.repository_paths.repository.iterdir()):
base = Package.from_archive(full_path, pacman, remote=None)
for package, description in base.packages.items():
package_list.append({
"make_depends": description.make_depends,
"opt_depends": description.opt_depends,
"package": package,
})
connection.executemany(
"""
update packages set
make_depends = :make_depends, opt_depends = :opt_depends
where package = :package
""",
package_list
)

View File

@ -0,0 +1,53 @@
#
# Copyright (c) 2021-2023 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__all__ = ["steps"]
steps = [
"""
alter table packages rename to packages_
""",
"""
create table packages (
package text not null,
package_base text not null,
architecture text not null,
archive_size integer,
build_date integer,
depends json,
description text,
filename text,
"groups" json,
installed_size integer,
licenses json,
provides json,
url text,
make_depends json,
opt_depends json,
unique (package, architecture)
)
""",
"""
insert into packages select * from packages_ where architecture is not null;
""",
"""
drop table packages_;
""",
]

View File

@ -71,12 +71,12 @@ class LogsOperations(Operations):
values
(:package_base, :process_id, :created, :record)
""",
dict(
package_base=log_record_id.package_base,
process_id=log_record_id.process_id,
created=created,
record=record
)
{
"package_base": log_record_id.package_base,
"process_id": log_record_id.process_id,
"created": created,
"record": record,
}
)
return self.with_connection(run, commit=True)

View File

@ -82,15 +82,15 @@ class PackageOperations(Operations):
on conflict (package_base) do update set
version = :version, branch = :branch, git_url = :git_url, path = :path, web_url = :web_url, source = :source
""",
dict(
package_base=package.base,
version=package.version,
branch=package.remote.branch if package.remote is not None else None,
git_url=package.remote.git_url if package.remote is not None else None,
path=package.remote.path if package.remote is not None else None,
web_url=package.remote.web_url if package.remote is not None else None,
source=package.remote.source.value if package.remote is not None else None,
)
{
"package_base": package.base,
"version": package.version,
"branch": package.remote.branch if package.remote is not None else None,
"git_url": package.remote.git_url if package.remote is not None else None,
"path": package.remote.path if package.remote is not None else None,
"web_url": package.remote.web_url if package.remote is not None else None,
"source": package.remote.source.value if package.remote is not None else None,
}
)
@staticmethod
@ -104,21 +104,26 @@ class PackageOperations(Operations):
"""
package_list = []
for name, description in package.packages.items():
package_list.append(dict(package=name, package_base=package.base, **description.view()))
if description.architecture is None:
continue # architecture is required
package_list.append({"package": name, "package_base": package.base, **description.view()})
connection.executemany(
"""
insert into packages
(package, package_base, architecture, archive_size,
build_date, depends, description, filename,
"groups", installed_size, licenses, provides, url)
"groups", installed_size, licenses, provides,
url, make_depends, opt_depends)
values
(:package, :package_base, :architecture, :archive_size,
:build_date, :depends, :description, :filename,
:groups, :installed_size, :licenses, :provides, :url)
:groups, :installed_size, :licenses, :provides,
:url, :make_depends, :opt_depends)
on conflict (package, architecture) do update set
package_base = :package_base, archive_size = :archive_size,
build_date = :build_date, depends = :depends, description = :description, filename = :filename,
"groups" = :groups, installed_size = :installed_size, licenses = :licenses, provides = :provides, url = :url
"groups" = :groups, installed_size = :installed_size, licenses = :licenses, provides = :provides,
url = :url, make_depends = :make_depends, opt_depends = :opt_depends
""",
package_list)
@ -140,7 +145,7 @@ class PackageOperations(Operations):
on conflict (package_base) do update set
status = :status, last_updated = :last_updated
""",
dict(package_base=package_base, status=status.status.value, last_updated=status.timestamp))
{"package_base": package_base, "status": status.status.value, "last_updated": status.timestamp})
@staticmethod
def _packages_get_select_package_bases(connection: Connection) -> Dict[str, Package]:

View File

@ -28,9 +28,18 @@ class ConfigurationPrinter(StringPrinter):
print content of the configuration section
Attributes:
HIDE_KEYS(List[str]): (class attribute) hide values for mentioned keys. This list must be used in order to hide
passwords from outputs
values(Dict[str, str]): configuration values dictionary
"""
HIDE_KEYS = [
"api_key", # telegram key
"client_secret", # oauth secret
"password", # generic password (github, email, web server, etc)
"secret_key", # aws secret key
]
def __init__(self, section: str, values: Dict[str, str]) -> None:
"""
default constructor
@ -50,6 +59,6 @@ class ConfigurationPrinter(StringPrinter):
List[Property]: list of content properties
"""
return [
Property(key, value, is_required=True)
Property(key, value, is_required=key not in self.HIDE_KEYS)
for key, value in sorted(self.values.items())
]

View File

@ -27,7 +27,7 @@ class Printer:
base class for formatters
"""
def print(self, verbose: bool, log_fn: Callable[[str], None] = print, separator: str = ": ") -> None:
def print(self, *, verbose: bool, log_fn: Callable[[str], None] = print, separator: str = ": ") -> None:
"""
print content

View File

@ -33,6 +33,16 @@ class RemotePullTrigger(Trigger):
"""
CONFIGURATION_SCHEMA = {
"remote-pull": {
"type": "dict",
"schema": {
"target": {
"type": "list",
"coerce": "list",
"schema": {"type": "string"},
},
},
},
"gitremote": {
"type": "dict",
"schema": {

View File

@ -82,7 +82,10 @@ class RemotePush(LazyLogging):
Sources.fetch(package_target_dir, package.remote)
# ...and last, but not least, we remove the dot-git directory...
for git_file in package_target_dir.glob(".git*"):
shutil.rmtree(package_target_dir / git_file)
if git_file.is_file():
git_file.unlink()
else:
shutil.rmtree(git_file)
# ...copy all patches...
for patch in self.database.patches_get(package.base):
filename = f"ahriman-{package.base}.patch" if patch.key is None else f"ahriman-{patch.key}.patch"

View File

@ -38,6 +38,16 @@ class RemotePushTrigger(Trigger):
"""
CONFIGURATION_SCHEMA = {
"remote-push": {
"type": "dict",
"schema": {
"target": {
"type": "list",
"coerce": "list",
"schema": {"type": "string"},
},
},
},
"gitremote": {
"type": "dict",
"schema": {

View File

@ -31,22 +31,25 @@ class HttpLogHandler(logging.Handler):
Attributes:
reporter(Client): build status reporter instance
suppress_errors(bool): suppress logging errors (e.g. if no web server available)
"""
def __init__(self, configuration: Configuration, *, report: bool) -> None:
def __init__(self, configuration: Configuration, *, report: bool, suppress_errors: bool) -> None:
"""
default constructor
Args:
configuration(Configuration): configuration instance
report(bool): force enable or disable reporting
suppress_errors(bool): suppress logging errors (e.g. if no web server available)
"""
# we don't really care about those parameters because they will be handled by the reporter
logging.Handler.__init__(self)
# client has to be importer here because of circular imports
# client has to be imported here because of circular imports
from ahriman.core.status.client import Client
self.reporter = Client.load(configuration, report=report)
self.suppress_errors = suppress_errors
@classmethod
def load(cls, configuration: Configuration, *, report: bool) -> HttpLogHandler:
@ -62,7 +65,8 @@ class HttpLogHandler(logging.Handler):
if (handler := next((handler for handler in root.handlers if isinstance(handler, cls)), None)) is not None:
return handler # there is already registered instance
handler = cls(configuration, report=report)
suppress_errors = configuration.getboolean("settings", "suppress_http_log_errors", fallback=False)
handler = cls(configuration, report=report, suppress_errors=suppress_errors)
root.addHandler(handler)
return handler
@ -81,4 +85,6 @@ class HttpLogHandler(logging.Handler):
try:
self.reporter.logs(package_base, record)
except Exception:
if self.suppress_errors:
return
self.handleError(record)

View File

@ -35,6 +35,16 @@ class ReportTrigger(Trigger):
"""
CONFIGURATION_SCHEMA = {
"report": {
"type": "dict",
"schema": {
"target": {
"type": "list",
"coerce": "list",
"schema": {"type": "string"},
},
},
},
"console": {
"type": "dict",
"schema": {
@ -62,6 +72,7 @@ class ReportTrigger(Trigger):
},
"homepage": {
"type": "string",
"is_url": ["http", "https"],
},
"host": {
"type": "string",
@ -70,6 +81,7 @@ class ReportTrigger(Trigger):
"link_path": {
"type": "string",
"required": True,
"is_url": [],
},
"no_empty_report": {
"type": "boolean",
@ -82,6 +94,8 @@ class ReportTrigger(Trigger):
"type": "integer",
"coerce": "integer",
"required": True,
"min": 0,
"max": 65535,
},
"receivers": {
"type": "list",
@ -118,10 +132,12 @@ class ReportTrigger(Trigger):
},
"homepage": {
"type": "string",
"is_url": ["http", "https"],
},
"link_path": {
"type": "string",
"required": True,
"is_url": [],
},
"path": {
"type": "path",
@ -153,10 +169,12 @@ class ReportTrigger(Trigger):
},
"homepage": {
"type": "string",
"is_url": ["http", "https"],
},
"link_path": {
"type": "string",
"required": True,
"is_url": [],
},
"template_path": {
"type": "path",
@ -171,6 +189,7 @@ class ReportTrigger(Trigger):
"timeout": {
"type": "integer",
"coerce": "integer",
"min": 0,
},
},
},

View File

@ -98,7 +98,7 @@ class UpdateHandler(Cleaner):
with self.in_package_context(cache_dir.name):
try:
Sources.fetch(cache_dir, remote=None)
remote = Package.from_build(cache_dir)
remote = Package.from_build(cache_dir, self.architecture)
local = packages.get(remote.base)
if local is None:

View File

@ -60,7 +60,7 @@ class Spawn(Thread, LazyLogging):
self.lock = Lock()
self.active: Dict[str, Process] = {}
# stupid pylint does not know that it is possible
self.queue: Queue[Tuple[str, bool]] = Queue() # pylint: disable=unsubscriptable-object
self.queue: Queue[Tuple[str, bool] | None] = Queue() # pylint: disable=unsubscriptable-object
@staticmethod
def process(callback: Callable[[argparse.Namespace, str], bool], args: argparse.Namespace, architecture: str,
@ -78,55 +78,7 @@ class Spawn(Thread, LazyLogging):
result = callback(args, architecture)
queue.put((process_id, result))
def key_import(self, key: str, server: Optional[str]) -> None:
"""
import key to service cache
Args:
key(str): key to import
server(str): PGP key server
"""
kwargs = {} if server is None else {"key-server": server}
self.spawn_process("service-key-import", key, **kwargs)
def packages_add(self, packages: Iterable[str], *, now: bool) -> None:
"""
add packages
Args:
packages(Iterable[str]): packages list to add
now(bool): build packages now
"""
kwargs = {"source": PackageSource.AUR.value} # avoid abusing by building non-aur packages
if now:
kwargs["now"] = ""
self.spawn_process("package-add", *packages, **kwargs)
def packages_rebuild(self, depends_on: str) -> None:
"""
rebuild packages which depend on the specified package
Args:
depends_on(str): packages dependency
"""
self.spawn_process("repo-rebuild", **{"depends-on": depends_on})
def packages_remove(self, packages: Iterable[str]) -> None:
"""
remove packages
Args:
packages(Iterable[str]): packages list to remove
"""
self.spawn_process("package-remove", *packages)
def packages_update(self, ) -> None:
"""
run full repository update
"""
self.spawn_process("repo-update")
def spawn_process(self, command: str, *args: str, **kwargs: str) -> None:
def _spawn_process(self, command: str, *args: str, **kwargs: str) -> None:
"""
spawn external ahriman process with supplied arguments
@ -161,6 +113,54 @@ class Spawn(Thread, LazyLogging):
with self.lock:
self.active[process_id] = process
def key_import(self, key: str, server: Optional[str]) -> None:
"""
import key to service cache
Args:
key(str): key to import
server(str): PGP key server
"""
kwargs = {} if server is None else {"key-server": server}
self._spawn_process("service-key-import", key, **kwargs)
def packages_add(self, packages: Iterable[str], *, now: bool) -> None:
"""
add packages
Args:
packages(Iterable[str]): packages list to add
now(bool): build packages now
"""
kwargs = {"source": PackageSource.AUR.value} # avoid abusing by building non-aur packages
if now:
kwargs["now"] = ""
self._spawn_process("package-add", *packages, **kwargs)
def packages_rebuild(self, depends_on: str) -> None:
"""
rebuild packages which depend on the specified package
Args:
depends_on(str): packages dependency
"""
self._spawn_process("repo-rebuild", **{"depends-on": depends_on})
def packages_remove(self, packages: Iterable[str]) -> None:
"""
remove packages
Args:
packages(Iterable[str]): packages list to remove
"""
self._spawn_process("package-remove", *packages)
def packages_update(self, ) -> None:
"""
run full repository update
"""
self._spawn_process("repo-update")
def run(self) -> None:
"""
thread run method
@ -174,3 +174,9 @@ class Spawn(Thread, LazyLogging):
if process is not None:
process.terminate() # make sure lol
process.join()
def stop(self) -> None:
"""
gracefully terminate thread
"""
self.queue.put(None)

View File

@ -21,14 +21,9 @@ from __future__ import annotations
import itertools
from pathlib import Path
from tempfile import TemporaryDirectory
from typing import Callable, Iterable, List, Set, Tuple, Type
from typing import Callable, Iterable, List, Tuple
from ahriman.core.build_tools.sources import Sources
from ahriman.core.database import SQLite
from ahriman.models.package import Package
from ahriman.models.repository_paths import RepositoryPaths
class Leaf:
@ -40,16 +35,15 @@ class Leaf:
package(Package): leaf package properties
"""
def __init__(self, package: Package, dependencies: Set[str]) -> None:
def __init__(self, package: Package) -> None:
"""
default constructor
Args:
package(Package): package properties
dependencies(Set[str]): package dependencies
"""
self.package = package
self.dependencies = dependencies
self.dependencies = package.depends_build
@property
def items(self) -> Iterable[str]:
@ -61,24 +55,6 @@ class Leaf:
"""
return self.package.packages.keys()
@classmethod
def load(cls: Type[Leaf], package: Package, paths: RepositoryPaths, database: SQLite) -> Leaf:
"""
load leaf from package with dependencies
Args:
package(Package): package properties
paths(RepositoryPaths): repository paths instance
database(SQLite): database instance
Returns:
Leaf: loaded class
"""
with TemporaryDirectory(ignore_cleanup_errors=True) as dir_name, (clone_dir := Path(dir_name)):
Sources.load(clone_dir, package, database.patches_get(package.base), paths)
dependencies = Package.dependencies(clone_dir)
return cls(package, dependencies)
def is_dependency(self, packages: Iterable[Leaf]) -> bool:
"""
check if the package is dependency of any other package from list or not
@ -130,7 +106,7 @@ class Tree:
>>> repository = Repository.load("x86_64", configuration, database, report=True, unsafe=False)
>>> packages = repository.packages()
>>>
>>> tree = Tree.resolve(packages, configuration.repository_paths, database)
>>> tree = Tree.resolve(packages)
>>> for tree_level in tree:
>>> for package in tree_level:
>>> print(package.base)
@ -138,14 +114,8 @@ class Tree:
The direct constructor call is also possible but requires tree leaves to be instantioned in advance, e.g.::
>>> leaves = [Leaf.load(package, database) for package in packages]
>>> leaves = [Leaf(package) for package in packages]
>>> tree = Tree(leaves)
Using the default ``Leaf()`` method is possible, but not really recommended because it requires from the user to
build the dependency list by himself::
>>> leaf = Leaf(package, dependecies)
>>> tree = Tree([leaf])
"""
def __init__(self, leaves: List[Leaf]) -> None:
@ -157,23 +127,20 @@ class Tree:
"""
self.leaves = leaves
@classmethod
def resolve(cls: Type[Tree], packages: Iterable[Package], paths: RepositoryPaths,
database: SQLite) -> List[List[Package]]:
@staticmethod
def resolve(packages: Iterable[Package]) -> List[List[Package]]:
"""
resolve dependency tree
Args:
packages(Iterable[Package]): packages list
paths(RepositoryPaths): repository paths instance
database(SQLite): database instance
Returns:
List[List[Package]]: list of packages lists based on their dependencies
"""
leaves = [Leaf.load(package, paths, database) for package in packages]
tree = cls(leaves)
return tree.levels()
leaves = [Leaf(package) for package in packages]
instance = Tree(leaves)
return instance.levels()
def levels(self) -> List[List[Package]]:
"""

View File

@ -35,6 +35,16 @@ class UploadTrigger(Trigger):
"""
CONFIGURATION_SCHEMA = {
"upload": {
"type": "dict",
"schema": {
"target": {
"type": "list",
"coerce": "list",
"schema": {"type": "string"},
},
},
},
"github": {
"type": "dict",
"schema": {
@ -57,6 +67,7 @@ class UploadTrigger(Trigger):
"timeout": {
"type": "integer",
"coerce": "integer",
"min": 0,
},
"username": {
"type": "string",
@ -101,6 +112,7 @@ class UploadTrigger(Trigger):
"chunk_size": {
"type": "integer",
"coerce": "integer",
"min": 0,
},
"region": {
"type": "string",

View File

@ -35,7 +35,7 @@ from ahriman.models.repository_paths import RepositoryPaths
__all__ = ["check_output", "check_user", "enum_values", "exception_response_text", "filter_json", "full_version",
"package_like", "pretty_datetime", "pretty_size", "safe_filename", "utcnow", "walk"]
"package_like", "pretty_datetime", "pretty_size", "safe_filename", "trim_package", "utcnow", "walk"]
def check_output(*args: str, exception: Optional[Exception] = None, cwd: Optional[Path] = None,
@ -295,6 +295,23 @@ def safe_filename(source: str) -> str:
return re.sub(r"[^A-Za-z\d\-._~:\[\]@]", "-", source)
def trim_package(package_name: str) -> str:
"""
remove version bound and description from package name. Pacman allows to specify version bound (=, <=, >= etc) for
packages in dependencies and also allows to specify description (via :); this function removes trailing parts and
return exact package name
Args:
package_name(str): source package name
Returns:
str: package name without description or version bound
"""
for symbol in ("<", "=", ">", ":"):
package_name = package_name.partition(symbol)[0]
return package_name
def utcnow() -> datetime.datetime:
"""
get current time

View File

@ -45,6 +45,7 @@ class AURPackage:
popularity(float): package popularity
out_of_date(Optional[datetime.datetime]): package out of date timestamp if any
maintainer(Optional[str]): package maintainer
submitter(Optional[str]): package first submitter
first_submitted(datetime.datetime): timestamp of the first package submission
last_modified(datetime.datetime): timestamp of the last package submission
url_path(str): AUR package path
@ -89,6 +90,7 @@ class AURPackage:
url: Optional[str] = None
out_of_date: Optional[datetime.datetime] = None
maintainer: Optional[str] = None
submitter: Optional[str] = None
repository: str = "aur"
depends: List[str] = field(default_factory=list)
make_depends: List[str] = field(default_factory=list)
@ -140,6 +142,7 @@ class AURPackage:
url=package.url,
out_of_date=None,
maintainer=None,
submitter=None,
repository=package.db.name,
depends=package.depends,
make_depends=package.makedepends,
@ -178,6 +181,7 @@ class AURPackage:
dump["flag_date"],
"%Y-%m-%dT%H:%M:%S.%fZ") if dump["flag_date"] is not None else None,
maintainer=next(iter(dump["maintainers"]), None),
submitter=None,
repository=dump["repo"],
depends=dump["depends"],
make_depends=dump["makedepends"],

View File

@ -57,7 +57,8 @@ class InternalStatus:
InternalStatus: internal status
"""
counters = Counters.from_json(dump["packages"]) if "packages" in dump else Counters(total=0)
return cls(status=BuildStatus.from_json(dump.get("status", {})),
build_status = dump.get("status") or {}
return cls(status=BuildStatus.from_json(build_status),
architecture=dump.get("architecture"),
packages=counters,
repository=dump.get("repository"),

View File

@ -18,7 +18,10 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from dataclasses import dataclass
from typing import List
from sqlite3 import Connection
from typing import Callable, List
from ahriman.core.configuration import Configuration
@dataclass(frozen=True, kw_only=True)
@ -30,8 +33,10 @@ class Migration:
index(int): migration position
name(str): migration name
steps(List[str]): migration steps
migrate_data(Callable[[Connection, Configuration], None]): data migration callback
"""
index: int
name: str
steps: List[str]
migrate_data: Callable[[Connection, Configuration], None]

View File

@ -17,7 +17,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# pylint: disable=too-many-lines
# pylint: disable=too-many-lines,too-many-public-methods
from __future__ import annotations
import copy
@ -88,6 +88,36 @@ class Package(LazyLogging):
"""
return sorted(set(sum((package.depends for package in self.packages.values()), start=[])))
@property
def depends_build(self) -> Set[str]:
"""
get full list of external dependencies which has to be installed for build process
Returns:
Set[str]: full dependencies list used by devtools
"""
return (set(self.depends) | set(self.depends_make)).difference(self.packages_full)
@property
def depends_make(self) -> List[str]:
"""
get package make dependencies
Returns:
List[str]: sum of make dependencies per each package
"""
return sorted(set(sum((package.make_depends for package in self.packages.values()), start=[])))
@property
def depends_opt(self) -> List[str]:
"""
get package optional dependencies
Returns:
List[str]: sum of optional dependencies per each package
"""
return sorted(set(sum((package.opt_depends for package in self.packages.values()), start=[])))
@property
def groups(self) -> List[str]:
"""
@ -133,6 +163,20 @@ class Package(LazyLogging):
"""
return sorted(set(sum((package.licenses for package in self.packages.values()), start=[])))
@property
def packages_full(self) -> List[str]:
"""
get full packages list including provides
Returns:
List[str]: full list of packages which this base contains
"""
packages = set()
for package, properties in self.packages.items():
packages.add(package)
packages.update(properties.provides)
return sorted(packages)
@classmethod
def from_archive(cls: Type[Package], path: Path, pacman: Pacman, remote: Optional[RemoteSource]) -> Package:
"""
@ -168,15 +212,16 @@ class Package(LazyLogging):
base=package.package_base,
version=package.version,
remote=remote,
packages={package.name: PackageDescription()})
packages={package.name: PackageDescription.from_aur(package)})
@classmethod
def from_build(cls: Type[Package], path: Path) -> Package:
def from_build(cls: Type[Package], path: Path, architecture: str) -> Package:
"""
construct package properties from sources directory
Args:
path(Path): path to package sources directory
architecture(str): load package for specific architecture
Returns:
Package: package properties
@ -188,7 +233,21 @@ class Package(LazyLogging):
srcinfo, errors = parse_srcinfo(srcinfo_source)
if errors:
raise PackageInfoError(errors)
packages = {key: PackageDescription() for key in srcinfo["packages"]}
def get_property(key: str, properties: Dict[str, Any], default: Any) -> Any:
return properties.get(key) or srcinfo.get(key) or default
def get_list(key: str, properties: Dict[str, Any]) -> Any:
return get_property(key, properties, []) + get_property(f"{key}_{architecture}", properties, [])
packages = {
package: PackageDescription(
depends=get_list("depends", properties),
make_depends=get_list("makedepends", properties),
opt_depends=get_list("optdepends", properties),
)
for package, properties in srcinfo["packages"].items()
}
version = full_version(srcinfo.get("epoch"), srcinfo["pkgver"], srcinfo["pkgrel"])
return cls(base=srcinfo["pkgbase"], version=version, remote=None, packages=packages)
@ -204,11 +263,12 @@ class Package(LazyLogging):
Returns:
Package: package properties
"""
packages_json = dump.get("packages") or {}
packages = {
key: PackageDescription.from_json(value)
for key, value in dump.get("packages", {}).items()
for key, value in packages_json.items()
}
remote = dump.get("remote", {})
remote = dump.get("remote") or {}
return cls(base=dump["base"], version=dump["version"], remote=RemoteSource.from_json(remote), packages=packages)
@classmethod
@ -230,43 +290,7 @@ class Package(LazyLogging):
base=package.package_base,
version=package.version,
remote=remote,
packages={package.name: PackageDescription()})
@staticmethod
def dependencies(path: Path) -> Set[str]:
"""
load dependencies from package sources
Args:
path(Path): path to package sources directory
Returns:
Set[str]: list of package dependencies including makedepends array, but excluding packages from this base
Raises:
InvalidPackageInfo: if there are parsing errors
"""
# additional function to remove versions from dependencies
def extract_packages(raw_packages_list: List[str]) -> Set[str]:
return {trim_version(package_name) for package_name in raw_packages_list}
def trim_version(package_name: str) -> str:
for symbol in ("<", "=", ">"):
package_name = package_name.split(symbol)[0]
return package_name
srcinfo_source = Package._check_output("makepkg", "--printsrcinfo", cwd=path)
srcinfo, errors = parse_srcinfo(srcinfo_source)
if errors:
raise PackageInfoError(errors)
makedepends = extract_packages(srcinfo.get("makedepends", []))
# sum over each package
depends = extract_packages(srcinfo.get("depends", []))
for package in srcinfo["packages"].values():
depends |= extract_packages(package.get("depends", []))
# we are not interested in dependencies inside pkgbase
packages = set(srcinfo["packages"].keys())
return (depends | makedepends) - packages
packages={package.name: PackageDescription.from_aur(package)})
@staticmethod
def supported_architectures(path: Path) -> Set[str]:

View File

@ -24,7 +24,8 @@ from pathlib import Path
from pyalpm import Package # type: ignore
from typing import Any, Dict, List, Optional, Type
from ahriman.core.util import filter_json
from ahriman.core.util import filter_json, trim_package
from ahriman.models.aur_package import AURPackage
@dataclass(kw_only=True)
@ -37,6 +38,8 @@ class PackageDescription:
archive_size(Optional[int]): package archive size
build_date(Optional[int]): package build date
depends(List[str]): package dependencies list
opt_depends(List[str]): optional package dependencies list
make_depends(List[str]): package dependencies list used for building
description(Optional[str]): package description
filename(Optional[str]): package archive name
groups(List[str]): package groups
@ -67,6 +70,8 @@ class PackageDescription:
archive_size: Optional[int] = None
build_date: Optional[int] = None
depends: List[str] = field(default_factory=list)
make_depends: List[str] = field(default_factory=list)
opt_depends: List[str] = field(default_factory=list)
description: Optional[str] = None
filename: Optional[str] = None
groups: List[str] = field(default_factory=list)
@ -75,6 +80,14 @@ class PackageDescription:
provides: List[str] = field(default_factory=list)
url: Optional[str] = None
def __post_init__(self) -> None:
"""
update dependencies list accordingly
"""
self.depends = [trim_package(package) for package in self.depends]
self.opt_depends = [trim_package(package) for package in self.opt_depends]
self.make_depends = [trim_package(package) for package in self.make_depends]
@property
def filepath(self) -> Optional[Path]:
"""
@ -85,6 +98,27 @@ class PackageDescription:
"""
return Path(self.filename) if self.filename is not None else None
@classmethod
def from_aur(cls: Type[PackageDescription], package: AURPackage) -> PackageDescription:
"""
construct properties from AUR package model
Args:
package(AURPackage): AUR package model
Returns:
PackageDescription: package properties based on source AUR package
"""
return cls(
depends=package.depends,
make_depends=package.make_depends,
opt_depends=package.opt_depends,
description=package.description,
licenses=package.license,
provides=package.provides,
url=package.url,
)
@classmethod
def from_json(cls: Type[PackageDescription], dump: Dict[str, Any]) -> PackageDescription:
"""
@ -117,13 +151,16 @@ class PackageDescription:
archive_size=package.size,
build_date=package.builddate,
depends=package.depends,
make_depends=package.makedepends,
opt_depends=package.optdepends,
description=package.desc,
filename=path.name,
groups=package.groups,
installed_size=package.isize,
licenses=package.licenses,
provides=package.provides,
url=package.url)
url=package.url,
)
def view(self) -> Dict[str, Any]:
"""

View File

@ -1,102 +0,0 @@
#
# Copyright (c) 2021-2023 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import annotations
import time
from dataclasses import dataclass
from typing import Optional, Type
@dataclass(frozen=True)
class UserIdentity:
"""
user identity used inside web service
Attributes:
username(str): username
expire_at(int): identity expiration timestamp
"""
username: str
expire_at: int
@classmethod
def from_identity(cls: Type[UserIdentity], identity: str) -> Optional[UserIdentity]:
"""
parse identity into object
Args:
identity(str): identity from session data
Returns:
Optional[UserIdentity]: user identity object if it can be parsed and not expired and None otherwise
"""
try:
username, expire_at = identity.split()
user = cls(username, int(expire_at))
return None if user.is_expired() else user
except ValueError:
return None
@classmethod
def from_username(cls: Type[UserIdentity], username: Optional[str], max_age: int) -> Optional[UserIdentity]:
"""
generate identity from username
Args:
username(Optional[str]): username
max_age(int): time to expire, seconds
Returns:
Optional[UserIdentity]: constructed identity object
"""
return cls(username, cls.expire_when(max_age)) if username is not None else None
@staticmethod
def expire_when(max_age: int) -> int:
"""
generate expiration time using delta
Args:
max_age(int): time delta to generate. Must be usually TTE
Returns:
int: expiration timestamp
"""
return int(time.time()) + max_age
def is_expired(self) -> bool:
"""
compare timestamp with current timestamp and return True in case if identity is expired
Returns:
bool: True in case if identity is expired and False otherwise
"""
return self.expire_when(0) > self.expire_at
def to_identity(self) -> str:
"""
convert object to identity representation
Returns:
str: web service identity
"""
return f"{self.username} {self.expire_at}"

View File

@ -17,4 +17,4 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__version__ = "2.6.1"
__version__ = "2.7.1"

View File

@ -18,7 +18,6 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import aiohttp_security # type: ignore
import base64
import socket
import types
@ -32,12 +31,12 @@ from cryptography import fernet
from typing import Optional
from ahriman.core.auth import Auth
from ahriman.core.configuration import Configuration
from ahriman.models.user_access import UserAccess
from ahriman.models.user_identity import UserIdentity
from ahriman.web.middlewares import HandlerType, MiddlewareType
__all__ = ["AuthorizationPolicy", "auth_handler", "setup_auth"]
__all__ = ["AuthorizationPolicy", "auth_handler", "cookie_secret_key", "setup_auth"]
class AuthorizationPolicy(aiohttp_security.AbstractAuthorizationPolicy): # type: ignore
@ -67,10 +66,7 @@ class AuthorizationPolicy(aiohttp_security.AbstractAuthorizationPolicy): # type
Returns:
Optional[str]: user identity (username) in case if user exists and None otherwise
"""
user = UserIdentity.from_identity(identity)
if user is None:
return None
return user.username if await self.validator.known_username(user.username) else None
return identity if await self.validator.known_username(identity) else None
async def permits(self, identity: str, permission: UserAccess, context: Optional[str] = None) -> bool:
"""
@ -84,10 +80,7 @@ class AuthorizationPolicy(aiohttp_security.AbstractAuthorizationPolicy): # type
Returns:
bool: True in case if user is allowed to perform this request and False otherwise
"""
user = UserIdentity.from_identity(identity)
if user is None:
return False
return await self.validator.verify_access(user.username, permission, context)
return await self.validator.verify_access(identity, permission, context)
def auth_handler(allow_read_only: bool) -> MiddlewareType:
@ -125,19 +118,36 @@ def auth_handler(allow_read_only: bool) -> MiddlewareType:
return handle
def setup_auth(application: web.Application, validator: Auth) -> web.Application:
def cookie_secret_key(configuration: Configuration) -> fernet.Fernet:
"""
extract cookie secret key from configuration if set or generate new one
Args:
configuration(Configuration): configuration instance
Returns:
fernet.Fernet: fernet key instance
"""
if (secret_key := configuration.get("auth", "cookie_secret_key", fallback=None)) is not None:
return fernet.Fernet(secret_key)
secret_key = fernet.Fernet.generate_key()
return fernet.Fernet(secret_key)
def setup_auth(application: web.Application, configuration: Configuration, validator: Auth) -> web.Application:
"""
setup authorization policies for the application
Args:
application(web.Application): web application instance
configuration(Configuration): configuration instance
validator(Auth): authorization module instance
Returns:
web.Application: configured web application
"""
fernet_key = fernet.Fernet.generate_key()
secret_key = base64.urlsafe_b64decode(fernet_key)
secret_key = cookie_secret_key(configuration)
storage = EncryptedCookieStorage(secret_key, cookie_name="API_SESSION", max_age=validator.max_age)
setup_session(application, storage)

View File

@ -21,7 +21,6 @@ from aiohttp.web import HTTPFound, HTTPMethodNotAllowed, HTTPUnauthorized
from ahriman.core.auth.helpers import remember
from ahriman.models.user_access import UserAccess
from ahriman.models.user_identity import UserIdentity
from ahriman.web.views.base import BaseView
@ -64,10 +63,9 @@ class LoginView(BaseView):
raise HTTPFound(oauth_provider.get_oauth_url())
response = HTTPFound("/")
username = await oauth_provider.get_oauth_username(code)
identity = UserIdentity.from_username(username, self.validator.max_age)
if identity is not None and await self.validator.known_username(username):
await remember(self.request, response, identity.to_identity())
identity = await oauth_provider.get_oauth_username(code)
if identity is not None and await self.validator.known_username(identity):
await remember(self.request, response, identity)
raise response
raise HTTPUnauthorized()
@ -111,12 +109,11 @@ class LoginView(BaseView):
302: Found
"""
data = await self.extract_data()
username = data.get("username")
identity = data.get("username")
response = HTTPFound("/")
identity = UserIdentity.from_username(username, self.validator.max_age)
if identity is not None and await self.validator.check_credentials(username, data.get("password")):
await remember(self.request, response, identity.to_identity())
if identity is not None and await self.validator.check_credentials(identity, data.get("password")):
await remember(self.request, response, identity)
raise response
raise HTTPUnauthorized()

View File

@ -90,7 +90,7 @@ async def on_startup(application: web.Application) -> None:
application(web.Application): web application instance
Raises:
InitializeException: in case if matched could not be loaded
InitializeError: in case if matched could not be loaded
"""
application.logger.info("server started")
try:
@ -115,7 +115,7 @@ def run_server(application: web.Application) -> None:
port = configuration.getint("web", "port")
unix_socket = create_socket(configuration, application)
web.run_app(application, host=host, port=port, sock=unix_socket, handle_signals=False,
web.run_app(application, host=host, port=port, sock=unix_socket, handle_signals=True,
access_log=logging.getLogger("http"), access_log_class=FilteredAccessLogger)
@ -168,6 +168,6 @@ def setup_service(architecture: str, configuration: Configuration, spawner: Spaw
validator = application["validator"] = Auth.load(configuration, database)
if validator.enabled:
from ahriman.web.middlewares.auth_handler import setup_auth
setup_auth(application, validator)
setup_auth(application, configuration, validator)
return application

View File

@ -1,4 +1,5 @@
from pytest_mock import MockerFixture
from unittest.mock import MagicMock, call as MockCall
from ahriman.application.application import Application
from ahriman.models.package import Package
@ -44,3 +45,55 @@ def test_on_stop(application: Application, mocker: MockerFixture) -> None:
application.on_stop()
triggers_mock.assert_called_once_with()
def test_with_dependencies(application: Application, package_ahriman: Package, package_python_schedule: Package,
mocker: MockerFixture) -> None:
"""
must append list of missing dependencies
"""
def create_package_mock(package_base) -> MagicMock:
mock = MagicMock()
mock.base = package_base
mock.depends_build = []
mock.packages_full = [package_base]
return mock
package_python_schedule.packages = {
package_python_schedule.base: package_python_schedule.packages[package_python_schedule.base]
}
package_ahriman.packages[package_ahriman.base].depends = ["devtools", "python", package_python_schedule.base]
package_ahriman.packages[package_ahriman.base].make_depends = ["python-build", "python-installer"]
packages = {
package_ahriman.base: package_ahriman,
package_python_schedule.base: package_python_schedule,
"python": create_package_mock("python"),
"python-installer": create_package_mock("python-installer"),
}
package_mock = mocker.patch("ahriman.models.package.Package.from_aur", side_effect=lambda p, _: packages[p])
packages_mock = mocker.patch("ahriman.application.application.Application._known_packages",
return_value=["devtools", "python-build"])
result = application.with_dependencies([package_ahriman], process_dependencies=True)
assert {package.base: package for package in result} == packages
package_mock.assert_has_calls([
MockCall(package_python_schedule.base, application.repository.pacman),
MockCall("python", application.repository.pacman),
MockCall("python-installer", application.repository.pacman),
], any_order=True)
packages_mock.assert_called_once_with()
def test_with_dependencies_skip(application: Application, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must skip processing of dependencies
"""
packages_mock = mocker.patch("ahriman.application.application.Application._known_packages")
assert application.with_dependencies([package_ahriman], process_dependencies=False) == [package_ahriman]
packages_mock.assert_not_called()
assert application.with_dependencies([], process_dependencies=True) == []
packages_mock.assert_not_called()

View File

@ -2,7 +2,7 @@ import pytest
from pathlib import Path
from pytest_mock import MockerFixture
from unittest.mock import MagicMock
from unittest.mock import MagicMock, call as MockCall
from ahriman.application.application.application_packages import ApplicationPackages
from ahriman.models.package import Package
@ -29,19 +29,10 @@ def test_add_aur(application_packages: ApplicationPackages, package_ahriman: Pac
must add package from AUR
"""
mocker.patch("ahriman.models.package.Package.from_aur", return_value=package_ahriman)
load_mock = mocker.patch("ahriman.core.build_tools.sources.Sources.load")
dependencies_mock = mocker.patch(
"ahriman.application.application.application_packages.ApplicationPackages._process_dependencies")
build_queue_mock = mocker.patch("ahriman.core.database.SQLite.build_queue_insert")
update_remote_mock = mocker.patch("ahriman.core.database.SQLite.remote_update")
application_packages._add_aur(package_ahriman.base, set(), False)
load_mock.assert_called_once_with(
pytest.helpers.anyvar(int),
package_ahriman,
pytest.helpers.anyvar(int),
application_packages.repository.paths)
dependencies_mock.assert_called_once_with(pytest.helpers.anyvar(int), set(), False)
application_packages._add_aur(package_ahriman.base)
build_queue_mock.assert_called_once_with(package_ahriman)
update_remote_mock.assert_called_once_with(package_ahriman)
@ -70,15 +61,12 @@ def test_add_local(application_packages: ApplicationPackages, package_ahriman: P
mocker.patch("ahriman.models.package.Package.from_build", return_value=package_ahriman)
init_mock = mocker.patch("ahriman.core.build_tools.sources.Sources.init")
copytree_mock = mocker.patch("shutil.copytree")
dependencies_mock = mocker.patch(
"ahriman.application.application.application_packages.ApplicationPackages._process_dependencies")
build_queue_mock = mocker.patch("ahriman.core.database.SQLite.build_queue_insert")
application_packages._add_local(package_ahriman.base, set(), False)
application_packages._add_local(package_ahriman.base)
copytree_mock.assert_called_once_with(
Path(package_ahriman.base), application_packages.repository.paths.cache_for(package_ahriman.base))
init_mock.assert_called_once_with(application_packages.repository.paths.cache_for(package_ahriman.base))
dependencies_mock.assert_called_once_with(pytest.helpers.anyvar(int), set(), False)
build_queue_mock.assert_called_once_with(package_ahriman)
@ -113,65 +101,15 @@ def test_add_repository(application_packages: ApplicationPackages, package_ahrim
update_remote_mock.assert_called_once_with(package_ahriman)
def test_known_packages(application_packages: ApplicationPackages) -> None:
"""
must raise NotImplemented for missing known_packages method
"""
with pytest.raises(NotImplementedError):
application_packages._known_packages()
def test_process_dependencies(application_packages: ApplicationPackages, mocker: MockerFixture) -> None:
"""
must process dependencies addition
"""
missing = {"python"}
path = Path("local")
dependencies_mock = mocker.patch("ahriman.models.package.Package.dependencies", return_value=missing)
add_mock = mocker.patch("ahriman.application.application.application_packages.ApplicationPackages.add")
application_packages._process_dependencies(path, set(), False)
dependencies_mock.assert_called_once_with(path)
add_mock.assert_called_once_with(missing, PackageSource.AUR, False)
def test_process_dependencies_missing(application_packages: ApplicationPackages, mocker: MockerFixture) -> None:
"""
must process dependencies addition only for missing packages
"""
path = Path("local")
dependencies_mock = mocker.patch("ahriman.models.package.Package.dependencies",
return_value={"python", "python-aiohttp"})
add_mock = mocker.patch("ahriman.application.application.application_packages.ApplicationPackages.add")
application_packages._process_dependencies(path, {"python"}, False)
dependencies_mock.assert_called_once_with(path)
add_mock.assert_called_once_with({"python-aiohttp"}, PackageSource.AUR, False)
def test_process_dependencies_skip(application_packages: ApplicationPackages, mocker: MockerFixture) -> None:
"""
must skip dependencies processing
"""
dependencies_mock = mocker.patch("ahriman.models.package.Package.dependencies")
add_mock = mocker.patch("ahriman.application.application.application_packages.ApplicationPackages.add")
application_packages._process_dependencies(Path("local"), set(), True)
dependencies_mock.assert_not_called()
add_mock.assert_not_called()
def test_add_add_archive(application_packages: ApplicationPackages, package_ahriman: Package,
mocker: MockerFixture) -> None:
"""
must add package from archive via add function
"""
mocker.patch("ahriman.application.application.application_packages.ApplicationPackages._known_packages",
return_value=set())
add_mock = mocker.patch("ahriman.application.application.application_packages.ApplicationPackages._add_archive")
application_packages.add([package_ahriman.base], PackageSource.Archive, False)
add_mock.assert_called_once_with(package_ahriman.base, set(), False)
application_packages.add([package_ahriman.base], PackageSource.Archive)
add_mock.assert_called_once_with(package_ahriman.base)
def test_add_add_aur(
@ -181,12 +119,10 @@ def test_add_add_aur(
"""
must add package from AUR via add function
"""
mocker.patch("ahriman.application.application.application_packages.ApplicationPackages._known_packages",
return_value=set())
add_mock = mocker.patch("ahriman.application.application.application_packages.ApplicationPackages._add_aur")
application_packages.add([package_ahriman.base], PackageSource.AUR, True)
add_mock.assert_called_once_with(package_ahriman.base, set(), True)
application_packages.add([package_ahriman.base], PackageSource.AUR)
add_mock.assert_called_once_with(package_ahriman.base)
def test_add_add_directory(application_packages: ApplicationPackages, package_ahriman: Package,
@ -194,12 +130,10 @@ def test_add_add_directory(application_packages: ApplicationPackages, package_ah
"""
must add packages from directory via add function
"""
mocker.patch("ahriman.application.application.application_packages.ApplicationPackages._known_packages",
return_value=set())
add_mock = mocker.patch("ahriman.application.application.application_packages.ApplicationPackages._add_directory")
application_packages.add([package_ahriman.base], PackageSource.Directory, False)
add_mock.assert_called_once_with(package_ahriman.base, set(), False)
application_packages.add([package_ahriman.base], PackageSource.Directory)
add_mock.assert_called_once_with(package_ahriman.base)
def test_add_add_local(application_packages: ApplicationPackages, package_ahriman: Package,
@ -207,12 +141,10 @@ def test_add_add_local(application_packages: ApplicationPackages, package_ahrima
"""
must add package from local sources via add function
"""
mocker.patch("ahriman.application.application.application_packages.ApplicationPackages._known_packages",
return_value=set())
add_mock = mocker.patch("ahriman.application.application.application_packages.ApplicationPackages._add_local")
application_packages.add([package_ahriman.base], PackageSource.Local, False)
add_mock.assert_called_once_with(package_ahriman.base, set(), False)
application_packages.add([package_ahriman.base], PackageSource.Local)
add_mock.assert_called_once_with(package_ahriman.base)
def test_add_add_remote(application_packages: ApplicationPackages, package_description_ahriman: PackageDescription,
@ -220,13 +152,11 @@ def test_add_add_remote(application_packages: ApplicationPackages, package_descr
"""
must add package from remote source via add function
"""
mocker.patch("ahriman.application.application.application_packages.ApplicationPackages._known_packages",
return_value=set())
add_mock = mocker.patch("ahriman.application.application.application_packages.ApplicationPackages._add_remote")
url = f"https://host/{package_description_ahriman.filename}"
application_packages.add([url], PackageSource.Remote, False)
add_mock.assert_called_once_with(url, set(), False)
application_packages.add([url], PackageSource.Remote)
add_mock.assert_called_once_with(url)
def test_on_result(application_packages: ApplicationPackages) -> None:

View File

@ -161,7 +161,7 @@ def test_update(application_repository: ApplicationRepository, package_ahriman:
must process package updates
"""
paths = [package.filepath for package in package_ahriman.packages.values()]
tree = Tree([Leaf(package_ahriman, set())])
tree = Tree([Leaf(package_ahriman)])
mocker.patch("ahriman.core.tree.Tree.resolve", return_value=tree.levels())
mocker.patch("ahriman.core.repository.repository.Repository.packages_built", return_value=paths)
@ -181,7 +181,7 @@ def test_update_empty(application_repository: ApplicationRepository, package_ahr
"""
must skip updating repository if no packages supplied
"""
tree = Tree([Leaf(package_ahriman, set())])
tree = Tree([Leaf(package_ahriman)])
mocker.patch("ahriman.core.tree.Tree.resolve", return_value=tree.levels())
mocker.patch("ahriman.core.repository.repository.Repository.packages_built", return_value=[])
@ -197,7 +197,7 @@ def test_updates_all(application_repository: ApplicationRepository, package_ahri
"""
must get updates for all
"""
tree = Tree([Leaf(package_ahriman, set())])
tree = Tree([Leaf(package_ahriman)])
mocker.patch("ahriman.core.tree.Tree.resolve", return_value=tree.levels())
mocker.patch("ahriman.core.repository.repository.Repository.packages", return_value=[])

View File

@ -26,7 +26,7 @@ def _default_args(args: argparse.Namespace) -> argparse.Namespace:
args.now = False
args.refresh = 0
args.source = PackageSource.Auto
args.without_dependencies = False
args.dependencies = True
return args
@ -38,10 +38,12 @@ def test_run(args: argparse.Namespace, configuration: Configuration, repository:
args = _default_args(args)
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
application_mock = mocker.patch("ahriman.application.application.Application.add")
dependencies_mock = mocker.patch("ahriman.application.application.Application.with_dependencies")
on_start_mock = mocker.patch("ahriman.application.application.Application.on_start")
Add.run(args, "x86_64", configuration, report=False, unsafe=False)
application_mock.assert_called_once_with(args.package, args.source, args.without_dependencies)
application_mock.assert_called_once_with(args.package, args.source)
dependencies_mock.assert_not_called()
on_start_mock.assert_called_once_with()
@ -59,11 +61,14 @@ def test_run_with_updates(args: argparse.Namespace, configuration: Configuration
application_mock = mocker.patch("ahriman.application.application.Application.update", return_value=result)
check_mock = mocker.patch("ahriman.application.handlers.Handler.check_if_empty")
updates_mock = mocker.patch("ahriman.application.application.Application.updates", return_value=[package_ahriman])
dependencies_mock = mocker.patch("ahriman.application.application.Application.with_dependencies",
return_value=[package_ahriman])
Add.run(args, "x86_64", configuration, report=False, unsafe=False)
updates_mock.assert_called_once_with(args.package, aur=False, local=False, manual=True, vcs=False,
log_fn=pytest.helpers.anyvar(int))
application_mock.assert_called_once_with([package_ahriman])
dependencies_mock.assert_called_once_with([package_ahriman], process_dependencies=args.dependencies)
check_mock.assert_called_once_with(False, False)
@ -78,6 +83,7 @@ def test_run_empty_exception(args: argparse.Namespace, configuration: Configurat
mocker.patch("ahriman.application.application.Application.add")
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
mocker.patch("ahriman.application.application.Application.update", return_value=Result())
mocker.patch("ahriman.application.application.Application.with_dependencies")
mocker.patch("ahriman.application.application.Application.updates")
check_mock = mocker.patch("ahriman.application.handlers.Handler.check_if_empty")

View File

@ -6,10 +6,25 @@ from ahriman.application.handlers import Dump
from ahriman.core.configuration import Configuration
def _default_args(args: argparse.Namespace) -> argparse.Namespace:
"""
default arguments for these test cases
Args:
args(argparse.Namespace): command line arguments fixture
Returns:
argparse.Namespace: generated arguments for these test cases
"""
args.secure = True
return args
def test_run(args: argparse.Namespace, configuration: Configuration, mocker: MockerFixture) -> None:
"""
must run command
"""
args = _default_args(args)
print_mock = mocker.patch("ahriman.core.formatters.Printer.print")
application_mock = mocker.patch("ahriman.core.configuration.Configuration.dump",
return_value=configuration.dump())

View File

@ -45,7 +45,7 @@ def test_run(args: argparse.Namespace, configuration: Configuration, repository:
application_mock = mocker.patch("ahriman.application.handlers.Patch.patch_set_create")
Patch.run(args, "x86_64", configuration, report=False, unsafe=False)
patch_mock.assert_called_once_with(args.package, args.track)
patch_mock.assert_called_once_with(args.package, "x86_64", args.track)
application_mock.assert_called_once_with(pytest.helpers.anyvar(int), args.package, PkgbuildPatch(None, "patch"))
@ -108,8 +108,8 @@ def test_patch_create_from_diff(package_ahriman: Package, mocker: MockerFixture)
package_mock = mocker.patch("ahriman.models.package.Package.from_build", return_value=package_ahriman)
sources_mock = mocker.patch("ahriman.core.build_tools.sources.Sources.patch_create", return_value=patch.value)
assert Patch.patch_create_from_diff(path, ["*.diff"]) == (package_ahriman.base, patch)
package_mock.assert_called_once_with(path)
assert Patch.patch_create_from_diff(path, "x86_64", ["*.diff"]) == (package_ahriman.base, patch)
package_mock.assert_called_once_with(path, "x86_64")
sources_mock.assert_called_once_with(path, "*.diff")

View File

@ -56,4 +56,4 @@ def test_run_dry_run(args: argparse.Namespace, configuration: Configuration, rep
RemoveUnknown.run(args, "x86_64", configuration, report=False, unsafe=False)
application_mock.assert_called_once_with()
remove_mock.assert_not_called()
print_mock.assert_called_once_with(False)
print_mock.assert_called_once_with(verbose=False)

View File

@ -46,7 +46,7 @@ def test_run(args: argparse.Namespace, configuration: Configuration, repository:
aur_search_mock.assert_called_once_with("ahriman", pacman=pytest.helpers.anyvar(int))
official_search_mock.assert_called_once_with("ahriman", pacman=pytest.helpers.anyvar(int))
check_mock.assert_called_once_with(False, False)
print_mock.assert_has_calls([MockCall(False), MockCall(False)])
print_mock.assert_has_calls([MockCall(verbose=False), MockCall(verbose=False)])
def test_run_empty_exception(args: argparse.Namespace, configuration: Configuration, repository: Repository,
@ -131,9 +131,10 @@ def test_disallow_auto_architecture_run() -> None:
assert not Search.ALLOW_AUTO_ARCHITECTURE_RUN
def test_sort_fields() -> None:
def test_sort_fields(aur_package_ahriman: AURPackage) -> None:
"""
must store valid field list which are allowed to be used for sorting
"""
expected = {field.name for field in dataclasses.fields(AURPackage)}
assert all(field in expected for field in Search.SORT_FIELDS)
assert all(not isinstance(getattr(aur_package_ahriman, field), list) for field in Search.SORT_FIELDS)

View File

@ -47,7 +47,7 @@ def test_run(args: argparse.Namespace, configuration: Configuration, repository:
application_mock.assert_called_once_with()
packages_mock.assert_called_once_with(None)
check_mock.assert_called_once_with(False, False)
print_mock.assert_has_calls([MockCall(False) for _ in range(3)])
print_mock.assert_has_calls([MockCall(verbose=False) for _ in range(3)])
def test_run_empty_exception(args: argparse.Namespace, configuration: Configuration, repository: Repository,
@ -79,7 +79,7 @@ def test_run_verbose(args: argparse.Namespace, configuration: Configuration, rep
print_mock = mocker.patch("ahriman.core.formatters.Printer.print")
Status.run(args, "x86_64", configuration, report=False, unsafe=False)
print_mock.assert_has_calls([MockCall(True) for _ in range(2)])
print_mock.assert_has_calls([MockCall(verbose=True) for _ in range(2)])
def test_run_with_package_filter(args: argparse.Namespace, configuration: Configuration, repository: Repository,
@ -111,7 +111,7 @@ def test_run_by_status(args: argparse.Namespace, configuration: Configuration, r
print_mock = mocker.patch("ahriman.core.formatters.Printer.print")
Status.run(args, "x86_64", configuration, report=False, unsafe=False)
print_mock.assert_has_calls([MockCall(False) for _ in range(2)])
print_mock.assert_has_calls([MockCall(verbose=False) for _ in range(2)])
def test_imply_with_report(args: argparse.Namespace, configuration: Configuration, database: SQLite,

View File

@ -1,5 +1,4 @@
import argparse
import pytest
from pytest_mock import MockerFixture
@ -20,7 +19,7 @@ def test_run(args: argparse.Namespace, configuration: Configuration, repository:
print_mock = mocker.patch("ahriman.core.formatters.Printer.print")
Structure.run(args, "x86_64", configuration, report=False, unsafe=False)
application_mock.assert_called_once_with([package_ahriman], repository.paths, pytest.helpers.anyvar(int))
application_mock.assert_called_once_with([package_ahriman])
print_mock.assert_called_once_with(verbose=True, separator=" ")

View File

@ -23,6 +23,7 @@ def _default_args(args: argparse.Namespace) -> argparse.Namespace:
argparse.Namespace: generated arguments for these test cases
"""
args.package = []
args.dependencies = True
args.dry_run = False
args.exit_code = False
args.aur = True
@ -44,6 +45,8 @@ def test_run(args: argparse.Namespace, package_ahriman: Package, configuration:
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
application_mock = mocker.patch("ahriman.application.application.Application.update", return_value=result)
check_mock = mocker.patch("ahriman.application.handlers.Handler.check_if_empty")
dependencies_mock = mocker.patch("ahriman.application.application.Application.with_dependencies",
return_value=[package_ahriman])
updates_mock = mocker.patch("ahriman.application.application.Application.updates", return_value=[package_ahriman])
on_start_mock = mocker.patch("ahriman.application.application.Application.on_start")
@ -51,6 +54,7 @@ def test_run(args: argparse.Namespace, package_ahriman: Package, configuration:
application_mock.assert_called_once_with([package_ahriman])
updates_mock.assert_called_once_with(args.package, aur=args.aur, local=args.local, manual=args.manual, vcs=args.vcs,
log_fn=pytest.helpers.anyvar(int))
dependencies_mock.assert_called_once_with([package_ahriman], process_dependencies=args.dependencies)
check_mock.assert_has_calls([MockCall(False, False), MockCall(False, False)])
on_start_mock.assert_called_once_with()
@ -81,6 +85,7 @@ def test_run_update_empty_exception(args: argparse.Namespace, package_ahriman: P
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
mocker.patch("ahriman.application.application.Application.update", return_value=Result())
mocker.patch("ahriman.application.application.Application.updates", return_value=[package_ahriman])
mocker.patch("ahriman.application.application.Application.with_dependencies", return_value=[package_ahriman])
check_mock = mocker.patch("ahriman.application.handlers.Handler.check_if_empty")
Update.run(args, "x86_64", configuration, report=False, unsafe=False)

View File

@ -62,9 +62,11 @@ def test_schema(configuration: Configuration) -> None:
assert schema.pop("email")
assert schema.pop("github")
assert schema.pop("html")
assert schema.pop("report")
assert schema.pop("rsync")
assert schema.pop("s3")
assert schema.pop("telegram")
assert schema.pop("upload")
assert schema == CONFIGURATION_SCHEMA

View File

@ -28,14 +28,19 @@ def test_run(args: argparse.Namespace, configuration: Configuration, repository:
must run command
"""
args = _default_args(args)
mocker.patch("ahriman.core.spawn.Spawn.start")
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
setup_mock = mocker.patch("ahriman.web.web.setup_service")
run_mock = mocker.patch("ahriman.web.web.run_server")
start_mock = mocker.patch("ahriman.core.spawn.Spawn.start")
stop_mock = mocker.patch("ahriman.core.spawn.Spawn.stop")
join_mock = mocker.patch("ahriman.core.spawn.Spawn.join")
Web.run(args, "x86_64", configuration, report=False, unsafe=False)
setup_mock.assert_called_once_with("x86_64", configuration, pytest.helpers.anyvar(int))
run_mock.assert_called_once_with(pytest.helpers.anyvar(int))
start_mock.assert_called_once_with()
stop_mock.assert_called_once_with()
join_mock.assert_called_once_with()
def test_disallow_auto_architecture_run() -> None:

View File

@ -342,9 +342,10 @@ def test_subparsers_repo_backup_architecture(parser: argparse.ArgumentParser) ->
def test_subparsers_repo_check(parser: argparse.ArgumentParser) -> None:
"""
repo-check command must imply dry-run, aur and manual
repo-check command must imply dependencies, dry-run, aur and manual
"""
args = parser.parse_args(["repo-check"])
assert not args.dependencies
assert args.dry_run
assert args.aur
assert not args.manual

View File

@ -98,29 +98,37 @@ def aur_package_ahriman() -> AURPackage:
AURPackage: AUR package test instance
"""
return AURPackage(
id=1009791,
id=1197565,
name="ahriman",
package_base_id=165427,
package_base="ahriman",
version="1.7.0-1",
version="2.6.0-1",
description="ArcH linux ReposItory MANager",
num_votes=0,
popularity=0,
first_submitted=datetime.datetime.utcfromtimestamp(1618008285),
last_modified=datetime.datetime.utcfromtimestamp(1640473871),
last_modified=datetime.datetime.utcfromtimestamp(1673826351),
url_path="/cgit/aur.git/snapshot/ahriman.tar.gz",
url="https://github.com/arcan1s/ahriman",
out_of_date=None,
maintainer="arcanis",
submitter="arcanis",
depends=[
"devtools",
"git",
"pyalpm",
"python-aur",
"python-cerberus",
"python-inflection",
"python-passlib",
"python-requests",
"python-setuptools",
"python-srcinfo",
],
make_depends=["python-pip"],
make_depends=[
"python-build",
"python-installer",
"python-wheel",
],
opt_depends=[
"breezy",
"darcs",
@ -133,6 +141,7 @@ def aur_package_ahriman() -> AURPackage:
"python-aiohttp-session",
"python-boto3",
"python-cryptography",
"python-requests-unixsocket",
"python-jinja",
"rsync",
"subversion",
@ -251,7 +260,7 @@ def package_ahriman(package_description_ahriman: PackageDescription, remote_sour
packages = {"ahriman": package_description_ahriman}
return Package(
base="ahriman",
version="1.7.0-1",
version="2.6.0-1",
remote=remote_source,
packages=packages)
@ -297,12 +306,37 @@ def package_description_ahriman() -> PackageDescription:
"devtools",
"git",
"pyalpm",
"python-aur",
"python-cerberus",
"python-inflection",
"python-passlib",
"python-requests",
"python-setuptools",
"python-srcinfo",
],
make_depends=[
"python-build",
"python-installer",
"python-wheel",
],
opt_depends=[
"breezy",
"darcs",
"mercurial",
"python-aioauth-client",
"python-aiohttp",
"python-aiohttp-debugtoolbar",
"python-aiohttp-jinja2",
"python-aiohttp-security",
"python-aiohttp-session",
"python-boto3",
"python-cryptography",
"python-requests-unixsocket",
"python-jinja",
"rsync",
"subversion",
],
description="ArcH linux ReposItory MANager",
filename="ahriman-1.7.0-1-any.pkg.tar.zst",
filename="ahriman-2.6.0-1-any.pkg.tar.zst",
groups=[],
installed_size=4200000,
licenses=["GPL3"],

View File

@ -207,3 +207,4 @@ def test_packages_with_provides(pacman: Pacman) -> None:
package list must contain provides packages
"""
assert "sh" in pacman.packages()
assert "mysql" in pacman.packages() # mariadb

View File

@ -16,4 +16,4 @@ def validator(configuration: Configuration) -> Validator:
Returns:
Validator: validator test instance
"""
return Validator(instance=configuration, schema=CONFIGURATION_SCHEMA)
return Validator(configuration=configuration, schema=CONFIGURATION_SCHEMA)

View File

@ -1,6 +1,6 @@
from pathlib import Path
from pytest_mock import MockerFixture
from unittest.mock import MagicMock
from unittest.mock import MagicMock, call as MockCall
from ahriman.core.configuration.validator import Validator
@ -18,7 +18,7 @@ def test_normalize_coerce_absolute_path(validator: Validator) -> None:
must convert string value to path by using configuration converters
"""
convert_mock = MagicMock()
validator.instance.converters["path"] = convert_mock
validator.configuration.converters["path"] = convert_mock
validator._normalize_coerce_absolute_path("value")
convert_mock.assert_called_once_with("value")
@ -46,12 +46,56 @@ def test_normalize_coerce_list(validator: Validator) -> None:
must convert string value to list by using configuration converters
"""
convert_mock = MagicMock()
validator.instance.converters["list"] = convert_mock
validator.configuration.converters["list"] = convert_mock
validator._normalize_coerce_list("value")
convert_mock.assert_called_once_with("value")
def test_validate_is_ip_address(validator: Validator, mocker: MockerFixture) -> None:
"""
must validate addresses correctly
"""
error_mock = mocker.patch("ahriman.core.configuration.validator.Validator._error")
validator._validate_is_ip_address(["localhost"], "field", "localhost")
validator._validate_is_ip_address([], "field", "localhost")
validator._validate_is_ip_address([], "field", "127.0.0.1")
validator._validate_is_ip_address([], "field", "::")
validator._validate_is_ip_address([], "field", "0.0.0.0")
validator._validate_is_ip_address([], "field", "random string")
error_mock.assert_has_calls([
MockCall("field", "Value localhost must be valid IP address"),
MockCall("field", "Value random string must be valid IP address"),
])
def test_validate_is_url(validator: Validator, mocker: MockerFixture) -> None:
"""
must validate url correctly
"""
error_mock = mocker.patch("ahriman.core.configuration.validator.Validator._error")
validator._validate_is_url([], "field", "http://example.com")
validator._validate_is_url([], "field", "https://example.com")
validator._validate_is_url([], "field", "file:///tmp")
validator._validate_is_url(["http", "https"], "field", "file:///tmp")
validator._validate_is_url([], "field", "http:///path")
validator._validate_is_url([], "field", "random string")
error_mock.assert_has_calls([
MockCall("field", "Url file:///tmp scheme must be one of ['http', 'https']"),
MockCall("field", "Location must be set for url http:///path of scheme http"),
MockCall("field", "Url scheme is not set for random string"),
])
def test_validate_path_exists(validator: Validator, mocker: MockerFixture) -> None:
"""
must validate that paths exists
@ -67,4 +111,6 @@ def test_validate_path_exists(validator: Validator, mocker: MockerFixture) -> No
mocker.patch("pathlib.Path.exists", return_value=True)
validator._validate_path_exists(True, "field", Path("3"))
error_mock.assert_called_once_with("field", "Path 2 must exist")
error_mock.assert_has_calls([
MockCall("field", "Path 2 must exist"),
])

View File

@ -20,7 +20,7 @@ def leaf_ahriman(package_ahriman: Package) -> Leaf:
Returns:
Leaf: tree leaf test instance
"""
return Leaf(package_ahriman, set())
return Leaf(package_ahriman)
@pytest.fixture
@ -34,7 +34,7 @@ def leaf_python_schedule(package_python_schedule: Package) -> Leaf:
Returns:
Leaf: tree leaf test instance
"""
return Leaf(package_python_schedule, set())
return Leaf(package_python_schedule)
@pytest.fixture

View File

@ -1,47 +0,0 @@
from pytest_mock import MockerFixture
from sqlite3 import Connection
from ahriman.core.configuration import Configuration
from ahriman.core.database.data import migrate_data
from ahriman.models.migration_result import MigrationResult
from ahriman.models.repository_paths import RepositoryPaths
def test_migrate_data_initial(connection: Connection, configuration: Configuration,
repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
"""
must perform initial migration
"""
packages = mocker.patch("ahriman.core.database.data.migrate_package_statuses")
patches = mocker.patch("ahriman.core.database.data.migrate_patches")
users = mocker.patch("ahriman.core.database.data.migrate_users_data")
remotes = mocker.patch("ahriman.core.database.data.migrate_package_remotes")
migrate_data(MigrationResult(old_version=0, new_version=900), connection, configuration)
packages.assert_called_once_with(connection, repository_paths)
patches.assert_called_once_with(connection, repository_paths)
users.assert_called_once_with(connection, configuration)
remotes.assert_called_once_with(connection, repository_paths)
def test_migrate_data_remotes(connection: Connection, configuration: Configuration,
repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
"""
must perform initial migration
"""
remotes = mocker.patch("ahriman.core.database.data.migrate_package_remotes")
migrate_data(MigrationResult(old_version=1, new_version=900), connection, configuration)
remotes.assert_called_once_with(connection, repository_paths)
def test_migrate_data_skip(connection: Connection, configuration: Configuration, mocker: MockerFixture) -> None:
"""
must not migrate data if version is up-to-date
"""
packages = mocker.patch("ahriman.core.database.data.migrate_package_statuses")
users = mocker.patch("ahriman.core.database.data.migrate_users_data")
migrate_data(MigrationResult(old_version=900, new_version=900), connection, configuration)
packages.assert_not_called()
users.assert_not_called()

View File

@ -1,66 +0,0 @@
import pytest
from pytest_mock import MockerFixture
from sqlite3 import Connection
from ahriman.core.database.data import migrate_package_remotes
from ahriman.models.package import Package
from ahriman.models.repository_paths import RepositoryPaths
def test_migrate_package_remotes(package_ahriman: Package, connection: Connection, repository_paths: RepositoryPaths,
mocker: MockerFixture) -> None:
"""
must put package remotes to database
"""
mocker.patch(
"ahriman.core.database.operations.PackageOperations._packages_get_select_package_bases",
return_value={package_ahriman.base: package_ahriman})
mocker.patch("pathlib.Path.exists", return_value=False)
migrate_package_remotes(connection, repository_paths)
connection.execute.assert_called_once_with(pytest.helpers.anyvar(str, strict=True), pytest.helpers.anyvar(int))
def test_migrate_package_remotes_has_local(package_ahriman: Package, connection: Connection,
repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
"""
must skip processing for packages which have local cache
"""
mocker.patch(
"ahriman.core.database.operations.PackageOperations._packages_get_select_package_bases",
return_value={package_ahriman.base: package_ahriman})
mocker.patch("pathlib.Path.exists", return_value=True)
migrate_package_remotes(connection, repository_paths)
connection.execute.assert_not_called()
def test_migrate_package_remotes_vcs(package_ahriman: Package, connection: Connection,
repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
"""
must process VCS packages with local cache
"""
mocker.patch(
"ahriman.core.database.operations.PackageOperations._packages_get_select_package_bases",
return_value={package_ahriman.base: package_ahriman})
mocker.patch("pathlib.Path.exists", return_value=True)
mocker.patch.object(Package, "is_vcs", True)
migrate_package_remotes(connection, repository_paths)
connection.execute.assert_called_once_with(pytest.helpers.anyvar(str, strict=True), pytest.helpers.anyvar(int))
def test_migrate_package_remotes_no_remotes(package_ahriman: Package, connection: Connection,
repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
"""
must skip processing in case if no remotes generated (should never happen)
"""
mocker.patch(
"ahriman.core.database.operations.PackageOperations._packages_get_select_package_bases",
return_value={package_ahriman.base: package_ahriman})
mocker.patch("pathlib.Path.exists", return_value=False)
mocker.patch("ahriman.models.remote_source.RemoteSource.from_source", return_value=None)
migrate_package_remotes(connection, repository_paths)
connection.execute.assert_not_called()

View File

@ -1,39 +0,0 @@
import pytest
from pytest_mock import MockerFixture
from sqlite3 import Connection
from unittest.mock import call as MockCall
from ahriman.core.database.data import migrate_package_statuses
from ahriman.models.package import Package
from ahriman.models.repository_paths import RepositoryPaths
def test_migrate_package_statuses(connection: Connection, package_ahriman: Package, repository_paths: RepositoryPaths,
mocker: MockerFixture) -> None:
"""
must migrate packages to database
"""
response = {"packages": [pytest.helpers.get_package_status_extended(package_ahriman)]}
mocker.patch("pathlib.Path.is_file", return_value=True)
mocker.patch("pathlib.Path.open")
mocker.patch("json.load", return_value=response)
migrate_package_statuses(connection, repository_paths)
connection.execute.assert_has_calls([
MockCall(pytest.helpers.anyvar(str, strict=True), pytest.helpers.anyvar(int)),
MockCall(pytest.helpers.anyvar(str, strict=True), pytest.helpers.anyvar(int)),
])
connection.executemany.assert_has_calls([
MockCall(pytest.helpers.anyvar(str, strict=True), pytest.helpers.anyvar(int)),
])
def test_migrate_package_statuses_skip(connection: Connection, repository_paths: RepositoryPaths,
mocker: MockerFixture) -> None:
"""
must skip packages migration if no cache file found
"""
mocker.patch("pathlib.Path.is_file", return_value=False)
migrate_package_statuses(connection, repository_paths)

View File

@ -1,52 +0,0 @@
import pytest
from pathlib import Path
from pytest_mock import MockerFixture
from sqlite3 import Connection
from ahriman.core.database.data import migrate_patches
from ahriman.models.package import Package
from ahriman.models.repository_paths import RepositoryPaths
def test_migrate_patches(connection: Connection, repository_paths: RepositoryPaths,
package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must perform migration for patches
"""
mocker.patch("pathlib.Path.is_dir", return_value=True)
mocker.patch("pathlib.Path.is_file", return_value=True)
iterdir_mock = mocker.patch("pathlib.Path.iterdir", return_value=[Path(package_ahriman.base)])
read_mock = mocker.patch("pathlib.Path.read_text", return_value="patch")
migrate_patches(connection, repository_paths)
iterdir_mock.assert_called_once_with()
read_mock.assert_called_once_with(encoding="utf8")
connection.execute.assert_called_once_with(pytest.helpers.anyvar(str, strict=True), pytest.helpers.anyvar(int))
def test_migrate_patches_skip(connection: Connection, repository_paths: RepositoryPaths,
mocker: MockerFixture) -> None:
"""
must skip patches migration in case if no root found
"""
mocker.patch("pathlib.Path.is_dir", return_value=False)
iterdir_mock = mocker.patch("pathlib.Path.iterdir")
migrate_patches(connection, repository_paths)
iterdir_mock.assert_not_called()
def test_migrate_patches_no_patch(connection: Connection, repository_paths: RepositoryPaths,
package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must skip package if no match found
"""
mocker.patch("pathlib.Path.is_dir", return_value=True)
mocker.patch("pathlib.Path.is_file", return_value=False)
iterdir_mock = mocker.patch("pathlib.Path.iterdir", return_value=[Path(package_ahriman.base)])
read_mock = mocker.patch("pathlib.Path.read_text")
migrate_patches(connection, repository_paths)
iterdir_mock.assert_called_once_with()
read_mock.assert_not_called()

View File

@ -1,21 +0,0 @@
import pytest
from sqlite3 import Connection
from unittest.mock import call as MockCall
from ahriman.core.configuration import Configuration
from ahriman.core.database.data import migrate_users_data
def test_migrate_users_data(connection: Connection, configuration: Configuration) -> None:
"""
must put users to database
"""
configuration.set_option("auth:read", "user1", "password1")
configuration.set_option("auth:write", "user2", "password2")
migrate_users_data(connection, configuration)
connection.execute.assert_has_calls([
MockCall(pytest.helpers.anyvar(str, strict=True), pytest.helpers.anyvar(int)),
MockCall(pytest.helpers.anyvar(str, strict=True), pytest.helpers.anyvar(int)),
])

View File

@ -1,4 +1,15 @@
from ahriman.core.database.migrations.m000_initial import steps
import pytest
from pathlib import Path
from pytest_mock import MockerFixture
from sqlite3 import Connection
from unittest.mock import call as MockCall
from ahriman.core.configuration import Configuration
from ahriman.core.database.migrations.m000_initial import migrate_data, migrate_package_statuses, migrate_patches, \
migrate_users_data, steps
from ahriman.models.package import Package
from ahriman.models.repository_paths import RepositoryPaths
def test_migration_initial() -> None:
@ -6,3 +17,104 @@ def test_migration_initial() -> None:
migration must not be empty
"""
assert steps
def test_migrate_data(connection: Connection, configuration: Configuration, mocker: MockerFixture) -> None:
"""
must perform data migration
"""
packages = mocker.patch("ahriman.core.database.migrations.m000_initial.migrate_package_statuses")
patches = mocker.patch("ahriman.core.database.migrations.m000_initial.migrate_patches")
users = mocker.patch("ahriman.core.database.migrations.m000_initial.migrate_users_data")
migrate_data(connection, configuration)
packages.assert_called_once_with(connection, configuration.repository_paths)
patches.assert_called_once_with(connection, configuration.repository_paths)
users.assert_called_once_with(connection, configuration)
def test_migrate_package_statuses(connection: Connection, package_ahriman: Package, repository_paths: RepositoryPaths,
mocker: MockerFixture) -> None:
"""
must migrate packages to database
"""
response = {"packages": [pytest.helpers.get_package_status_extended(package_ahriman)]}
mocker.patch("pathlib.Path.is_file", return_value=True)
mocker.patch("pathlib.Path.open")
mocker.patch("json.load", return_value=response)
migrate_package_statuses(connection, repository_paths)
connection.execute.assert_has_calls([
MockCall(pytest.helpers.anyvar(str, strict=True), pytest.helpers.anyvar(int)),
MockCall(pytest.helpers.anyvar(str, strict=True), pytest.helpers.anyvar(int)),
])
connection.executemany.assert_has_calls([
MockCall(pytest.helpers.anyvar(str, strict=True), pytest.helpers.anyvar(int)),
])
def test_migrate_package_statuses_skip(connection: Connection, repository_paths: RepositoryPaths,
mocker: MockerFixture) -> None:
"""
must skip packages migration if no cache file found
"""
mocker.patch("pathlib.Path.is_file", return_value=False)
migrate_package_statuses(connection, repository_paths)
def test_migrate_patches(connection: Connection, repository_paths: RepositoryPaths,
package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must perform migration for patches
"""
mocker.patch("pathlib.Path.is_dir", return_value=True)
mocker.patch("pathlib.Path.is_file", return_value=True)
iterdir_mock = mocker.patch("pathlib.Path.iterdir", return_value=[Path(package_ahriman.base)])
read_mock = mocker.patch("pathlib.Path.read_text", return_value="patch")
migrate_patches(connection, repository_paths)
iterdir_mock.assert_called_once_with()
read_mock.assert_called_once_with(encoding="utf8")
connection.execute.assert_called_once_with(pytest.helpers.anyvar(str, strict=True), pytest.helpers.anyvar(int))
def test_migrate_patches_skip(connection: Connection, repository_paths: RepositoryPaths,
mocker: MockerFixture) -> None:
"""
must skip patches migration in case if no root found
"""
mocker.patch("pathlib.Path.is_dir", return_value=False)
iterdir_mock = mocker.patch("pathlib.Path.iterdir")
migrate_patches(connection, repository_paths)
iterdir_mock.assert_not_called()
def test_migrate_patches_no_patch(connection: Connection, repository_paths: RepositoryPaths,
package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must skip package if no match found
"""
mocker.patch("pathlib.Path.is_dir", return_value=True)
mocker.patch("pathlib.Path.is_file", return_value=False)
iterdir_mock = mocker.patch("pathlib.Path.iterdir", return_value=[Path(package_ahriman.base)])
read_mock = mocker.patch("pathlib.Path.read_text")
migrate_patches(connection, repository_paths)
iterdir_mock.assert_called_once_with()
read_mock.assert_not_called()
def test_migrate_users_data(connection: Connection, configuration: Configuration) -> None:
"""
must put users to database
"""
configuration.set_option("auth:read", "user1", "password1")
configuration.set_option("auth:write", "user2", "password2")
migrate_users_data(connection, configuration)
connection.execute.assert_has_calls([
MockCall(pytest.helpers.anyvar(str, strict=True), pytest.helpers.anyvar(int)),
MockCall(pytest.helpers.anyvar(str, strict=True), pytest.helpers.anyvar(int)),
])

View File

@ -1,4 +1,12 @@
from ahriman.core.database.migrations.m001_package_source import steps
import pytest
from sqlite3 import Connection
from pytest_mock import MockerFixture
from ahriman.core.configuration import Configuration
from ahriman.core.database.migrations.m001_package_source import migrate_data, migrate_package_remotes, steps
from ahriman.models.package import Package
from ahriman.models.repository_paths import RepositoryPaths
def test_migration_package_source() -> None:
@ -6,3 +14,70 @@ def test_migration_package_source() -> None:
migration must not be empty
"""
assert steps
def test_migrate_data(connection: Connection, configuration: Configuration, mocker: MockerFixture) -> None:
"""
must perform data migration
"""
remotes = mocker.patch("ahriman.core.database.migrations.m001_package_source.migrate_package_remotes")
migrate_data(connection, configuration)
remotes.assert_called_once_with(connection, configuration.repository_paths)
def test_migrate_package_remotes(package_ahriman: Package, connection: Connection, repository_paths: RepositoryPaths,
mocker: MockerFixture) -> None:
"""
must put package remotes to database
"""
mocker.patch(
"ahriman.core.database.operations.PackageOperations._packages_get_select_package_bases",
return_value={package_ahriman.base: package_ahriman})
mocker.patch("pathlib.Path.exists", return_value=False)
migrate_package_remotes(connection, repository_paths)
connection.execute.assert_called_once_with(pytest.helpers.anyvar(str, strict=True), pytest.helpers.anyvar(int))
def test_migrate_package_remotes_has_local(package_ahriman: Package, connection: Connection,
repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
"""
must skip processing for packages which have local cache
"""
mocker.patch(
"ahriman.core.database.operations.PackageOperations._packages_get_select_package_bases",
return_value={package_ahriman.base: package_ahriman})
mocker.patch("pathlib.Path.exists", return_value=True)
migrate_package_remotes(connection, repository_paths)
connection.execute.assert_not_called()
def test_migrate_package_remotes_vcs(package_ahriman: Package, connection: Connection,
repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
"""
must process VCS packages with local cache
"""
mocker.patch(
"ahriman.core.database.operations.PackageOperations._packages_get_select_package_bases",
return_value={package_ahriman.base: package_ahriman})
mocker.patch("pathlib.Path.exists", return_value=True)
mocker.patch.object(Package, "is_vcs", True)
migrate_package_remotes(connection, repository_paths)
connection.execute.assert_called_once_with(pytest.helpers.anyvar(str, strict=True), pytest.helpers.anyvar(int))
def test_migrate_package_remotes_no_remotes(package_ahriman: Package, connection: Connection,
repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
"""
must skip processing in case if no remotes generated (should never happen)
"""
mocker.patch(
"ahriman.core.database.operations.PackageOperations._packages_get_select_package_bases",
return_value={package_ahriman.base: package_ahriman})
mocker.patch("pathlib.Path.exists", return_value=False)
mocker.patch("ahriman.models.remote_source.RemoteSource.from_source", return_value=None)
migrate_package_remotes(connection, repository_paths)
connection.execute.assert_not_called()

View File

@ -0,0 +1,53 @@
import pytest
from pytest_mock import MockerFixture
from sqlite3 import Connection
from ahriman.core.configuration import Configuration
from ahriman.core.database.migrations.m005_make_opt_depends import migrate_data, migrate_package_depends, steps
from ahriman.models.package import Package
def test_migration_make_opt_depends() -> None:
"""
migration must not be empty
"""
assert steps
def test_migrate_data(connection: Connection, configuration: Configuration, mocker: MockerFixture) -> None:
"""
must perform data migration
"""
depends_mock = mocker.patch("ahriman.core.database.migrations.m005_make_opt_depends.migrate_package_depends")
migrate_data(connection, configuration)
depends_mock.assert_called_once_with(connection, configuration)
def test_migrate_package_depends(connection: Connection, configuration: Configuration, package_ahriman: Package,
mocker: MockerFixture) -> None:
"""
must update make and opt depends list
"""
mocker.patch("pathlib.Path.is_dir", return_value=True)
mocker.patch("pathlib.Path.iterdir", return_value=[package_ahriman.packages[package_ahriman.base].filepath])
package_mock = mocker.patch("ahriman.models.package.Package.from_archive", return_value=package_ahriman)
migrate_package_depends(connection, configuration)
package_mock.assert_called_once_with(
package_ahriman.packages[package_ahriman.base].filepath, pytest.helpers.anyvar(int), remote=None)
connection.executemany.assert_called_once_with(pytest.helpers.anyvar(str, strict=True), [{
"make_depends": package_ahriman.packages[package_ahriman.base].make_depends,
"opt_depends": package_ahriman.packages[package_ahriman.base].opt_depends,
"package": package_ahriman.base,
}])
def test_migrate_package_depends_skip(connection: Connection, configuration: Configuration,
mocker: MockerFixture) -> None:
"""
must skip update make and opt depends list if no repository directory found
"""
mocker.patch("pathlib.Path.is_dir", return_value=False)
migrate_package_depends(connection, configuration)
connection.executemany.assert_not_called()

View File

@ -0,0 +1,8 @@
from ahriman.core.database.migrations.m006_packages_architecture_required import steps
def test_migration_logs() -> None:
"""
migration must not be empty
"""
assert steps

View File

@ -19,6 +19,19 @@ def test_migrate(connection: Connection, configuration: Configuration, mocker: M
run_mock.assert_called_once_with()
def test_migration(migrations: Migrations, connection: Connection) -> None:
"""
must perform single migration
"""
migrate_data_mock = MagicMock()
cursor = MagicMock()
migration = Migration(index=0, name="test", steps=["select 1"], migrate_data=migrate_data_mock)
migrations.migration(cursor, migration)
cursor.execute.assert_called_once_with("select 1")
migrate_data_mock.assert_called_once_with(migrations.connection, migrations.configuration)
def test_migrations(migrations: Migrations) -> None:
"""
must retrieve migrations
@ -40,25 +53,23 @@ def test_run(migrations: Migrations, mocker: MockerFixture) -> None:
"""
must run migration
"""
migration = Migration(index=0, name="test", steps=["select 1"], migrate_data=MagicMock())
cursor = MagicMock()
mocker.patch("ahriman.core.database.migrations.Migrations.user_version", return_value=0)
mocker.patch("ahriman.core.database.migrations.Migrations.migrations",
return_value=[Migration(index=0, name="test", steps=["select 1"])])
mocker.patch("ahriman.core.database.migrations.Migrations.migrations", return_value=[migration])
migrations.connection.cursor.return_value = cursor
migration_mock = mocker.patch("ahriman.core.database.migrations.Migrations.migration")
validate_mock = mocker.patch("ahriman.models.migration_result.MigrationResult.validate")
migrate_data_mock = mocker.patch("ahriman.core.database.migrations.migrate_data")
migrations.run()
validate_mock.assert_called_once_with()
cursor.execute.assert_has_calls([
MockCall("begin exclusive"),
MockCall("select 1"),
MockCall("pragma user_version = 1"),
MockCall("commit"),
])
cursor.close.assert_called_once_with()
migrate_data_mock.assert_called_once_with(
MigrationResult(old_version=0, new_version=1), migrations.connection, migrations.configuration)
migration_mock.assert_called_once_with(cursor, migration)
def test_run_migration_exception(migrations: Migrations, mocker: MockerFixture) -> None:
@ -69,7 +80,7 @@ def test_run_migration_exception(migrations: Migrations, mocker: MockerFixture)
mocker.patch("logging.Logger.info", side_effect=Exception())
mocker.patch("ahriman.core.database.migrations.Migrations.user_version", return_value=0)
mocker.patch("ahriman.core.database.migrations.Migrations.migrations",
return_value=[Migration(index=0, name="test", steps=["select 1"])])
return_value=[Migration(index=0, name="test", steps=["select 1"], migrate_data=MagicMock())])
mocker.patch("ahriman.models.migration_result.MigrationResult.validate")
migrations.connection.cursor.return_value = cursor
@ -90,7 +101,7 @@ def test_run_sql_exception(migrations: Migrations, mocker: MockerFixture) -> Non
cursor.execute.side_effect = Exception()
mocker.patch("ahriman.core.database.migrations.Migrations.user_version", return_value=0)
mocker.patch("ahriman.core.database.migrations.Migrations.migrations",
return_value=[Migration(index=0, name="test", steps=["select 1"])])
return_value=[Migration(index=0, name="test", steps=["select 1"], migrate_data=MagicMock())])
mocker.patch("ahriman.models.migration_result.MigrationResult.validate")
migrations.connection.cursor.return_value = cursor

View File

@ -48,6 +48,16 @@ def test_package_update_insert_packages(database: SQLite, connection: Connection
connection.executemany(pytest.helpers.anyvar(str, strict=True), pytest.helpers.anyvar(int))
def test_package_update_insert_packages_no_arch(database: SQLite, connection: Connection,
package_ahriman: Package) -> None:
"""
must skip package insertion if no package architecture set
"""
package_ahriman.packages[package_ahriman.base].architecture = None
database._package_update_insert_packages(connection, package_ahriman)
connection.executemany(pytest.helpers.anyvar(str, strict=True), [])
def test_package_update_insert_status(database: SQLite, connection: Connection, package_ahriman: Package) -> None:
"""
must insert single package status

View File

@ -10,10 +10,13 @@ def test_properties(configuration_printer: ConfigurationPrinter) -> None:
def test_properties_required(configuration_printer: ConfigurationPrinter) -> None:
"""
must return all properties as required
must return all safe properties as required
"""
assert all(prop.is_required for prop in configuration_printer.properties())
configuration_printer.values = {"password": "pa55w0rd"}
assert all(not prop.is_required for prop in configuration_printer.properties())
def test_title(configuration_printer: ConfigurationPrinter) -> None:
"""

View File

@ -22,8 +22,13 @@ def test_package_update(database: SQLite, configuration: Configuration, package_
patch2 = PkgbuildPatch("key", "value")
local = Path("local")
glob_mock = mocker.patch("pathlib.Path.glob", return_value=[".git", ".gitignore"])
mocker.patch(
"pathlib.Path.is_file",
autospec=True,
side_effect=lambda p: True if p == Path(".gitignore") else False)
glob_mock = mocker.patch("pathlib.Path.glob", return_value=[Path(".git"), Path(".gitignore")])
rmtree_mock = mocker.patch("shutil.rmtree")
unlink_mock = mocker.patch("pathlib.Path.unlink")
fetch_mock = mocker.patch("ahriman.core.build_tools.sources.Sources.fetch")
patches_mock = mocker.patch("ahriman.core.database.SQLite.patches_get", return_value=[patch1, patch2])
patches_write_mock = mocker.patch("ahriman.models.pkgbuild_patch.PkgbuildPatch.write")
@ -33,9 +38,9 @@ def test_package_update(database: SQLite, configuration: Configuration, package_
glob_mock.assert_called_once_with(".git*")
rmtree_mock.assert_has_calls([
MockCall(local / package_ahriman.base, ignore_errors=True),
MockCall(local / package_ahriman.base / ".git"),
MockCall(local / package_ahriman.base / ".gitignore"),
MockCall(Path(".git")),
])
unlink_mock.assert_called_once_with()
fetch_mock.assert_called_once_with(pytest.helpers.anyvar(int), package_ahriman.remote)
patches_mock.assert_called_once_with(package_ahriman.base)
patches_write_mock.assert_has_calls([

Some files were not shown because too many files have changed in this diff Show More