mirror of
https://github.com/arcan1s/ahriman.git
synced 2025-04-24 15:27:17 +00:00
Remote call trigger support (#105)
* add support of remote task tracking * add remote call trigger implementation * docs update * add cross-service upload * add notes about user * add more ability to control upload * multipart upload with signatures as well as safe file save * configuration reference update * rename watcher methods * erase logs based on current package version Old implementation has used process id instead, but it leads to log removal in case of remote process trigger * add --server flag for setup command * restore behavior of the httploghandler
This commit is contained in:
parent
5b172ad20b
commit
ad1c0051c4
@ -11,6 +11,7 @@ ENV AHRIMAN_PACKAGER="ahriman bot <ahriman@example.com>"
|
|||||||
ENV AHRIMAN_PACMAN_MIRROR=""
|
ENV AHRIMAN_PACMAN_MIRROR=""
|
||||||
ENV AHRIMAN_PORT=""
|
ENV AHRIMAN_PORT=""
|
||||||
ENV AHRIMAN_REPOSITORY="aur-clone"
|
ENV AHRIMAN_REPOSITORY="aur-clone"
|
||||||
|
ENV AHRIMAN_REPOSITORY_SERVER=""
|
||||||
ENV AHRIMAN_REPOSITORY_ROOT="/var/lib/ahriman/ahriman"
|
ENV AHRIMAN_REPOSITORY_ROOT="/var/lib/ahriman/ahriman"
|
||||||
ENV AHRIMAN_UNIX_SOCKET=""
|
ENV AHRIMAN_UNIX_SOCKET=""
|
||||||
ENV AHRIMAN_USER="ahriman"
|
ENV AHRIMAN_USER="ahriman"
|
||||||
|
@ -43,6 +43,9 @@ fi
|
|||||||
if [ -n "$AHRIMAN_PORT" ]; then
|
if [ -n "$AHRIMAN_PORT" ]; then
|
||||||
AHRIMAN_SETUP_ARGS+=("--web-port" "$AHRIMAN_PORT")
|
AHRIMAN_SETUP_ARGS+=("--web-port" "$AHRIMAN_PORT")
|
||||||
fi
|
fi
|
||||||
|
if [ -n "$AHRIMAN_REPOSITORY_SERVER" ]; then
|
||||||
|
AHRIMAN_SETUP_ARGS+=("--server" "$AHRIMAN_REPOSITORY_SERVER")
|
||||||
|
fi
|
||||||
if [ -n "$AHRIMAN_UNIX_SOCKET" ]; then
|
if [ -n "$AHRIMAN_UNIX_SOCKET" ]; then
|
||||||
AHRIMAN_SETUP_ARGS+=("--web-unix-socket" "$AHRIMAN_UNIX_SOCKET")
|
AHRIMAN_SETUP_ARGS+=("--web-unix-socket" "$AHRIMAN_UNIX_SOCKET")
|
||||||
fi
|
fi
|
||||||
|
@ -20,6 +20,14 @@ ahriman.core.configuration.schema module
|
|||||||
:no-undoc-members:
|
:no-undoc-members:
|
||||||
:show-inheritance:
|
:show-inheritance:
|
||||||
|
|
||||||
|
ahriman.core.configuration.shell\_interpolator module
|
||||||
|
-----------------------------------------------------
|
||||||
|
|
||||||
|
.. automodule:: ahriman.core.configuration.shell_interpolator
|
||||||
|
:members:
|
||||||
|
:no-undoc-members:
|
||||||
|
:show-inheritance:
|
||||||
|
|
||||||
ahriman.core.configuration.validator module
|
ahriman.core.configuration.validator module
|
||||||
-------------------------------------------
|
-------------------------------------------
|
||||||
|
|
||||||
|
@ -84,6 +84,14 @@ ahriman.core.database.migrations.m009\_local\_source module
|
|||||||
:no-undoc-members:
|
:no-undoc-members:
|
||||||
:show-inheritance:
|
:show-inheritance:
|
||||||
|
|
||||||
|
ahriman.core.database.migrations.m010\_version\_based\_logs\_removal module
|
||||||
|
---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
.. automodule:: ahriman.core.database.migrations.m010_version_based_logs_removal
|
||||||
|
:members:
|
||||||
|
:no-undoc-members:
|
||||||
|
:show-inheritance:
|
||||||
|
|
||||||
Module contents
|
Module contents
|
||||||
---------------
|
---------------
|
||||||
|
|
||||||
|
@ -36,6 +36,14 @@ ahriman.core.report.jinja\_template module
|
|||||||
:no-undoc-members:
|
:no-undoc-members:
|
||||||
:show-inheritance:
|
:show-inheritance:
|
||||||
|
|
||||||
|
ahriman.core.report.remote\_call module
|
||||||
|
---------------------------------------
|
||||||
|
|
||||||
|
.. automodule:: ahriman.core.report.remote_call
|
||||||
|
:members:
|
||||||
|
:no-undoc-members:
|
||||||
|
:show-inheritance:
|
||||||
|
|
||||||
ahriman.core.report.report module
|
ahriman.core.report.report module
|
||||||
---------------------------------
|
---------------------------------
|
||||||
|
|
||||||
|
@ -20,6 +20,14 @@ ahriman.core.upload.http\_upload module
|
|||||||
:no-undoc-members:
|
:no-undoc-members:
|
||||||
:show-inheritance:
|
:show-inheritance:
|
||||||
|
|
||||||
|
ahriman.core.upload.remote\_service module
|
||||||
|
------------------------------------------
|
||||||
|
|
||||||
|
.. automodule:: ahriman.core.upload.remote_service
|
||||||
|
:members:
|
||||||
|
:no-undoc-members:
|
||||||
|
:show-inheritance:
|
||||||
|
|
||||||
ahriman.core.upload.rsync module
|
ahriman.core.upload.rsync module
|
||||||
--------------------------------
|
--------------------------------
|
||||||
|
|
||||||
|
@ -220,6 +220,14 @@ ahriman.models.user\_access module
|
|||||||
:no-undoc-members:
|
:no-undoc-members:
|
||||||
:show-inheritance:
|
:show-inheritance:
|
||||||
|
|
||||||
|
ahriman.models.waiter module
|
||||||
|
----------------------------
|
||||||
|
|
||||||
|
.. automodule:: ahriman.models.waiter
|
||||||
|
:members:
|
||||||
|
:no-undoc-members:
|
||||||
|
:show-inheritance:
|
||||||
|
|
||||||
Module contents
|
Module contents
|
||||||
---------------
|
---------------
|
||||||
|
|
||||||
|
@ -36,6 +36,14 @@ ahriman.web.schemas.error\_schema module
|
|||||||
:no-undoc-members:
|
:no-undoc-members:
|
||||||
:show-inheritance:
|
:show-inheritance:
|
||||||
|
|
||||||
|
ahriman.web.schemas.file\_schema module
|
||||||
|
---------------------------------------
|
||||||
|
|
||||||
|
.. automodule:: ahriman.web.schemas.file_schema
|
||||||
|
:members:
|
||||||
|
:no-undoc-members:
|
||||||
|
:show-inheritance:
|
||||||
|
|
||||||
ahriman.web.schemas.internal\_status\_schema module
|
ahriman.web.schemas.internal\_status\_schema module
|
||||||
---------------------------------------------------
|
---------------------------------------------------
|
||||||
|
|
||||||
@ -132,6 +140,22 @@ ahriman.web.schemas.pgp\_key\_schema module
|
|||||||
:no-undoc-members:
|
:no-undoc-members:
|
||||||
:show-inheritance:
|
:show-inheritance:
|
||||||
|
|
||||||
|
ahriman.web.schemas.process\_id\_schema module
|
||||||
|
----------------------------------------------
|
||||||
|
|
||||||
|
.. automodule:: ahriman.web.schemas.process_id_schema
|
||||||
|
:members:
|
||||||
|
:no-undoc-members:
|
||||||
|
:show-inheritance:
|
||||||
|
|
||||||
|
ahriman.web.schemas.process\_schema module
|
||||||
|
------------------------------------------
|
||||||
|
|
||||||
|
.. automodule:: ahriman.web.schemas.process_schema
|
||||||
|
:members:
|
||||||
|
:no-undoc-members:
|
||||||
|
:show-inheritance:
|
||||||
|
|
||||||
ahriman.web.schemas.remote\_schema module
|
ahriman.web.schemas.remote\_schema module
|
||||||
-----------------------------------------
|
-----------------------------------------
|
||||||
|
|
||||||
@ -156,6 +180,14 @@ ahriman.web.schemas.status\_schema module
|
|||||||
:no-undoc-members:
|
:no-undoc-members:
|
||||||
:show-inheritance:
|
:show-inheritance:
|
||||||
|
|
||||||
|
ahriman.web.schemas.update\_flags\_schema module
|
||||||
|
------------------------------------------------
|
||||||
|
|
||||||
|
.. automodule:: ahriman.web.schemas.update_flags_schema
|
||||||
|
:members:
|
||||||
|
:no-undoc-members:
|
||||||
|
:show-inheritance:
|
||||||
|
|
||||||
Module contents
|
Module contents
|
||||||
---------------
|
---------------
|
||||||
|
|
||||||
|
@ -20,6 +20,14 @@ ahriman.web.views.service.pgp module
|
|||||||
:no-undoc-members:
|
:no-undoc-members:
|
||||||
:show-inheritance:
|
:show-inheritance:
|
||||||
|
|
||||||
|
ahriman.web.views.service.process module
|
||||||
|
----------------------------------------
|
||||||
|
|
||||||
|
.. automodule:: ahriman.web.views.service.process
|
||||||
|
:members:
|
||||||
|
:no-undoc-members:
|
||||||
|
:show-inheritance:
|
||||||
|
|
||||||
ahriman.web.views.service.rebuild module
|
ahriman.web.views.service.rebuild module
|
||||||
----------------------------------------
|
----------------------------------------
|
||||||
|
|
||||||
@ -60,6 +68,14 @@ ahriman.web.views.service.update module
|
|||||||
:no-undoc-members:
|
:no-undoc-members:
|
||||||
:show-inheritance:
|
:show-inheritance:
|
||||||
|
|
||||||
|
ahriman.web.views.service.upload module
|
||||||
|
---------------------------------------
|
||||||
|
|
||||||
|
.. automodule:: ahriman.web.views.service.upload
|
||||||
|
:members:
|
||||||
|
:no-undoc-members:
|
||||||
|
:show-inheritance:
|
||||||
|
|
||||||
Module contents
|
Module contents
|
||||||
---------------
|
---------------
|
||||||
|
|
||||||
|
@ -106,8 +106,10 @@ Web server settings. If any of ``host``/``port`` is not set, web integration wil
|
|||||||
* ``debug`` - enable debug toolbar, boolean, optional, default ``no``.
|
* ``debug`` - enable debug toolbar, boolean, optional, default ``no``.
|
||||||
* ``debug_check_host`` - check hosts to access debug toolbar, boolean, optional, default ``no``.
|
* ``debug_check_host`` - check hosts to access debug toolbar, boolean, optional, default ``no``.
|
||||||
* ``debug_allowed_hosts`` - allowed hosts to get access to debug toolbar, space separated list of string, optional.
|
* ``debug_allowed_hosts`` - allowed hosts to get access to debug toolbar, space separated list of string, optional.
|
||||||
|
* ``enable_archive_upload`` - allow to upload packages via HTTP (i.e. call of ``/api/v1/service/upload`` uri), boolean, optional, default ``no``.
|
||||||
* ``host`` - host to bind, string, optional.
|
* ``host`` - host to bind, string, optional.
|
||||||
* ``index_url`` - full url of the repository index page, string, optional.
|
* ``index_url`` - full url of the repository index page, string, optional.
|
||||||
|
* ``max_body_size`` - max body size in bytes to be validated for archive upload, integer, optional. If not set, validation will be disabled.
|
||||||
* ``password`` - password to authorize in web service in order to update service status, string, required in case if authorization enabled.
|
* ``password`` - password to authorize in web service in order to update service status, string, required in case if authorization enabled.
|
||||||
* ``port`` - port to bind, int, optional.
|
* ``port`` - port to bind, int, optional.
|
||||||
* ``static_path`` - path to directory with static files, string, required.
|
* ``static_path`` - path to directory with static files, string, required.
|
||||||
@ -115,6 +117,7 @@ Web server settings. If any of ``host``/``port`` is not set, web integration wil
|
|||||||
* ``unix_socket`` - path to the listening unix socket, string, optional. If set, server will create the socket on the specified address which can (and will) be used by application. Note, that unlike usual host/port configuration, unix socket allows to perform requests without authorization.
|
* ``unix_socket`` - path to the listening unix socket, string, optional. If set, server will create the socket on the specified address which can (and will) be used by application. Note, that unlike usual host/port configuration, unix socket allows to perform requests without authorization.
|
||||||
* ``unix_socket_unsafe`` - set unsafe (o+w) permissions to unix socket, boolean, optional, default ``yes``. This option is enabled by default, because it is supposed that unix socket is created in safe environment (only web service is supposed to be used in unsafe), but it can be disabled by configuration.
|
* ``unix_socket_unsafe`` - set unsafe (o+w) permissions to unix socket, boolean, optional, default ``yes``. This option is enabled by default, because it is supposed that unix socket is created in safe environment (only web service is supposed to be used in unsafe), but it can be disabled by configuration.
|
||||||
* ``username`` - username to authorize in web service in order to update service status, string, required in case if authorization enabled.
|
* ``username`` - username to authorize in web service in order to update service status, string, required in case if authorization enabled.
|
||||||
|
* ``wait_timeout`` - wait timeout in seconds, maximum amount of time to be waited before lock will be free, int, optional.
|
||||||
|
|
||||||
``keyring`` group
|
``keyring`` group
|
||||||
--------------------
|
--------------------
|
||||||
@ -246,6 +249,17 @@ Section name must be either ``html`` (plus optional architecture name, e.g. ``ht
|
|||||||
* ``path`` - path to html report file, string, required.
|
* ``path`` - path to html report file, string, required.
|
||||||
* ``template_path`` - path to Jinja2 template, string, required.
|
* ``template_path`` - path to Jinja2 template, string, required.
|
||||||
|
|
||||||
|
``remote-call`` type
|
||||||
|
^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Section name must be either ``remote-call`` (plus optional architecture name, e.g. ``remote-call:x86_64``) or random name with ``type`` set.
|
||||||
|
|
||||||
|
* ``type`` - type of the report, string, optional, must be set to ``remote-call`` if exists.
|
||||||
|
* ``aur`` - check for AUR packages updates, boolean, optional, default ``no``.
|
||||||
|
* ``local`` - check for local packages updates, boolean, optional, default ``no``.
|
||||||
|
* ``manual`` - update manually built packages, boolean, optional, default ``no``.
|
||||||
|
* ``wait_timeout`` - maximum amount of time in seconds to be waited before remote process will be terminated, int, optional, default ``-1``.
|
||||||
|
|
||||||
``telegram`` type
|
``telegram`` type
|
||||||
^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
@ -291,6 +305,13 @@ This feature requires Github key creation (see below). Section name must be eith
|
|||||||
* ``timeout`` - HTTP request timeout in seconds, int, optional, default is ``30``.
|
* ``timeout`` - HTTP request timeout in seconds, int, optional, default is ``30``.
|
||||||
* ``username`` - Github authorization user, string, required. Basically the same as ``owner``.
|
* ``username`` - Github authorization user, string, required. Basically the same as ``owner``.
|
||||||
|
|
||||||
|
``remote-service`` type
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Section name must be either ``remote-service`` (plus optional architecture name, e.g. ``remote-service:x86_64``) or random name with ``type`` set.
|
||||||
|
|
||||||
|
* ``type`` - type of the report, string, optional, must be set to ``remote-service`` if exists.
|
||||||
|
|
||||||
``rsync`` type
|
``rsync`` type
|
||||||
^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
199
docs/faq.rst
199
docs/faq.rst
@ -396,6 +396,7 @@ The following environment variables are supported:
|
|||||||
* ``AHRIMAN_PACMAN_MIRROR`` - override pacman mirror server if set.
|
* ``AHRIMAN_PACMAN_MIRROR`` - override pacman mirror server if set.
|
||||||
* ``AHRIMAN_PORT`` - HTTP server port if any, default is empty.
|
* ``AHRIMAN_PORT`` - HTTP server port if any, default is empty.
|
||||||
* ``AHRIMAN_REPOSITORY`` - repository name, default is ``aur-clone``.
|
* ``AHRIMAN_REPOSITORY`` - repository name, default is ``aur-clone``.
|
||||||
|
* ``AHRIMAN_REPOSITORY_SERVER`` - optional override for the repository url. Useful if you would like to download packages from remote instead of local filesystem.
|
||||||
* ``AHRIMAN_REPOSITORY_ROOT`` - repository root. Because of filesystem rights it is required to override default repository root. By default, it uses ``ahriman`` directory inside ahriman's home, which can be passed as mount volume.
|
* ``AHRIMAN_REPOSITORY_ROOT`` - repository root. Because of filesystem rights it is required to override default repository root. By default, it uses ``ahriman`` directory inside ahriman's home, which can be passed as mount volume.
|
||||||
* ``AHRIMAN_UNIX_SOCKET`` - full path to unix socket which is used by web server, default is empty. Note that more likely you would like to put it inside ``AHRIMAN_REPOSITORY_ROOT`` directory (e.g. ``/var/lib/ahriman/ahriman/ahriman-web.sock``) or to ``/tmp``.
|
* ``AHRIMAN_UNIX_SOCKET`` - full path to unix socket which is used by web server, default is empty. Note that more likely you would like to put it inside ``AHRIMAN_REPOSITORY_ROOT`` directory (e.g. ``/var/lib/ahriman/ahriman/ahriman-web.sock``) or to ``/tmp``.
|
||||||
* ``AHRIMAN_USER`` - ahriman user, usually must not be overwritten, default is ``ahriman``.
|
* ``AHRIMAN_USER`` - ahriman user, usually must not be overwritten, default is ``ahriman``.
|
||||||
@ -722,7 +723,6 @@ How to post build report to telegram
|
|||||||
#.
|
#.
|
||||||
Optionally (if you want to post message in chat):
|
Optionally (if you want to post message in chat):
|
||||||
|
|
||||||
|
|
||||||
#. Create telegram channel.
|
#. Create telegram channel.
|
||||||
#. Invite your bot into the channel.
|
#. Invite your bot into the channel.
|
||||||
#. Make your channel public
|
#. Make your channel public
|
||||||
@ -753,6 +753,203 @@ If you did everything fine you should receive the message with the next update.
|
|||||||
|
|
||||||
(replace ``${CHAT_ID}`` and ``${API_KEY}`` with the values from configuration).
|
(replace ``${CHAT_ID}`` and ``${API_KEY}`` with the values from configuration).
|
||||||
|
|
||||||
|
Distributed builds
|
||||||
|
------------------
|
||||||
|
|
||||||
|
The service allows to run build on multiple machines and collect packages on main node. There are multiple ways to achieve it, this section describes officially supported methods.
|
||||||
|
|
||||||
|
Remote synchronization and remote server call
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
This setup requires at least two instances of the service:
|
||||||
|
|
||||||
|
#. Web service (with opt-in authorization enabled), later will be referenced as ``master`` node.
|
||||||
|
#. Application instances responsible for build, later will be referenced as ``worker`` nodes.
|
||||||
|
|
||||||
|
In this example the following settings are assumed:
|
||||||
|
|
||||||
|
* Repository architecture is ``x86_64``.
|
||||||
|
* Master node address is ``master.example.com``.
|
||||||
|
|
||||||
|
Master node configuration
|
||||||
|
"""""""""""""""""""""""""
|
||||||
|
|
||||||
|
The only requirements for the master node is that API must be available for worker nodes to call (e.g. port must be exposed to internet, or local network in case of VPN, etc) and file upload must be enabled:
|
||||||
|
|
||||||
|
.. code-block:: ini
|
||||||
|
|
||||||
|
[web]
|
||||||
|
enable_archive_upload = yes
|
||||||
|
|
||||||
|
In addition, the following settings are recommended for the master node:
|
||||||
|
|
||||||
|
*
|
||||||
|
As it has been mentioned above, it is recommended to enable authentication (see `How to enable basic authorization`_) and create system user which will be used later. Later this user (if any) will be referenced as ``worker-user``.
|
||||||
|
|
||||||
|
*
|
||||||
|
In order to be able to spawn multiple processes at the same time, wait timeout must be configured:
|
||||||
|
|
||||||
|
.. code-block:: ini
|
||||||
|
|
||||||
|
[web]
|
||||||
|
wait_timeout = 0
|
||||||
|
|
||||||
|
Worker nodes configuration
|
||||||
|
""""""""""""""""""""""""""
|
||||||
|
|
||||||
|
#.
|
||||||
|
First of all, in this setup you need to split your repository into chunks manually, e.g. if you have repository on master node with packages ``A``, ``B`` and ``C``, you need to split them between all available workers, as example:
|
||||||
|
|
||||||
|
* Worker #1: ``A``.
|
||||||
|
* Worker #2: ``B`` and ``C``.
|
||||||
|
|
||||||
|
#.
|
||||||
|
Each worker must be configured to upload files to master node:
|
||||||
|
|
||||||
|
.. code-block:: ini
|
||||||
|
|
||||||
|
[upload]
|
||||||
|
target = remote-service
|
||||||
|
|
||||||
|
[remote-service]
|
||||||
|
|
||||||
|
#.
|
||||||
|
Worker must be configured to access web on master node:
|
||||||
|
|
||||||
|
.. code-block:: ini
|
||||||
|
|
||||||
|
[web]
|
||||||
|
address = master.example.com
|
||||||
|
username = worker-user
|
||||||
|
password = very-secure-password
|
||||||
|
|
||||||
|
As it has been mentioned above, ``web.address`` must be available for workers. In case if unix socket is used, it can be passed as ``web.unix_socket`` variable as usual. Optional ``web.username``/``web.password`` can be supplied in case if authentication was enabled on master node.
|
||||||
|
|
||||||
|
#.
|
||||||
|
Each worker must call master node on success:
|
||||||
|
|
||||||
|
.. code-block:: ini
|
||||||
|
|
||||||
|
[report]
|
||||||
|
target = remote-call
|
||||||
|
|
||||||
|
[remote-call]
|
||||||
|
manual = yes
|
||||||
|
|
||||||
|
After success synchronization (see above), the built packages will be put into directory, from which they will be read during manual update, thus ``remote-call.manual`` flag is required.
|
||||||
|
|
||||||
|
#.
|
||||||
|
Change order of trigger runs. This step is required, because by default the report trigger is called before the upload trigger and we would like to achieve the opposite:
|
||||||
|
|
||||||
|
.. code-block:: ini
|
||||||
|
|
||||||
|
[build]
|
||||||
|
triggers = ahriman.core.gitremote.RemotePullTrigger ahriman.core.upload.UploadTrigger ahriman.core.report.ReportTrigger ahriman.core.gitremote.RemotePushTrigger
|
||||||
|
|
||||||
|
In addition, the following settings are recommended for workers:
|
||||||
|
|
||||||
|
*
|
||||||
|
You might want to wait until report trigger will be completed; in this case the following option must be set:
|
||||||
|
|
||||||
|
.. code-block:: ini
|
||||||
|
|
||||||
|
[remote-call]
|
||||||
|
wait_timeout = 0
|
||||||
|
|
||||||
|
Dependency management
|
||||||
|
"""""""""""""""""""""
|
||||||
|
|
||||||
|
By default worker nodes don't know anything about master nodes packages, thus it will try to build each dependency by its own. However, using ``AHRIMAN_REPOSITORY_SERVER`` docker variable (or ``--server`` flag for setup command), it is possible to specify address of the master node for devtools configuration.
|
||||||
|
|
||||||
|
Repository and packages signing
|
||||||
|
"""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
|
You can sign packages on worker nodes and then signatures will be synced to master node. In order to do so, you need to configure worker node as following, e.g.:
|
||||||
|
|
||||||
|
.. code-block:: ini
|
||||||
|
|
||||||
|
[sign]
|
||||||
|
target = package
|
||||||
|
key = 8BE91E5A773FB48AC05CC1EDBED105AED6246B39
|
||||||
|
|
||||||
|
Note, however, that in this case, signatures will not be validated on master node and just will be copied to repository tree.
|
||||||
|
|
||||||
|
If you would like to sign only database files (aka repository sign), it has to be configured on master node only as usual, e.g.:
|
||||||
|
|
||||||
|
.. code-block:: ini
|
||||||
|
|
||||||
|
[sign]
|
||||||
|
target = repository
|
||||||
|
key = 8BE91E5A773FB48AC05CC1EDBED105AED6246B39
|
||||||
|
|
||||||
|
Double node minimal docker example
|
||||||
|
""""""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
|
Master node config (``master.ini``) as:
|
||||||
|
|
||||||
|
.. code-block:: ini
|
||||||
|
|
||||||
|
[auth]
|
||||||
|
target = mapping
|
||||||
|
|
||||||
|
[web]
|
||||||
|
enable_archive_upload = yes
|
||||||
|
wait_timeout = 0
|
||||||
|
|
||||||
|
|
||||||
|
Command to run master node:
|
||||||
|
|
||||||
|
.. code-block:: shell
|
||||||
|
|
||||||
|
docker run --privileged -p 8080:8080 -e AHRIMAN_PORT=8080 -v master.ini:/etc/ahriman.ini.d/overrides.ini arcan1s/ahriman:latest web
|
||||||
|
|
||||||
|
The user ``worker-user`` has been created additionally. Worker node config (``worker.ini``) as:
|
||||||
|
|
||||||
|
.. code-block:: ini
|
||||||
|
|
||||||
|
[web]
|
||||||
|
address = http://172.17.0.1:8080
|
||||||
|
username = worker-user
|
||||||
|
password = very-secure-password
|
||||||
|
|
||||||
|
[upload]
|
||||||
|
target = remote-service
|
||||||
|
|
||||||
|
[remote-service]
|
||||||
|
|
||||||
|
[report]
|
||||||
|
target = remote-call
|
||||||
|
|
||||||
|
[remote-call]
|
||||||
|
manual = yes
|
||||||
|
wait_timeout = 0
|
||||||
|
|
||||||
|
[build]
|
||||||
|
triggers = ahriman.core.gitremote.RemotePullTrigger ahriman.core.upload.UploadTrigger ahriman.core.report.ReportTrigger ahriman.core.gitremote.RemotePushTrigger
|
||||||
|
|
||||||
|
The address above (``http://172.17.0.1:8080``) is something available for worker container.
|
||||||
|
|
||||||
|
Command to run worker node:
|
||||||
|
|
||||||
|
.. code-block:: shell
|
||||||
|
|
||||||
|
docker run --privileged -v worker.ini:/etc/ahriman.ini.d/overrides.ini -it arcan1s/ahriman:latest package-add arhiman --now
|
||||||
|
|
||||||
|
The command above will successfully build ``ahriman`` package, upload it on master node and, finally, will update master node repository.
|
||||||
|
|
||||||
|
Addition of new package and repository update
|
||||||
|
"""""""""""""""""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
|
Just run on worker command as usual, the built packages will be automatically uploaded to master node. Note that automatic update process must be disabled on master node.
|
||||||
|
|
||||||
|
Package removal
|
||||||
|
"""""""""""""""
|
||||||
|
|
||||||
|
This action must be done in two steps:
|
||||||
|
|
||||||
|
#. Remove package on worker.
|
||||||
|
#. Remove package on master node.
|
||||||
|
|
||||||
Maintenance packages
|
Maintenance packages
|
||||||
--------------------
|
--------------------
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
_shtab_ahriman_subparsers=('aur-search' 'search' 'help' 'help-commands-unsafe' 'help-updates' 'help-version' 'version' 'package-add' 'add' 'package-update' 'package-remove' 'remove' 'package-status' 'status' 'package-status-remove' 'package-status-update' 'status-update' 'patch-add' 'patch-list' 'patch-remove' 'patch-set-add' 'repo-backup' 'repo-check' 'check' 'repo-create-keyring' 'repo-create-mirrorlist' 'repo-daemon' 'daemon' 'repo-rebuild' 'rebuild' 'repo-remove-unknown' 'remove-unknown' 'repo-report' 'report' 'repo-restore' 'repo-sign' 'sign' 'repo-status-update' 'repo-sync' 'sync' 'repo-tree' 'repo-triggers' 'repo-update' 'update' 'service-clean' 'clean' 'repo-clean' 'service-config' 'config' 'repo-config' 'service-config-validate' 'config-validate' 'repo-config-validate' 'service-key-import' 'key-import' 'service-setup' 'init' 'repo-init' 'repo-setup' 'setup' 'service-shell' 'shell' 'user-add' 'user-list' 'user-remove' 'web')
|
_shtab_ahriman_subparsers=('aur-search' 'search' 'help' 'help-commands-unsafe' 'help-updates' 'help-version' 'version' 'package-add' 'add' 'package-update' 'package-remove' 'remove' 'package-status' 'status' 'package-status-remove' 'package-status-update' 'status-update' 'patch-add' 'patch-list' 'patch-remove' 'patch-set-add' 'repo-backup' 'repo-check' 'check' 'repo-create-keyring' 'repo-create-mirrorlist' 'repo-daemon' 'daemon' 'repo-rebuild' 'rebuild' 'repo-remove-unknown' 'remove-unknown' 'repo-report' 'report' 'repo-restore' 'repo-sign' 'sign' 'repo-status-update' 'repo-sync' 'sync' 'repo-tree' 'repo-triggers' 'repo-update' 'update' 'service-clean' 'clean' 'repo-clean' 'service-config' 'config' 'repo-config' 'service-config-validate' 'config-validate' 'repo-config-validate' 'service-key-import' 'key-import' 'service-setup' 'init' 'repo-init' 'repo-setup' 'setup' 'service-shell' 'shell' 'user-add' 'user-list' 'user-remove' 'web')
|
||||||
|
|
||||||
_shtab_ahriman_option_strings=('-h' '--help' '-a' '--architecture' '-c' '--configuration' '--force' '-l' '--lock' '--log-handler' '--report' '--no-report' '-q' '--quiet' '--unsafe' '-V' '--version')
|
_shtab_ahriman_option_strings=('-h' '--help' '-a' '--architecture' '-c' '--configuration' '--force' '-l' '--lock' '--log-handler' '--report' '--no-report' '-q' '--quiet' '--unsafe' '--wait-timeout' '-V' '--version')
|
||||||
_shtab_ahriman_aur_search_option_strings=('-h' '--help' '-e' '--exit-code' '--info' '--no-info' '--sort-by')
|
_shtab_ahriman_aur_search_option_strings=('-h' '--help' '-e' '--exit-code' '--info' '--no-info' '--sort-by')
|
||||||
_shtab_ahriman_search_option_strings=('-h' '--help' '-e' '--exit-code' '--info' '--no-info' '--sort-by')
|
_shtab_ahriman_search_option_strings=('-h' '--help' '-e' '--exit-code' '--info' '--no-info' '--sort-by')
|
||||||
_shtab_ahriman_help_option_strings=('-h' '--help')
|
_shtab_ahriman_help_option_strings=('-h' '--help')
|
||||||
@ -58,11 +58,11 @@ _shtab_ahriman_config_validate_option_strings=('-h' '--help' '-e' '--exit-code')
|
|||||||
_shtab_ahriman_repo_config_validate_option_strings=('-h' '--help' '-e' '--exit-code')
|
_shtab_ahriman_repo_config_validate_option_strings=('-h' '--help' '-e' '--exit-code')
|
||||||
_shtab_ahriman_service_key_import_option_strings=('-h' '--help' '--key-server')
|
_shtab_ahriman_service_key_import_option_strings=('-h' '--help' '--key-server')
|
||||||
_shtab_ahriman_key_import_option_strings=('-h' '--help' '--key-server')
|
_shtab_ahriman_key_import_option_strings=('-h' '--help' '--key-server')
|
||||||
_shtab_ahriman_service_setup_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
_shtab_ahriman_service_setup_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--server' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||||
_shtab_ahriman_init_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
_shtab_ahriman_init_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--server' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||||
_shtab_ahriman_repo_init_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
_shtab_ahriman_repo_init_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--server' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||||
_shtab_ahriman_repo_setup_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
_shtab_ahriman_repo_setup_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--server' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||||
_shtab_ahriman_setup_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
_shtab_ahriman_setup_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--server' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||||
_shtab_ahriman_service_shell_option_strings=('-h' '--help')
|
_shtab_ahriman_service_shell_option_strings=('-h' '--help')
|
||||||
_shtab_ahriman_shell_option_strings=('-h' '--help')
|
_shtab_ahriman_shell_option_strings=('-h' '--help')
|
||||||
_shtab_ahriman_user_add_option_strings=('-h' '--help' '--key' '--packager' '-p' '--password' '-r' '--role')
|
_shtab_ahriman_user_add_option_strings=('-h' '--help' '--key' '--packager' '-p' '--password' '-r' '--role')
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
.TH AHRIMAN "1" "2023\-08\-07" "ahriman" "Generated Python Manual"
|
.TH AHRIMAN "1" "2023\-08\-19" "ahriman" "Generated Python Manual"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
ahriman
|
ahriman
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
.B ahriman
|
.B ahriman
|
||||||
[-h] [-a ARCHITECTURE] [-c CONFIGURATION] [--force] [-l LOCK] [--log-handler {console,syslog,journald}] [--report | --no-report] [-q] [--unsafe] [-V] {aur-search,search,help,help-commands-unsafe,help-updates,help-version,version,package-add,add,package-update,package-remove,remove,package-status,status,package-status-remove,package-status-update,status-update,patch-add,patch-list,patch-remove,patch-set-add,repo-backup,repo-check,check,repo-create-keyring,repo-create-mirrorlist,repo-daemon,daemon,repo-rebuild,rebuild,repo-remove-unknown,remove-unknown,repo-report,report,repo-restore,repo-sign,sign,repo-status-update,repo-sync,sync,repo-tree,repo-triggers,repo-update,update,service-clean,clean,repo-clean,service-config,config,repo-config,service-config-validate,config-validate,repo-config-validate,service-key-import,key-import,service-setup,init,repo-init,repo-setup,setup,service-shell,shell,user-add,user-list,user-remove,web} ...
|
[-h] [-a ARCHITECTURE] [-c CONFIGURATION] [--force] [-l LOCK] [--log-handler {console,syslog,journald}] [--report | --no-report] [-q] [--unsafe] [--wait-timeout WAIT_TIMEOUT] [-V] {aur-search,search,help,help-commands-unsafe,help-updates,help-version,version,package-add,add,package-update,package-remove,remove,package-status,status,package-status-remove,package-status-update,status-update,patch-add,patch-list,patch-remove,patch-set-add,repo-backup,repo-check,check,repo-create-keyring,repo-create-mirrorlist,repo-daemon,daemon,repo-rebuild,rebuild,repo-remove-unknown,remove-unknown,repo-report,report,repo-restore,repo-sign,sign,repo-status-update,repo-sync,sync,repo-tree,repo-triggers,repo-update,update,service-clean,clean,repo-clean,service-config,config,repo-config,service-config-validate,config-validate,repo-config-validate,service-key-import,key-import,service-setup,init,repo-init,repo-setup,setup,service-shell,shell,user-add,user-list,user-remove,web} ...
|
||||||
.SH DESCRIPTION
|
.SH DESCRIPTION
|
||||||
ArcH linux ReposItory MANager
|
ArcH linux ReposItory MANager
|
||||||
|
|
||||||
@ -40,6 +40,11 @@ force disable any logging
|
|||||||
\fB\-\-unsafe\fR
|
\fB\-\-unsafe\fR
|
||||||
allow to run ahriman as non\-ahriman user. Some actions might be unavailable
|
allow to run ahriman as non\-ahriman user. Some actions might be unavailable
|
||||||
|
|
||||||
|
.TP
|
||||||
|
\fB\-\-wait\-timeout\fR \fI\,WAIT_TIMEOUT\/\fR
|
||||||
|
wait for lock to be free. Negative value will lead to immediate application run even if there is lock file. In case of
|
||||||
|
zero value, tthe application will wait infinitely
|
||||||
|
|
||||||
.TP
|
.TP
|
||||||
\fB\-V\fR, \fB\-\-version\fR
|
\fB\-V\fR, \fB\-\-version\fR
|
||||||
show program's version number and exit
|
show program's version number and exit
|
||||||
@ -684,7 +689,7 @@ key server for key import
|
|||||||
usage: ahriman service\-setup [\-h] [\-\-build\-as\-user BUILD_AS_USER] [\-\-build\-command BUILD_COMMAND]
|
usage: ahriman service\-setup [\-h] [\-\-build\-as\-user BUILD_AS_USER] [\-\-build\-command BUILD_COMMAND]
|
||||||
[\-\-from\-configuration FROM_CONFIGURATION] [\-\-generate\-salt | \-\-no\-generate\-salt]
|
[\-\-from\-configuration FROM_CONFIGURATION] [\-\-generate\-salt | \-\-no\-generate\-salt]
|
||||||
[\-\-makeflags\-jobs | \-\-no\-makeflags\-jobs] [\-\-mirror MIRROR] [\-\-multilib | \-\-no\-multilib]
|
[\-\-makeflags\-jobs | \-\-no\-makeflags\-jobs] [\-\-mirror MIRROR] [\-\-multilib | \-\-no\-multilib]
|
||||||
\-\-packager PACKAGER \-\-repository REPOSITORY [\-\-sign\-key SIGN_KEY]
|
\-\-packager PACKAGER \-\-repository REPOSITORY [\-\-server SERVER] [\-\-sign\-key SIGN_KEY]
|
||||||
[\-\-sign\-target {disabled,packages,repository}] [\-\-web\-port WEB_PORT]
|
[\-\-sign\-target {disabled,packages,repository}] [\-\-web\-port WEB_PORT]
|
||||||
[\-\-web\-unix\-socket WEB_UNIX_SOCKET]
|
[\-\-web\-unix\-socket WEB_UNIX_SOCKET]
|
||||||
|
|
||||||
@ -727,6 +732,10 @@ packager name and email
|
|||||||
\fB\-\-repository\fR \fI\,REPOSITORY\/\fR
|
\fB\-\-repository\fR \fI\,REPOSITORY\/\fR
|
||||||
repository name
|
repository name
|
||||||
|
|
||||||
|
.TP
|
||||||
|
\fB\-\-server\fR \fI\,SERVER\/\fR
|
||||||
|
server to be used for devtools. If none set, local files will be used
|
||||||
|
|
||||||
.TP
|
.TP
|
||||||
\fB\-\-sign\-key\fR \fI\,SIGN_KEY\/\fR
|
\fB\-\-sign\-key\fR \fI\,SIGN_KEY\/\fR
|
||||||
sign key id
|
sign key id
|
||||||
|
@ -85,6 +85,7 @@ _shtab_ahriman_options=(
|
|||||||
{--report,--no-report}"[force enable or disable reporting to web service (default\: True)]:report:"
|
{--report,--no-report}"[force enable or disable reporting to web service (default\: True)]:report:"
|
||||||
{-q,--quiet}"[force disable any logging (default\: False)]"
|
{-q,--quiet}"[force disable any logging (default\: False)]"
|
||||||
"--unsafe[allow to run ahriman as non-ahriman user. Some actions might be unavailable (default\: False)]"
|
"--unsafe[allow to run ahriman as non-ahriman user. Some actions might be unavailable (default\: False)]"
|
||||||
|
"--wait-timeout[wait for lock to be free. Negative value will lead to immediate application run even if there is lock file. In case of zero value, tthe application will wait infinitely (default\: -1)]:wait_timeout:"
|
||||||
"(- : *)"{-V,--version}"[show program\'s version number and exit]"
|
"(- : *)"{-V,--version}"[show program\'s version number and exit]"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -176,6 +177,7 @@ _shtab_ahriman_init_options=(
|
|||||||
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
||||||
"--packager[packager name and email (default\: None)]:packager:"
|
"--packager[packager name and email (default\: None)]:packager:"
|
||||||
"--repository[repository name (default\: None)]:repository:"
|
"--repository[repository name (default\: None)]:repository:"
|
||||||
|
"--server[server to be used for devtools. If none set, local files will be used (default\: None)]:server:"
|
||||||
"--sign-key[sign key id (default\: None)]:sign_key:"
|
"--sign-key[sign key id (default\: None)]:sign_key:"
|
||||||
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
||||||
"--web-port[port of the web service (default\: None)]:web_port:"
|
"--web-port[port of the web service (default\: None)]:web_port:"
|
||||||
@ -346,6 +348,7 @@ _shtab_ahriman_repo_init_options=(
|
|||||||
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
||||||
"--packager[packager name and email (default\: None)]:packager:"
|
"--packager[packager name and email (default\: None)]:packager:"
|
||||||
"--repository[repository name (default\: None)]:repository:"
|
"--repository[repository name (default\: None)]:repository:"
|
||||||
|
"--server[server to be used for devtools. If none set, local files will be used (default\: None)]:server:"
|
||||||
"--sign-key[sign key id (default\: None)]:sign_key:"
|
"--sign-key[sign key id (default\: None)]:sign_key:"
|
||||||
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
||||||
"--web-port[port of the web service (default\: None)]:web_port:"
|
"--web-port[port of the web service (default\: None)]:web_port:"
|
||||||
@ -389,6 +392,7 @@ _shtab_ahriman_repo_setup_options=(
|
|||||||
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
||||||
"--packager[packager name and email (default\: None)]:packager:"
|
"--packager[packager name and email (default\: None)]:packager:"
|
||||||
"--repository[repository name (default\: None)]:repository:"
|
"--repository[repository name (default\: None)]:repository:"
|
||||||
|
"--server[server to be used for devtools. If none set, local files will be used (default\: None)]:server:"
|
||||||
"--sign-key[sign key id (default\: None)]:sign_key:"
|
"--sign-key[sign key id (default\: None)]:sign_key:"
|
||||||
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
||||||
"--web-port[port of the web service (default\: None)]:web_port:"
|
"--web-port[port of the web service (default\: None)]:web_port:"
|
||||||
@ -481,6 +485,7 @@ _shtab_ahriman_service_setup_options=(
|
|||||||
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
||||||
"--packager[packager name and email (default\: None)]:packager:"
|
"--packager[packager name and email (default\: None)]:packager:"
|
||||||
"--repository[repository name (default\: None)]:repository:"
|
"--repository[repository name (default\: None)]:repository:"
|
||||||
|
"--server[server to be used for devtools. If none set, local files will be used (default\: None)]:server:"
|
||||||
"--sign-key[sign key id (default\: None)]:sign_key:"
|
"--sign-key[sign key id (default\: None)]:sign_key:"
|
||||||
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
||||||
"--web-port[port of the web service (default\: None)]:web_port:"
|
"--web-port[port of the web service (default\: None)]:web_port:"
|
||||||
@ -503,6 +508,7 @@ _shtab_ahriman_setup_options=(
|
|||||||
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
||||||
"--packager[packager name and email (default\: None)]:packager:"
|
"--packager[packager name and email (default\: None)]:packager:"
|
||||||
"--repository[repository name (default\: None)]:repository:"
|
"--repository[repository name (default\: None)]:repository:"
|
||||||
|
"--server[server to be used for devtools. If none set, local files will be used (default\: None)]:server:"
|
||||||
"--sign-key[sign key id (default\: None)]:sign_key:"
|
"--sign-key[sign key id (default\: None)]:sign_key:"
|
||||||
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
||||||
"--web-port[port of the web service (default\: None)]:web_port:"
|
"--web-port[port of the web service (default\: None)]:web_port:"
|
||||||
|
@ -84,6 +84,10 @@ def _parser() -> argparse.ArgumentParser:
|
|||||||
parser.add_argument("-q", "--quiet", help="force disable any logging", action="store_true")
|
parser.add_argument("-q", "--quiet", help="force disable any logging", action="store_true")
|
||||||
parser.add_argument("--unsafe", help="allow to run ahriman as non-ahriman user. Some actions might be unavailable",
|
parser.add_argument("--unsafe", help="allow to run ahriman as non-ahriman user. Some actions might be unavailable",
|
||||||
action="store_true")
|
action="store_true")
|
||||||
|
parser.add_argument("--wait-timeout", help="wait for lock to be free. Negative value will lead to "
|
||||||
|
"immediate application run even if there is lock file. "
|
||||||
|
"In case of zero value, tthe application will wait infinitely",
|
||||||
|
type=int, default=-1)
|
||||||
parser.add_argument("-V", "--version", action="version", version=__version__)
|
parser.add_argument("-V", "--version", action="version", version=__version__)
|
||||||
|
|
||||||
subparsers = parser.add_subparsers(title="command", help="command to run", dest="command", required=True)
|
subparsers = parser.add_subparsers(title="command", help="command to run", dest="command", required=True)
|
||||||
@ -889,6 +893,7 @@ def _set_service_setup_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
|||||||
action=argparse.BooleanOptionalAction, default=True)
|
action=argparse.BooleanOptionalAction, default=True)
|
||||||
parser.add_argument("--packager", help="packager name and email", required=True)
|
parser.add_argument("--packager", help="packager name and email", required=True)
|
||||||
parser.add_argument("--repository", help="repository name", required=True)
|
parser.add_argument("--repository", help="repository name", required=True)
|
||||||
|
parser.add_argument("--server", help="server to be used for devtools. If none set, local files will be used")
|
||||||
parser.add_argument("--sign-key", help="sign key id")
|
parser.add_argument("--sign-key", help="sign key id")
|
||||||
parser.add_argument("--sign-target", help="sign options", action="append",
|
parser.add_argument("--sign-target", help="sign options", action="append",
|
||||||
type=SignSettings.from_option, choices=enum_values(SignSettings))
|
type=SignSettings.from_option, choices=enum_values(SignSettings))
|
||||||
|
@ -63,8 +63,9 @@ class Setup(Handler):
|
|||||||
|
|
||||||
Setup.configuration_create_makepkg(args.packager, args.makeflags_jobs, application.repository.paths)
|
Setup.configuration_create_makepkg(args.packager, args.makeflags_jobs, application.repository.paths)
|
||||||
Setup.executable_create(application.repository.paths, args.build_command, architecture)
|
Setup.executable_create(application.repository.paths, args.build_command, architecture)
|
||||||
|
repository_server = f"file://{application.repository.paths.repository}" if args.server is None else args.server
|
||||||
Setup.configuration_create_devtools(args.build_command, architecture, args.from_configuration, args.mirror,
|
Setup.configuration_create_devtools(args.build_command, architecture, args.from_configuration, args.mirror,
|
||||||
args.multilib, args.repository, application.repository.paths)
|
args.multilib, args.repository, repository_server)
|
||||||
Setup.configuration_create_sudo(application.repository.paths, args.build_command, architecture)
|
Setup.configuration_create_sudo(application.repository.paths, args.build_command, architecture)
|
||||||
|
|
||||||
application.repository.repo.init()
|
application.repository.repo.init()
|
||||||
@ -134,7 +135,7 @@ class Setup(Handler):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def configuration_create_devtools(prefix: str, architecture: str, source: Path, mirror: str | None,
|
def configuration_create_devtools(prefix: str, architecture: str, source: Path, mirror: str | None,
|
||||||
multilib: bool, repository: str, paths: RepositoryPaths) -> None:
|
multilib: bool, repository: str, repository_server: str) -> None:
|
||||||
"""
|
"""
|
||||||
create configuration for devtools based on ``source`` configuration
|
create configuration for devtools based on ``source`` configuration
|
||||||
|
|
||||||
@ -148,7 +149,7 @@ class Setup(Handler):
|
|||||||
mirror(str | None): link to package server mirror
|
mirror(str | None): link to package server mirror
|
||||||
multilib(bool): add or do not multilib repository to the configuration
|
multilib(bool): add or do not multilib repository to the configuration
|
||||||
repository(str): repository name
|
repository(str): repository name
|
||||||
paths(RepositoryPaths): repository paths instance
|
repository_server(str): url of the repository
|
||||||
"""
|
"""
|
||||||
# allow_no_value=True is required because pacman uses boolean configuration in which just keys present
|
# allow_no_value=True is required because pacman uses boolean configuration in which just keys present
|
||||||
# (e.g. NoProgressBar) which will lead to exception
|
# (e.g. NoProgressBar) which will lead to exception
|
||||||
@ -178,7 +179,7 @@ class Setup(Handler):
|
|||||||
|
|
||||||
# add repository itself
|
# add repository itself
|
||||||
configuration.set_option(repository, "SigLevel", "Never") # we don't care
|
configuration.set_option(repository, "SigLevel", "Never") # we don't care
|
||||||
configuration.set_option(repository, "Server", f"file://{paths.repository}")
|
configuration.set_option(repository, "Server", repository_server)
|
||||||
|
|
||||||
target = source.parent / f"{prefix}-{architecture}.conf"
|
target = source.parent / f"{prefix}-{architecture}.conf"
|
||||||
with target.open("w") as devtools_configuration:
|
with target.open("w") as devtools_configuration:
|
||||||
|
@ -50,14 +50,14 @@ class Status(Handler):
|
|||||||
# we are using reporter here
|
# we are using reporter here
|
||||||
client = Application(architecture, configuration, report=True).repository.reporter
|
client = Application(architecture, configuration, report=True).repository.reporter
|
||||||
if args.ahriman:
|
if args.ahriman:
|
||||||
service_status = client.get_internal()
|
service_status = client.status_get()
|
||||||
StatusPrinter(service_status.status).print(verbose=args.info)
|
StatusPrinter(service_status.status).print(verbose=args.info)
|
||||||
if args.package:
|
if args.package:
|
||||||
packages: list[tuple[Package, BuildStatus]] = sum(
|
packages: list[tuple[Package, BuildStatus]] = sum(
|
||||||
(client.get(base) for base in args.package),
|
(client.package_get(base) for base in args.package),
|
||||||
start=[])
|
start=[])
|
||||||
else:
|
else:
|
||||||
packages = client.get(None)
|
packages = client.package_get(None)
|
||||||
|
|
||||||
Status.check_if_empty(args.exit_code, not packages)
|
Status.check_if_empty(args.exit_code, not packages)
|
||||||
|
|
||||||
|
@ -49,10 +49,10 @@ class StatusUpdate(Handler):
|
|||||||
if args.action == Action.Update and args.package:
|
if args.action == Action.Update and args.package:
|
||||||
# update packages statuses
|
# update packages statuses
|
||||||
for package in args.package:
|
for package in args.package:
|
||||||
client.update(package, args.status)
|
client.package_update(package, args.status)
|
||||||
elif args.action == Action.Update:
|
elif args.action == Action.Update:
|
||||||
# update service status
|
# update service status
|
||||||
client.update_self(args.status)
|
client.status_update(args.status)
|
||||||
elif args.action == Action.Remove:
|
elif args.action == Action.Remove:
|
||||||
for package in args.package:
|
for package in args.package:
|
||||||
client.remove(package)
|
client.package_remove(package)
|
||||||
|
@ -33,7 +33,6 @@ class Web(Handler):
|
|||||||
|
|
||||||
ALLOW_AUTO_ARCHITECTURE_RUN = False
|
ALLOW_AUTO_ARCHITECTURE_RUN = False
|
||||||
ALLOW_MULTI_ARCHITECTURE_RUN = False # required to be able to spawn external processes
|
ALLOW_MULTI_ARCHITECTURE_RUN = False # required to be able to spawn external processes
|
||||||
COMMAND_ARGS_WHITELIST = ["force", "log_handler", ""]
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||||
@ -89,3 +88,7 @@ class Web(Handler):
|
|||||||
yield "--quiet"
|
yield "--quiet"
|
||||||
if args.unsafe:
|
if args.unsafe:
|
||||||
yield "--unsafe"
|
yield "--unsafe"
|
||||||
|
|
||||||
|
# arguments from configuration
|
||||||
|
if (wait_timeout := configuration.getint("web", "wait_timeout", fallback=None)) is not None:
|
||||||
|
yield from ["--wait-timeout", str(wait_timeout)]
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
#
|
#
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
from types import TracebackType
|
from types import TracebackType
|
||||||
from typing import Literal, Self
|
from typing import Literal, Self
|
||||||
|
|
||||||
@ -29,6 +30,7 @@ from ahriman.core.log import LazyLogging
|
|||||||
from ahriman.core.status.client import Client
|
from ahriman.core.status.client import Client
|
||||||
from ahriman.core.util import check_user
|
from ahriman.core.util import check_user
|
||||||
from ahriman.models.build_status import BuildStatusEnum
|
from ahriman.models.build_status import BuildStatusEnum
|
||||||
|
from ahriman.models.waiter import Waiter
|
||||||
|
|
||||||
|
|
||||||
class Lock(LazyLogging):
|
class Lock(LazyLogging):
|
||||||
@ -41,6 +43,7 @@ class Lock(LazyLogging):
|
|||||||
reporter(Client): build status reporter instance
|
reporter(Client): build status reporter instance
|
||||||
paths(RepositoryPaths): repository paths instance
|
paths(RepositoryPaths): repository paths instance
|
||||||
unsafe(bool): skip user check
|
unsafe(bool): skip user check
|
||||||
|
wait_timeout(int): wait in seconds until lock will free
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
Instance of this class except for controlling file-based lock is also required for basic applications checks.
|
Instance of this class except for controlling file-based lock is also required for basic applications checks.
|
||||||
@ -65,9 +68,11 @@ class Lock(LazyLogging):
|
|||||||
architecture(str): repository architecture
|
architecture(str): repository architecture
|
||||||
configuration(Configuration): configuration instance
|
configuration(Configuration): configuration instance
|
||||||
"""
|
"""
|
||||||
self.path = args.lock.with_stem(f"{args.lock.stem}_{architecture}") if args.lock is not None else None
|
self.path: Path | None = \
|
||||||
self.force = args.force
|
args.lock.with_stem(f"{args.lock.stem}_{architecture}") if args.lock is not None else None
|
||||||
self.unsafe = args.unsafe
|
self.force: bool = args.force
|
||||||
|
self.unsafe: bool = args.unsafe
|
||||||
|
self.wait_timeout: int = args.wait_timeout
|
||||||
|
|
||||||
self.paths = configuration.repository_paths
|
self.paths = configuration.repository_paths
|
||||||
self.reporter = Client.load(configuration, report=args.report)
|
self.reporter = Client.load(configuration, report=args.report)
|
||||||
@ -76,7 +81,7 @@ class Lock(LazyLogging):
|
|||||||
"""
|
"""
|
||||||
check web server version
|
check web server version
|
||||||
"""
|
"""
|
||||||
status = self.reporter.get_internal()
|
status = self.reporter.status_get()
|
||||||
if status.version is not None and status.version != __version__:
|
if status.version is not None and status.version != __version__:
|
||||||
self.logger.warning("status watcher version mismatch, our %s, their %s",
|
self.logger.warning("status watcher version mismatch, our %s, their %s",
|
||||||
__version__, status.version)
|
__version__, status.version)
|
||||||
@ -110,6 +115,19 @@ class Lock(LazyLogging):
|
|||||||
except FileExistsError:
|
except FileExistsError:
|
||||||
raise DuplicateRunError()
|
raise DuplicateRunError()
|
||||||
|
|
||||||
|
def watch(self) -> None:
|
||||||
|
"""
|
||||||
|
watch until lock disappear
|
||||||
|
"""
|
||||||
|
# there are reasons why we are not using inotify here. First of all, if we would use it, it would bring to
|
||||||
|
# race conditions because multiple processes will be notified in the same time. Secondly, it is good library,
|
||||||
|
# but platform-specific, and we only need to check if file exists
|
||||||
|
if self.path is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
waiter = Waiter(self.wait_timeout)
|
||||||
|
waiter.wait(self.path.is_file)
|
||||||
|
|
||||||
def __enter__(self) -> Self:
|
def __enter__(self) -> Self:
|
||||||
"""
|
"""
|
||||||
default workflow is the following:
|
default workflow is the following:
|
||||||
@ -117,16 +135,18 @@ class Lock(LazyLogging):
|
|||||||
1. Check user UID
|
1. Check user UID
|
||||||
2. Check if there is lock file
|
2. Check if there is lock file
|
||||||
3. Check web status watcher status
|
3. Check web status watcher status
|
||||||
4. Create lock file and directory tree
|
4. Wait for lock file to be free
|
||||||
5. Report to status page if enabled
|
5. Create lock file and directory tree
|
||||||
|
6. Report to status page if enabled
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Self: always instance of self
|
Self: always instance of self
|
||||||
"""
|
"""
|
||||||
self.check_user()
|
self.check_user()
|
||||||
self.check_version()
|
self.check_version()
|
||||||
|
self.watch()
|
||||||
self.create()
|
self.create()
|
||||||
self.reporter.update_self(BuildStatusEnum.Building)
|
self.reporter.status_update(BuildStatusEnum.Building)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(self, exc_type: type[Exception] | None, exc_val: Exception | None,
|
def __exit__(self, exc_type: type[Exception] | None, exc_val: Exception | None,
|
||||||
@ -144,5 +164,5 @@ class Lock(LazyLogging):
|
|||||||
"""
|
"""
|
||||||
self.clear()
|
self.clear()
|
||||||
status = BuildStatusEnum.Success if exc_val is None else BuildStatusEnum.Failed
|
status = BuildStatusEnum.Success if exc_val is None else BuildStatusEnum.Failed
|
||||||
self.reporter.update_self(status)
|
self.reporter.status_update(status)
|
||||||
return False
|
return False
|
||||||
|
@ -228,6 +228,10 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
|||||||
"coerce": "list",
|
"coerce": "list",
|
||||||
"schema": {"type": "string"},
|
"schema": {"type": "string"},
|
||||||
},
|
},
|
||||||
|
"enable_archive_upload": {
|
||||||
|
"type": "boolean",
|
||||||
|
"coerce": "boolean",
|
||||||
|
},
|
||||||
"host": {
|
"host": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"is_ip_address": ["localhost"],
|
"is_ip_address": ["localhost"],
|
||||||
@ -236,6 +240,11 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
|||||||
"type": "string",
|
"type": "string",
|
||||||
"is_url": ["http", "https"],
|
"is_url": ["http", "https"],
|
||||||
},
|
},
|
||||||
|
"max_body_size": {
|
||||||
|
"type": "integer",
|
||||||
|
"coerce": "integer",
|
||||||
|
"min": 0,
|
||||||
|
},
|
||||||
"password": {
|
"password": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
},
|
},
|
||||||
@ -268,6 +277,10 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
|||||||
"username": {
|
"username": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
},
|
},
|
||||||
|
"wait_timeout": {
|
||||||
|
"type": "integer",
|
||||||
|
"coerce": "integer",
|
||||||
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,36 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2021-2023 ahriman team.
|
||||||
|
#
|
||||||
|
# This file is part of ahriman
|
||||||
|
# (see https://github.com/arcan1s/ahriman).
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
__all__ = ["steps"]
|
||||||
|
|
||||||
|
|
||||||
|
steps = [
|
||||||
|
"""
|
||||||
|
drop index logs_package_base_process_id
|
||||||
|
""",
|
||||||
|
"""
|
||||||
|
alter table logs drop column process_id
|
||||||
|
""",
|
||||||
|
"""
|
||||||
|
alter table logs add column version text not null default ''
|
||||||
|
""",
|
||||||
|
"""
|
||||||
|
create index logs_package_base_version on logs (package_base, version)
|
||||||
|
""",
|
||||||
|
]
|
@ -66,13 +66,13 @@ class LogsOperations(Operations):
|
|||||||
connection.execute(
|
connection.execute(
|
||||||
"""
|
"""
|
||||||
insert into logs
|
insert into logs
|
||||||
(package_base, process_id, created, record)
|
(package_base, version, created, record)
|
||||||
values
|
values
|
||||||
(:package_base, :process_id, :created, :record)
|
(:package_base, :version, :created, :record)
|
||||||
""",
|
""",
|
||||||
{
|
{
|
||||||
"package_base": log_record_id.package_base,
|
"package_base": log_record_id.package_base,
|
||||||
"process_id": log_record_id.process_id,
|
"version": log_record_id.version,
|
||||||
"created": created,
|
"created": created,
|
||||||
"record": record,
|
"record": record,
|
||||||
}
|
}
|
||||||
@ -80,22 +80,22 @@ class LogsOperations(Operations):
|
|||||||
|
|
||||||
return self.with_connection(run, commit=True)
|
return self.with_connection(run, commit=True)
|
||||||
|
|
||||||
def logs_remove(self, package_base: str, current_process_id: int | None) -> None:
|
def logs_remove(self, package_base: str, version: str | None) -> None:
|
||||||
"""
|
"""
|
||||||
remove log records for the specified package
|
remove log records for the specified package
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
package_base(str): package base to remove logs
|
package_base(str): package base to remove logs
|
||||||
current_process_id(int | None): current process id. If set it will remove only logs belonging to another
|
version(str): package version. If set it will remove only logs belonging to another
|
||||||
process
|
version
|
||||||
"""
|
"""
|
||||||
def run(connection: Connection) -> None:
|
def run(connection: Connection) -> None:
|
||||||
connection.execute(
|
connection.execute(
|
||||||
"""
|
"""
|
||||||
delete from logs
|
delete from logs
|
||||||
where package_base = :package_base and (:process_id is null or process_id <> :process_id)
|
where package_base = :package_base and (:version is null or version <> :version)
|
||||||
""",
|
""",
|
||||||
{"package_base": package_base, "process_id": current_process_id}
|
{"package_base": package_base, "version": version}
|
||||||
)
|
)
|
||||||
|
|
||||||
return self.with_connection(run, commit=True)
|
return self.with_connection(run, commit=True)
|
||||||
|
@ -30,13 +30,14 @@ class FilteredAccessLogger(AccessLogger):
|
|||||||
LOG_PATH_REGEX(re.Pattern): (class attribute) regex for logs uri
|
LOG_PATH_REGEX(re.Pattern): (class attribute) regex for logs uri
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# official packages have only ``[A-Za-z0-9_.+-]`` regex
|
LOG_PATH_REGEX = re.compile(r"^/api/v1/packages/[^/]+/logs$")
|
||||||
LOG_PATH_REGEX = re.compile(r"^/api/v1/packages/[A-Za-z0-9_.+%-]+/logs$")
|
# technically process id is uuid, but we might change it later
|
||||||
|
PROCESS_PATH_REGEX = re.compile(r"^/api/v1/service/process/[^/]+$")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_logs_post(request: BaseRequest) -> bool:
|
def is_logs_post(request: BaseRequest) -> bool:
|
||||||
"""
|
"""
|
||||||
check if request looks lie logs posting
|
check if request looks like logs posting
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
request(BaseRequest): http reqeust descriptor
|
request(BaseRequest): http reqeust descriptor
|
||||||
@ -46,6 +47,19 @@ class FilteredAccessLogger(AccessLogger):
|
|||||||
"""
|
"""
|
||||||
return request.method == "POST" and FilteredAccessLogger.LOG_PATH_REGEX.match(request.path) is not None
|
return request.method == "POST" and FilteredAccessLogger.LOG_PATH_REGEX.match(request.path) is not None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_process_get(request: BaseRequest) -> bool:
|
||||||
|
"""
|
||||||
|
check if request looks like process status request
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request(BaseRequest): http reqeust descriptor
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True in case if request looks like process status request and False otherwise
|
||||||
|
"""
|
||||||
|
return request.method == "GET" and FilteredAccessLogger.PROCESS_PATH_REGEX.match(request.path) is not None
|
||||||
|
|
||||||
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
|
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
|
||||||
"""
|
"""
|
||||||
access log with enabled filter by request path
|
access log with enabled filter by request path
|
||||||
@ -55,6 +69,7 @@ class FilteredAccessLogger(AccessLogger):
|
|||||||
response(StreamResponse): streaming response object
|
response(StreamResponse): streaming response object
|
||||||
time(float):
|
time(float):
|
||||||
"""
|
"""
|
||||||
if self.is_logs_post(request):
|
if self.is_logs_post(request) \
|
||||||
|
or self.is_process_get(request):
|
||||||
return
|
return
|
||||||
AccessLogger.log(self, request, response, time)
|
AccessLogger.log(self, request, response, time)
|
||||||
|
@ -81,12 +81,12 @@ class HttpLogHandler(logging.Handler):
|
|||||||
Args:
|
Args:
|
||||||
record(logging.LogRecord): log record to log
|
record(logging.LogRecord): log record to log
|
||||||
"""
|
"""
|
||||||
package_base = getattr(record, "package_base", None)
|
log_record_id = getattr(record, "package_id", None)
|
||||||
if package_base is None:
|
if log_record_id is None:
|
||||||
return # in case if no package base supplied we need just skip log message
|
return # in case if no package base supplied we need just skip log message
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.reporter.logs(package_base, record)
|
self.reporter.package_logs(log_record_id, record)
|
||||||
except Exception:
|
except Exception:
|
||||||
if self.suppress_errors:
|
if self.suppress_errors:
|
||||||
return
|
return
|
||||||
|
@ -24,6 +24,8 @@ from collections.abc import Generator
|
|||||||
from functools import cached_property
|
from functools import cached_property
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
from ahriman.models.log_record_id import LogRecordId
|
||||||
|
|
||||||
|
|
||||||
class LazyLogging:
|
class LazyLogging:
|
||||||
"""
|
"""
|
||||||
@ -60,38 +62,40 @@ class LazyLogging:
|
|||||||
logging.setLogRecordFactory(logging.LogRecord)
|
logging.setLogRecordFactory(logging.LogRecord)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _package_logger_set(package_base: str) -> None:
|
def _package_logger_set(package_base: str, version: str | None) -> None:
|
||||||
"""
|
"""
|
||||||
set package base as extra info to the logger
|
set package base as extra info to the logger
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
package_base(str): package base
|
package_base(str): package base
|
||||||
|
version(str | None): package version if available
|
||||||
"""
|
"""
|
||||||
current_factory = logging.getLogRecordFactory()
|
current_factory = logging.getLogRecordFactory()
|
||||||
|
|
||||||
def package_record_factory(*args: Any, **kwargs: Any) -> logging.LogRecord:
|
def package_record_factory(*args: Any, **kwargs: Any) -> logging.LogRecord:
|
||||||
record = current_factory(*args, **kwargs)
|
record = current_factory(*args, **kwargs)
|
||||||
record.package_base = package_base
|
record.package_id = LogRecordId(package_base, version or "")
|
||||||
return record
|
return record
|
||||||
|
|
||||||
logging.setLogRecordFactory(package_record_factory)
|
logging.setLogRecordFactory(package_record_factory)
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def in_package_context(self, package_base: str) -> Generator[None, None, None]:
|
def in_package_context(self, package_base: str, version: str | None) -> Generator[None, None, None]:
|
||||||
"""
|
"""
|
||||||
execute function while setting package context
|
execute function while setting package context
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
package_base(str): package base to set context in
|
package_base(str): package base to set context in
|
||||||
|
version(str | None): package version if available
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
This function is designed to be called as context manager with ``package_base`` argument, e.g.:
|
This function is designed to be called as context manager with ``package_base`` argument, e.g.:
|
||||||
|
|
||||||
>>> with self.in_package_context(package.base):
|
>>> with self.in_package_context(package.base, package.version):
|
||||||
>>> build_package(package)
|
>>> build_package(package)
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
self._package_logger_set(package_base)
|
self._package_logger_set(package_base, version)
|
||||||
yield
|
yield
|
||||||
finally:
|
finally:
|
||||||
self._package_logger_reset()
|
self._package_logger_reset()
|
||||||
|
119
src/ahriman/core/report/remote_call.py
Normal file
119
src/ahriman/core/report/remote_call.py
Normal file
@ -0,0 +1,119 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2021-2023 ahriman team.
|
||||||
|
#
|
||||||
|
# This file is part of ahriman
|
||||||
|
# (see https://github.com/arcan1s/ahriman).
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from ahriman.core.configuration import Configuration
|
||||||
|
from ahriman.core.report.report import Report
|
||||||
|
from ahriman.core.status.web_client import WebClient
|
||||||
|
from ahriman.models.package import Package
|
||||||
|
from ahriman.models.result import Result
|
||||||
|
from ahriman.models.waiter import Waiter
|
||||||
|
|
||||||
|
|
||||||
|
class RemoteCall(Report):
|
||||||
|
"""
|
||||||
|
trigger implementation which call remote service with update
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
client(WebClient): web client instance
|
||||||
|
update_aur(bool): check for AUR updates
|
||||||
|
update_local(bool): check for local packages update
|
||||||
|
update_manual(bool): check for manually built packages
|
||||||
|
wait_timeout(int): timeout to wait external process
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, architecture: str, configuration: Configuration, section: str) -> None:
|
||||||
|
"""
|
||||||
|
default constructor
|
||||||
|
|
||||||
|
Args:
|
||||||
|
architecture(str): repository architecture
|
||||||
|
configuration(Configuration): configuration instance
|
||||||
|
section(str): settings section name
|
||||||
|
"""
|
||||||
|
Report.__init__(self, architecture, configuration)
|
||||||
|
|
||||||
|
self.client = WebClient(configuration)
|
||||||
|
|
||||||
|
self.update_aur = configuration.getboolean(section, "aur", fallback=False)
|
||||||
|
self.update_local = configuration.getboolean(section, "local", fallback=False)
|
||||||
|
self.update_manual = configuration.getboolean(section, "manual", fallback=False)
|
||||||
|
|
||||||
|
self.wait_timeout = configuration.getint(section, "wait_timeout", fallback=-1)
|
||||||
|
|
||||||
|
def generate(self, packages: list[Package], result: Result) -> None:
|
||||||
|
"""
|
||||||
|
generate report for the specified packages
|
||||||
|
|
||||||
|
Args:
|
||||||
|
packages(list[Package]): list of packages to generate report
|
||||||
|
result(Result): build result
|
||||||
|
"""
|
||||||
|
process_id = self.remote_update()
|
||||||
|
self.remote_wait(process_id)
|
||||||
|
|
||||||
|
def is_process_alive(self, process_id: str) -> bool:
|
||||||
|
"""
|
||||||
|
check if process is alive
|
||||||
|
|
||||||
|
Args:
|
||||||
|
process_id(str): remote process id
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True in case if remote process is alive and False otherwise
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
response = self.client.make_request("GET", f"/api/v1/service/process/{process_id}")
|
||||||
|
except requests.RequestException as e:
|
||||||
|
if e.response is not None and e.response.status_code == 404:
|
||||||
|
return False
|
||||||
|
raise
|
||||||
|
|
||||||
|
response_json = response.json()
|
||||||
|
is_alive: bool = response_json["is_alive"]
|
||||||
|
|
||||||
|
return is_alive
|
||||||
|
|
||||||
|
def remote_update(self) -> str:
|
||||||
|
"""
|
||||||
|
call remote server for update
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: remote process id
|
||||||
|
"""
|
||||||
|
response = self.client.make_request("POST", "/api/v1/service/update", json={
|
||||||
|
"aur": self.update_aur,
|
||||||
|
"local": self.update_local,
|
||||||
|
"manual": self.update_manual,
|
||||||
|
})
|
||||||
|
response_json = response.json()
|
||||||
|
|
||||||
|
process_id: str = response_json["process_id"]
|
||||||
|
return process_id
|
||||||
|
|
||||||
|
def remote_wait(self, process_id: str) -> None:
|
||||||
|
"""
|
||||||
|
wait for remote process termination
|
||||||
|
|
||||||
|
Args:
|
||||||
|
process_id(str): remote process id
|
||||||
|
"""
|
||||||
|
waiter = Waiter(self.wait_timeout)
|
||||||
|
waiter.wait(self.is_process_alive, process_id)
|
@ -93,6 +93,9 @@ class Report(LazyLogging):
|
|||||||
if provider == ReportSettings.Telegram:
|
if provider == ReportSettings.Telegram:
|
||||||
from ahriman.core.report.telegram import Telegram
|
from ahriman.core.report.telegram import Telegram
|
||||||
return Telegram(architecture, configuration, section)
|
return Telegram(architecture, configuration, section)
|
||||||
|
if provider == ReportSettings.RemoteCall:
|
||||||
|
from ahriman.core.report.remote_call import RemoteCall
|
||||||
|
return RemoteCall(architecture, configuration, section)
|
||||||
return Report(architecture, configuration) # should never happen
|
return Report(architecture, configuration) # should never happen
|
||||||
|
|
||||||
def generate(self, packages: list[Package], result: Result) -> None:
|
def generate(self, packages: list[Package], result: Result) -> None:
|
||||||
|
@ -191,6 +191,31 @@ class ReportTrigger(Trigger):
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
"remote-call": {
|
||||||
|
"type": "dict",
|
||||||
|
"schema": {
|
||||||
|
"type": {
|
||||||
|
"type": "string",
|
||||||
|
"allowed": ["ahriman", "remote-call"],
|
||||||
|
},
|
||||||
|
"aur": {
|
||||||
|
"type": "boolean",
|
||||||
|
"coerce": "boolean",
|
||||||
|
},
|
||||||
|
"local": {
|
||||||
|
"type": "boolean",
|
||||||
|
"coerce": "boolean",
|
||||||
|
},
|
||||||
|
"manual": {
|
||||||
|
"type": "boolean",
|
||||||
|
"coerce": "boolean",
|
||||||
|
},
|
||||||
|
"wait_timeout": {
|
||||||
|
"type": "integer",
|
||||||
|
"coerce": "integer",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self, architecture: str, configuration: Configuration) -> None:
|
def __init__(self, architecture: str, configuration: Configuration) -> None:
|
||||||
|
@ -93,7 +93,8 @@ class Executor(Cleaner):
|
|||||||
|
|
||||||
result = Result()
|
result = Result()
|
||||||
for single in updates:
|
for single in updates:
|
||||||
with self.in_package_context(single.base), TemporaryDirectory(ignore_cleanup_errors=True) as dir_name:
|
with self.in_package_context(single.base, local_versions.get(single.base)), \
|
||||||
|
TemporaryDirectory(ignore_cleanup_errors=True) as dir_name:
|
||||||
try:
|
try:
|
||||||
packager = self.packager(packagers, single.base)
|
packager = self.packager(packagers, single.base)
|
||||||
build_single(single, Path(dir_name), packager.packager_id)
|
build_single(single, Path(dir_name), packager.packager_id)
|
||||||
@ -121,7 +122,7 @@ class Executor(Cleaner):
|
|||||||
self.database.build_queue_clear(package_base)
|
self.database.build_queue_clear(package_base)
|
||||||
self.database.patches_remove(package_base, [])
|
self.database.patches_remove(package_base, [])
|
||||||
self.database.logs_remove(package_base, None)
|
self.database.logs_remove(package_base, None)
|
||||||
self.reporter.remove(package_base) # we only update status page in case of base removal
|
self.reporter.package_remove(package_base) # we only update status page in case of base removal
|
||||||
except Exception:
|
except Exception:
|
||||||
self.logger.exception("could not remove base %s", package_base)
|
self.logger.exception("could not remove base %s", package_base)
|
||||||
|
|
||||||
@ -201,14 +202,16 @@ class Executor(Cleaner):
|
|||||||
package_path = self.paths.repository / safe_filename(name)
|
package_path = self.paths.repository / safe_filename(name)
|
||||||
self.repo.add(package_path)
|
self.repo.add(package_path)
|
||||||
|
|
||||||
current_packages = self.packages()
|
current_packages = {package.base: package for package in self.packages()}
|
||||||
|
local_versions = {package_base: package.version for package_base, package in current_packages.items()}
|
||||||
|
|
||||||
removed_packages: list[str] = [] # list of packages which have been removed from the base
|
removed_packages: list[str] = [] # list of packages which have been removed from the base
|
||||||
updates = self.load_archives(packages)
|
updates = self.load_archives(packages)
|
||||||
packagers = packagers or Packagers()
|
packagers = packagers or Packagers()
|
||||||
|
|
||||||
result = Result()
|
result = Result()
|
||||||
for local in updates:
|
for local in updates:
|
||||||
with self.in_package_context(local.base):
|
with self.in_package_context(local.base, local_versions.get(local.base)):
|
||||||
try:
|
try:
|
||||||
packager = self.packager(packagers, local.base)
|
packager = self.packager(packagers, local.base)
|
||||||
|
|
||||||
@ -218,12 +221,9 @@ class Executor(Cleaner):
|
|||||||
self.reporter.set_success(local)
|
self.reporter.set_success(local)
|
||||||
result.add_success(local)
|
result.add_success(local)
|
||||||
|
|
||||||
current_package_archives = {
|
current_package_archives: set[str] = set()
|
||||||
package
|
if local.base in current_packages:
|
||||||
for current in current_packages
|
current_package_archives = set(current_packages[local.base].packages.keys())
|
||||||
if current.base == local.base
|
|
||||||
for package in current.packages
|
|
||||||
}
|
|
||||||
removed_packages.extend(current_package_archives.difference(local.packages))
|
removed_packages.extend(current_package_archives.difference(local.packages))
|
||||||
except Exception:
|
except Exception:
|
||||||
self.reporter.set_failed(local.base)
|
self.reporter.set_failed(local.base)
|
||||||
|
@ -66,10 +66,11 @@ class UpdateHandler(Cleaner):
|
|||||||
continue
|
continue
|
||||||
raise UnknownPackageError(package.base)
|
raise UnknownPackageError(package.base)
|
||||||
|
|
||||||
result: list[Package] = []
|
local_versions = {package.base: package.version for package in self.packages()}
|
||||||
|
|
||||||
|
result: list[Package] = []
|
||||||
for local in self.packages():
|
for local in self.packages():
|
||||||
with self.in_package_context(local.base):
|
with self.in_package_context(local.base, local_versions.get(local.base)):
|
||||||
if not local.remote.is_remote:
|
if not local.remote.is_remote:
|
||||||
continue # avoid checking local packages
|
continue # avoid checking local packages
|
||||||
if local.base in self.ignore_list:
|
if local.base in self.ignore_list:
|
||||||
@ -102,11 +103,12 @@ class UpdateHandler(Cleaner):
|
|||||||
Returns:
|
Returns:
|
||||||
list[Package]: list of local packages which are out-of-dated
|
list[Package]: list of local packages which are out-of-dated
|
||||||
"""
|
"""
|
||||||
result: list[Package] = []
|
|
||||||
packages = {local.base: local for local in self.packages()}
|
packages = {local.base: local for local in self.packages()}
|
||||||
|
local_versions = {package_base: package.version for package_base, package in packages.items()}
|
||||||
|
|
||||||
|
result: list[Package] = []
|
||||||
for cache_dir in self.paths.cache.iterdir():
|
for cache_dir in self.paths.cache.iterdir():
|
||||||
with self.in_package_context(cache_dir.name):
|
with self.in_package_context(cache_dir.name, local_versions.get(cache_dir.name)):
|
||||||
try:
|
try:
|
||||||
source = RemoteSource(
|
source = RemoteSource(
|
||||||
source=PackageSource.Local,
|
source=PackageSource.Local,
|
||||||
|
@ -101,6 +101,19 @@ class GPG(LazyLogging):
|
|||||||
default_key = configuration.get("sign", "key") if targets else None
|
default_key = configuration.get("sign", "key") if targets else None
|
||||||
return targets, default_key
|
return targets, default_key
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def signature(filepath: Path) -> Path:
|
||||||
|
"""
|
||||||
|
generate signature name for the file
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filepath(Path): path to the file which will be signed
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: path to signature file
|
||||||
|
"""
|
||||||
|
return filepath.parent / f"{filepath.name}.sig"
|
||||||
|
|
||||||
def key_download(self, server: str, key: str) -> str:
|
def key_download(self, server: str, key: str) -> str:
|
||||||
"""
|
"""
|
||||||
download key from public PGP server
|
download key from public PGP server
|
||||||
@ -179,11 +192,11 @@ class GPG(LazyLogging):
|
|||||||
*GPG.sign_command(path, key),
|
*GPG.sign_command(path, key),
|
||||||
exception=BuildError(path.name),
|
exception=BuildError(path.name),
|
||||||
logger=self.logger)
|
logger=self.logger)
|
||||||
return [path, path.parent / f"{path.name}.sig"]
|
return [path, self.signature(path)]
|
||||||
|
|
||||||
def process_sign_package(self, path: Path, packager_key: str | None) -> list[Path]:
|
def process_sign_package(self, path: Path, packager_key: str | None) -> list[Path]:
|
||||||
"""
|
"""
|
||||||
sign package if required by configuration
|
sign package if required by configuration and signature doesn't exist
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
path(Path): path to file to sign
|
path(Path): path to file to sign
|
||||||
@ -192,6 +205,10 @@ class GPG(LazyLogging):
|
|||||||
Returns:
|
Returns:
|
||||||
list[Path]: list of generated files including original file
|
list[Path]: list of generated files including original file
|
||||||
"""
|
"""
|
||||||
|
if (signature := self.signature(path)).is_file():
|
||||||
|
# the file was already signed before, just use its signature
|
||||||
|
return [path, signature]
|
||||||
|
|
||||||
if SignSettings.Packages not in self.targets:
|
if SignSettings.Packages not in self.targets:
|
||||||
return [path]
|
return [path]
|
||||||
|
|
||||||
|
@ -20,6 +20,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from collections.abc import Callable, Iterable
|
from collections.abc import Callable, Iterable
|
||||||
@ -38,7 +39,7 @@ class Spawn(Thread, LazyLogging):
|
|||||||
active(dict[str, Process]): map of active child processes required to avoid zombies
|
active(dict[str, Process]): map of active child processes required to avoid zombies
|
||||||
architecture(str): repository architecture
|
architecture(str): repository architecture
|
||||||
command_arguments(list[str]): base command line arguments
|
command_arguments(list[str]): base command line arguments
|
||||||
queue(Queue[tuple[str, bool]]): multiprocessing queue to read updates from processes
|
queue(Queue[tuple[str, bool, int]]): multiprocessing queue to read updates from processes
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, args_parser: argparse.ArgumentParser, architecture: str, command_arguments: list[str]) -> None:
|
def __init__(self, args_parser: argparse.ArgumentParser, architecture: str, command_arguments: list[str]) -> None:
|
||||||
@ -59,11 +60,25 @@ class Spawn(Thread, LazyLogging):
|
|||||||
self.lock = Lock()
|
self.lock = Lock()
|
||||||
self.active: dict[str, Process] = {}
|
self.active: dict[str, Process] = {}
|
||||||
# stupid pylint does not know that it is possible
|
# stupid pylint does not know that it is possible
|
||||||
self.queue: Queue[tuple[str, bool] | None] = Queue() # pylint: disable=unsubscriptable-object
|
self.queue: Queue[tuple[str, bool, int] | None] = Queue() # pylint: disable=unsubscriptable-object
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def boolean_action_argument(name: str, value: bool) -> str:
|
||||||
|
"""
|
||||||
|
convert option of given name with value to boolean action argument
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name(str): command line argument name
|
||||||
|
value(bool): command line argument value
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: if ``value`` is True, then returns positive flag and negative otherwise
|
||||||
|
"""
|
||||||
|
return name if value else f"no-{name}"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def process(callback: Callable[[argparse.Namespace, str], bool], args: argparse.Namespace, architecture: str,
|
def process(callback: Callable[[argparse.Namespace, str], bool], args: argparse.Namespace, architecture: str,
|
||||||
process_id: str, queue: Queue[tuple[str, bool]]) -> None: # pylint: disable=unsubscriptable-object
|
process_id: str, queue: Queue[tuple[str, bool, int]]) -> None: # pylint: disable=unsubscriptable-object
|
||||||
"""
|
"""
|
||||||
helper to run external process
|
helper to run external process
|
||||||
|
|
||||||
@ -72,12 +87,17 @@ class Spawn(Thread, LazyLogging):
|
|||||||
args(argparse.Namespace): command line arguments
|
args(argparse.Namespace): command line arguments
|
||||||
architecture(str): repository architecture
|
architecture(str): repository architecture
|
||||||
process_id(str): process unique identifier
|
process_id(str): process unique identifier
|
||||||
queue(Queue[tuple[str, bool]]): output queue
|
queue(Queue[tuple[str, bool, int]]): output queue
|
||||||
"""
|
"""
|
||||||
|
start_time = time.monotonic()
|
||||||
result = callback(args, architecture)
|
result = callback(args, architecture)
|
||||||
queue.put((process_id, result))
|
stop_time = time.monotonic()
|
||||||
|
|
||||||
def _spawn_process(self, command: str, *args: str, **kwargs: str | None) -> None:
|
consumed_time = int(1000 * (stop_time - start_time))
|
||||||
|
|
||||||
|
queue.put((process_id, result, consumed_time))
|
||||||
|
|
||||||
|
def _spawn_process(self, command: str, *args: str, **kwargs: str | None) -> str:
|
||||||
"""
|
"""
|
||||||
spawn external ahriman process with supplied arguments
|
spawn external ahriman process with supplied arguments
|
||||||
|
|
||||||
@ -85,6 +105,9 @@ class Spawn(Thread, LazyLogging):
|
|||||||
command(str): subcommand to run
|
command(str): subcommand to run
|
||||||
*args(str): positional command arguments
|
*args(str): positional command arguments
|
||||||
**kwargs(str): named command arguments
|
**kwargs(str): named command arguments
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: spawned process id
|
||||||
"""
|
"""
|
||||||
# default arguments
|
# default arguments
|
||||||
arguments = self.command_arguments[:]
|
arguments = self.command_arguments[:]
|
||||||
@ -111,19 +134,36 @@ class Spawn(Thread, LazyLogging):
|
|||||||
|
|
||||||
with self.lock:
|
with self.lock:
|
||||||
self.active[process_id] = process
|
self.active[process_id] = process
|
||||||
|
return process_id
|
||||||
|
|
||||||
def key_import(self, key: str, server: str | None) -> None:
|
def has_process(self, process_id: str) -> bool:
|
||||||
|
"""
|
||||||
|
check if given process is alive
|
||||||
|
|
||||||
|
Args:
|
||||||
|
process_id(str): process id to be checked as returned by ``Spawn._spawn_process``
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True in case if process still counts as active and False otherwise
|
||||||
|
"""
|
||||||
|
with self.lock:
|
||||||
|
return process_id in self.active
|
||||||
|
|
||||||
|
def key_import(self, key: str, server: str | None) -> str:
|
||||||
"""
|
"""
|
||||||
import key to service cache
|
import key to service cache
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
key(str): key to import
|
key(str): key to import
|
||||||
server(str | None): PGP key server
|
server(str | None): PGP key server
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: spawned process id
|
||||||
"""
|
"""
|
||||||
kwargs = {} if server is None else {"key-server": server}
|
kwargs = {} if server is None else {"key-server": server}
|
||||||
self._spawn_process("service-key-import", key, **kwargs)
|
return self._spawn_process("service-key-import", key, **kwargs)
|
||||||
|
|
||||||
def packages_add(self, packages: Iterable[str], username: str | None, *, now: bool) -> None:
|
def packages_add(self, packages: Iterable[str], username: str | None, *, now: bool) -> str:
|
||||||
"""
|
"""
|
||||||
add packages
|
add packages
|
||||||
|
|
||||||
@ -131,48 +171,69 @@ class Spawn(Thread, LazyLogging):
|
|||||||
packages(Iterable[str]): packages list to add
|
packages(Iterable[str]): packages list to add
|
||||||
username(str | None): optional override of username for build process
|
username(str | None): optional override of username for build process
|
||||||
now(bool): build packages now
|
now(bool): build packages now
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: spawned process id
|
||||||
"""
|
"""
|
||||||
kwargs = {"username": username}
|
kwargs = {"username": username}
|
||||||
if now:
|
if now:
|
||||||
kwargs["now"] = ""
|
kwargs["now"] = ""
|
||||||
self._spawn_process("package-add", *packages, **kwargs)
|
return self._spawn_process("package-add", *packages, **kwargs)
|
||||||
|
|
||||||
def packages_rebuild(self, depends_on: str, username: str | None) -> None:
|
def packages_rebuild(self, depends_on: str, username: str | None) -> str:
|
||||||
"""
|
"""
|
||||||
rebuild packages which depend on the specified package
|
rebuild packages which depend on the specified package
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
depends_on(str): packages dependency
|
depends_on(str): packages dependency
|
||||||
username(str | None): optional override of username for build process
|
username(str | None): optional override of username for build process
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: spawned process id
|
||||||
"""
|
"""
|
||||||
kwargs = {"depends-on": depends_on, "username": username}
|
kwargs = {"depends-on": depends_on, "username": username}
|
||||||
self._spawn_process("repo-rebuild", **kwargs)
|
return self._spawn_process("repo-rebuild", **kwargs)
|
||||||
|
|
||||||
def packages_remove(self, packages: Iterable[str]) -> None:
|
def packages_remove(self, packages: Iterable[str]) -> str:
|
||||||
"""
|
"""
|
||||||
remove packages
|
remove packages
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
packages(Iterable[str]): packages list to remove
|
packages(Iterable[str]): packages list to remove
|
||||||
"""
|
|
||||||
self._spawn_process("package-remove", *packages)
|
|
||||||
|
|
||||||
def packages_update(self, username: str | None) -> None:
|
Returns:
|
||||||
|
str: spawned process id
|
||||||
|
"""
|
||||||
|
return self._spawn_process("package-remove", *packages)
|
||||||
|
|
||||||
|
def packages_update(self, username: str | None, *, aur: bool, local: bool, manual: bool) -> str:
|
||||||
"""
|
"""
|
||||||
run full repository update
|
run full repository update
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
username(str | None): optional override of username for build process
|
username(str | None): optional override of username for build process
|
||||||
|
aur(bool): check for aur updates
|
||||||
|
local(bool): check for local packages updates
|
||||||
|
manual(bool): check for manual packages
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: spawned process id
|
||||||
"""
|
"""
|
||||||
kwargs = {"username": username}
|
kwargs = {
|
||||||
self._spawn_process("repo-update", **kwargs)
|
"username": username,
|
||||||
|
self.boolean_action_argument("aur", aur): "",
|
||||||
|
self.boolean_action_argument("local", local): "",
|
||||||
|
self.boolean_action_argument("manual", manual): "",
|
||||||
|
}
|
||||||
|
return self._spawn_process("repo-update", **kwargs)
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
"""
|
"""
|
||||||
thread run method
|
thread run method
|
||||||
"""
|
"""
|
||||||
for process_id, status in iter(self.queue.get, None):
|
for process_id, status, consumed_time in iter(self.queue.get, None):
|
||||||
self.logger.info("process %s has been terminated with status %s", process_id, status)
|
self.logger.info("process %s has been terminated with status %s, consumed time %s",
|
||||||
|
process_id, status, consumed_time / 1000)
|
||||||
|
|
||||||
with self.lock:
|
with self.lock:
|
||||||
process = self.active.pop(process_id, None)
|
process = self.active.pop(process_id, None)
|
||||||
|
@ -24,6 +24,7 @@ import logging
|
|||||||
from ahriman.core.configuration import Configuration
|
from ahriman.core.configuration import Configuration
|
||||||
from ahriman.models.build_status import BuildStatus, BuildStatusEnum
|
from ahriman.models.build_status import BuildStatus, BuildStatusEnum
|
||||||
from ahriman.models.internal_status import InternalStatus
|
from ahriman.models.internal_status import InternalStatus
|
||||||
|
from ahriman.models.log_record_id import LogRecordId
|
||||||
from ahriman.models.package import Package
|
from ahriman.models.package import Package
|
||||||
|
|
||||||
|
|
||||||
@ -60,7 +61,7 @@ class Client:
|
|||||||
return WebClient(configuration)
|
return WebClient(configuration)
|
||||||
return Client()
|
return Client()
|
||||||
|
|
||||||
def add(self, package: Package, status: BuildStatusEnum) -> None:
|
def package_add(self, package: Package, status: BuildStatusEnum) -> None:
|
||||||
"""
|
"""
|
||||||
add new package with status
|
add new package with status
|
||||||
|
|
||||||
@ -69,7 +70,7 @@ class Client:
|
|||||||
status(BuildStatusEnum): current package build status
|
status(BuildStatusEnum): current package build status
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get(self, package_base: str | None) -> list[tuple[Package, BuildStatus]]:
|
def package_get(self, package_base: str | None) -> list[tuple[Package, BuildStatus]]:
|
||||||
"""
|
"""
|
||||||
get package status
|
get package status
|
||||||
|
|
||||||
@ -82,25 +83,16 @@ class Client:
|
|||||||
del package_base
|
del package_base
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def get_internal(self) -> InternalStatus:
|
def package_logs(self, log_record_id: LogRecordId, record: logging.LogRecord) -> None:
|
||||||
"""
|
|
||||||
get internal service status
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
InternalStatus: current internal (web) service status
|
|
||||||
"""
|
|
||||||
return InternalStatus(status=BuildStatus())
|
|
||||||
|
|
||||||
def logs(self, package_base: str, record: logging.LogRecord) -> None:
|
|
||||||
"""
|
"""
|
||||||
post log record
|
post log record
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
package_base(str) package base
|
log_record_id(LogRecordId): log record id
|
||||||
record(logging.LogRecord): log record to post to api
|
record(logging.LogRecord): log record to post to api
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def remove(self, package_base: str) -> None:
|
def package_remove(self, package_base: str) -> None:
|
||||||
"""
|
"""
|
||||||
remove packages from watcher
|
remove packages from watcher
|
||||||
|
|
||||||
@ -108,7 +100,7 @@ class Client:
|
|||||||
package_base(str): package base to remove
|
package_base(str): package base to remove
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def update(self, package_base: str, status: BuildStatusEnum) -> None:
|
def package_update(self, package_base: str, status: BuildStatusEnum) -> None:
|
||||||
"""
|
"""
|
||||||
update package build status. Unlike ``add`` it does not update package properties
|
update package build status. Unlike ``add`` it does not update package properties
|
||||||
|
|
||||||
@ -117,14 +109,6 @@ class Client:
|
|||||||
status(BuildStatusEnum): current package build status
|
status(BuildStatusEnum): current package build status
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def update_self(self, status: BuildStatusEnum) -> None:
|
|
||||||
"""
|
|
||||||
update ahriman status itself
|
|
||||||
|
|
||||||
Args:
|
|
||||||
status(BuildStatusEnum): current ahriman status
|
|
||||||
"""
|
|
||||||
|
|
||||||
def set_building(self, package_base: str) -> None:
|
def set_building(self, package_base: str) -> None:
|
||||||
"""
|
"""
|
||||||
set package status to building
|
set package status to building
|
||||||
@ -132,7 +116,7 @@ class Client:
|
|||||||
Args:
|
Args:
|
||||||
package_base(str): package base to update
|
package_base(str): package base to update
|
||||||
"""
|
"""
|
||||||
return self.update(package_base, BuildStatusEnum.Building)
|
return self.package_update(package_base, BuildStatusEnum.Building)
|
||||||
|
|
||||||
def set_failed(self, package_base: str) -> None:
|
def set_failed(self, package_base: str) -> None:
|
||||||
"""
|
"""
|
||||||
@ -141,7 +125,7 @@ class Client:
|
|||||||
Args:
|
Args:
|
||||||
package_base(str): package base to update
|
package_base(str): package base to update
|
||||||
"""
|
"""
|
||||||
return self.update(package_base, BuildStatusEnum.Failed)
|
return self.package_update(package_base, BuildStatusEnum.Failed)
|
||||||
|
|
||||||
def set_pending(self, package_base: str) -> None:
|
def set_pending(self, package_base: str) -> None:
|
||||||
"""
|
"""
|
||||||
@ -150,7 +134,7 @@ class Client:
|
|||||||
Args:
|
Args:
|
||||||
package_base(str): package base to update
|
package_base(str): package base to update
|
||||||
"""
|
"""
|
||||||
return self.update(package_base, BuildStatusEnum.Pending)
|
return self.package_update(package_base, BuildStatusEnum.Pending)
|
||||||
|
|
||||||
def set_success(self, package: Package) -> None:
|
def set_success(self, package: Package) -> None:
|
||||||
"""
|
"""
|
||||||
@ -159,7 +143,7 @@ class Client:
|
|||||||
Args:
|
Args:
|
||||||
package(Package): current package properties
|
package(Package): current package properties
|
||||||
"""
|
"""
|
||||||
return self.add(package, BuildStatusEnum.Success)
|
return self.package_add(package, BuildStatusEnum.Success)
|
||||||
|
|
||||||
def set_unknown(self, package: Package) -> None:
|
def set_unknown(self, package: Package) -> None:
|
||||||
"""
|
"""
|
||||||
@ -168,4 +152,21 @@ class Client:
|
|||||||
Args:
|
Args:
|
||||||
package(Package): current package properties
|
package(Package): current package properties
|
||||||
"""
|
"""
|
||||||
return self.add(package, BuildStatusEnum.Unknown)
|
return self.package_add(package, BuildStatusEnum.Unknown)
|
||||||
|
|
||||||
|
def status_get(self) -> InternalStatus:
|
||||||
|
"""
|
||||||
|
get internal service status
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
InternalStatus: current internal (web) service status
|
||||||
|
"""
|
||||||
|
return InternalStatus(status=BuildStatus())
|
||||||
|
|
||||||
|
def status_update(self, status: BuildStatusEnum) -> None:
|
||||||
|
"""
|
||||||
|
update ahriman status itself
|
||||||
|
|
||||||
|
Args:
|
||||||
|
status(BuildStatusEnum): current ahriman status
|
||||||
|
"""
|
||||||
|
@ -17,8 +17,6 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
import os
|
|
||||||
|
|
||||||
from ahriman.core.configuration import Configuration
|
from ahriman.core.configuration import Configuration
|
||||||
from ahriman.core.database import SQLite
|
from ahriman.core.database import SQLite
|
||||||
from ahriman.core.exceptions import UnknownPackageError
|
from ahriman.core.exceptions import UnknownPackageError
|
||||||
@ -59,7 +57,7 @@ class Watcher(LazyLogging):
|
|||||||
self.status = BuildStatus()
|
self.status = BuildStatus()
|
||||||
|
|
||||||
# special variables for updating logs
|
# special variables for updating logs
|
||||||
self._last_log_record_id = LogRecordId("", os.getpid())
|
self._last_log_record_id = LogRecordId("", "")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def packages(self) -> list[tuple[Package, BuildStatus]]:
|
def packages(self) -> list[tuple[Package, BuildStatus]]:
|
||||||
@ -71,36 +69,6 @@ class Watcher(LazyLogging):
|
|||||||
"""
|
"""
|
||||||
return list(self.known.values())
|
return list(self.known.values())
|
||||||
|
|
||||||
def get(self, package_base: str) -> tuple[Package, BuildStatus]:
|
|
||||||
"""
|
|
||||||
get current package base build status
|
|
||||||
|
|
||||||
Args:
|
|
||||||
package_base(str): package base
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
tuple[Package, BuildStatus]: package and its status
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
UnknownPackage: if no package found
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return self.known[package_base]
|
|
||||||
except KeyError:
|
|
||||||
raise UnknownPackageError(package_base)
|
|
||||||
|
|
||||||
def get_logs(self, package_base: str) -> str:
|
|
||||||
"""
|
|
||||||
extract logs for the package base
|
|
||||||
|
|
||||||
Args:
|
|
||||||
package_base(str): package base
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: package logs
|
|
||||||
"""
|
|
||||||
return self.database.logs_get(package_base)
|
|
||||||
|
|
||||||
def load(self) -> None:
|
def load(self) -> None:
|
||||||
"""
|
"""
|
||||||
load packages from local repository. In case if last status is known, it will use it
|
load packages from local repository. In case if last status is known, it will use it
|
||||||
@ -117,7 +85,62 @@ class Watcher(LazyLogging):
|
|||||||
if package.base in self.known:
|
if package.base in self.known:
|
||||||
self.known[package.base] = (package, status)
|
self.known[package.base] = (package, status)
|
||||||
|
|
||||||
def remove(self, package_base: str) -> None:
|
def logs_get(self, package_base: str) -> str:
|
||||||
|
"""
|
||||||
|
extract logs for the package base
|
||||||
|
|
||||||
|
Args:
|
||||||
|
package_base(str): package base
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: package logs
|
||||||
|
"""
|
||||||
|
return self.database.logs_get(package_base)
|
||||||
|
|
||||||
|
def logs_remove(self, package_base: str, version: str | None) -> None:
|
||||||
|
"""
|
||||||
|
remove package related logs
|
||||||
|
|
||||||
|
Args:
|
||||||
|
package_base(str): package base
|
||||||
|
version(str): package versio
|
||||||
|
"""
|
||||||
|
self.database.logs_remove(package_base, version)
|
||||||
|
|
||||||
|
def logs_update(self, log_record_id: LogRecordId, created: float, record: str) -> None:
|
||||||
|
"""
|
||||||
|
make new log record into database
|
||||||
|
|
||||||
|
Args:
|
||||||
|
log_record_id(LogRecordId): log record id
|
||||||
|
created(float): log created record
|
||||||
|
record(str): log record
|
||||||
|
"""
|
||||||
|
if self._last_log_record_id != log_record_id:
|
||||||
|
# there is new log record, so we remove old ones
|
||||||
|
self.logs_remove(log_record_id.package_base, log_record_id.version)
|
||||||
|
self._last_log_record_id = log_record_id
|
||||||
|
self.database.logs_insert(log_record_id, created, record)
|
||||||
|
|
||||||
|
def package_get(self, package_base: str) -> tuple[Package, BuildStatus]:
|
||||||
|
"""
|
||||||
|
get current package base build status
|
||||||
|
|
||||||
|
Args:
|
||||||
|
package_base(str): package base
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple[Package, BuildStatus]: package and its status
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
UnknownPackage: if no package found
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return self.known[package_base]
|
||||||
|
except KeyError:
|
||||||
|
raise UnknownPackageError(package_base)
|
||||||
|
|
||||||
|
def package_remove(self, package_base: str) -> None:
|
||||||
"""
|
"""
|
||||||
remove package base from known list if any
|
remove package base from known list if any
|
||||||
|
|
||||||
@ -126,19 +149,9 @@ class Watcher(LazyLogging):
|
|||||||
"""
|
"""
|
||||||
self.known.pop(package_base, None)
|
self.known.pop(package_base, None)
|
||||||
self.database.package_remove(package_base)
|
self.database.package_remove(package_base)
|
||||||
self.remove_logs(package_base, None)
|
self.logs_remove(package_base, None)
|
||||||
|
|
||||||
def remove_logs(self, package_base: str, current_process_id: int | None) -> None:
|
def package_update(self, package_base: str, status: BuildStatusEnum, package: Package | None) -> None:
|
||||||
"""
|
|
||||||
remove package related logs
|
|
||||||
|
|
||||||
Args:
|
|
||||||
package_base(str): package base
|
|
||||||
current_process_id(int | None): current process id
|
|
||||||
"""
|
|
||||||
self.database.logs_remove(package_base, current_process_id)
|
|
||||||
|
|
||||||
def update(self, package_base: str, status: BuildStatusEnum, package: Package | None) -> None:
|
|
||||||
"""
|
"""
|
||||||
update package status and description
|
update package status and description
|
||||||
|
|
||||||
@ -159,22 +172,7 @@ class Watcher(LazyLogging):
|
|||||||
self.known[package_base] = (package, full_status)
|
self.known[package_base] = (package, full_status)
|
||||||
self.database.package_update(package, full_status)
|
self.database.package_update(package, full_status)
|
||||||
|
|
||||||
def update_logs(self, log_record_id: LogRecordId, created: float, record: str) -> None:
|
def status_update(self, status: BuildStatusEnum) -> None:
|
||||||
"""
|
|
||||||
make new log record into database
|
|
||||||
|
|
||||||
Args:
|
|
||||||
log_record_id(LogRecordId): log record id
|
|
||||||
created(float): log created record
|
|
||||||
record(str): log record
|
|
||||||
"""
|
|
||||||
if self._last_log_record_id != log_record_id:
|
|
||||||
# there is new log record, so we remove old ones
|
|
||||||
self.remove_logs(log_record_id.package_base, log_record_id.process_id)
|
|
||||||
self._last_log_record_id = log_record_id
|
|
||||||
self.database.logs_insert(log_record_id, created, record)
|
|
||||||
|
|
||||||
def update_self(self, status: BuildStatusEnum) -> None:
|
|
||||||
"""
|
"""
|
||||||
update service status
|
update service status
|
||||||
|
|
||||||
|
@ -21,7 +21,8 @@ import contextlib
|
|||||||
import logging
|
import logging
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from collections.abc import Generator
|
from functools import cached_property
|
||||||
|
from typing import Any, IO, Literal
|
||||||
from urllib.parse import quote_plus as urlencode
|
from urllib.parse import quote_plus as urlencode
|
||||||
|
|
||||||
from ahriman import __version__
|
from ahriman import __version__
|
||||||
@ -31,10 +32,15 @@ from ahriman.core.status.client import Client
|
|||||||
from ahriman.core.util import exception_response_text
|
from ahriman.core.util import exception_response_text
|
||||||
from ahriman.models.build_status import BuildStatus, BuildStatusEnum
|
from ahriman.models.build_status import BuildStatus, BuildStatusEnum
|
||||||
from ahriman.models.internal_status import InternalStatus
|
from ahriman.models.internal_status import InternalStatus
|
||||||
|
from ahriman.models.log_record_id import LogRecordId
|
||||||
from ahriman.models.package import Package
|
from ahriman.models.package import Package
|
||||||
from ahriman.models.user import User
|
from ahriman.models.user import User
|
||||||
|
|
||||||
|
|
||||||
|
# filename, file, content-type, headers
|
||||||
|
MultipartType = tuple[str, IO[bytes], str, dict[str, str]]
|
||||||
|
|
||||||
|
|
||||||
class WebClient(Client, LazyLogging):
|
class WebClient(Client, LazyLogging):
|
||||||
"""
|
"""
|
||||||
build status reporter web client
|
build status reporter web client
|
||||||
@ -43,8 +49,12 @@ class WebClient(Client, LazyLogging):
|
|||||||
address(str): address of the web service
|
address(str): address of the web service
|
||||||
suppress_errors(bool): suppress logging errors (e.g. if no web server available)
|
suppress_errors(bool): suppress logging errors (e.g. if no web server available)
|
||||||
user(User | None): web service user descriptor
|
user(User | None): web service user descriptor
|
||||||
|
use_unix_socket(bool): use websocket or not
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
_login_url = "/api/v1/login"
|
||||||
|
_status_url = "/api/v1/status"
|
||||||
|
|
||||||
def __init__(self, configuration: Configuration) -> None:
|
def __init__(self, configuration: Configuration) -> None:
|
||||||
"""
|
"""
|
||||||
default constructor
|
default constructor
|
||||||
@ -52,33 +62,49 @@ class WebClient(Client, LazyLogging):
|
|||||||
Args:
|
Args:
|
||||||
configuration(Configuration): configuration instance
|
configuration(Configuration): configuration instance
|
||||||
"""
|
"""
|
||||||
self.address, use_unix_socket = self.parse_address(configuration)
|
self.address, self.use_unix_socket = self.parse_address(configuration)
|
||||||
self.user = User.from_option(
|
self.user = User.from_option(
|
||||||
configuration.get("web", "username", fallback=None),
|
configuration.get("web", "username", fallback=None),
|
||||||
configuration.get("web", "password", fallback=None))
|
configuration.get("web", "password", fallback=None))
|
||||||
self.suppress_errors = configuration.getboolean("settings", "suppress_http_log_errors", fallback=False)
|
self.suppress_errors = configuration.getboolean("settings", "suppress_http_log_errors", fallback=False)
|
||||||
|
|
||||||
self.__session = self._create_session(use_unix_socket=use_unix_socket)
|
@cached_property
|
||||||
|
def session(self) -> requests.Session:
|
||||||
@property
|
|
||||||
def _login_url(self) -> str:
|
|
||||||
"""
|
"""
|
||||||
get url for the login api
|
get or create session
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: full url for web service to log in
|
request.Session: created session object
|
||||||
"""
|
"""
|
||||||
return f"{self.address}/api/v1/login"
|
return self._create_session(use_unix_socket=self.use_unix_socket)
|
||||||
|
|
||||||
@property
|
@staticmethod
|
||||||
def _status_url(self) -> str:
|
def _logs_url(package_base: str) -> str:
|
||||||
"""
|
"""
|
||||||
get url for the status api
|
get url for the logs api
|
||||||
|
|
||||||
|
Args:
|
||||||
|
package_base(str): package base
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: full url for web service for status
|
str: full url for web service for logs
|
||||||
"""
|
"""
|
||||||
return f"{self.address}/api/v1/status"
|
return f"/api/v1/packages/{package_base}/logs"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _package_url(package_base: str = "") -> str:
|
||||||
|
"""
|
||||||
|
url generator
|
||||||
|
|
||||||
|
Args:
|
||||||
|
package_base(str, optional): package base to generate url (Default value = "")
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: full url of web service for specific package base
|
||||||
|
"""
|
||||||
|
# in case if unix socket is used we need to normalize url
|
||||||
|
suffix = f"/{package_base}" if package_base else ""
|
||||||
|
return f"/api/v1/packages{suffix}"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_address(configuration: Configuration) -> tuple[str, bool]:
|
def parse_address(configuration: Configuration) -> tuple[str, bool]:
|
||||||
@ -102,32 +128,6 @@ class WebClient(Client, LazyLogging):
|
|||||||
address = f"http://{host}:{port}"
|
address = f"http://{host}:{port}"
|
||||||
return address, False
|
return address, False
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def __get_session(self, session: requests.Session | None = None) -> Generator[requests.Session, None, None]:
|
|
||||||
"""
|
|
||||||
execute request and handle exceptions
|
|
||||||
|
|
||||||
Args:
|
|
||||||
session(requests.Session | None, optional): session to be used or stored instance property otherwise
|
|
||||||
(Default value = None)
|
|
||||||
|
|
||||||
Yields:
|
|
||||||
requests.Session: session for requests
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if session is not None:
|
|
||||||
yield session # use session from arguments
|
|
||||||
else:
|
|
||||||
yield self.__session # use instance generated session
|
|
||||||
except requests.RequestException as e:
|
|
||||||
if self.suppress_errors:
|
|
||||||
return
|
|
||||||
self.logger.exception("could not perform http request: %s", exception_response_text(e))
|
|
||||||
except Exception:
|
|
||||||
if self.suppress_errors:
|
|
||||||
return
|
|
||||||
self.logger.exception("could not perform http request")
|
|
||||||
|
|
||||||
def _create_session(self, *, use_unix_socket: bool) -> requests.Session:
|
def _create_session(self, *, use_unix_socket: bool) -> requests.Session:
|
||||||
"""
|
"""
|
||||||
generate new request session
|
generate new request session
|
||||||
@ -164,38 +164,51 @@ class WebClient(Client, LazyLogging):
|
|||||||
"username": self.user.username,
|
"username": self.user.username,
|
||||||
"password": self.user.password
|
"password": self.user.password
|
||||||
}
|
}
|
||||||
|
with contextlib.suppress(Exception):
|
||||||
|
self.make_request("POST", self._login_url, json=payload, session=session)
|
||||||
|
|
||||||
with self.__get_session(session):
|
def make_request(self, method: Literal["DELETE", "GET", "POST"], url: str, *,
|
||||||
response = session.post(self._login_url, json=payload)
|
params: list[tuple[str, str]] | None = None,
|
||||||
|
json: dict[str, Any] | None = None,
|
||||||
|
files: dict[str, MultipartType] | None = None,
|
||||||
|
session: requests.Session | None = None,
|
||||||
|
suppress_errors: bool | None = None) -> requests.Response:
|
||||||
|
"""
|
||||||
|
perform request with specified parameters
|
||||||
|
|
||||||
|
Args:
|
||||||
|
method(Literal["DELETE", "GET", "POST"]): HTTP method to call
|
||||||
|
url(str): remote url to call
|
||||||
|
params(list[tuple[str, str]] | None, optional): request query parameters (Default value = None)
|
||||||
|
json(dict[str, Any] | None, optional): request json parameters (Default value = None)
|
||||||
|
files(dict[str, MultipartType] | None, optional): multipart upload (Default value = None)
|
||||||
|
session(requests.Session | None, optional): session object if any (Default value = None)
|
||||||
|
suppress_errors(bool | None, optional): suppress logging errors (e.g. if no web server available). If none
|
||||||
|
set, the instance-wide value will be used (Default value = None)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
requests.Response: response object
|
||||||
|
"""
|
||||||
|
# defaults
|
||||||
|
if suppress_errors is None:
|
||||||
|
suppress_errors = self.suppress_errors
|
||||||
|
if session is None:
|
||||||
|
session = self.session
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = session.request(method, f"{self.address}{url}", params=params, json=json, files=files)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
return response
|
||||||
|
except requests.RequestException as e:
|
||||||
|
if not suppress_errors:
|
||||||
|
self.logger.exception("could not perform http request: %s", exception_response_text(e))
|
||||||
|
raise
|
||||||
|
except Exception:
|
||||||
|
if not suppress_errors:
|
||||||
|
self.logger.exception("could not perform http request")
|
||||||
|
raise
|
||||||
|
|
||||||
def _logs_url(self, package_base: str) -> str:
|
def package_add(self, package: Package, status: BuildStatusEnum) -> None:
|
||||||
"""
|
|
||||||
get url for the logs api
|
|
||||||
|
|
||||||
Args:
|
|
||||||
package_base(str): package base
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: full url for web service for logs
|
|
||||||
"""
|
|
||||||
return f"{self.address}/api/v1/packages/{package_base}/logs"
|
|
||||||
|
|
||||||
def _package_url(self, package_base: str = "") -> str:
|
|
||||||
"""
|
|
||||||
url generator
|
|
||||||
|
|
||||||
Args:
|
|
||||||
package_base(str, optional): package base to generate url (Default value = "")
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: full url of web service for specific package base
|
|
||||||
"""
|
|
||||||
# in case if unix socket is used we need to normalize url
|
|
||||||
suffix = f"/{package_base}" if package_base else ""
|
|
||||||
return f"{self.address}/api/v1/packages{suffix}"
|
|
||||||
|
|
||||||
def add(self, package: Package, status: BuildStatusEnum) -> None:
|
|
||||||
"""
|
"""
|
||||||
add new package with status
|
add new package with status
|
||||||
|
|
||||||
@ -207,12 +220,10 @@ class WebClient(Client, LazyLogging):
|
|||||||
"status": status.value,
|
"status": status.value,
|
||||||
"package": package.view()
|
"package": package.view()
|
||||||
}
|
}
|
||||||
|
with contextlib.suppress(Exception):
|
||||||
|
self.make_request("POST", self._package_url(package.base), json=payload)
|
||||||
|
|
||||||
with self.__get_session() as session:
|
def package_get(self, package_base: str | None) -> list[tuple[Package, BuildStatus]]:
|
||||||
response = session.post(self._package_url(package.base), json=payload)
|
|
||||||
response.raise_for_status()
|
|
||||||
|
|
||||||
def get(self, package_base: str | None) -> list[tuple[Package, BuildStatus]]:
|
|
||||||
"""
|
"""
|
||||||
get package status
|
get package status
|
||||||
|
|
||||||
@ -222,66 +233,47 @@ class WebClient(Client, LazyLogging):
|
|||||||
Returns:
|
Returns:
|
||||||
list[tuple[Package, BuildStatus]]: list of current package description and status if it has been found
|
list[tuple[Package, BuildStatus]]: list of current package description and status if it has been found
|
||||||
"""
|
"""
|
||||||
with self.__get_session() as session:
|
with contextlib.suppress(Exception):
|
||||||
response = session.get(self._package_url(package_base or ""))
|
response = self.make_request("GET", self._package_url(package_base or ""))
|
||||||
response.raise_for_status()
|
response_json = response.json()
|
||||||
|
|
||||||
status_json = response.json()
|
|
||||||
return [
|
return [
|
||||||
(Package.from_json(package["package"]), BuildStatus.from_json(package["status"]))
|
(Package.from_json(package["package"]), BuildStatus.from_json(package["status"]))
|
||||||
for package in status_json
|
for package in response_json
|
||||||
]
|
]
|
||||||
|
|
||||||
# noinspection PyUnreachableCode
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def get_internal(self) -> InternalStatus:
|
def package_logs(self, log_record_id: LogRecordId, record: logging.LogRecord) -> None:
|
||||||
"""
|
|
||||||
get internal service status
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
InternalStatus: current internal (web) service status
|
|
||||||
"""
|
|
||||||
with self.__get_session() as session:
|
|
||||||
response = session.get(self._status_url)
|
|
||||||
response.raise_for_status()
|
|
||||||
|
|
||||||
status_json = response.json()
|
|
||||||
return InternalStatus.from_json(status_json)
|
|
||||||
|
|
||||||
# noinspection PyUnreachableCode
|
|
||||||
return InternalStatus(status=BuildStatus())
|
|
||||||
|
|
||||||
def logs(self, package_base: str, record: logging.LogRecord) -> None:
|
|
||||||
"""
|
"""
|
||||||
post log record
|
post log record
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
package_base(str) package base
|
log_record_id(LogRecordId): log record id
|
||||||
record(logging.LogRecord): log record to post to api
|
record(logging.LogRecord): log record to post to api
|
||||||
"""
|
"""
|
||||||
payload = {
|
payload = {
|
||||||
"created": record.created,
|
"created": record.created,
|
||||||
"message": record.getMessage(),
|
"message": record.getMessage(),
|
||||||
"process_id": record.process,
|
"version": log_record_id.version,
|
||||||
}
|
}
|
||||||
|
|
||||||
# in this method exception has to be handled outside in logger handler
|
# this is special case, because we would like to do not suppress exception here
|
||||||
response = self.__session.post(self._logs_url(package_base), json=payload)
|
# in case of exception raised it will be handled by upstream HttpLogHandler
|
||||||
response.raise_for_status()
|
# In the other hand, we force to suppress all http logs here to avoid cyclic reporting
|
||||||
|
self.make_request("POST", self._logs_url(log_record_id.package_base), json=payload, suppress_errors=True)
|
||||||
|
|
||||||
def remove(self, package_base: str) -> None:
|
def package_remove(self, package_base: str) -> None:
|
||||||
"""
|
"""
|
||||||
remove packages from watcher
|
remove packages from watcher
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
package_base(str): basename to remove
|
package_base(str): basename to remove
|
||||||
"""
|
"""
|
||||||
with self.__get_session() as session:
|
with contextlib.suppress(Exception):
|
||||||
response = session.delete(self._package_url(package_base))
|
self.make_request("DELETE", self._package_url(package_base))
|
||||||
response.raise_for_status()
|
|
||||||
|
|
||||||
def update(self, package_base: str, status: BuildStatusEnum) -> None:
|
def package_update(self, package_base: str, status: BuildStatusEnum) -> None:
|
||||||
"""
|
"""
|
||||||
update package build status. Unlike ``add`` it does not update package properties
|
update package build status. Unlike ``add`` it does not update package properties
|
||||||
|
|
||||||
@ -290,12 +282,25 @@ class WebClient(Client, LazyLogging):
|
|||||||
status(BuildStatusEnum): current package build status
|
status(BuildStatusEnum): current package build status
|
||||||
"""
|
"""
|
||||||
payload = {"status": status.value}
|
payload = {"status": status.value}
|
||||||
|
with contextlib.suppress(Exception):
|
||||||
|
self.make_request("POST", self._package_url(package_base), json=payload)
|
||||||
|
|
||||||
with self.__get_session() as session:
|
def status_get(self) -> InternalStatus:
|
||||||
response = session.post(self._package_url(package_base), json=payload)
|
"""
|
||||||
response.raise_for_status()
|
get internal service status
|
||||||
|
|
||||||
def update_self(self, status: BuildStatusEnum) -> None:
|
Returns:
|
||||||
|
InternalStatus: current internal (web) service status
|
||||||
|
"""
|
||||||
|
with contextlib.suppress(Exception):
|
||||||
|
response = self.make_request("GET", self._status_url)
|
||||||
|
response_json = response.json()
|
||||||
|
|
||||||
|
return InternalStatus.from_json(response_json)
|
||||||
|
|
||||||
|
return InternalStatus(status=BuildStatus())
|
||||||
|
|
||||||
|
def status_update(self, status: BuildStatusEnum) -> None:
|
||||||
"""
|
"""
|
||||||
update ahriman status itself
|
update ahriman status itself
|
||||||
|
|
||||||
@ -303,7 +308,5 @@ class WebClient(Client, LazyLogging):
|
|||||||
status(BuildStatusEnum): current ahriman status
|
status(BuildStatusEnum): current ahriman status
|
||||||
"""
|
"""
|
||||||
payload = {"status": status.value}
|
payload = {"status": status.value}
|
||||||
|
with contextlib.suppress(Exception):
|
||||||
with self.__get_session() as session:
|
self.make_request("POST", self._status_url, json=payload)
|
||||||
response = session.post(self._status_url, json=payload)
|
|
||||||
response.raise_for_status()
|
|
||||||
|
@ -20,6 +20,7 @@
|
|||||||
import hashlib
|
import hashlib
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
from functools import cached_property
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@ -52,6 +53,16 @@ class HttpUpload(Upload):
|
|||||||
self.auth = (password, username) if password and username else None
|
self.auth = (password, username) if password and username else None
|
||||||
self.timeout = configuration.getint(section, "timeout", fallback=30)
|
self.timeout = configuration.getint(section, "timeout", fallback=30)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def session(self) -> requests.Session:
|
||||||
|
"""
|
||||||
|
get or create session
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
request.Session: created session object
|
||||||
|
"""
|
||||||
|
return requests.Session()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def calculate_hash(path: Path) -> str:
|
def calculate_hash(path: Path) -> str:
|
||||||
"""
|
"""
|
||||||
@ -110,7 +121,7 @@ class HttpUpload(Upload):
|
|||||||
requests.Response: request response object
|
requests.Response: request response object
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
response = requests.request(method, url, auth=self.auth, timeout=self.timeout, **kwargs)
|
response = self.session.request(method, url, auth=self.auth, timeout=self.timeout, **kwargs)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
except requests.HTTPError as e:
|
except requests.HTTPError as e:
|
||||||
self.logger.exception("could not perform %s request to %s: %s", method, url, exception_response_text(e))
|
self.logger.exception("could not perform %s request to %s: %s", method, url, exception_response_text(e))
|
||||||
|
105
src/ahriman/core/upload/remote_service.py
Normal file
105
src/ahriman/core/upload/remote_service.py
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2021-2023 ahriman team.
|
||||||
|
#
|
||||||
|
# This file is part of ahriman
|
||||||
|
# (see https://github.com/arcan1s/ahriman).
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from functools import cached_property
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from ahriman.core.configuration import Configuration
|
||||||
|
from ahriman.core.sign.gpg import GPG
|
||||||
|
from ahriman.core.status.web_client import MultipartType, WebClient
|
||||||
|
from ahriman.core.upload.http_upload import HttpUpload
|
||||||
|
from ahriman.models.package import Package
|
||||||
|
|
||||||
|
|
||||||
|
class RemoteService(HttpUpload):
|
||||||
|
"""
|
||||||
|
upload files to another server instance
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
client(WebClient): web client instance
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, architecture: str, configuration: Configuration, section: str) -> None:
|
||||||
|
"""
|
||||||
|
default constructor
|
||||||
|
|
||||||
|
Args:
|
||||||
|
architecture(str): repository architecture
|
||||||
|
configuration(Configuration): configuration instance
|
||||||
|
section(str): settings section name
|
||||||
|
"""
|
||||||
|
HttpUpload.__init__(self, architecture, configuration, section)
|
||||||
|
self.client = WebClient(configuration)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def session(self) -> requests.Session:
|
||||||
|
"""
|
||||||
|
get or create session
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
request.Session: created session object
|
||||||
|
"""
|
||||||
|
return self.client.session
|
||||||
|
|
||||||
|
def package_upload(self, path: Path, package: Package) -> None:
|
||||||
|
"""
|
||||||
|
upload single package to remote
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path(Path): local path to sync
|
||||||
|
package(Package): package to upload
|
||||||
|
"""
|
||||||
|
def upload(package_path: Path, signature_path: Path | None) -> None:
|
||||||
|
files: dict[str, MultipartType] = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# package part always persists
|
||||||
|
files["package"] = package_path.name, package_path.open("rb"), "application/octet-stream", {}
|
||||||
|
# signature part is optional
|
||||||
|
if signature_path is not None:
|
||||||
|
files["signature"] = signature_path.name, signature_path.open("rb"), "application/octet-stream", {}
|
||||||
|
|
||||||
|
self._request("POST", f"{self.client.address}/api/v1/service/upload", files=files)
|
||||||
|
finally:
|
||||||
|
for _, fd, _, _ in files.values():
|
||||||
|
fd.close()
|
||||||
|
|
||||||
|
for key, descriptor in package.packages.items():
|
||||||
|
if descriptor.filename is None:
|
||||||
|
self.logger.warning("package %s of %s doesn't have filename set", key, package.base)
|
||||||
|
continue
|
||||||
|
|
||||||
|
archive = path / descriptor.filename
|
||||||
|
maybe_signature_path = GPG.signature(archive)
|
||||||
|
signature = maybe_signature_path if maybe_signature_path.is_file() else None
|
||||||
|
|
||||||
|
upload(archive, signature)
|
||||||
|
|
||||||
|
def sync(self, path: Path, built_packages: list[Package]) -> None:
|
||||||
|
"""
|
||||||
|
sync data to remote server
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path(Path): local path to sync
|
||||||
|
built_packages(list[Package]): list of packages which has just been built
|
||||||
|
"""
|
||||||
|
for package in built_packages:
|
||||||
|
self.package_upload(path, package)
|
@ -90,6 +90,9 @@ class Upload(LazyLogging):
|
|||||||
if provider == UploadSettings.Github:
|
if provider == UploadSettings.Github:
|
||||||
from ahriman.core.upload.github import Github
|
from ahriman.core.upload.github import Github
|
||||||
return Github(architecture, configuration, section)
|
return Github(architecture, configuration, section)
|
||||||
|
if provider == UploadSettings.RemoteService:
|
||||||
|
from ahriman.core.upload.remote_service import RemoteService
|
||||||
|
return RemoteService(architecture, configuration, section)
|
||||||
return Upload(architecture, configuration) # should never happen
|
return Upload(architecture, configuration) # should never happen
|
||||||
|
|
||||||
def run(self, path: Path, built_packages: list[Package]) -> None:
|
def run(self, path: Path, built_packages: list[Package]) -> None:
|
||||||
|
@ -92,6 +92,15 @@ class UploadTrigger(Trigger):
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
"remote-service": {
|
||||||
|
"type": "dict",
|
||||||
|
"schema": {
|
||||||
|
"type": {
|
||||||
|
"type": "string",
|
||||||
|
"allowed": ["ahriman", "remote-service"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
"s3": {
|
"s3": {
|
||||||
"type": "dict",
|
"type": "dict",
|
||||||
"schema": {
|
"schema": {
|
||||||
|
@ -142,7 +142,6 @@ def check_output(*args: str, exception: Exception | None = None, cwd: Path | Non
|
|||||||
while selector.get_map(): # while there are unread selectors, keep reading
|
while selector.get_map(): # while there are unread selectors, keep reading
|
||||||
result.extend(poll(selector))
|
result.extend(poll(selector))
|
||||||
|
|
||||||
process.terminate() # make sure that process is terminated
|
|
||||||
status_code = process.wait()
|
status_code = process.wait()
|
||||||
if status_code != 0:
|
if status_code != 0:
|
||||||
if exception is not None:
|
if exception is not None:
|
||||||
@ -280,7 +279,7 @@ def package_like(filename: Path) -> bool:
|
|||||||
bool: True in case if name contains ``.pkg.`` and not signature, False otherwise
|
bool: True in case if name contains ``.pkg.`` and not signature, False otherwise
|
||||||
"""
|
"""
|
||||||
name = filename.name
|
name = filename.name
|
||||||
return ".pkg." in name and not name.endswith(".sig")
|
return not name.startswith(".") and ".pkg." in name and not name.endswith(".sig")
|
||||||
|
|
||||||
|
|
||||||
def parse_version(version: str) -> tuple[str | None, str, str]:
|
def parse_version(version: str) -> tuple[str | None, str, str]:
|
||||||
|
@ -27,8 +27,8 @@ class LogRecordId:
|
|||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
package_base(str): package base for which log record belongs
|
package_base(str): package base for which log record belongs
|
||||||
process_id(int): process id from which log record was emitted
|
version(str): package version for which log record belongs
|
||||||
"""
|
"""
|
||||||
|
|
||||||
package_base: str
|
package_base: str
|
||||||
process_id: int
|
version: str
|
||||||
|
@ -32,6 +32,7 @@ class ReportSettings(str, Enum):
|
|||||||
Email(ReportSettings): (class attribute) email report generation
|
Email(ReportSettings): (class attribute) email report generation
|
||||||
Console(ReportSettings): (class attribute) print result to console
|
Console(ReportSettings): (class attribute) print result to console
|
||||||
Telegram(ReportSettings): (class attribute) markdown report to telegram channel
|
Telegram(ReportSettings): (class attribute) markdown report to telegram channel
|
||||||
|
RemoteCall(ReportSettings): (class attribute) remote ahriman server call
|
||||||
"""
|
"""
|
||||||
|
|
||||||
Disabled = "disabled" # for testing purpose
|
Disabled = "disabled" # for testing purpose
|
||||||
@ -39,6 +40,7 @@ class ReportSettings(str, Enum):
|
|||||||
Email = "email"
|
Email = "email"
|
||||||
Console = "console"
|
Console = "console"
|
||||||
Telegram = "telegram"
|
Telegram = "telegram"
|
||||||
|
RemoteCall = "remote-call"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_option(value: str) -> ReportSettings:
|
def from_option(value: str) -> ReportSettings:
|
||||||
@ -59,4 +61,6 @@ class ReportSettings(str, Enum):
|
|||||||
return ReportSettings.Console
|
return ReportSettings.Console
|
||||||
if value.lower() in ("telegram",):
|
if value.lower() in ("telegram",):
|
||||||
return ReportSettings.Telegram
|
return ReportSettings.Telegram
|
||||||
|
if value.lower() in ("ahriman", "remote-call",):
|
||||||
|
return ReportSettings.RemoteCall
|
||||||
return ReportSettings.Disabled
|
return ReportSettings.Disabled
|
||||||
|
@ -31,12 +31,14 @@ class UploadSettings(str, Enum):
|
|||||||
Rsync(UploadSettings): (class attribute) sync via rsync
|
Rsync(UploadSettings): (class attribute) sync via rsync
|
||||||
S3(UploadSettings): (class attribute) sync to Amazon S3
|
S3(UploadSettings): (class attribute) sync to Amazon S3
|
||||||
Github(UploadSettings): (class attribute) sync to github releases page
|
Github(UploadSettings): (class attribute) sync to github releases page
|
||||||
|
RemoteService(UploadSettings): (class attribute) sync to another ahriman instance
|
||||||
"""
|
"""
|
||||||
|
|
||||||
Disabled = "disabled" # for testing purpose
|
Disabled = "disabled" # for testing purpose
|
||||||
Rsync = "rsync"
|
Rsync = "rsync"
|
||||||
S3 = "s3"
|
S3 = "s3"
|
||||||
Github = "github"
|
Github = "github"
|
||||||
|
RemoteService = "remote-service"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_option(value: str) -> UploadSettings:
|
def from_option(value: str) -> UploadSettings:
|
||||||
@ -55,4 +57,6 @@ class UploadSettings(str, Enum):
|
|||||||
return UploadSettings.S3
|
return UploadSettings.S3
|
||||||
if value.lower() in ("github",):
|
if value.lower() in ("github",):
|
||||||
return UploadSettings.Github
|
return UploadSettings.Github
|
||||||
|
if value.lower() in ("ahriman", "remote-service",):
|
||||||
|
return UploadSettings.RemoteService
|
||||||
return UploadSettings.Disabled
|
return UploadSettings.Disabled
|
||||||
|
72
src/ahriman/models/waiter.py
Normal file
72
src/ahriman/models/waiter.py
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2021-2023 ahriman team.
|
||||||
|
#
|
||||||
|
# This file is part of ahriman
|
||||||
|
# (see https://github.com/arcan1s/ahriman).
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
import time
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import ParamSpec
|
||||||
|
|
||||||
|
|
||||||
|
Params = ParamSpec("Params")
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class Waiter:
|
||||||
|
"""
|
||||||
|
simple waiter implementation
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
interval(int): interval in seconds between checks
|
||||||
|
start_time(float): monotonic time of the waiter start. More likely must not be assigned explicitly
|
||||||
|
wait_timeout(int): timeout in seconds to wait for. Negative value will result in immediate exit. Zero value
|
||||||
|
means infinite timeout
|
||||||
|
"""
|
||||||
|
|
||||||
|
wait_timeout: int
|
||||||
|
start_time: float = field(default_factory=time.monotonic, kw_only=True)
|
||||||
|
interval: int = field(default=10, kw_only=True)
|
||||||
|
|
||||||
|
def is_timed_out(self) -> bool:
|
||||||
|
"""
|
||||||
|
check if timer is out
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True in case current monotonic time is more than ``Waiter.start_time`` and
|
||||||
|
``Waiter.wait_timeout`` doesn't equal to 0
|
||||||
|
"""
|
||||||
|
since_start: float = time.monotonic() - self.start_time
|
||||||
|
return self.wait_timeout != 0 and since_start > self.wait_timeout
|
||||||
|
|
||||||
|
def wait(self, in_progress: Callable[Params, bool], *args: Params.args, **kwargs: Params.kwargs) -> float:
|
||||||
|
"""
|
||||||
|
wait until requirements are not met
|
||||||
|
|
||||||
|
Args:
|
||||||
|
in_progress(Callable[Params, bool]): function to check if timer should wait for another cycle
|
||||||
|
*args(Params.args): positional arguments for check call
|
||||||
|
**kwargs(Params.kwargs): keyword arguments for check call
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
float: consumed time in seconds
|
||||||
|
"""
|
||||||
|
while not self.is_timed_out() and in_progress(*args, **kwargs):
|
||||||
|
time.sleep(self.interval)
|
||||||
|
|
||||||
|
return time.monotonic() - self.start_time
|
@ -25,11 +25,13 @@ from ahriman.web.views.api.swagger import SwaggerView
|
|||||||
from ahriman.web.views.index import IndexView
|
from ahriman.web.views.index import IndexView
|
||||||
from ahriman.web.views.service.add import AddView
|
from ahriman.web.views.service.add import AddView
|
||||||
from ahriman.web.views.service.pgp import PGPView
|
from ahriman.web.views.service.pgp import PGPView
|
||||||
|
from ahriman.web.views.service.process import ProcessView
|
||||||
from ahriman.web.views.service.rebuild import RebuildView
|
from ahriman.web.views.service.rebuild import RebuildView
|
||||||
from ahriman.web.views.service.remove import RemoveView
|
from ahriman.web.views.service.remove import RemoveView
|
||||||
from ahriman.web.views.service.request import RequestView
|
from ahriman.web.views.service.request import RequestView
|
||||||
from ahriman.web.views.service.search import SearchView
|
from ahriman.web.views.service.search import SearchView
|
||||||
from ahriman.web.views.service.update import UpdateView
|
from ahriman.web.views.service.update import UpdateView
|
||||||
|
from ahriman.web.views.service.upload import UploadView
|
||||||
from ahriman.web.views.status.logs import LogsView
|
from ahriman.web.views.status.logs import LogsView
|
||||||
from ahriman.web.views.status.package import PackageView
|
from ahriman.web.views.status.package import PackageView
|
||||||
from ahriman.web.views.status.packages import PackagesView
|
from ahriman.web.views.status.packages import PackagesView
|
||||||
@ -60,10 +62,12 @@ def setup_routes(application: Application, static_path: Path) -> None:
|
|||||||
application.router.add_view("/api/v1/service/add", AddView)
|
application.router.add_view("/api/v1/service/add", AddView)
|
||||||
application.router.add_view("/api/v1/service/pgp", PGPView)
|
application.router.add_view("/api/v1/service/pgp", PGPView)
|
||||||
application.router.add_view("/api/v1/service/rebuild", RebuildView)
|
application.router.add_view("/api/v1/service/rebuild", RebuildView)
|
||||||
|
application.router.add_view("/api/v1/service/process/{process_id}", ProcessView)
|
||||||
application.router.add_view("/api/v1/service/remove", RemoveView)
|
application.router.add_view("/api/v1/service/remove", RemoveView)
|
||||||
application.router.add_view("/api/v1/service/request", RequestView)
|
application.router.add_view("/api/v1/service/request", RequestView)
|
||||||
application.router.add_view("/api/v1/service/search", SearchView)
|
application.router.add_view("/api/v1/service/search", SearchView)
|
||||||
application.router.add_view("/api/v1/service/update", UpdateView)
|
application.router.add_view("/api/v1/service/update", UpdateView)
|
||||||
|
application.router.add_view("/api/v1/service/upload", UploadView)
|
||||||
|
|
||||||
application.router.add_view("/api/v1/packages", PackagesView)
|
application.router.add_view("/api/v1/packages", PackagesView)
|
||||||
application.router.add_view("/api/v1/packages/{package}", PackageView)
|
application.router.add_view("/api/v1/packages/{package}", PackageView)
|
||||||
|
@ -21,6 +21,7 @@ from ahriman.web.schemas.aur_package_schema import AURPackageSchema
|
|||||||
from ahriman.web.schemas.auth_schema import AuthSchema
|
from ahriman.web.schemas.auth_schema import AuthSchema
|
||||||
from ahriman.web.schemas.counters_schema import CountersSchema
|
from ahriman.web.schemas.counters_schema import CountersSchema
|
||||||
from ahriman.web.schemas.error_schema import ErrorSchema
|
from ahriman.web.schemas.error_schema import ErrorSchema
|
||||||
|
from ahriman.web.schemas.file_schema import FileSchema
|
||||||
from ahriman.web.schemas.internal_status_schema import InternalStatusSchema
|
from ahriman.web.schemas.internal_status_schema import InternalStatusSchema
|
||||||
from ahriman.web.schemas.log_schema import LogSchema
|
from ahriman.web.schemas.log_schema import LogSchema
|
||||||
from ahriman.web.schemas.login_schema import LoginSchema
|
from ahriman.web.schemas.login_schema import LoginSchema
|
||||||
@ -33,6 +34,9 @@ from ahriman.web.schemas.package_schema import PackageSchema
|
|||||||
from ahriman.web.schemas.package_status_schema import PackageStatusSimplifiedSchema, PackageStatusSchema
|
from ahriman.web.schemas.package_status_schema import PackageStatusSimplifiedSchema, PackageStatusSchema
|
||||||
from ahriman.web.schemas.pgp_key_id_schema import PGPKeyIdSchema
|
from ahriman.web.schemas.pgp_key_id_schema import PGPKeyIdSchema
|
||||||
from ahriman.web.schemas.pgp_key_schema import PGPKeySchema
|
from ahriman.web.schemas.pgp_key_schema import PGPKeySchema
|
||||||
|
from ahriman.web.schemas.process_id_schema import ProcessIdSchema
|
||||||
|
from ahriman.web.schemas.process_schema import ProcessSchema
|
||||||
from ahriman.web.schemas.remote_schema import RemoteSchema
|
from ahriman.web.schemas.remote_schema import RemoteSchema
|
||||||
from ahriman.web.schemas.search_schema import SearchSchema
|
from ahriman.web.schemas.search_schema import SearchSchema
|
||||||
from ahriman.web.schemas.status_schema import StatusSchema
|
from ahriman.web.schemas.status_schema import StatusSchema
|
||||||
|
from ahriman.web.schemas.update_flags_schema import UpdateFlagsSchema
|
||||||
|
30
src/ahriman/web/schemas/file_schema.py
Normal file
30
src/ahriman/web/schemas/file_schema.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2021-2023 ahriman team.
|
||||||
|
#
|
||||||
|
# This file is part of ahriman
|
||||||
|
# (see https://github.com/arcan1s/ahriman).
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
from marshmallow import Schema, fields
|
||||||
|
|
||||||
|
|
||||||
|
class FileSchema(Schema):
|
||||||
|
"""
|
||||||
|
request file upload schema
|
||||||
|
"""
|
||||||
|
|
||||||
|
archive = fields.Field(required=True, metadata={
|
||||||
|
"description": "Package archive to be uploaded",
|
||||||
|
})
|
@ -19,6 +19,8 @@
|
|||||||
#
|
#
|
||||||
from marshmallow import Schema, fields
|
from marshmallow import Schema, fields
|
||||||
|
|
||||||
|
from ahriman import __version__
|
||||||
|
|
||||||
|
|
||||||
class LogSchema(Schema):
|
class LogSchema(Schema):
|
||||||
"""
|
"""
|
||||||
@ -29,9 +31,9 @@ class LogSchema(Schema):
|
|||||||
"description": "Log record timestamp",
|
"description": "Log record timestamp",
|
||||||
"example": 1680537091.233495,
|
"example": 1680537091.233495,
|
||||||
})
|
})
|
||||||
process_id = fields.Integer(required=True, metadata={
|
version = fields.Integer(required=True, metadata={
|
||||||
"description": "Current process id",
|
"description": "Package version to tag",
|
||||||
"example": 42,
|
"example": __version__,
|
||||||
})
|
})
|
||||||
message = fields.String(required=True, metadata={
|
message = fields.String(required=True, metadata={
|
||||||
"description": "Log message",
|
"description": "Log message",
|
||||||
|
31
src/ahriman/web/schemas/process_id_schema.py
Normal file
31
src/ahriman/web/schemas/process_id_schema.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2021-2023 ahriman team.
|
||||||
|
#
|
||||||
|
# This file is part of ahriman
|
||||||
|
# (see https://github.com/arcan1s/ahriman).
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
from marshmallow import Schema, fields
|
||||||
|
|
||||||
|
|
||||||
|
class ProcessIdSchema(Schema):
|
||||||
|
"""
|
||||||
|
request and response spawned process id schema
|
||||||
|
"""
|
||||||
|
|
||||||
|
process_id = fields.String(required=True, metadata={
|
||||||
|
"description": "Spawned process unique ID",
|
||||||
|
"example": "ff456814-5669-4de6-9143-44dbf6f68607",
|
||||||
|
})
|
30
src/ahriman/web/schemas/process_schema.py
Normal file
30
src/ahriman/web/schemas/process_schema.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2021-2023 ahriman team.
|
||||||
|
#
|
||||||
|
# This file is part of ahriman
|
||||||
|
# (see https://github.com/arcan1s/ahriman).
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
from marshmallow import Schema, fields
|
||||||
|
|
||||||
|
|
||||||
|
class ProcessSchema(Schema):
|
||||||
|
"""
|
||||||
|
process status response schema
|
||||||
|
"""
|
||||||
|
|
||||||
|
is_alive = fields.Bool(required=True, metadata={
|
||||||
|
"description": "Is process alive or not",
|
||||||
|
})
|
36
src/ahriman/web/schemas/update_flags_schema.py
Normal file
36
src/ahriman/web/schemas/update_flags_schema.py
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2021-2023 ahriman team.
|
||||||
|
#
|
||||||
|
# This file is part of ahriman
|
||||||
|
# (see https://github.com/arcan1s/ahriman).
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
from marshmallow import Schema, fields
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateFlagsSchema(Schema):
|
||||||
|
"""
|
||||||
|
update flags request schema
|
||||||
|
"""
|
||||||
|
|
||||||
|
aur = fields.Bool(dump_default=True, metadata={
|
||||||
|
"description": "Check AUR for updates",
|
||||||
|
})
|
||||||
|
local = fields.Bool(dump_default=True, metadata={
|
||||||
|
"description": "Check local packages for updates",
|
||||||
|
})
|
||||||
|
manual = fields.Bool(dump_default=True, metadata={
|
||||||
|
"description": "Check manually built packages",
|
||||||
|
})
|
@ -43,7 +43,7 @@ class SwaggerView(BaseView):
|
|||||||
Response: 200 with json api specification
|
Response: 200 with json api specification
|
||||||
"""
|
"""
|
||||||
spec = self.request.app["swagger_dict"]
|
spec = self.request.app["swagger_dict"]
|
||||||
is_body_parameter: Callable[[dict[str, str]], bool] = lambda p: p["in"] == "body"
|
is_body_parameter: Callable[[dict[str, str]], bool] = lambda p: p["in"] == "body" or p["in"] == "formData"
|
||||||
|
|
||||||
# special workaround because it writes request body to parameters section
|
# special workaround because it writes request body to parameters section
|
||||||
paths = spec["paths"]
|
paths = spec["paths"]
|
||||||
@ -56,11 +56,14 @@ class SwaggerView(BaseView):
|
|||||||
if not body:
|
if not body:
|
||||||
continue # there were no ``body`` parameters found
|
continue # there were no ``body`` parameters found
|
||||||
|
|
||||||
|
schema = next(iter(body))
|
||||||
|
content_type = "multipart/form-data" if schema["in"] == "formData" else "application/json"
|
||||||
|
|
||||||
# there should be only one body parameters
|
# there should be only one body parameters
|
||||||
method["requestBody"] = {
|
method["requestBody"] = {
|
||||||
"content": {
|
"content": {
|
||||||
"application/json": {
|
content_type: {
|
||||||
"schema": next(iter(body))["schema"]
|
"schema": schema["schema"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -19,10 +19,10 @@
|
|||||||
#
|
#
|
||||||
import aiohttp_apispec # type: ignore[import]
|
import aiohttp_apispec # type: ignore[import]
|
||||||
|
|
||||||
from aiohttp.web import HTTPBadRequest, HTTPNoContent
|
from aiohttp.web import HTTPBadRequest, Response, json_response
|
||||||
|
|
||||||
from ahriman.models.user_access import UserAccess
|
from ahriman.models.user_access import UserAccess
|
||||||
from ahriman.web.schemas import AuthSchema, ErrorSchema, PackageNamesSchema
|
from ahriman.web.schemas import AuthSchema, ErrorSchema, PackageNamesSchema, ProcessIdSchema
|
||||||
from ahriman.web.views.base import BaseView
|
from ahriman.web.views.base import BaseView
|
||||||
|
|
||||||
|
|
||||||
@ -41,7 +41,7 @@ class AddView(BaseView):
|
|||||||
summary="Add new package",
|
summary="Add new package",
|
||||||
description="Add new package(s) from AUR",
|
description="Add new package(s) from AUR",
|
||||||
responses={
|
responses={
|
||||||
204: {"description": "Success response"},
|
200: {"description": "Success response", "schema": ProcessIdSchema},
|
||||||
400: {"description": "Bad data is supplied", "schema": ErrorSchema},
|
400: {"description": "Bad data is supplied", "schema": ErrorSchema},
|
||||||
401: {"description": "Authorization required", "schema": ErrorSchema},
|
401: {"description": "Authorization required", "schema": ErrorSchema},
|
||||||
403: {"description": "Access is forbidden", "schema": ErrorSchema},
|
403: {"description": "Access is forbidden", "schema": ErrorSchema},
|
||||||
@ -51,13 +51,15 @@ class AddView(BaseView):
|
|||||||
)
|
)
|
||||||
@aiohttp_apispec.cookies_schema(AuthSchema)
|
@aiohttp_apispec.cookies_schema(AuthSchema)
|
||||||
@aiohttp_apispec.json_schema(PackageNamesSchema)
|
@aiohttp_apispec.json_schema(PackageNamesSchema)
|
||||||
async def post(self) -> None:
|
async def post(self) -> Response:
|
||||||
"""
|
"""
|
||||||
add new package
|
add new package
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response: 200 with spawned process id
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
HTTPBadRequest: if bad data is supplied
|
HTTPBadRequest: if bad data is supplied
|
||||||
HTTPNoContent: in case of success response
|
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
data = await self.extract_data(["packages"])
|
data = await self.extract_data(["packages"])
|
||||||
@ -66,6 +68,6 @@ class AddView(BaseView):
|
|||||||
raise HTTPBadRequest(reason=str(e))
|
raise HTTPBadRequest(reason=str(e))
|
||||||
|
|
||||||
username = await self.username()
|
username = await self.username()
|
||||||
self.spawner.packages_add(packages, username, now=True)
|
process_id = self.spawner.packages_add(packages, username, now=True)
|
||||||
|
|
||||||
raise HTTPNoContent()
|
return json_response({"process_id": process_id})
|
||||||
|
@ -19,10 +19,10 @@
|
|||||||
#
|
#
|
||||||
import aiohttp_apispec # type: ignore[import]
|
import aiohttp_apispec # type: ignore[import]
|
||||||
|
|
||||||
from aiohttp.web import HTTPBadRequest, HTTPNoContent, HTTPNotFound, Response, json_response
|
from aiohttp.web import HTTPBadRequest, HTTPNotFound, Response, json_response
|
||||||
|
|
||||||
from ahriman.models.user_access import UserAccess
|
from ahriman.models.user_access import UserAccess
|
||||||
from ahriman.web.schemas import AuthSchema, ErrorSchema, PGPKeyIdSchema, PGPKeySchema
|
from ahriman.web.schemas import AuthSchema, ErrorSchema, PGPKeyIdSchema, PGPKeySchema, ProcessIdSchema
|
||||||
from ahriman.web.views.base import BaseView
|
from ahriman.web.views.base import BaseView
|
||||||
|
|
||||||
|
|
||||||
@ -83,7 +83,7 @@ class PGPView(BaseView):
|
|||||||
summary="Fetch PGP key",
|
summary="Fetch PGP key",
|
||||||
description="Fetch PGP key from the key server",
|
description="Fetch PGP key from the key server",
|
||||||
responses={
|
responses={
|
||||||
204: {"description": "Success response"},
|
200: {"description": "Success response", "schema": ProcessIdSchema},
|
||||||
400: {"description": "Bad data is supplied", "schema": ErrorSchema},
|
400: {"description": "Bad data is supplied", "schema": ErrorSchema},
|
||||||
401: {"description": "Authorization required", "schema": ErrorSchema},
|
401: {"description": "Authorization required", "schema": ErrorSchema},
|
||||||
403: {"description": "Access is forbidden", "schema": ErrorSchema},
|
403: {"description": "Access is forbidden", "schema": ErrorSchema},
|
||||||
@ -93,13 +93,15 @@ class PGPView(BaseView):
|
|||||||
)
|
)
|
||||||
@aiohttp_apispec.cookies_schema(AuthSchema)
|
@aiohttp_apispec.cookies_schema(AuthSchema)
|
||||||
@aiohttp_apispec.json_schema(PGPKeyIdSchema)
|
@aiohttp_apispec.json_schema(PGPKeyIdSchema)
|
||||||
async def post(self) -> None:
|
async def post(self) -> Response:
|
||||||
"""
|
"""
|
||||||
store key to the local service environment
|
store key to the local service environment
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response: 200 with spawned process id
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
HTTPBadRequest: if bad data is supplied
|
HTTPBadRequest: if bad data is supplied
|
||||||
HTTPNoContent: in case of success response
|
|
||||||
"""
|
"""
|
||||||
data = await self.extract_data()
|
data = await self.extract_data()
|
||||||
|
|
||||||
@ -108,6 +110,6 @@ class PGPView(BaseView):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPBadRequest(reason=str(e))
|
raise HTTPBadRequest(reason=str(e))
|
||||||
|
|
||||||
self.spawner.key_import(key, data.get("server"))
|
process_id = self.spawner.key_import(key, data.get("server"))
|
||||||
|
|
||||||
raise HTTPNoContent()
|
return json_response({"process_id": process_id})
|
||||||
|
74
src/ahriman/web/views/service/process.py
Normal file
74
src/ahriman/web/views/service/process.py
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2021-2023 ahriman team.
|
||||||
|
#
|
||||||
|
# This file is part of ahriman
|
||||||
|
# (see https://github.com/arcan1s/ahriman).
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
import aiohttp_apispec # type: ignore[import]
|
||||||
|
|
||||||
|
from aiohttp.web import HTTPNotFound, Response, json_response
|
||||||
|
|
||||||
|
from ahriman.models.user_access import UserAccess
|
||||||
|
from ahriman.web.schemas import AuthSchema, ErrorSchema, ProcessIdSchema, ProcessSchema
|
||||||
|
from ahriman.web.views.base import BaseView
|
||||||
|
|
||||||
|
|
||||||
|
class ProcessView(BaseView):
|
||||||
|
"""
|
||||||
|
Process information web view
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
GET_PERMISSION(UserAccess): (class attribute) get permissions of self
|
||||||
|
"""
|
||||||
|
|
||||||
|
GET_PERMISSION = UserAccess.Reporter
|
||||||
|
|
||||||
|
@aiohttp_apispec.docs(
|
||||||
|
tags=["Actions"],
|
||||||
|
summary="Get process",
|
||||||
|
description="Get process information",
|
||||||
|
responses={
|
||||||
|
200: {"description": "Success response", "schema": ProcessSchema},
|
||||||
|
401: {"description": "Authorization required", "schema": ErrorSchema},
|
||||||
|
403: {"description": "Access is forbidden", "schema": ErrorSchema},
|
||||||
|
404: {"description": "Not found", "schema": ErrorSchema},
|
||||||
|
500: {"description": "Internal server error", "schema": ErrorSchema},
|
||||||
|
},
|
||||||
|
security=[{"token": [GET_PERMISSION]}],
|
||||||
|
)
|
||||||
|
@aiohttp_apispec.cookies_schema(AuthSchema)
|
||||||
|
@aiohttp_apispec.match_info_schema(ProcessIdSchema)
|
||||||
|
async def get(self) -> Response:
|
||||||
|
"""
|
||||||
|
get spawned process status
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response: 200 with process information
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
HTTPNotFound: if no process found
|
||||||
|
"""
|
||||||
|
process_id = self.request.match_info["process_id"]
|
||||||
|
|
||||||
|
is_alive = self.spawner.has_process(process_id)
|
||||||
|
if not is_alive:
|
||||||
|
raise HTTPNotFound(reason=f"No process {process_id} found")
|
||||||
|
|
||||||
|
response = {
|
||||||
|
"is_alive": is_alive,
|
||||||
|
}
|
||||||
|
|
||||||
|
return json_response(response)
|
@ -19,10 +19,10 @@
|
|||||||
#
|
#
|
||||||
import aiohttp_apispec # type: ignore[import]
|
import aiohttp_apispec # type: ignore[import]
|
||||||
|
|
||||||
from aiohttp.web import HTTPBadRequest, HTTPNoContent
|
from aiohttp.web import HTTPBadRequest, Response, json_response
|
||||||
|
|
||||||
from ahriman.models.user_access import UserAccess
|
from ahriman.models.user_access import UserAccess
|
||||||
from ahriman.web.schemas import AuthSchema, ErrorSchema, PackageNamesSchema
|
from ahriman.web.schemas import AuthSchema, ErrorSchema, PackageNamesSchema, ProcessIdSchema
|
||||||
from ahriman.web.views.base import BaseView
|
from ahriman.web.views.base import BaseView
|
||||||
|
|
||||||
|
|
||||||
@ -41,7 +41,7 @@ class RebuildView(BaseView):
|
|||||||
summary="Rebuild packages",
|
summary="Rebuild packages",
|
||||||
description="Rebuild packages which depend on specified one",
|
description="Rebuild packages which depend on specified one",
|
||||||
responses={
|
responses={
|
||||||
204: {"description": "Success response"},
|
200: {"description": "Success response", "schema": ProcessIdSchema},
|
||||||
400: {"description": "Bad data is supplied", "schema": ErrorSchema},
|
400: {"description": "Bad data is supplied", "schema": ErrorSchema},
|
||||||
401: {"description": "Authorization required", "schema": ErrorSchema},
|
401: {"description": "Authorization required", "schema": ErrorSchema},
|
||||||
403: {"description": "Access is forbidden", "schema": ErrorSchema},
|
403: {"description": "Access is forbidden", "schema": ErrorSchema},
|
||||||
@ -51,13 +51,15 @@ class RebuildView(BaseView):
|
|||||||
)
|
)
|
||||||
@aiohttp_apispec.cookies_schema(AuthSchema)
|
@aiohttp_apispec.cookies_schema(AuthSchema)
|
||||||
@aiohttp_apispec.json_schema(PackageNamesSchema)
|
@aiohttp_apispec.json_schema(PackageNamesSchema)
|
||||||
async def post(self) -> None:
|
async def post(self) -> Response:
|
||||||
"""
|
"""
|
||||||
rebuild packages based on their dependency
|
rebuild packages based on their dependency
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response: 200 with spawned process id
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
HTTPBadRequest: if bad data is supplied
|
HTTPBadRequest: if bad data is supplied
|
||||||
HTTPNoContent: in case of success response
|
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
data = await self.extract_data(["packages"])
|
data = await self.extract_data(["packages"])
|
||||||
@ -67,6 +69,6 @@ class RebuildView(BaseView):
|
|||||||
raise HTTPBadRequest(reason=str(e))
|
raise HTTPBadRequest(reason=str(e))
|
||||||
|
|
||||||
username = await self.username()
|
username = await self.username()
|
||||||
self.spawner.packages_rebuild(depends_on, username)
|
process_id = self.spawner.packages_rebuild(depends_on, username)
|
||||||
|
|
||||||
raise HTTPNoContent()
|
return json_response({"process_id": process_id})
|
||||||
|
@ -19,10 +19,10 @@
|
|||||||
#
|
#
|
||||||
import aiohttp_apispec # type: ignore[import]
|
import aiohttp_apispec # type: ignore[import]
|
||||||
|
|
||||||
from aiohttp.web import HTTPBadRequest, HTTPNoContent
|
from aiohttp.web import HTTPBadRequest, Response, json_response
|
||||||
|
|
||||||
from ahriman.models.user_access import UserAccess
|
from ahriman.models.user_access import UserAccess
|
||||||
from ahriman.web.schemas import AuthSchema, ErrorSchema, PackageNamesSchema
|
from ahriman.web.schemas import AuthSchema, ErrorSchema, PackageNamesSchema, ProcessIdSchema
|
||||||
from ahriman.web.views.base import BaseView
|
from ahriman.web.views.base import BaseView
|
||||||
|
|
||||||
|
|
||||||
@ -41,7 +41,7 @@ class RemoveView(BaseView):
|
|||||||
summary="Remove packages",
|
summary="Remove packages",
|
||||||
description="Remove specified packages from the repository",
|
description="Remove specified packages from the repository",
|
||||||
responses={
|
responses={
|
||||||
204: {"description": "Success response"},
|
200: {"description": "Success response", "schema": ProcessIdSchema},
|
||||||
400: {"description": "Bad data is supplied", "schema": ErrorSchema},
|
400: {"description": "Bad data is supplied", "schema": ErrorSchema},
|
||||||
401: {"description": "Authorization required", "schema": ErrorSchema},
|
401: {"description": "Authorization required", "schema": ErrorSchema},
|
||||||
403: {"description": "Access is forbidden", "schema": ErrorSchema},
|
403: {"description": "Access is forbidden", "schema": ErrorSchema},
|
||||||
@ -51,13 +51,15 @@ class RemoveView(BaseView):
|
|||||||
)
|
)
|
||||||
@aiohttp_apispec.cookies_schema(AuthSchema)
|
@aiohttp_apispec.cookies_schema(AuthSchema)
|
||||||
@aiohttp_apispec.json_schema(PackageNamesSchema)
|
@aiohttp_apispec.json_schema(PackageNamesSchema)
|
||||||
async def post(self) -> None:
|
async def post(self) -> Response:
|
||||||
"""
|
"""
|
||||||
remove existing packages
|
remove existing packages
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response: 200 with spawned process id
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
HTTPBadRequest: if bad data is supplied
|
HTTPBadRequest: if bad data is supplied
|
||||||
HTTPNoContent: in case of success response
|
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
data = await self.extract_data(["packages"])
|
data = await self.extract_data(["packages"])
|
||||||
@ -65,6 +67,6 @@ class RemoveView(BaseView):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPBadRequest(reason=str(e))
|
raise HTTPBadRequest(reason=str(e))
|
||||||
|
|
||||||
self.spawner.packages_remove(packages)
|
process_id = self.spawner.packages_remove(packages)
|
||||||
|
|
||||||
raise HTTPNoContent()
|
return json_response({"process_id": process_id})
|
||||||
|
@ -19,10 +19,10 @@
|
|||||||
#
|
#
|
||||||
import aiohttp_apispec # type: ignore[import]
|
import aiohttp_apispec # type: ignore[import]
|
||||||
|
|
||||||
from aiohttp.web import HTTPBadRequest, HTTPNoContent
|
from aiohttp.web import HTTPBadRequest, Response, json_response
|
||||||
|
|
||||||
from ahriman.models.user_access import UserAccess
|
from ahriman.models.user_access import UserAccess
|
||||||
from ahriman.web.schemas import AuthSchema, ErrorSchema, PackageNamesSchema
|
from ahriman.web.schemas import AuthSchema, ErrorSchema, PackageNamesSchema, ProcessIdSchema
|
||||||
from ahriman.web.views.base import BaseView
|
from ahriman.web.views.base import BaseView
|
||||||
|
|
||||||
|
|
||||||
@ -41,7 +41,7 @@ class RequestView(BaseView):
|
|||||||
summary="Request new package",
|
summary="Request new package",
|
||||||
description="Request new package(s) to be added from AUR",
|
description="Request new package(s) to be added from AUR",
|
||||||
responses={
|
responses={
|
||||||
204: {"description": "Success response"},
|
200: {"description": "Success response", "schema": ProcessIdSchema},
|
||||||
400: {"description": "Bad data is supplied", "schema": ErrorSchema},
|
400: {"description": "Bad data is supplied", "schema": ErrorSchema},
|
||||||
401: {"description": "Authorization required", "schema": ErrorSchema},
|
401: {"description": "Authorization required", "schema": ErrorSchema},
|
||||||
403: {"description": "Access is forbidden", "schema": ErrorSchema},
|
403: {"description": "Access is forbidden", "schema": ErrorSchema},
|
||||||
@ -51,13 +51,15 @@ class RequestView(BaseView):
|
|||||||
)
|
)
|
||||||
@aiohttp_apispec.cookies_schema(AuthSchema)
|
@aiohttp_apispec.cookies_schema(AuthSchema)
|
||||||
@aiohttp_apispec.json_schema(PackageNamesSchema)
|
@aiohttp_apispec.json_schema(PackageNamesSchema)
|
||||||
async def post(self) -> None:
|
async def post(self) -> Response:
|
||||||
"""
|
"""
|
||||||
request to add new package
|
request to add new package
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response: 200 with spawned process id
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
HTTPBadRequest: if bad data is supplied
|
HTTPBadRequest: if bad data is supplied
|
||||||
HTTPNoContent: in case of success response
|
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
data = await self.extract_data(["packages"])
|
data = await self.extract_data(["packages"])
|
||||||
@ -66,6 +68,6 @@ class RequestView(BaseView):
|
|||||||
raise HTTPBadRequest(reason=str(e))
|
raise HTTPBadRequest(reason=str(e))
|
||||||
|
|
||||||
username = await self.username()
|
username = await self.username()
|
||||||
self.spawner.packages_add(packages, username, now=False)
|
process_id = self.spawner.packages_add(packages, username, now=False)
|
||||||
|
|
||||||
raise HTTPNoContent()
|
return json_response({"process_id": process_id})
|
||||||
|
@ -19,10 +19,10 @@
|
|||||||
#
|
#
|
||||||
import aiohttp_apispec # type: ignore[import]
|
import aiohttp_apispec # type: ignore[import]
|
||||||
|
|
||||||
from aiohttp.web import HTTPNoContent
|
from aiohttp.web import HTTPBadRequest, Response, json_response
|
||||||
|
|
||||||
from ahriman.models.user_access import UserAccess
|
from ahriman.models.user_access import UserAccess
|
||||||
from ahriman.web.schemas import AuthSchema, ErrorSchema
|
from ahriman.web.schemas import AuthSchema, ErrorSchema, ProcessIdSchema, UpdateFlagsSchema
|
||||||
from ahriman.web.views.base import BaseView
|
from ahriman.web.views.base import BaseView
|
||||||
|
|
||||||
|
|
||||||
@ -41,7 +41,8 @@ class UpdateView(BaseView):
|
|||||||
summary="Update packages",
|
summary="Update packages",
|
||||||
description="Run repository update process",
|
description="Run repository update process",
|
||||||
responses={
|
responses={
|
||||||
204: {"description": "Success response"},
|
200: {"description": "Success response", "schema": ProcessIdSchema},
|
||||||
|
400: {"description": "Bad data is supplied", "schema": ErrorSchema},
|
||||||
401: {"description": "Authorization required", "schema": ErrorSchema},
|
401: {"description": "Authorization required", "schema": ErrorSchema},
|
||||||
403: {"description": "Access is forbidden", "schema": ErrorSchema},
|
403: {"description": "Access is forbidden", "schema": ErrorSchema},
|
||||||
500: {"description": "Internal server error", "schema": ErrorSchema},
|
500: {"description": "Internal server error", "schema": ErrorSchema},
|
||||||
@ -49,14 +50,28 @@ class UpdateView(BaseView):
|
|||||||
security=[{"token": [POST_PERMISSION]}],
|
security=[{"token": [POST_PERMISSION]}],
|
||||||
)
|
)
|
||||||
@aiohttp_apispec.cookies_schema(AuthSchema)
|
@aiohttp_apispec.cookies_schema(AuthSchema)
|
||||||
async def post(self) -> None:
|
@aiohttp_apispec.json_schema(UpdateFlagsSchema)
|
||||||
|
async def post(self) -> Response:
|
||||||
"""
|
"""
|
||||||
run repository update. No parameters supported here
|
run repository update. No parameters supported here
|
||||||
|
|
||||||
Raises:
|
Returns:
|
||||||
HTTPNoContent: in case of success response
|
Response: 200 with spawned process id
|
||||||
"""
|
|
||||||
username = await self.username()
|
|
||||||
self.spawner.packages_update(username)
|
|
||||||
|
|
||||||
raise HTTPNoContent()
|
Raises:
|
||||||
|
HTTPBadRequest: if bad data is supplied
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
data = await self.extract_data()
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPBadRequest(reason=str(e))
|
||||||
|
|
||||||
|
username = await self.username()
|
||||||
|
process_id = self.spawner.packages_update(
|
||||||
|
username,
|
||||||
|
aur=data.get("aur", True),
|
||||||
|
local=data.get("local", True),
|
||||||
|
manual=data.get("manual", True),
|
||||||
|
)
|
||||||
|
|
||||||
|
return json_response({"process_id": process_id})
|
||||||
|
144
src/ahriman/web/views/service/upload.py
Normal file
144
src/ahriman/web/views/service/upload.py
Normal file
@ -0,0 +1,144 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2021-2023 ahriman team.
|
||||||
|
#
|
||||||
|
# This file is part of ahriman
|
||||||
|
# (see https://github.com/arcan1s/ahriman).
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
import aiohttp_apispec # type: ignore[import]
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
from aiohttp import BodyPartReader
|
||||||
|
from aiohttp.web import HTTPBadRequest, HTTPCreated, HTTPNotFound
|
||||||
|
from pathlib import Path
|
||||||
|
from tempfile import NamedTemporaryFile
|
||||||
|
|
||||||
|
from ahriman.models.user_access import UserAccess
|
||||||
|
from ahriman.web.schemas import AuthSchema, ErrorSchema, FileSchema
|
||||||
|
from ahriman.web.views.base import BaseView
|
||||||
|
|
||||||
|
|
||||||
|
class UploadView(BaseView):
|
||||||
|
"""
|
||||||
|
upload file to repository
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
POST_PERMISSION(UserAccess): (class attribute) post permissions of self
|
||||||
|
"""
|
||||||
|
|
||||||
|
POST_PERMISSION = UserAccess.Full
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def save_file(part: BodyPartReader, target: Path, *, max_body_size: int | None = None) -> tuple[str, Path]:
|
||||||
|
"""
|
||||||
|
save file to local cache
|
||||||
|
|
||||||
|
Args:
|
||||||
|
part(BodyPartReader): multipart part to be saved
|
||||||
|
target(Path): path to directory to which file should be saved
|
||||||
|
max_body_size(int | None, optional): max body size in bytes (Default value = None)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple[str, Path]: map of received filename to its local path
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
HTTPBadRequest: if bad data is supplied
|
||||||
|
"""
|
||||||
|
archive_name = part.filename
|
||||||
|
if archive_name is None:
|
||||||
|
raise HTTPBadRequest(reason="Filename must be set")
|
||||||
|
# some magic inside. We would like to make sure that passed filename is filename
|
||||||
|
# without slashes, dots, etc
|
||||||
|
if Path(archive_name).resolve().name != archive_name:
|
||||||
|
raise HTTPBadRequest(reason="Filename must be valid archive name")
|
||||||
|
|
||||||
|
current_size = 0
|
||||||
|
|
||||||
|
# in order to handle errors automatically we create temporary file for long operation (transfer)
|
||||||
|
# and then copy it to valid location
|
||||||
|
with NamedTemporaryFile() as cache:
|
||||||
|
while True:
|
||||||
|
chunk = await part.read_chunk()
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
|
||||||
|
current_size += len(chunk)
|
||||||
|
if max_body_size is not None and current_size > max_body_size:
|
||||||
|
raise HTTPBadRequest(reason="Body part is too large")
|
||||||
|
|
||||||
|
cache.write(chunk)
|
||||||
|
|
||||||
|
cache.seek(0) # reset file position
|
||||||
|
|
||||||
|
# and now copy temporary file to target location as hidden file
|
||||||
|
# we put it as hidden in order to make sure that it will not be handled during some random process
|
||||||
|
temporary_output = target / f".{archive_name}"
|
||||||
|
with temporary_output.open("wb") as archive:
|
||||||
|
shutil.copyfileobj(cache, archive)
|
||||||
|
|
||||||
|
return archive_name, temporary_output
|
||||||
|
|
||||||
|
@aiohttp_apispec.docs(
|
||||||
|
tags=["Actions"],
|
||||||
|
summary="Upload package",
|
||||||
|
description="Upload package to local filesystem",
|
||||||
|
responses={
|
||||||
|
201: {"description": "Success response"},
|
||||||
|
400: {"description": "Bad data is supplied", "schema": ErrorSchema},
|
||||||
|
401: {"description": "Authorization required", "schema": ErrorSchema},
|
||||||
|
403: {"description": "Access is forbidden", "schema": ErrorSchema},
|
||||||
|
404: {"description": "Not found", "schema": ErrorSchema},
|
||||||
|
500: {"description": "Internal server error", "schema": ErrorSchema},
|
||||||
|
},
|
||||||
|
security=[{"token": [POST_PERMISSION]}],
|
||||||
|
)
|
||||||
|
@aiohttp_apispec.cookies_schema(AuthSchema)
|
||||||
|
@aiohttp_apispec.form_schema(FileSchema)
|
||||||
|
async def post(self) -> None:
|
||||||
|
"""
|
||||||
|
upload file from another instance to the server
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
HTTPBadRequest: if bad data is supplied
|
||||||
|
HTTPCreated: on success response
|
||||||
|
"""
|
||||||
|
if not self.configuration.getboolean("web", "enable_archive_upload", fallback=False):
|
||||||
|
raise HTTPNotFound()
|
||||||
|
|
||||||
|
try:
|
||||||
|
reader = await self.request.multipart()
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPBadRequest(reason=str(e))
|
||||||
|
|
||||||
|
max_body_size = self.configuration.getint("web", "max_body_size", fallback=None)
|
||||||
|
target = self.configuration.repository_paths.packages
|
||||||
|
|
||||||
|
files = []
|
||||||
|
while (part := await reader.next()) is not None:
|
||||||
|
if not isinstance(part, BodyPartReader):
|
||||||
|
raise HTTPBadRequest(reason="Invalid multipart message received")
|
||||||
|
|
||||||
|
if part.name not in ("package", "signature"):
|
||||||
|
raise HTTPBadRequest(reason="Multipart field isn't package or signature")
|
||||||
|
|
||||||
|
files.append(await self.save_file(part, target, max_body_size=max_body_size))
|
||||||
|
|
||||||
|
# and now we can rename files, which is relatively fast operation
|
||||||
|
# it is probably good way to call lock here, however
|
||||||
|
for filename, current_location in files:
|
||||||
|
target_location = current_location.parent / filename
|
||||||
|
current_location.rename(target_location)
|
||||||
|
|
||||||
|
raise HTTPCreated()
|
@ -63,7 +63,7 @@ class LogsView(BaseView):
|
|||||||
HTTPNoContent: on success response
|
HTTPNoContent: on success response
|
||||||
"""
|
"""
|
||||||
package_base = self.request.match_info["package"]
|
package_base = self.request.match_info["package"]
|
||||||
self.service.remove_logs(package_base, None)
|
self.service.logs_remove(package_base, None)
|
||||||
|
|
||||||
raise HTTPNoContent()
|
raise HTTPNoContent()
|
||||||
|
|
||||||
@ -95,10 +95,10 @@ class LogsView(BaseView):
|
|||||||
package_base = self.request.match_info["package"]
|
package_base = self.request.match_info["package"]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_, status = self.service.get(package_base)
|
_, status = self.service.package_get(package_base)
|
||||||
except UnknownPackageError:
|
except UnknownPackageError:
|
||||||
raise HTTPNotFound()
|
raise HTTPNotFound()
|
||||||
logs = self.service.get_logs(package_base)
|
logs = self.service.logs_get(package_base)
|
||||||
|
|
||||||
response = {
|
response = {
|
||||||
"package_base": package_base,
|
"package_base": package_base,
|
||||||
@ -137,10 +137,10 @@ class LogsView(BaseView):
|
|||||||
try:
|
try:
|
||||||
created = data["created"]
|
created = data["created"]
|
||||||
record = data["message"]
|
record = data["message"]
|
||||||
process_id = data["process_id"]
|
version = data["version"]
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPBadRequest(reason=str(e))
|
raise HTTPBadRequest(reason=str(e))
|
||||||
|
|
||||||
self.service.update_logs(LogRecordId(package_base, process_id), created, record)
|
self.service.logs_update(LogRecordId(package_base, version), created, record)
|
||||||
|
|
||||||
raise HTTPNoContent()
|
raise HTTPNoContent()
|
||||||
|
@ -64,7 +64,7 @@ class PackageView(BaseView):
|
|||||||
HTTPNoContent: on success response
|
HTTPNoContent: on success response
|
||||||
"""
|
"""
|
||||||
package_base = self.request.match_info["package"]
|
package_base = self.request.match_info["package"]
|
||||||
self.service.remove(package_base)
|
self.service.package_remove(package_base)
|
||||||
|
|
||||||
raise HTTPNoContent()
|
raise HTTPNoContent()
|
||||||
|
|
||||||
@ -96,7 +96,7 @@ class PackageView(BaseView):
|
|||||||
package_base = self.request.match_info["package"]
|
package_base = self.request.match_info["package"]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
package, status = self.service.get(package_base)
|
package, status = self.service.package_get(package_base)
|
||||||
except UnknownPackageError:
|
except UnknownPackageError:
|
||||||
raise HTTPNotFound()
|
raise HTTPNotFound()
|
||||||
|
|
||||||
@ -142,7 +142,7 @@ class PackageView(BaseView):
|
|||||||
raise HTTPBadRequest(reason=str(e))
|
raise HTTPBadRequest(reason=str(e))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.service.update(package_base, status, package)
|
self.service.package_update(package_base, status, package)
|
||||||
except UnknownPackageError:
|
except UnknownPackageError:
|
||||||
raise HTTPBadRequest(reason=f"Package {package_base} is unknown, but no package body set")
|
raise HTTPBadRequest(reason=f"Package {package_base} is unknown, but no package body set")
|
||||||
|
|
||||||
|
@ -102,6 +102,6 @@ class StatusView(BaseView):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPBadRequest(reason=str(e))
|
raise HTTPBadRequest(reason=str(e))
|
||||||
|
|
||||||
self.service.update_self(status)
|
self.service.status_update(status)
|
||||||
|
|
||||||
raise HTTPNoContent()
|
raise HTTPNoContent()
|
||||||
|
@ -39,7 +39,7 @@ def args() -> argparse.Namespace:
|
|||||||
Returns:
|
Returns:
|
||||||
argparse.Namespace: command line arguments test instance
|
argparse.Namespace: command line arguments test instance
|
||||||
"""
|
"""
|
||||||
return argparse.Namespace(architecture=None, lock=None, force=False, unsafe=False, report=False)
|
return argparse.Namespace(architecture=None, lock=None, force=False, unsafe=False, report=False, wait_timeout=-1)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
@ -32,6 +32,7 @@ def _default_args(args: argparse.Namespace) -> argparse.Namespace:
|
|||||||
args.multilib = True
|
args.multilib = True
|
||||||
args.packager = "John Doe <john@doe.com>"
|
args.packager = "John Doe <john@doe.com>"
|
||||||
args.repository = "aur-clone"
|
args.repository = "aur-clone"
|
||||||
|
args.server = None
|
||||||
args.sign_key = "key"
|
args.sign_key = "key"
|
||||||
args.sign_target = [SignSettings.Packages]
|
args.sign_target = [SignSettings.Packages]
|
||||||
args.web_port = 8080
|
args.web_port = 8080
|
||||||
@ -57,13 +58,34 @@ def test_run(args: argparse.Namespace, configuration: Configuration, repository:
|
|||||||
ahriman_configuration_mock.assert_called_once_with(args, "x86_64", args.repository, configuration)
|
ahriman_configuration_mock.assert_called_once_with(args, "x86_64", args.repository, configuration)
|
||||||
devtools_configuration_mock.assert_called_once_with(
|
devtools_configuration_mock.assert_called_once_with(
|
||||||
args.build_command, "x86_64", args.from_configuration, args.mirror, args.multilib, args.repository,
|
args.build_command, "x86_64", args.from_configuration, args.mirror, args.multilib, args.repository,
|
||||||
repository_paths)
|
f"file://{repository_paths.repository}")
|
||||||
makepkg_configuration_mock.assert_called_once_with(args.packager, args.makeflags_jobs, repository_paths)
|
makepkg_configuration_mock.assert_called_once_with(args.packager, args.makeflags_jobs, repository_paths)
|
||||||
sudo_configuration_mock.assert_called_once_with(repository_paths, args.build_command, "x86_64")
|
sudo_configuration_mock.assert_called_once_with(repository_paths, args.build_command, "x86_64")
|
||||||
executable_mock.assert_called_once_with(repository_paths, args.build_command, "x86_64")
|
executable_mock.assert_called_once_with(repository_paths, args.build_command, "x86_64")
|
||||||
init_mock.assert_called_once_with()
|
init_mock.assert_called_once_with()
|
||||||
|
|
||||||
|
|
||||||
|
def test_run_with_server(args: argparse.Namespace, configuration: Configuration, repository: Repository,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must run command with server specified
|
||||||
|
"""
|
||||||
|
args = _default_args(args)
|
||||||
|
args.server = "server"
|
||||||
|
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
||||||
|
mocker.patch("ahriman.application.handlers.Setup.configuration_create_ahriman")
|
||||||
|
mocker.patch("ahriman.application.handlers.Setup.configuration_create_makepkg")
|
||||||
|
mocker.patch("ahriman.application.handlers.Setup.configuration_create_sudo")
|
||||||
|
mocker.patch("ahriman.application.handlers.Setup.executable_create")
|
||||||
|
mocker.patch("ahriman.core.alpm.repo.Repo.init")
|
||||||
|
devtools_configuration_mock = mocker.patch("ahriman.application.handlers.Setup.configuration_create_devtools")
|
||||||
|
|
||||||
|
Setup.run(args, "x86_64", configuration, report=False)
|
||||||
|
devtools_configuration_mock.assert_called_once_with(
|
||||||
|
args.build_command, "x86_64", args.from_configuration, args.mirror, args.multilib, args.repository,
|
||||||
|
"server")
|
||||||
|
|
||||||
|
|
||||||
def test_build_command(args: argparse.Namespace) -> None:
|
def test_build_command(args: argparse.Namespace) -> None:
|
||||||
"""
|
"""
|
||||||
must generate correct build command name
|
must generate correct build command name
|
||||||
@ -120,8 +142,7 @@ def test_configuration_create_ahriman_no_multilib(args: argparse.Namespace, conf
|
|||||||
]) # non-strict check called intentionally
|
]) # non-strict check called intentionally
|
||||||
|
|
||||||
|
|
||||||
def test_configuration_create_devtools(args: argparse.Namespace, repository_paths: RepositoryPaths,
|
def test_configuration_create_devtools(args: argparse.Namespace, mocker: MockerFixture) -> None:
|
||||||
mocker: MockerFixture) -> None:
|
|
||||||
"""
|
"""
|
||||||
must create configuration for the devtools
|
must create configuration for the devtools
|
||||||
"""
|
"""
|
||||||
@ -132,13 +153,12 @@ def test_configuration_create_devtools(args: argparse.Namespace, repository_path
|
|||||||
write_mock = mocker.patch("ahriman.core.configuration.Configuration.write")
|
write_mock = mocker.patch("ahriman.core.configuration.Configuration.write")
|
||||||
|
|
||||||
Setup.configuration_create_devtools(args.build_command, "x86_64", args.from_configuration,
|
Setup.configuration_create_devtools(args.build_command, "x86_64", args.from_configuration,
|
||||||
None, args.multilib, args.repository, repository_paths)
|
None, args.multilib, args.repository, "server")
|
||||||
add_section_mock.assert_has_calls([MockCall("multilib"), MockCall(args.repository)])
|
add_section_mock.assert_has_calls([MockCall("multilib"), MockCall(args.repository)])
|
||||||
write_mock.assert_called_once_with(pytest.helpers.anyvar(int))
|
write_mock.assert_called_once_with(pytest.helpers.anyvar(int))
|
||||||
|
|
||||||
|
|
||||||
def test_configuration_create_devtools_mirror(args: argparse.Namespace, repository_paths: RepositoryPaths,
|
def test_configuration_create_devtools_mirror(args: argparse.Namespace, mocker: MockerFixture) -> None:
|
||||||
mocker: MockerFixture) -> None:
|
|
||||||
"""
|
"""
|
||||||
must create configuration for the devtools with mirror set explicitly
|
must create configuration for the devtools with mirror set explicitly
|
||||||
"""
|
"""
|
||||||
@ -157,14 +177,13 @@ def test_configuration_create_devtools_mirror(args: argparse.Namespace, reposito
|
|||||||
set_option_mock = mocker.patch("ahriman.core.configuration.Configuration.set_option")
|
set_option_mock = mocker.patch("ahriman.core.configuration.Configuration.set_option")
|
||||||
|
|
||||||
Setup.configuration_create_devtools(args.build_command, "x86_64", args.from_configuration,
|
Setup.configuration_create_devtools(args.build_command, "x86_64", args.from_configuration,
|
||||||
args.mirror, False, args.repository, repository_paths)
|
args.mirror, False, args.repository, "server")
|
||||||
get_mock.assert_has_calls([MockCall("core", "Include", fallback=None), MockCall("extra", "Include", fallback=None)])
|
get_mock.assert_has_calls([MockCall("core", "Include", fallback=None), MockCall("extra", "Include", fallback=None)])
|
||||||
remove_option_mock.assert_called_once_with("core", "Include")
|
remove_option_mock.assert_called_once_with("core", "Include")
|
||||||
set_option_mock.assert_has_calls([MockCall("core", "Server", args.mirror)]) # non-strict check called intentionally
|
set_option_mock.assert_has_calls([MockCall("core", "Server", args.mirror)]) # non-strict check called intentionally
|
||||||
|
|
||||||
|
|
||||||
def test_configuration_create_devtools_no_multilib(args: argparse.Namespace, repository_paths: RepositoryPaths,
|
def test_configuration_create_devtools_no_multilib(args: argparse.Namespace, mocker: MockerFixture) -> None:
|
||||||
mocker: MockerFixture) -> None:
|
|
||||||
"""
|
"""
|
||||||
must create configuration for the devtools without multilib
|
must create configuration for the devtools without multilib
|
||||||
"""
|
"""
|
||||||
@ -174,7 +193,7 @@ def test_configuration_create_devtools_no_multilib(args: argparse.Namespace, rep
|
|||||||
write_mock = mocker.patch("ahriman.core.configuration.Configuration.write")
|
write_mock = mocker.patch("ahriman.core.configuration.Configuration.write")
|
||||||
|
|
||||||
Setup.configuration_create_devtools(args.build_command, "x86_64", args.from_configuration,
|
Setup.configuration_create_devtools(args.build_command, "x86_64", args.from_configuration,
|
||||||
None, False, args.repository, repository_paths)
|
None, False, args.repository, "server")
|
||||||
write_mock.assert_called_once_with(pytest.helpers.anyvar(int))
|
write_mock.assert_called_once_with(pytest.helpers.anyvar(int))
|
||||||
|
|
||||||
|
|
||||||
|
@ -36,8 +36,8 @@ def test_run(args: argparse.Namespace, configuration: Configuration, repository:
|
|||||||
"""
|
"""
|
||||||
args = _default_args(args)
|
args = _default_args(args)
|
||||||
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
||||||
application_mock = mocker.patch("ahriman.core.status.client.Client.get_internal")
|
application_mock = mocker.patch("ahriman.core.status.client.Client.status_get")
|
||||||
packages_mock = mocker.patch("ahriman.core.status.client.Client.get",
|
packages_mock = mocker.patch("ahriman.core.status.client.Client.package_get",
|
||||||
return_value=[(package_ahriman, BuildStatus(BuildStatusEnum.Success)),
|
return_value=[(package_ahriman, BuildStatus(BuildStatusEnum.Success)),
|
||||||
(package_python_schedule, BuildStatus(BuildStatusEnum.Failed))])
|
(package_python_schedule, BuildStatus(BuildStatusEnum.Failed))])
|
||||||
check_mock = mocker.patch("ahriman.application.handlers.Handler.check_if_empty")
|
check_mock = mocker.patch("ahriman.application.handlers.Handler.check_if_empty")
|
||||||
@ -58,8 +58,8 @@ def test_run_empty_exception(args: argparse.Namespace, configuration: Configurat
|
|||||||
args = _default_args(args)
|
args = _default_args(args)
|
||||||
args.exit_code = True
|
args.exit_code = True
|
||||||
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
||||||
mocker.patch("ahriman.core.status.client.Client.get_internal")
|
mocker.patch("ahriman.core.status.client.Client.status_get")
|
||||||
mocker.patch("ahriman.core.status.client.Client.get", return_value=[])
|
mocker.patch("ahriman.core.status.client.Client.package_get", return_value=[])
|
||||||
check_mock = mocker.patch("ahriman.application.handlers.Handler.check_if_empty")
|
check_mock = mocker.patch("ahriman.application.handlers.Handler.check_if_empty")
|
||||||
|
|
||||||
Status.run(args, "x86_64", configuration, report=False)
|
Status.run(args, "x86_64", configuration, report=False)
|
||||||
@ -74,7 +74,7 @@ def test_run_verbose(args: argparse.Namespace, configuration: Configuration, rep
|
|||||||
args = _default_args(args)
|
args = _default_args(args)
|
||||||
args.info = True
|
args.info = True
|
||||||
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
||||||
mocker.patch("ahriman.core.status.client.Client.get",
|
mocker.patch("ahriman.core.status.client.Client.package_get",
|
||||||
return_value=[(package_ahriman, BuildStatus(BuildStatusEnum.Success))])
|
return_value=[(package_ahriman, BuildStatus(BuildStatusEnum.Success))])
|
||||||
print_mock = mocker.patch("ahriman.core.formatters.Printer.print")
|
print_mock = mocker.patch("ahriman.core.formatters.Printer.print")
|
||||||
|
|
||||||
@ -90,7 +90,7 @@ def test_run_with_package_filter(args: argparse.Namespace, configuration: Config
|
|||||||
args = _default_args(args)
|
args = _default_args(args)
|
||||||
args.package = [package_ahriman.base]
|
args.package = [package_ahriman.base]
|
||||||
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
||||||
packages_mock = mocker.patch("ahriman.core.status.client.Client.get",
|
packages_mock = mocker.patch("ahriman.core.status.client.Client.package_get",
|
||||||
return_value=[(package_ahriman, BuildStatus(BuildStatusEnum.Success))])
|
return_value=[(package_ahriman, BuildStatus(BuildStatusEnum.Success))])
|
||||||
|
|
||||||
Status.run(args, "x86_64", configuration, report=False)
|
Status.run(args, "x86_64", configuration, report=False)
|
||||||
@ -104,7 +104,7 @@ def test_run_by_status(args: argparse.Namespace, configuration: Configuration, r
|
|||||||
"""
|
"""
|
||||||
args = _default_args(args)
|
args = _default_args(args)
|
||||||
args.status = BuildStatusEnum.Failed
|
args.status = BuildStatusEnum.Failed
|
||||||
mocker.patch("ahriman.core.status.client.Client.get",
|
mocker.patch("ahriman.core.status.client.Client.package_get",
|
||||||
return_value=[(package_ahriman, BuildStatus(BuildStatusEnum.Success)),
|
return_value=[(package_ahriman, BuildStatus(BuildStatusEnum.Success)),
|
||||||
(package_python_schedule, BuildStatus(BuildStatusEnum.Failed))])
|
(package_python_schedule, BuildStatus(BuildStatusEnum.Failed))])
|
||||||
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
||||||
|
@ -34,7 +34,7 @@ def test_run(args: argparse.Namespace, configuration: Configuration, repository:
|
|||||||
"""
|
"""
|
||||||
args = _default_args(args)
|
args = _default_args(args)
|
||||||
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
||||||
update_self_mock = mocker.patch("ahriman.core.status.client.Client.update_self")
|
update_self_mock = mocker.patch("ahriman.core.status.client.Client.status_update")
|
||||||
|
|
||||||
StatusUpdate.run(args, "x86_64", configuration, report=False)
|
StatusUpdate.run(args, "x86_64", configuration, report=False)
|
||||||
update_self_mock.assert_called_once_with(args.status)
|
update_self_mock.assert_called_once_with(args.status)
|
||||||
@ -48,7 +48,7 @@ def test_run_packages(args: argparse.Namespace, configuration: Configuration, re
|
|||||||
args = _default_args(args)
|
args = _default_args(args)
|
||||||
args.package = [package_ahriman.base]
|
args.package = [package_ahriman.base]
|
||||||
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
||||||
update_mock = mocker.patch("ahriman.core.status.client.Client.update")
|
update_mock = mocker.patch("ahriman.core.status.client.Client.package_update")
|
||||||
|
|
||||||
StatusUpdate.run(args, "x86_64", configuration, report=False)
|
StatusUpdate.run(args, "x86_64", configuration, report=False)
|
||||||
update_mock.assert_called_once_with(package_ahriman.base, args.status)
|
update_mock.assert_called_once_with(package_ahriman.base, args.status)
|
||||||
@ -63,7 +63,7 @@ def test_run_remove(args: argparse.Namespace, configuration: Configuration, repo
|
|||||||
args.package = [package_ahriman.base]
|
args.package = [package_ahriman.base]
|
||||||
args.action = Action.Remove
|
args.action = Action.Remove
|
||||||
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
||||||
update_mock = mocker.patch("ahriman.core.status.client.Client.remove")
|
update_mock = mocker.patch("ahriman.core.status.client.Client.package_remove")
|
||||||
|
|
||||||
StatusUpdate.run(args, "x86_64", configuration, report=False)
|
StatusUpdate.run(args, "x86_64", configuration, report=False)
|
||||||
update_mock.assert_called_once_with(package_ahriman.base)
|
update_mock.assert_called_once_with(package_ahriman.base)
|
||||||
|
@ -67,8 +67,10 @@ def test_schema(configuration: Configuration) -> None:
|
|||||||
assert schema.pop("keyring-generator")
|
assert schema.pop("keyring-generator")
|
||||||
assert schema.pop("mirrorlist")
|
assert schema.pop("mirrorlist")
|
||||||
assert schema.pop("mirrorlist-generator")
|
assert schema.pop("mirrorlist-generator")
|
||||||
|
assert schema.pop("remote-call")
|
||||||
assert schema.pop("remote-pull")
|
assert schema.pop("remote-pull")
|
||||||
assert schema.pop("remote-push")
|
assert schema.pop("remote-push")
|
||||||
|
assert schema.pop("remote-service")
|
||||||
assert schema.pop("report")
|
assert schema.pop("report")
|
||||||
assert schema.pop("rsync")
|
assert schema.pop("rsync")
|
||||||
assert schema.pop("s3")
|
assert schema.pop("s3")
|
||||||
|
@ -77,6 +77,10 @@ def test_extract_arguments(args: argparse.Namespace, configuration: Configuratio
|
|||||||
expected.extend(["--unsafe"])
|
expected.extend(["--unsafe"])
|
||||||
assert list(Web.extract_arguments(probe, "x86_64", configuration)) == expected
|
assert list(Web.extract_arguments(probe, "x86_64", configuration)) == expected
|
||||||
|
|
||||||
|
configuration.set_option("web", "wait_timeout", "60")
|
||||||
|
expected.extend(["--wait-timeout", "60"])
|
||||||
|
assert list(Web.extract_arguments(probe, "x86_64", configuration)) == expected
|
||||||
|
|
||||||
|
|
||||||
def test_extract_arguments_full(parser: argparse.ArgumentParser, configuration: Configuration):
|
def test_extract_arguments_full(parser: argparse.ArgumentParser, configuration: Configuration):
|
||||||
"""
|
"""
|
||||||
@ -91,6 +95,7 @@ def test_extract_arguments_full(parser: argparse.ArgumentParser, configuration:
|
|||||||
value = action.const or \
|
value = action.const or \
|
||||||
next(iter(action.choices or []), None) or \
|
next(iter(action.choices or []), None) or \
|
||||||
(not action.default if isinstance(action.default, bool) else None) or \
|
(not action.default if isinstance(action.default, bool) else None) or \
|
||||||
|
(42 if action.type == int else None) or \
|
||||||
"random string"
|
"random string"
|
||||||
if action.type is not None:
|
if action.type is not None:
|
||||||
value = action.type(value)
|
value = action.type(value)
|
||||||
|
@ -47,6 +47,16 @@ def test_parser_option_log_handler(parser: argparse.ArgumentParser) -> None:
|
|||||||
assert isinstance(args.log_handler, LogHandler)
|
assert isinstance(args.log_handler, LogHandler)
|
||||||
|
|
||||||
|
|
||||||
|
def test_parser_option_wait_timeout(parser: argparse.ArgumentParser) -> None:
|
||||||
|
"""
|
||||||
|
must convert wait-timeout option to int instance
|
||||||
|
"""
|
||||||
|
args = parser.parse_args(["service-config"])
|
||||||
|
assert isinstance(args.wait_timeout, int)
|
||||||
|
args = parser.parse_args(["--wait-timeout", "60", "service-config"])
|
||||||
|
assert isinstance(args.wait_timeout, int)
|
||||||
|
|
||||||
|
|
||||||
def test_multiple_architectures(parser: argparse.ArgumentParser) -> None:
|
def test_multiple_architectures(parser: argparse.ArgumentParser) -> None:
|
||||||
"""
|
"""
|
||||||
must accept multiple architectures
|
must accept multiple architectures
|
||||||
|
@ -32,7 +32,7 @@ def test_check_version(lock: Lock, mocker: MockerFixture) -> None:
|
|||||||
"""
|
"""
|
||||||
must check version correctly
|
must check version correctly
|
||||||
"""
|
"""
|
||||||
mocker.patch("ahriman.core.status.client.Client.get_internal",
|
mocker.patch("ahriman.core.status.client.Client.status_get",
|
||||||
return_value=InternalStatus(status=BuildStatus(), version=__version__))
|
return_value=InternalStatus(status=BuildStatus(), version=__version__))
|
||||||
logging_mock = mocker.patch("logging.Logger.warning")
|
logging_mock = mocker.patch("logging.Logger.warning")
|
||||||
|
|
||||||
@ -44,7 +44,7 @@ def test_check_version_mismatch(lock: Lock, mocker: MockerFixture) -> None:
|
|||||||
"""
|
"""
|
||||||
must check mismatched version correctly
|
must check mismatched version correctly
|
||||||
"""
|
"""
|
||||||
mocker.patch("ahriman.core.status.client.Client.get_internal",
|
mocker.patch("ahriman.core.status.client.Client.status_get",
|
||||||
return_value=InternalStatus(status=BuildStatus(), version="version"))
|
return_value=InternalStatus(status=BuildStatus(), version="version"))
|
||||||
logging_mock = mocker.patch("logging.Logger.warning")
|
logging_mock = mocker.patch("logging.Logger.warning")
|
||||||
|
|
||||||
@ -154,15 +154,35 @@ def test_create_unsafe(lock: Lock) -> None:
|
|||||||
lock.path.unlink()
|
lock.path.unlink()
|
||||||
|
|
||||||
|
|
||||||
|
def test_watch(lock: Lock, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must check if lock file exists
|
||||||
|
"""
|
||||||
|
wait_mock = mocker.patch("ahriman.models.waiter.Waiter.wait")
|
||||||
|
lock.path = Path(tempfile.mktemp()) # nosec
|
||||||
|
|
||||||
|
lock.watch()
|
||||||
|
wait_mock.assert_called_once_with(lock.path.is_file)
|
||||||
|
|
||||||
|
|
||||||
|
def test_watch_skip(lock: Lock, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must skip watch on empty path
|
||||||
|
"""
|
||||||
|
mocker.patch("pathlib.Path.is_file", return_value=True)
|
||||||
|
lock.watch()
|
||||||
|
|
||||||
|
|
||||||
def test_enter(lock: Lock, mocker: MockerFixture) -> None:
|
def test_enter(lock: Lock, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must process with context manager
|
must process with context manager
|
||||||
"""
|
"""
|
||||||
check_user_mock = mocker.patch("ahriman.application.lock.Lock.check_user")
|
check_user_mock = mocker.patch("ahriman.application.lock.Lock.check_user")
|
||||||
check_version_mock = mocker.patch("ahriman.application.lock.Lock.check_version")
|
check_version_mock = mocker.patch("ahriman.application.lock.Lock.check_version")
|
||||||
|
watch_mock = mocker.patch("ahriman.application.lock.Lock.watch")
|
||||||
clear_mock = mocker.patch("ahriman.application.lock.Lock.clear")
|
clear_mock = mocker.patch("ahriman.application.lock.Lock.clear")
|
||||||
create_mock = mocker.patch("ahriman.application.lock.Lock.create")
|
create_mock = mocker.patch("ahriman.application.lock.Lock.create")
|
||||||
update_status_mock = mocker.patch("ahriman.core.status.client.Client.update_self")
|
update_status_mock = mocker.patch("ahriman.core.status.client.Client.status_update")
|
||||||
|
|
||||||
with lock:
|
with lock:
|
||||||
pass
|
pass
|
||||||
@ -170,6 +190,7 @@ def test_enter(lock: Lock, mocker: MockerFixture) -> None:
|
|||||||
clear_mock.assert_called_once_with()
|
clear_mock.assert_called_once_with()
|
||||||
create_mock.assert_called_once_with()
|
create_mock.assert_called_once_with()
|
||||||
check_version_mock.assert_called_once_with()
|
check_version_mock.assert_called_once_with()
|
||||||
|
watch_mock.assert_called_once_with()
|
||||||
update_status_mock.assert_has_calls([MockCall(BuildStatusEnum.Building), MockCall(BuildStatusEnum.Success)])
|
update_status_mock.assert_has_calls([MockCall(BuildStatusEnum.Building), MockCall(BuildStatusEnum.Success)])
|
||||||
|
|
||||||
|
|
||||||
@ -180,7 +201,7 @@ def test_exit_with_exception(lock: Lock, mocker: MockerFixture) -> None:
|
|||||||
mocker.patch("ahriman.application.lock.Lock.check_user")
|
mocker.patch("ahriman.application.lock.Lock.check_user")
|
||||||
mocker.patch("ahriman.application.lock.Lock.clear")
|
mocker.patch("ahriman.application.lock.Lock.clear")
|
||||||
mocker.patch("ahriman.application.lock.Lock.create")
|
mocker.patch("ahriman.application.lock.Lock.create")
|
||||||
update_status_mock = mocker.patch("ahriman.core.status.client.Client.update_self")
|
update_status_mock = mocker.patch("ahriman.core.status.client.Client.status_update")
|
||||||
|
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception):
|
||||||
with lock:
|
with lock:
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
from ahriman.core.database.migrations.m009_local_source import steps
|
from ahriman.core.database.migrations.m009_local_source import steps
|
||||||
|
|
||||||
|
|
||||||
def test_migration_packagers() -> None:
|
def test_migration_local_source() -> None:
|
||||||
"""
|
"""
|
||||||
migration must not be empty
|
migration must not be empty
|
||||||
"""
|
"""
|
||||||
|
@ -0,0 +1,8 @@
|
|||||||
|
from ahriman.core.database.migrations.m010_version_based_logs_removal import steps
|
||||||
|
|
||||||
|
|
||||||
|
def test_migration_version_based_logs_removal() -> None:
|
||||||
|
"""
|
||||||
|
migration must not be empty
|
||||||
|
"""
|
||||||
|
assert steps
|
@ -8,11 +8,11 @@ def test_logs_insert_remove_process(database: SQLite, package_ahriman: Package,
|
|||||||
"""
|
"""
|
||||||
must clear process specific package logs
|
must clear process specific package logs
|
||||||
"""
|
"""
|
||||||
database.logs_insert(LogRecordId(package_ahriman.base, 1), 42.0, "message 1")
|
database.logs_insert(LogRecordId(package_ahriman.base, "1"), 42.0, "message 1")
|
||||||
database.logs_insert(LogRecordId(package_ahriman.base, 2), 43.0, "message 2")
|
database.logs_insert(LogRecordId(package_ahriman.base, "2"), 43.0, "message 2")
|
||||||
database.logs_insert(LogRecordId(package_python_schedule.base, 1), 42.0, "message 3")
|
database.logs_insert(LogRecordId(package_python_schedule.base, "1"), 42.0, "message 3")
|
||||||
|
|
||||||
database.logs_remove(package_ahriman.base, 1)
|
database.logs_remove(package_ahriman.base, "1")
|
||||||
assert database.logs_get(package_ahriman.base) == "[1970-01-01 00:00:42] message 1"
|
assert database.logs_get(package_ahriman.base) == "[1970-01-01 00:00:42] message 1"
|
||||||
assert database.logs_get(package_python_schedule.base) == "[1970-01-01 00:00:42] message 3"
|
assert database.logs_get(package_python_schedule.base) == "[1970-01-01 00:00:42] message 3"
|
||||||
|
|
||||||
@ -21,9 +21,9 @@ def test_logs_insert_remove_full(database: SQLite, package_ahriman: Package, pac
|
|||||||
"""
|
"""
|
||||||
must clear full package logs
|
must clear full package logs
|
||||||
"""
|
"""
|
||||||
database.logs_insert(LogRecordId(package_ahriman.base, 1), 42.0, "message 1")
|
database.logs_insert(LogRecordId(package_ahriman.base, "1"), 42.0, "message 1")
|
||||||
database.logs_insert(LogRecordId(package_ahriman.base, 2), 43.0, "message 2")
|
database.logs_insert(LogRecordId(package_ahriman.base, "2"), 43.0, "message 2")
|
||||||
database.logs_insert(LogRecordId(package_python_schedule.base, 1), 42.0, "message 3")
|
database.logs_insert(LogRecordId(package_python_schedule.base, "1"), 42.0, "message 3")
|
||||||
|
|
||||||
database.logs_remove(package_ahriman.base, None)
|
database.logs_remove(package_ahriman.base, None)
|
||||||
assert not database.logs_get(package_ahriman.base)
|
assert not database.logs_get(package_ahriman.base)
|
||||||
@ -34,6 +34,6 @@ def test_logs_insert_get(database: SQLite, package_ahriman: Package) -> None:
|
|||||||
"""
|
"""
|
||||||
must insert and get package logs
|
must insert and get package logs
|
||||||
"""
|
"""
|
||||||
database.logs_insert(LogRecordId(package_ahriman.base, 1), 43.0, "message 2")
|
database.logs_insert(LogRecordId(package_ahriman.base, "1"), 43.0, "message 2")
|
||||||
database.logs_insert(LogRecordId(package_ahriman.base, 1), 42.0, "message 1")
|
database.logs_insert(LogRecordId(package_ahriman.base, "1"), 42.0, "message 1")
|
||||||
assert database.logs_get(package_ahriman.base) == "[1970-01-01 00:00:42] message 1\n[1970-01-01 00:00:43] message 2"
|
assert database.logs_get(package_ahriman.base) == "[1970-01-01 00:00:42] message 1\n[1970-01-01 00:00:43] message 2"
|
||||||
|
@ -43,6 +43,33 @@ def test_is_logs_post() -> None:
|
|||||||
assert not FilteredAccessLogger.is_logs_post(request)
|
assert not FilteredAccessLogger.is_logs_post(request)
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_process_get() -> None:
|
||||||
|
"""
|
||||||
|
must correctly define if request belongs to process get
|
||||||
|
"""
|
||||||
|
request = MagicMock()
|
||||||
|
|
||||||
|
request.method = "GET"
|
||||||
|
request.path = "/api/v1/service/process/e7d67119-264a-48f4-b7e4-07bc96a7de00"
|
||||||
|
assert FilteredAccessLogger.is_process_get(request)
|
||||||
|
|
||||||
|
request.method = "POST"
|
||||||
|
request.path = "/api/v1/service/process/e7d67119-264a-48f4-b7e4-07bc96a7de00"
|
||||||
|
assert not FilteredAccessLogger.is_process_get(request)
|
||||||
|
|
||||||
|
request.method = "GET"
|
||||||
|
request.path = "/api/v1/service/process/e7d67119-264a-48f4-b7e4-07bc96a7de00/some/random/path"
|
||||||
|
assert not FilteredAccessLogger.is_process_get(request)
|
||||||
|
|
||||||
|
request.method = "GET"
|
||||||
|
request.path = "/api/v1/service/process"
|
||||||
|
assert not FilteredAccessLogger.is_process_get(request)
|
||||||
|
|
||||||
|
request.method = "GET"
|
||||||
|
request.path = "/api/v1/service/process/"
|
||||||
|
assert not FilteredAccessLogger.is_process_get(request)
|
||||||
|
|
||||||
|
|
||||||
def test_log(filtered_access_logger: FilteredAccessLogger, mocker: MockerFixture) -> None:
|
def test_log(filtered_access_logger: FilteredAccessLogger, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must emit log record
|
must emit log record
|
||||||
|
@ -4,6 +4,7 @@ from pytest_mock import MockerFixture
|
|||||||
|
|
||||||
from ahriman.core.configuration import Configuration
|
from ahriman.core.configuration import Configuration
|
||||||
from ahriman.core.log.http_log_handler import HttpLogHandler
|
from ahriman.core.log.http_log_handler import HttpLogHandler
|
||||||
|
from ahriman.models.log_record_id import LogRecordId
|
||||||
from ahriman.models.package import Package
|
from ahriman.models.package import Package
|
||||||
|
|
||||||
|
|
||||||
@ -39,13 +40,13 @@ def test_emit(configuration: Configuration, log_record: logging.LogRecord, packa
|
|||||||
"""
|
"""
|
||||||
must emit log record to reporter
|
must emit log record to reporter
|
||||||
"""
|
"""
|
||||||
log_record.package_base = package_ahriman.base
|
log_record_id = log_record.package_id = LogRecordId(package_ahriman.base, package_ahriman.version)
|
||||||
log_mock = mocker.patch("ahriman.core.status.client.Client.logs")
|
log_mock = mocker.patch("ahriman.core.status.client.Client.package_logs")
|
||||||
|
|
||||||
handler = HttpLogHandler(configuration, report=False, suppress_errors=False)
|
handler = HttpLogHandler(configuration, report=False, suppress_errors=False)
|
||||||
|
|
||||||
handler.emit(log_record)
|
handler.emit(log_record)
|
||||||
log_mock.assert_called_once_with(package_ahriman.base, log_record)
|
log_mock.assert_called_once_with(log_record_id, log_record)
|
||||||
|
|
||||||
|
|
||||||
def test_emit_failed(configuration: Configuration, log_record: logging.LogRecord, package_ahriman: Package,
|
def test_emit_failed(configuration: Configuration, log_record: logging.LogRecord, package_ahriman: Package,
|
||||||
@ -53,8 +54,8 @@ def test_emit_failed(configuration: Configuration, log_record: logging.LogRecord
|
|||||||
"""
|
"""
|
||||||
must call handle error on exception
|
must call handle error on exception
|
||||||
"""
|
"""
|
||||||
log_record.package_base = package_ahriman.base
|
log_record.package_id = LogRecordId(package_ahriman.base, package_ahriman.version)
|
||||||
mocker.patch("ahriman.core.status.client.Client.logs", side_effect=Exception())
|
mocker.patch("ahriman.core.status.client.Client.package_logs", side_effect=Exception())
|
||||||
handle_error_mock = mocker.patch("logging.Handler.handleError")
|
handle_error_mock = mocker.patch("logging.Handler.handleError")
|
||||||
handler = HttpLogHandler(configuration, report=False, suppress_errors=False)
|
handler = HttpLogHandler(configuration, report=False, suppress_errors=False)
|
||||||
|
|
||||||
@ -67,8 +68,8 @@ def test_emit_suppress_failed(configuration: Configuration, log_record: logging.
|
|||||||
"""
|
"""
|
||||||
must not call handle error on exception if suppress flag is set
|
must not call handle error on exception if suppress flag is set
|
||||||
"""
|
"""
|
||||||
log_record.package_base = package_ahriman.base
|
log_record.package_id = LogRecordId(package_ahriman.base, package_ahriman.version)
|
||||||
mocker.patch("ahriman.core.status.client.Client.logs", side_effect=Exception())
|
mocker.patch("ahriman.core.status.client.Client.package_logs", side_effect=Exception())
|
||||||
handle_error_mock = mocker.patch("logging.Handler.handleError")
|
handle_error_mock = mocker.patch("logging.Handler.handleError")
|
||||||
handler = HttpLogHandler(configuration, report=False, suppress_errors=True)
|
handler = HttpLogHandler(configuration, report=False, suppress_errors=True)
|
||||||
|
|
||||||
@ -80,7 +81,7 @@ def test_emit_skip(configuration: Configuration, log_record: logging.LogRecord,
|
|||||||
"""
|
"""
|
||||||
must skip log record posting if no package base set
|
must skip log record posting if no package base set
|
||||||
"""
|
"""
|
||||||
log_mock = mocker.patch("ahriman.core.status.client.Client.logs")
|
log_mock = mocker.patch("ahriman.core.status.client.Client.package_logs")
|
||||||
handler = HttpLogHandler(configuration, report=False, suppress_errors=False)
|
handler = HttpLogHandler(configuration, report=False, suppress_errors=False)
|
||||||
|
|
||||||
handler.emit(log_record)
|
handler.emit(log_record)
|
||||||
|
@ -5,6 +5,7 @@ from pytest_mock import MockerFixture
|
|||||||
|
|
||||||
from ahriman.core.alpm.repo import Repo
|
from ahriman.core.alpm.repo import Repo
|
||||||
from ahriman.core.database import SQLite
|
from ahriman.core.database import SQLite
|
||||||
|
from ahriman.models.log_record_id import LogRecordId
|
||||||
from ahriman.models.package import Package
|
from ahriman.models.package import Package
|
||||||
|
|
||||||
|
|
||||||
@ -20,16 +21,16 @@ def test_package_logger_set_reset(database: SQLite) -> None:
|
|||||||
"""
|
"""
|
||||||
must set and reset package base attribute
|
must set and reset package base attribute
|
||||||
"""
|
"""
|
||||||
package_base = "package base"
|
log_record_id = LogRecordId("base", "version")
|
||||||
|
|
||||||
database._package_logger_set(package_base)
|
database._package_logger_set(log_record_id.package_base, log_record_id.version)
|
||||||
record = logging.makeLogRecord({})
|
record = logging.makeLogRecord({})
|
||||||
assert record.package_base == package_base
|
assert record.package_id == log_record_id
|
||||||
|
|
||||||
database._package_logger_reset()
|
database._package_logger_reset()
|
||||||
record = logging.makeLogRecord({})
|
record = logging.makeLogRecord({})
|
||||||
with pytest.raises(AttributeError):
|
with pytest.raises(AttributeError):
|
||||||
record.package_base
|
record.package_id
|
||||||
|
|
||||||
|
|
||||||
def test_in_package_context(database: SQLite, package_ahriman: Package, mocker: MockerFixture) -> None:
|
def test_in_package_context(database: SQLite, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
@ -39,10 +40,24 @@ def test_in_package_context(database: SQLite, package_ahriman: Package, mocker:
|
|||||||
set_mock = mocker.patch("ahriman.core.log.LazyLogging._package_logger_set")
|
set_mock = mocker.patch("ahriman.core.log.LazyLogging._package_logger_set")
|
||||||
reset_mock = mocker.patch("ahriman.core.log.LazyLogging._package_logger_reset")
|
reset_mock = mocker.patch("ahriman.core.log.LazyLogging._package_logger_reset")
|
||||||
|
|
||||||
with database.in_package_context(package_ahriman.base):
|
with database.in_package_context(package_ahriman.base, package_ahriman.version):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
set_mock.assert_called_once_with(package_ahriman.base)
|
set_mock.assert_called_once_with(package_ahriman.base, package_ahriman.version)
|
||||||
|
reset_mock.assert_called_once_with()
|
||||||
|
|
||||||
|
|
||||||
|
def test_in_package_context_empty_version(database: SQLite, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must set package log context
|
||||||
|
"""
|
||||||
|
set_mock = mocker.patch("ahriman.core.log.LazyLogging._package_logger_set")
|
||||||
|
reset_mock = mocker.patch("ahriman.core.log.LazyLogging._package_logger_reset")
|
||||||
|
|
||||||
|
with database.in_package_context(package_ahriman.base, None):
|
||||||
|
pass
|
||||||
|
|
||||||
|
set_mock.assert_called_once_with(package_ahriman.base, None)
|
||||||
reset_mock.assert_called_once_with()
|
reset_mock.assert_called_once_with()
|
||||||
|
|
||||||
|
|
||||||
@ -54,7 +69,7 @@ def test_in_package_context_failed(database: SQLite, package_ahriman: Package, m
|
|||||||
reset_mock = mocker.patch("ahriman.core.log.LazyLogging._package_logger_reset")
|
reset_mock = mocker.patch("ahriman.core.log.LazyLogging._package_logger_reset")
|
||||||
|
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception):
|
||||||
with database.in_package_context(package_ahriman.base):
|
with database.in_package_context(package_ahriman.base, ""):
|
||||||
raise Exception()
|
raise Exception()
|
||||||
|
|
||||||
reset_mock.assert_called_once_with()
|
reset_mock.assert_called_once_with()
|
||||||
|
20
tests/ahriman/core/report/conftest.py
Normal file
20
tests/ahriman/core/report/conftest.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from ahriman.core.configuration import Configuration
|
||||||
|
from ahriman.core.report.remote_call import RemoteCall
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def remote_call(configuration: Configuration) -> RemoteCall:
|
||||||
|
"""
|
||||||
|
fixture for remote update trigger
|
||||||
|
|
||||||
|
Args:
|
||||||
|
configuration(Configuration): configuration fixture
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
RemoteCall: remote update trigger test instance
|
||||||
|
"""
|
||||||
|
configuration.set_option("web", "host", "localhost")
|
||||||
|
configuration.set_option("web", "port", "8080")
|
||||||
|
return RemoteCall("x86_64", configuration, "remote-call")
|
95
tests/ahriman/core/report/test_remote_call.py
Normal file
95
tests/ahriman/core/report/test_remote_call.py
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
import pytest
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from pytest_mock import MockerFixture
|
||||||
|
|
||||||
|
from ahriman.core.report.remote_call import RemoteCall
|
||||||
|
from ahriman.models.result import Result
|
||||||
|
|
||||||
|
|
||||||
|
def test_generate(remote_call: RemoteCall, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must correctly call client
|
||||||
|
"""
|
||||||
|
update_mock = mocker.patch("ahriman.core.report.remote_call.RemoteCall.remote_update", return_value="id")
|
||||||
|
wait_mock = mocker.patch("ahriman.core.report.remote_call.RemoteCall.remote_wait")
|
||||||
|
|
||||||
|
remote_call.generate([], Result())
|
||||||
|
update_mock.assert_called_once_with()
|
||||||
|
wait_mock.assert_called_once_with("id")
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_process_alive(remote_call: RemoteCall, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must correctly define if process is alive
|
||||||
|
"""
|
||||||
|
response_obj = requests.Response()
|
||||||
|
response_obj._content = """{"is_alive": true}""".encode("utf8")
|
||||||
|
response_obj.status_code = 200
|
||||||
|
|
||||||
|
request_mock = mocker.patch("ahriman.core.status.web_client.WebClient.make_request", return_value=response_obj)
|
||||||
|
|
||||||
|
assert remote_call.is_process_alive("id")
|
||||||
|
request_mock.assert_called_once_with("GET", "/api/v1/service/process/id")
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_process_alive_unknown(remote_call: RemoteCall, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must correctly define if process is unknown
|
||||||
|
"""
|
||||||
|
response = requests.Response()
|
||||||
|
response.status_code = 404
|
||||||
|
mocker.patch("ahriman.core.status.web_client.WebClient.make_request",
|
||||||
|
side_effect=requests.RequestException(response=response))
|
||||||
|
|
||||||
|
assert not remote_call.is_process_alive("id")
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_process_alive_error(remote_call: RemoteCall, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must reraise exception on process request
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.core.status.web_client.WebClient.make_request", side_effect=Exception)
|
||||||
|
|
||||||
|
with pytest.raises(Exception):
|
||||||
|
remote_call.is_process_alive("id")
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_process_alive_http_error(remote_call: RemoteCall, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must reraise http exception on process request
|
||||||
|
"""
|
||||||
|
response = requests.Response()
|
||||||
|
response.status_code = 500
|
||||||
|
mocker.patch("ahriman.core.status.web_client.WebClient.make_request",
|
||||||
|
side_effect=requests.RequestException(response=response))
|
||||||
|
|
||||||
|
with pytest.raises(requests.RequestException):
|
||||||
|
remote_call.is_process_alive("id")
|
||||||
|
|
||||||
|
|
||||||
|
def test_remote_update(remote_call: RemoteCall, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must call remote server for update process
|
||||||
|
"""
|
||||||
|
response_obj = requests.Response()
|
||||||
|
response_obj._content = """{"process_id": "id"}""".encode("utf8")
|
||||||
|
response_obj.status_code = 200
|
||||||
|
|
||||||
|
request_mock = mocker.patch("ahriman.core.status.web_client.WebClient.make_request", return_value=response_obj)
|
||||||
|
|
||||||
|
assert remote_call.remote_update() == "id"
|
||||||
|
request_mock.assert_called_once_with("POST", "/api/v1/service/update", json={
|
||||||
|
"aur": False,
|
||||||
|
"local": False,
|
||||||
|
"manual": True,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def test_remote_wait(remote_call: RemoteCall, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must wait for remote process to success
|
||||||
|
"""
|
||||||
|
wait_mock = mocker.patch("ahriman.models.waiter.Waiter.wait")
|
||||||
|
remote_call.remote_wait("id")
|
||||||
|
wait_mock.assert_called_once_with(pytest.helpers.anyvar(int), "id")
|
@ -24,6 +24,7 @@ def test_report_dummy(configuration: Configuration, result: Result, mocker: Mock
|
|||||||
"""
|
"""
|
||||||
mocker.patch("ahriman.models.report_settings.ReportSettings.from_option", return_value=ReportSettings.Disabled)
|
mocker.patch("ahriman.models.report_settings.ReportSettings.from_option", return_value=ReportSettings.Disabled)
|
||||||
report_mock = mocker.patch("ahriman.core.report.report.Report.generate")
|
report_mock = mocker.patch("ahriman.core.report.report.Report.generate")
|
||||||
|
|
||||||
Report.load("x86_64", configuration, "disabled").run(result, [])
|
Report.load("x86_64", configuration, "disabled").run(result, [])
|
||||||
report_mock.assert_called_once_with([], result)
|
report_mock.assert_called_once_with([], result)
|
||||||
|
|
||||||
@ -55,6 +56,18 @@ def test_report_html(configuration: Configuration, result: Result, mocker: Mocke
|
|||||||
report_mock.assert_called_once_with([], result)
|
report_mock.assert_called_once_with([], result)
|
||||||
|
|
||||||
|
|
||||||
|
def test_report_remote_call(configuration: Configuration, result: Result, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must instantiate remote call trigger
|
||||||
|
"""
|
||||||
|
configuration.set_option("web", "host", "localhost")
|
||||||
|
configuration.set_option("web", "port", "8080")
|
||||||
|
report_mock = mocker.patch("ahriman.core.report.remote_call.RemoteCall.generate")
|
||||||
|
|
||||||
|
Report.load("x86_64", configuration, "remote-call").run(result, [])
|
||||||
|
report_mock.assert_called_once_with([], result)
|
||||||
|
|
||||||
|
|
||||||
def test_report_telegram(configuration: Configuration, result: Result, mocker: MockerFixture) -> None:
|
def test_report_telegram(configuration: Configuration, result: Result, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must generate telegram report
|
must generate telegram report
|
||||||
|
@ -85,7 +85,7 @@ def test_process_remove_base(executor: Executor, package_ahriman: Package, mocke
|
|||||||
build_queue_mock = mocker.patch("ahriman.core.database.SQLite.build_queue_clear")
|
build_queue_mock = mocker.patch("ahriman.core.database.SQLite.build_queue_clear")
|
||||||
patches_mock = mocker.patch("ahriman.core.database.SQLite.patches_remove")
|
patches_mock = mocker.patch("ahriman.core.database.SQLite.patches_remove")
|
||||||
logs_mock = mocker.patch("ahriman.core.database.SQLite.logs_remove")
|
logs_mock = mocker.patch("ahriman.core.database.SQLite.logs_remove")
|
||||||
status_client_mock = mocker.patch("ahriman.core.status.client.Client.remove")
|
status_client_mock = mocker.patch("ahriman.core.status.client.Client.package_remove")
|
||||||
|
|
||||||
executor.process_remove([package_ahriman.base])
|
executor.process_remove([package_ahriman.base])
|
||||||
# must remove via alpm wrapper
|
# must remove via alpm wrapper
|
||||||
@ -106,7 +106,7 @@ def test_process_remove_base_multiple(executor: Executor, package_python_schedul
|
|||||||
"""
|
"""
|
||||||
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_python_schedule])
|
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_python_schedule])
|
||||||
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
|
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
|
||||||
status_client_mock = mocker.patch("ahriman.core.status.client.Client.remove")
|
status_client_mock = mocker.patch("ahriman.core.status.client.Client.package_remove")
|
||||||
|
|
||||||
executor.process_remove([package_python_schedule.base])
|
executor.process_remove([package_python_schedule.base])
|
||||||
# must remove via alpm wrapper
|
# must remove via alpm wrapper
|
||||||
@ -125,7 +125,7 @@ def test_process_remove_base_single(executor: Executor, package_python_schedule:
|
|||||||
"""
|
"""
|
||||||
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_python_schedule])
|
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_python_schedule])
|
||||||
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
|
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
|
||||||
status_client_mock = mocker.patch("ahriman.core.status.client.Client.remove")
|
status_client_mock = mocker.patch("ahriman.core.status.client.Client.package_remove")
|
||||||
|
|
||||||
executor.process_remove(["python2-schedule"])
|
executor.process_remove(["python2-schedule"])
|
||||||
# must remove via alpm wrapper
|
# must remove via alpm wrapper
|
||||||
@ -171,7 +171,7 @@ def test_process_remove_unknown(executor: Executor, package_ahriman: Package, mo
|
|||||||
"""
|
"""
|
||||||
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[])
|
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[])
|
||||||
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
|
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
|
||||||
status_client_mock = mocker.patch("ahriman.core.status.client.Client.remove")
|
status_client_mock = mocker.patch("ahriman.core.status.client.Client.package_remove")
|
||||||
|
|
||||||
executor.process_remove([package_ahriman.base])
|
executor.process_remove([package_ahriman.base])
|
||||||
repo_remove_mock.assert_not_called()
|
repo_remove_mock.assert_not_called()
|
||||||
|
@ -76,6 +76,13 @@ def test_sign_options(configuration: Configuration) -> None:
|
|||||||
assert default_key == "default-key"
|
assert default_key == "default-key"
|
||||||
|
|
||||||
|
|
||||||
|
def test_signature() -> None:
|
||||||
|
"""
|
||||||
|
must correctly generate the signature path
|
||||||
|
"""
|
||||||
|
assert GPG.signature(Path("path") / "to" / "package.tar.xz") == Path("path") / "to" / "package.tar.xz.sig"
|
||||||
|
|
||||||
|
|
||||||
def test_key_download(gpg: GPG, mocker: MockerFixture) -> None:
|
def test_key_download(gpg: GPG, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must download the key from public server
|
must download the key from public server
|
||||||
@ -222,6 +229,18 @@ def test_process_sign_package_skip_4(gpg: GPG, mocker: MockerFixture) -> None:
|
|||||||
process_mock.assert_not_called()
|
process_mock.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_sign_package_skip_already_signed(gpg_with_key: GPG, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must not sign package if it was already signed
|
||||||
|
"""
|
||||||
|
result = [Path("a"), Path("a.sig")]
|
||||||
|
mocker.patch("pathlib.Path.is_file", return_value=True)
|
||||||
|
process_mock = mocker.patch("ahriman.core.sign.gpg.GPG.process")
|
||||||
|
|
||||||
|
assert gpg_with_key.process_sign_package(Path("a"), gpg_with_key.default_key) == result
|
||||||
|
process_mock.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
def test_process_sign_repository_1(gpg_with_key: GPG, mocker: MockerFixture) -> None:
|
def test_process_sign_repository_1(gpg_with_key: GPG, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must sign repository
|
must sign repository
|
||||||
|
@ -7,6 +7,7 @@ from ahriman.core.status.client import Client
|
|||||||
from ahriman.core.status.web_client import WebClient
|
from ahriman.core.status.web_client import WebClient
|
||||||
from ahriman.models.build_status import BuildStatus, BuildStatusEnum
|
from ahriman.models.build_status import BuildStatus, BuildStatusEnum
|
||||||
from ahriman.models.internal_status import InternalStatus
|
from ahriman.models.internal_status import InternalStatus
|
||||||
|
from ahriman.models.log_record_id import LogRecordId
|
||||||
from ahriman.models.package import Package
|
from ahriman.models.package import Package
|
||||||
|
|
||||||
|
|
||||||
@ -51,64 +52,47 @@ def test_load_full_client_from_unix_socket(configuration: Configuration) -> None
|
|||||||
assert isinstance(Client.load(configuration, report=True), WebClient)
|
assert isinstance(Client.load(configuration, report=True), WebClient)
|
||||||
|
|
||||||
|
|
||||||
def test_add(client: Client, package_ahriman: Package) -> None:
|
def test_package_add(client: Client, package_ahriman: Package) -> None:
|
||||||
"""
|
"""
|
||||||
must process package addition without errors
|
must process package addition without errors
|
||||||
"""
|
"""
|
||||||
client.add(package_ahriman, BuildStatusEnum.Unknown)
|
client.package_add(package_ahriman, BuildStatusEnum.Unknown)
|
||||||
|
|
||||||
|
|
||||||
def test_get(client: Client, package_ahriman: Package) -> None:
|
def test_package_get(client: Client, package_ahriman: Package) -> None:
|
||||||
"""
|
"""
|
||||||
must return empty package list
|
must return empty package list
|
||||||
"""
|
"""
|
||||||
assert client.get(package_ahriman.base) == []
|
assert client.package_get(package_ahriman.base) == []
|
||||||
assert client.get(None) == []
|
assert client.package_get(None) == []
|
||||||
|
|
||||||
|
|
||||||
def test_get_internal(client: Client) -> None:
|
def test_package_logs(client: Client, package_ahriman: Package, log_record: logging.LogRecord) -> None:
|
||||||
"""
|
|
||||||
must return dummy status for web service
|
|
||||||
"""
|
|
||||||
actual = client.get_internal()
|
|
||||||
expected = InternalStatus(status=BuildStatus(timestamp=actual.status.timestamp))
|
|
||||||
|
|
||||||
assert actual == expected
|
|
||||||
|
|
||||||
|
|
||||||
def test_log(client: Client, package_ahriman: Package, log_record: logging.LogRecord) -> None:
|
|
||||||
"""
|
"""
|
||||||
must process log record without errors
|
must process log record without errors
|
||||||
"""
|
"""
|
||||||
client.logs(package_ahriman.base, log_record)
|
client.package_logs(LogRecordId(package_ahriman.base, package_ahriman.version), log_record)
|
||||||
|
|
||||||
|
|
||||||
def test_remove(client: Client, package_ahriman: Package) -> None:
|
def test_package_remove(client: Client, package_ahriman: Package) -> None:
|
||||||
"""
|
"""
|
||||||
must process remove without errors
|
must process remove without errors
|
||||||
"""
|
"""
|
||||||
client.remove(package_ahriman.base)
|
client.package_remove(package_ahriman.base)
|
||||||
|
|
||||||
|
|
||||||
def test_update(client: Client, package_ahriman: Package) -> None:
|
def test_package_update(client: Client, package_ahriman: Package) -> None:
|
||||||
"""
|
"""
|
||||||
must update package status without errors
|
must update package status without errors
|
||||||
"""
|
"""
|
||||||
client.update(package_ahriman.base, BuildStatusEnum.Unknown)
|
client.package_update(package_ahriman.base, BuildStatusEnum.Unknown)
|
||||||
|
|
||||||
|
|
||||||
def test_update_self(client: Client) -> None:
|
|
||||||
"""
|
|
||||||
must update self status without errors
|
|
||||||
"""
|
|
||||||
client.update_self(BuildStatusEnum.Unknown)
|
|
||||||
|
|
||||||
|
|
||||||
def test_set_building(client: Client, package_ahriman: Package, mocker: MockerFixture) -> None:
|
def test_set_building(client: Client, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must set building status to the package
|
must set building status to the package
|
||||||
"""
|
"""
|
||||||
update_mock = mocker.patch("ahriman.core.status.client.Client.update")
|
update_mock = mocker.patch("ahriman.core.status.client.Client.package_update")
|
||||||
client.set_building(package_ahriman.base)
|
client.set_building(package_ahriman.base)
|
||||||
|
|
||||||
update_mock.assert_called_once_with(package_ahriman.base, BuildStatusEnum.Building)
|
update_mock.assert_called_once_with(package_ahriman.base, BuildStatusEnum.Building)
|
||||||
@ -118,7 +102,7 @@ def test_set_failed(client: Client, package_ahriman: Package, mocker: MockerFixt
|
|||||||
"""
|
"""
|
||||||
must set failed status to the package
|
must set failed status to the package
|
||||||
"""
|
"""
|
||||||
update_mock = mocker.patch("ahriman.core.status.client.Client.update")
|
update_mock = mocker.patch("ahriman.core.status.client.Client.package_update")
|
||||||
client.set_failed(package_ahriman.base)
|
client.set_failed(package_ahriman.base)
|
||||||
|
|
||||||
update_mock.assert_called_once_with(package_ahriman.base, BuildStatusEnum.Failed)
|
update_mock.assert_called_once_with(package_ahriman.base, BuildStatusEnum.Failed)
|
||||||
@ -128,7 +112,7 @@ def test_set_pending(client: Client, package_ahriman: Package, mocker: MockerFix
|
|||||||
"""
|
"""
|
||||||
must set building status to the package
|
must set building status to the package
|
||||||
"""
|
"""
|
||||||
update_mock = mocker.patch("ahriman.core.status.client.Client.update")
|
update_mock = mocker.patch("ahriman.core.status.client.Client.package_update")
|
||||||
client.set_pending(package_ahriman.base)
|
client.set_pending(package_ahriman.base)
|
||||||
|
|
||||||
update_mock.assert_called_once_with(package_ahriman.base, BuildStatusEnum.Pending)
|
update_mock.assert_called_once_with(package_ahriman.base, BuildStatusEnum.Pending)
|
||||||
@ -138,7 +122,7 @@ def test_set_success(client: Client, package_ahriman: Package, mocker: MockerFix
|
|||||||
"""
|
"""
|
||||||
must set success status to the package
|
must set success status to the package
|
||||||
"""
|
"""
|
||||||
add_mock = mocker.patch("ahriman.core.status.client.Client.add")
|
add_mock = mocker.patch("ahriman.core.status.client.Client.package_add")
|
||||||
client.set_success(package_ahriman)
|
client.set_success(package_ahriman)
|
||||||
|
|
||||||
add_mock.assert_called_once_with(package_ahriman, BuildStatusEnum.Success)
|
add_mock.assert_called_once_with(package_ahriman, BuildStatusEnum.Success)
|
||||||
@ -148,7 +132,24 @@ def test_set_unknown(client: Client, package_ahriman: Package, mocker: MockerFix
|
|||||||
"""
|
"""
|
||||||
must add new package with unknown status
|
must add new package with unknown status
|
||||||
"""
|
"""
|
||||||
add_mock = mocker.patch("ahriman.core.status.client.Client.add")
|
add_mock = mocker.patch("ahriman.core.status.client.Client.package_add")
|
||||||
client.set_unknown(package_ahriman)
|
client.set_unknown(package_ahriman)
|
||||||
|
|
||||||
add_mock.assert_called_once_with(package_ahriman, BuildStatusEnum.Unknown)
|
add_mock.assert_called_once_with(package_ahriman, BuildStatusEnum.Unknown)
|
||||||
|
|
||||||
|
|
||||||
|
def test_status_get(client: Client) -> None:
|
||||||
|
"""
|
||||||
|
must return dummy status for web service
|
||||||
|
"""
|
||||||
|
actual = client.status_get()
|
||||||
|
expected = InternalStatus(status=BuildStatus(timestamp=actual.status.timestamp))
|
||||||
|
|
||||||
|
assert actual == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_status_update(client: Client) -> None:
|
||||||
|
"""
|
||||||
|
must update self status without errors
|
||||||
|
"""
|
||||||
|
client.status_update(BuildStatusEnum.Unknown)
|
||||||
|
@ -22,33 +22,6 @@ def test_force_no_report(configuration: Configuration, database: SQLite, mocker:
|
|||||||
load_mock.assert_called_once_with("x86_64", configuration, database, report=False)
|
load_mock.assert_called_once_with("x86_64", configuration, database, report=False)
|
||||||
|
|
||||||
|
|
||||||
def test_get(watcher: Watcher, package_ahriman: Package) -> None:
|
|
||||||
"""
|
|
||||||
must return package status
|
|
||||||
"""
|
|
||||||
watcher.known = {package_ahriman.base: (package_ahriman, BuildStatus())}
|
|
||||||
package, status = watcher.get(package_ahriman.base)
|
|
||||||
assert package == package_ahriman
|
|
||||||
assert status.status == BuildStatusEnum.Unknown
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_failed(watcher: Watcher, package_ahriman: Package) -> None:
|
|
||||||
"""
|
|
||||||
must fail on unknown package
|
|
||||||
"""
|
|
||||||
with pytest.raises(UnknownPackageError):
|
|
||||||
watcher.get(package_ahriman.base)
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_logs(watcher: Watcher, package_ahriman: Package, mocker: MockerFixture) -> None:
|
|
||||||
"""
|
|
||||||
must return package logs
|
|
||||||
"""
|
|
||||||
logs_mock = mocker.patch("ahriman.core.database.SQLite.logs_get")
|
|
||||||
watcher.get_logs(package_ahriman.base)
|
|
||||||
logs_mock.assert_called_once_with(package_ahriman.base)
|
|
||||||
|
|
||||||
|
|
||||||
def test_load(watcher: Watcher, package_ahriman: Package, mocker: MockerFixture) -> None:
|
def test_load(watcher: Watcher, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must correctly load packages
|
must correctly load packages
|
||||||
@ -77,109 +50,136 @@ def test_load_known(watcher: Watcher, package_ahriman: Package, mocker: MockerFi
|
|||||||
assert status.status == BuildStatusEnum.Success
|
assert status.status == BuildStatusEnum.Success
|
||||||
|
|
||||||
|
|
||||||
def test_remove(watcher: Watcher, package_ahriman: Package, mocker: MockerFixture) -> None:
|
def test_logs_get(watcher: Watcher, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must return package logs
|
||||||
|
"""
|
||||||
|
logs_mock = mocker.patch("ahriman.core.database.SQLite.logs_get")
|
||||||
|
watcher.logs_get(package_ahriman.base)
|
||||||
|
logs_mock.assert_called_once_with(package_ahriman.base)
|
||||||
|
|
||||||
|
|
||||||
|
def test_logs_remove(watcher: Watcher, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must remove package logs
|
||||||
|
"""
|
||||||
|
logs_mock = mocker.patch("ahriman.core.database.SQLite.logs_remove")
|
||||||
|
watcher.logs_remove(package_ahriman.base, "42")
|
||||||
|
logs_mock.assert_called_once_with(package_ahriman.base, "42")
|
||||||
|
|
||||||
|
|
||||||
|
def test_logs_update_new(watcher: Watcher, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must create package logs record for new package
|
||||||
|
"""
|
||||||
|
delete_mock = mocker.patch("ahriman.core.status.watcher.Watcher.logs_remove")
|
||||||
|
insert_mock = mocker.patch("ahriman.core.database.SQLite.logs_insert")
|
||||||
|
|
||||||
|
log_record_id = LogRecordId(package_ahriman.base, watcher._last_log_record_id.version)
|
||||||
|
assert watcher._last_log_record_id != log_record_id
|
||||||
|
|
||||||
|
watcher.logs_update(log_record_id, 42.01, "log record")
|
||||||
|
delete_mock.assert_called_once_with(package_ahriman.base, log_record_id.version)
|
||||||
|
insert_mock.assert_called_once_with(log_record_id, 42.01, "log record")
|
||||||
|
|
||||||
|
assert watcher._last_log_record_id == log_record_id
|
||||||
|
|
||||||
|
|
||||||
|
def test_logs_update_update(watcher: Watcher, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must create package logs record for current package
|
||||||
|
"""
|
||||||
|
delete_mock = mocker.patch("ahriman.core.status.watcher.Watcher.logs_remove")
|
||||||
|
insert_mock = mocker.patch("ahriman.core.database.SQLite.logs_insert")
|
||||||
|
|
||||||
|
log_record_id = LogRecordId(package_ahriman.base, watcher._last_log_record_id.version)
|
||||||
|
watcher._last_log_record_id = log_record_id
|
||||||
|
|
||||||
|
watcher.logs_update(log_record_id, 42.01, "log record")
|
||||||
|
delete_mock.assert_not_called()
|
||||||
|
insert_mock.assert_called_once_with(log_record_id, 42.01, "log record")
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_get(watcher: Watcher, package_ahriman: Package) -> None:
|
||||||
|
"""
|
||||||
|
must return package status
|
||||||
|
"""
|
||||||
|
watcher.known = {package_ahriman.base: (package_ahriman, BuildStatus())}
|
||||||
|
package, status = watcher.package_get(package_ahriman.base)
|
||||||
|
assert package == package_ahriman
|
||||||
|
assert status.status == BuildStatusEnum.Unknown
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_get_failed(watcher: Watcher, package_ahriman: Package) -> None:
|
||||||
|
"""
|
||||||
|
must fail on unknown package
|
||||||
|
"""
|
||||||
|
with pytest.raises(UnknownPackageError):
|
||||||
|
watcher.package_get(package_ahriman.base)
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_remove(watcher: Watcher, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must remove package base
|
must remove package base
|
||||||
"""
|
"""
|
||||||
cache_mock = mocker.patch("ahriman.core.database.SQLite.package_remove")
|
cache_mock = mocker.patch("ahriman.core.database.SQLite.package_remove")
|
||||||
logs_mock = mocker.patch("ahriman.core.status.watcher.Watcher.remove_logs")
|
logs_mock = mocker.patch("ahriman.core.status.watcher.Watcher.logs_remove")
|
||||||
watcher.known = {package_ahriman.base: (package_ahriman, BuildStatus())}
|
watcher.known = {package_ahriman.base: (package_ahriman, BuildStatus())}
|
||||||
|
|
||||||
watcher.remove(package_ahriman.base)
|
watcher.package_remove(package_ahriman.base)
|
||||||
assert not watcher.known
|
assert not watcher.known
|
||||||
cache_mock.assert_called_once_with(package_ahriman.base)
|
cache_mock.assert_called_once_with(package_ahriman.base)
|
||||||
logs_mock.assert_called_once_with(package_ahriman.base, None)
|
logs_mock.assert_called_once_with(package_ahriman.base, None)
|
||||||
|
|
||||||
|
|
||||||
def test_remove_logs(watcher: Watcher, package_ahriman: Package, mocker: MockerFixture) -> None:
|
def test_package_remove_unknown(watcher: Watcher, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
"""
|
|
||||||
must remove package logs
|
|
||||||
"""
|
|
||||||
logs_mock = mocker.patch("ahriman.core.database.SQLite.logs_remove")
|
|
||||||
watcher.remove_logs(package_ahriman.base, 42)
|
|
||||||
logs_mock.assert_called_once_with(package_ahriman.base, 42)
|
|
||||||
|
|
||||||
|
|
||||||
def test_remove_unknown(watcher: Watcher, package_ahriman: Package, mocker: MockerFixture) -> None:
|
|
||||||
"""
|
"""
|
||||||
must not fail on unknown base removal
|
must not fail on unknown base removal
|
||||||
"""
|
"""
|
||||||
cache_mock = mocker.patch("ahriman.core.database.SQLite.package_remove")
|
cache_mock = mocker.patch("ahriman.core.database.SQLite.package_remove")
|
||||||
|
|
||||||
watcher.remove(package_ahriman.base)
|
watcher.package_remove(package_ahriman.base)
|
||||||
cache_mock.assert_called_once_with(package_ahriman.base)
|
cache_mock.assert_called_once_with(package_ahriman.base)
|
||||||
|
|
||||||
|
|
||||||
def test_update(watcher: Watcher, package_ahriman: Package, mocker: MockerFixture) -> None:
|
def test_package_update(watcher: Watcher, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must update package status
|
must update package status
|
||||||
"""
|
"""
|
||||||
cache_mock = mocker.patch("ahriman.core.database.SQLite.package_update")
|
cache_mock = mocker.patch("ahriman.core.database.SQLite.package_update")
|
||||||
|
|
||||||
watcher.update(package_ahriman.base, BuildStatusEnum.Unknown, package_ahriman)
|
watcher.package_update(package_ahriman.base, BuildStatusEnum.Unknown, package_ahriman)
|
||||||
cache_mock.assert_called_once_with(package_ahriman, pytest.helpers.anyvar(int))
|
cache_mock.assert_called_once_with(package_ahriman, pytest.helpers.anyvar(int))
|
||||||
package, status = watcher.known[package_ahriman.base]
|
package, status = watcher.known[package_ahriman.base]
|
||||||
assert package == package_ahriman
|
assert package == package_ahriman
|
||||||
assert status.status == BuildStatusEnum.Unknown
|
assert status.status == BuildStatusEnum.Unknown
|
||||||
|
|
||||||
|
|
||||||
def test_update_ping(watcher: Watcher, package_ahriman: Package, mocker: MockerFixture) -> None:
|
def test_package_update_ping(watcher: Watcher, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must update package status only for known package
|
must update package status only for known package
|
||||||
"""
|
"""
|
||||||
cache_mock = mocker.patch("ahriman.core.database.SQLite.package_update")
|
cache_mock = mocker.patch("ahriman.core.database.SQLite.package_update")
|
||||||
watcher.known = {package_ahriman.base: (package_ahriman, BuildStatus())}
|
watcher.known = {package_ahriman.base: (package_ahriman, BuildStatus())}
|
||||||
|
|
||||||
watcher.update(package_ahriman.base, BuildStatusEnum.Success, None)
|
watcher.package_update(package_ahriman.base, BuildStatusEnum.Success, None)
|
||||||
cache_mock.assert_called_once_with(package_ahriman, pytest.helpers.anyvar(int))
|
cache_mock.assert_called_once_with(package_ahriman, pytest.helpers.anyvar(int))
|
||||||
package, status = watcher.known[package_ahriman.base]
|
package, status = watcher.known[package_ahriman.base]
|
||||||
assert package == package_ahriman
|
assert package == package_ahriman
|
||||||
assert status.status == BuildStatusEnum.Success
|
assert status.status == BuildStatusEnum.Success
|
||||||
|
|
||||||
|
|
||||||
def test_update_unknown(watcher: Watcher, package_ahriman: Package) -> None:
|
def test_package_update_unknown(watcher: Watcher, package_ahriman: Package) -> None:
|
||||||
"""
|
"""
|
||||||
must fail on unknown package status update only
|
must fail on unknown package status update only
|
||||||
"""
|
"""
|
||||||
with pytest.raises(UnknownPackageError):
|
with pytest.raises(UnknownPackageError):
|
||||||
watcher.update(package_ahriman.base, BuildStatusEnum.Unknown, None)
|
watcher.package_update(package_ahriman.base, BuildStatusEnum.Unknown, None)
|
||||||
|
|
||||||
|
|
||||||
def test_update_logs_new(watcher: Watcher, package_ahriman: Package, mocker: MockerFixture) -> None:
|
def test_status_update(watcher: Watcher) -> None:
|
||||||
"""
|
|
||||||
must create package logs record for new package
|
|
||||||
"""
|
|
||||||
delete_mock = mocker.patch("ahriman.core.status.watcher.Watcher.remove_logs")
|
|
||||||
insert_mock = mocker.patch("ahriman.core.database.SQLite.logs_insert")
|
|
||||||
|
|
||||||
log_record_id = LogRecordId(package_ahriman.base, watcher._last_log_record_id.process_id)
|
|
||||||
assert watcher._last_log_record_id != log_record_id
|
|
||||||
|
|
||||||
watcher.update_logs(log_record_id, 42.01, "log record")
|
|
||||||
delete_mock.assert_called_once_with(package_ahriman.base, log_record_id.process_id)
|
|
||||||
insert_mock.assert_called_once_with(log_record_id, 42.01, "log record")
|
|
||||||
|
|
||||||
assert watcher._last_log_record_id == log_record_id
|
|
||||||
|
|
||||||
|
|
||||||
def test_update_logs_update(watcher: Watcher, package_ahriman: Package, mocker: MockerFixture) -> None:
|
|
||||||
"""
|
|
||||||
must create package logs record for current package
|
|
||||||
"""
|
|
||||||
delete_mock = mocker.patch("ahriman.core.status.watcher.Watcher.remove_logs")
|
|
||||||
insert_mock = mocker.patch("ahriman.core.database.SQLite.logs_insert")
|
|
||||||
|
|
||||||
log_record_id = LogRecordId(package_ahriman.base, watcher._last_log_record_id.process_id)
|
|
||||||
watcher._last_log_record_id = log_record_id
|
|
||||||
|
|
||||||
watcher.update_logs(log_record_id, 42.01, "log record")
|
|
||||||
delete_mock.assert_not_called()
|
|
||||||
insert_mock.assert_called_once_with(log_record_id, 42.01, "log record")
|
|
||||||
|
|
||||||
|
|
||||||
def test_update_self(watcher: Watcher) -> None:
|
|
||||||
"""
|
"""
|
||||||
must update service status
|
must update service status
|
||||||
"""
|
"""
|
||||||
watcher.update_self(BuildStatusEnum.Success)
|
watcher.status_update(BuildStatusEnum.Success)
|
||||||
assert watcher.status.status == BuildStatusEnum.Success
|
assert watcher.status.status == BuildStatusEnum.Success
|
||||||
|
@ -5,12 +5,13 @@ import requests
|
|||||||
import requests_unixsocket
|
import requests_unixsocket
|
||||||
|
|
||||||
from pytest_mock import MockerFixture
|
from pytest_mock import MockerFixture
|
||||||
from requests import Response
|
from unittest.mock import call as MockCall
|
||||||
|
|
||||||
from ahriman.core.configuration import Configuration
|
from ahriman.core.configuration import Configuration
|
||||||
from ahriman.core.status.web_client import WebClient
|
from ahriman.core.status.web_client import WebClient
|
||||||
from ahriman.models.build_status import BuildStatus, BuildStatusEnum
|
from ahriman.models.build_status import BuildStatus, BuildStatusEnum
|
||||||
from ahriman.models.internal_status import InternalStatus
|
from ahriman.models.internal_status import InternalStatus
|
||||||
|
from ahriman.models.log_record_id import LogRecordId
|
||||||
from ahriman.models.package import Package
|
from ahriman.models.package import Package
|
||||||
from ahriman.models.user import User
|
from ahriman.models.user import User
|
||||||
|
|
||||||
@ -19,7 +20,6 @@ def test_login_url(web_client: WebClient) -> None:
|
|||||||
"""
|
"""
|
||||||
must generate login url correctly
|
must generate login url correctly
|
||||||
"""
|
"""
|
||||||
assert web_client._login_url.startswith(web_client.address)
|
|
||||||
assert web_client._login_url.endswith("/api/v1/login")
|
assert web_client._login_url.endswith("/api/v1/login")
|
||||||
|
|
||||||
|
|
||||||
@ -27,10 +27,24 @@ def test_status_url(web_client: WebClient) -> None:
|
|||||||
"""
|
"""
|
||||||
must generate package status url correctly
|
must generate package status url correctly
|
||||||
"""
|
"""
|
||||||
assert web_client._status_url.startswith(web_client.address)
|
|
||||||
assert web_client._status_url.endswith("/api/v1/status")
|
assert web_client._status_url.endswith("/api/v1/status")
|
||||||
|
|
||||||
|
|
||||||
|
def test_logs_url(web_client: WebClient, package_ahriman: Package) -> None:
|
||||||
|
"""
|
||||||
|
must generate logs url correctly
|
||||||
|
"""
|
||||||
|
assert web_client._logs_url(package_ahriman.base).endswith(f"/api/v1/packages/{package_ahriman.base}/logs")
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_url(web_client: WebClient, package_ahriman: Package) -> None:
|
||||||
|
"""
|
||||||
|
must generate package status url correctly
|
||||||
|
"""
|
||||||
|
assert web_client._package_url("").endswith("/api/v1/packages")
|
||||||
|
assert web_client._package_url(package_ahriman.base).endswith(f"/api/v1/packages/{package_ahriman.base}")
|
||||||
|
|
||||||
|
|
||||||
def test_parse_address(configuration: Configuration) -> None:
|
def test_parse_address(configuration: Configuration) -> None:
|
||||||
"""
|
"""
|
||||||
must extract address correctly
|
must extract address correctly
|
||||||
@ -74,14 +88,15 @@ def test_login(web_client: WebClient, user: User, mocker: MockerFixture) -> None
|
|||||||
must login user
|
must login user
|
||||||
"""
|
"""
|
||||||
web_client.user = user
|
web_client.user = user
|
||||||
requests_mock = mocker.patch("requests.Session.post")
|
requests_mock = mocker.patch("requests.Session.request")
|
||||||
payload = {
|
payload = {
|
||||||
"username": user.username,
|
"username": user.username,
|
||||||
"password": user.password
|
"password": user.password
|
||||||
}
|
}
|
||||||
|
|
||||||
web_client._login(requests.Session())
|
web_client._login(requests.Session())
|
||||||
requests_mock.assert_called_once_with(pytest.helpers.anyvar(str, True), json=payload)
|
requests_mock.assert_called_once_with("POST", pytest.helpers.anyvar(str, True),
|
||||||
|
params=None, json=payload, files=None)
|
||||||
|
|
||||||
|
|
||||||
def test_login_failed(web_client: WebClient, user: User, mocker: MockerFixture) -> None:
|
def test_login_failed(web_client: WebClient, user: User, mocker: MockerFixture) -> None:
|
||||||
@ -89,7 +104,7 @@ def test_login_failed(web_client: WebClient, user: User, mocker: MockerFixture)
|
|||||||
must suppress any exception happened during login
|
must suppress any exception happened during login
|
||||||
"""
|
"""
|
||||||
web_client.user = user
|
web_client.user = user
|
||||||
mocker.patch("requests.Session.post", side_effect=Exception())
|
mocker.patch("requests.Session.request", side_effect=Exception())
|
||||||
web_client._login(requests.Session())
|
web_client._login(requests.Session())
|
||||||
|
|
||||||
|
|
||||||
@ -98,7 +113,7 @@ def test_login_failed_http_error(web_client: WebClient, user: User, mocker: Mock
|
|||||||
must suppress HTTP exception happened during login
|
must suppress HTTP exception happened during login
|
||||||
"""
|
"""
|
||||||
web_client.user = user
|
web_client.user = user
|
||||||
mocker.patch("requests.Session.post", side_effect=requests.exceptions.HTTPError())
|
mocker.patch("requests.Session.request", side_effect=requests.exceptions.HTTPError())
|
||||||
web_client._login(requests.Session())
|
web_client._login(requests.Session())
|
||||||
|
|
||||||
|
|
||||||
@ -106,265 +121,310 @@ def test_login_skip(web_client: WebClient, mocker: MockerFixture) -> None:
|
|||||||
"""
|
"""
|
||||||
must skip login if no user set
|
must skip login if no user set
|
||||||
"""
|
"""
|
||||||
requests_mock = mocker.patch("requests.Session.post")
|
requests_mock = mocker.patch("requests.Session.request")
|
||||||
web_client._login(requests.Session())
|
web_client._login(requests.Session())
|
||||||
requests_mock.assert_not_called()
|
requests_mock.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
def test_logs_url(web_client: WebClient, package_ahriman: Package) -> None:
|
def test_make_request(web_client: WebClient, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must generate logs url correctly
|
must make HTTP request
|
||||||
"""
|
"""
|
||||||
assert web_client._logs_url(package_ahriman.base).startswith(web_client.address)
|
request_mock = mocker.patch("requests.Session.request")
|
||||||
assert web_client._logs_url(package_ahriman.base).endswith(f"/api/v1/packages/{package_ahriman.base}/logs")
|
|
||||||
|
assert web_client.make_request("GET", "/url1") is not None
|
||||||
|
assert web_client.make_request("GET", "/url2", params=[("param", "value")]) is not None
|
||||||
|
|
||||||
|
assert web_client.make_request("POST", "/url3") is not None
|
||||||
|
assert web_client.make_request("POST", "/url4", json={"param": "value"}) is not None
|
||||||
|
# we don't want to put full descriptor here
|
||||||
|
assert web_client.make_request("POST", "/url5", files={"file": "tuple"}) is not None
|
||||||
|
|
||||||
|
assert web_client.make_request("DELETE", "/url6") is not None
|
||||||
|
|
||||||
|
request_mock.assert_has_calls([
|
||||||
|
MockCall("GET", f"{web_client.address}/url1", params=None, json=None, files=None),
|
||||||
|
MockCall().raise_for_status(),
|
||||||
|
MockCall("GET", f"{web_client.address}/url2", params=[("param", "value")], json=None, files=None),
|
||||||
|
MockCall().raise_for_status(),
|
||||||
|
MockCall("POST", f"{web_client.address}/url3", params=None, json=None, files=None),
|
||||||
|
MockCall().raise_for_status(),
|
||||||
|
MockCall("POST", f"{web_client.address}/url4", params=None, json={"param": "value"}, files=None),
|
||||||
|
MockCall().raise_for_status(),
|
||||||
|
MockCall("POST", f"{web_client.address}/url5", params=None, json=None, files={"file": "tuple"}),
|
||||||
|
MockCall().raise_for_status(),
|
||||||
|
MockCall("DELETE", f"{web_client.address}/url6", params=None, json=None, files=None),
|
||||||
|
MockCall().raise_for_status(),
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
def test_package_url(web_client: WebClient, package_ahriman: Package) -> None:
|
def test_make_request_failed(web_client: WebClient, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must generate package status url correctly
|
must make HTTP request
|
||||||
"""
|
"""
|
||||||
assert web_client._package_url("").startswith(web_client.address)
|
mocker.patch("requests.Session.request", side_effect=Exception())
|
||||||
assert web_client._package_url("").endswith(f"/api/v1/packages")
|
with pytest.raises(Exception):
|
||||||
|
web_client.make_request("GET", "url")
|
||||||
assert web_client._package_url(package_ahriman.base).startswith(web_client.address)
|
|
||||||
assert web_client._package_url(package_ahriman.base).endswith(f"/api/v1/packages/{package_ahriman.base}")
|
|
||||||
|
|
||||||
|
|
||||||
def test_add(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
def test_package_add(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must process package addition
|
must process package addition
|
||||||
"""
|
"""
|
||||||
requests_mock = mocker.patch("requests.Session.post")
|
requests_mock = mocker.patch("requests.Session.request")
|
||||||
payload = pytest.helpers.get_package_status(package_ahriman)
|
payload = pytest.helpers.get_package_status(package_ahriman)
|
||||||
|
|
||||||
web_client.add(package_ahriman, BuildStatusEnum.Unknown)
|
web_client.package_add(package_ahriman, BuildStatusEnum.Unknown)
|
||||||
requests_mock.assert_called_once_with(pytest.helpers.anyvar(str, True), json=payload)
|
requests_mock.assert_called_once_with("POST", pytest.helpers.anyvar(str, True),
|
||||||
|
params=None, json=payload, files=None)
|
||||||
|
|
||||||
|
|
||||||
def test_add_failed(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
def test_package_add_failed(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must suppress any exception happened during addition
|
must suppress any exception happened during addition
|
||||||
"""
|
"""
|
||||||
mocker.patch("requests.Session.post", side_effect=Exception())
|
mocker.patch("requests.Session.request", side_effect=Exception())
|
||||||
web_client.add(package_ahriman, BuildStatusEnum.Unknown)
|
web_client.package_add(package_ahriman, BuildStatusEnum.Unknown)
|
||||||
|
|
||||||
|
|
||||||
def test_add_failed_http_error(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
def test_package_add_failed_http_error(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must suppress HTTP exception happened during addition
|
must suppress HTTP exception happened during addition
|
||||||
"""
|
"""
|
||||||
mocker.patch("requests.Session.post", side_effect=requests.exceptions.HTTPError())
|
mocker.patch("requests.Session.request", side_effect=requests.exceptions.HTTPError())
|
||||||
web_client.add(package_ahriman, BuildStatusEnum.Unknown)
|
web_client.package_add(package_ahriman, BuildStatusEnum.Unknown)
|
||||||
|
|
||||||
|
|
||||||
def test_add_failed_suppress(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
def test_package_add_failed_suppress(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must suppress any exception happened during addition and don't log
|
must suppress any exception happened during addition and don't log
|
||||||
"""
|
"""
|
||||||
web_client.suppress_errors = True
|
web_client.suppress_errors = True
|
||||||
mocker.patch("requests.Session.post", side_effect=Exception())
|
mocker.patch("requests.Session.request", side_effect=Exception())
|
||||||
logging_mock = mocker.patch("logging.exception")
|
logging_mock = mocker.patch("logging.exception")
|
||||||
|
|
||||||
web_client.add(package_ahriman, BuildStatusEnum.Unknown)
|
web_client.package_add(package_ahriman, BuildStatusEnum.Unknown)
|
||||||
logging_mock.assert_not_called()
|
logging_mock.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
def test_add_failed_http_error_suppress(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
def test_package_add_failed_http_error_suppress(web_client: WebClient, package_ahriman: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must suppress HTTP exception happened during addition and don't log
|
must suppress HTTP exception happened during addition and don't log
|
||||||
"""
|
"""
|
||||||
web_client.suppress_errors = True
|
web_client.suppress_errors = True
|
||||||
mocker.patch("requests.Session.post", side_effect=requests.exceptions.HTTPError())
|
mocker.patch("requests.Session.request", side_effect=requests.exceptions.HTTPError())
|
||||||
logging_mock = mocker.patch("logging.exception")
|
logging_mock = mocker.patch("logging.exception")
|
||||||
|
|
||||||
web_client.add(package_ahriman, BuildStatusEnum.Unknown)
|
web_client.package_add(package_ahriman, BuildStatusEnum.Unknown)
|
||||||
logging_mock.assert_not_called()
|
logging_mock.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
def test_get_all(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
def test_package_get_all(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must return all packages status
|
must return all packages status
|
||||||
"""
|
"""
|
||||||
response = [pytest.helpers.get_package_status_extended(package_ahriman)]
|
response = [pytest.helpers.get_package_status_extended(package_ahriman)]
|
||||||
response_obj = Response()
|
response_obj = requests.Response()
|
||||||
response_obj._content = json.dumps(response).encode("utf8")
|
response_obj._content = json.dumps(response).encode("utf8")
|
||||||
response_obj.status_code = 200
|
response_obj.status_code = 200
|
||||||
|
|
||||||
requests_mock = mocker.patch("requests.Session.get", return_value=response_obj)
|
requests_mock = mocker.patch("requests.Session.request", return_value=response_obj)
|
||||||
|
|
||||||
result = web_client.get(None)
|
result = web_client.package_get(None)
|
||||||
requests_mock.assert_called_once_with(web_client._package_url())
|
requests_mock.assert_called_once_with("GET", f"{web_client.address}{web_client._package_url()}",
|
||||||
|
params=None, json=None, files=None)
|
||||||
assert len(result) == len(response)
|
assert len(result) == len(response)
|
||||||
assert (package_ahriman, BuildStatusEnum.Unknown) in [(package, status.status) for package, status in result]
|
assert (package_ahriman, BuildStatusEnum.Unknown) in [(package, status.status) for package, status in result]
|
||||||
|
|
||||||
|
|
||||||
def test_get_failed(web_client: WebClient, mocker: MockerFixture) -> None:
|
def test_package_get_failed(web_client: WebClient, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must suppress any exception happened during status getting
|
must suppress any exception happened during status getting
|
||||||
"""
|
"""
|
||||||
mocker.patch("requests.Session.get", side_effect=Exception())
|
mocker.patch("requests.Session.request", side_effect=Exception())
|
||||||
assert web_client.get(None) == []
|
assert web_client.package_get(None) == []
|
||||||
|
|
||||||
|
|
||||||
def test_get_failed_http_error(web_client: WebClient, mocker: MockerFixture) -> None:
|
def test_package_get_failed_http_error(web_client: WebClient, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must suppress HTTP exception happened during status getting
|
must suppress HTTP exception happened during status getting
|
||||||
"""
|
"""
|
||||||
mocker.patch("requests.Session.get", side_effect=requests.exceptions.HTTPError())
|
mocker.patch("requests.Session.request", side_effect=requests.exceptions.HTTPError())
|
||||||
assert web_client.get(None) == []
|
assert web_client.package_get(None) == []
|
||||||
|
|
||||||
|
|
||||||
def test_get_single(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
def test_package_get_single(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must return single package status
|
must return single package status
|
||||||
"""
|
"""
|
||||||
response = [pytest.helpers.get_package_status_extended(package_ahriman)]
|
response = [pytest.helpers.get_package_status_extended(package_ahriman)]
|
||||||
response_obj = Response()
|
response_obj = requests.Response()
|
||||||
response_obj._content = json.dumps(response).encode("utf8")
|
response_obj._content = json.dumps(response).encode("utf8")
|
||||||
response_obj.status_code = 200
|
response_obj.status_code = 200
|
||||||
|
|
||||||
requests_mock = mocker.patch("requests.Session.get", return_value=response_obj)
|
requests_mock = mocker.patch("requests.Session.request", return_value=response_obj)
|
||||||
|
|
||||||
result = web_client.get(package_ahriman.base)
|
result = web_client.package_get(package_ahriman.base)
|
||||||
requests_mock.assert_called_once_with(web_client._package_url(package_ahriman.base))
|
requests_mock.assert_called_once_with("GET",
|
||||||
|
f"{web_client.address}{web_client._package_url(package_ahriman.base)}",
|
||||||
|
params=None, json=None, files=None)
|
||||||
assert len(result) == len(response)
|
assert len(result) == len(response)
|
||||||
assert (package_ahriman, BuildStatusEnum.Unknown) in [(package, status.status) for package, status in result]
|
assert (package_ahriman, BuildStatusEnum.Unknown) in [(package, status.status) for package, status in result]
|
||||||
|
|
||||||
|
|
||||||
def test_get_internal(web_client: WebClient, mocker: MockerFixture) -> None:
|
def test_package_logs(web_client: WebClient, log_record: logging.LogRecord, package_ahriman: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must process log record
|
||||||
|
"""
|
||||||
|
requests_mock = mocker.patch("requests.Session.request")
|
||||||
|
payload = {
|
||||||
|
"created": log_record.created,
|
||||||
|
"message": log_record.getMessage(),
|
||||||
|
"version": package_ahriman.version,
|
||||||
|
}
|
||||||
|
|
||||||
|
web_client.package_logs(LogRecordId(package_ahriman.base, package_ahriman.version), log_record)
|
||||||
|
requests_mock.assert_called_once_with("POST", pytest.helpers.anyvar(str, True),
|
||||||
|
params=None, json=payload, files=None)
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_logs_failed(web_client: WebClient, log_record: logging.LogRecord, package_ahriman: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must pass exception during log post
|
||||||
|
"""
|
||||||
|
mocker.patch("requests.Session.request", side_effect=Exception())
|
||||||
|
log_record.package_base = package_ahriman.base
|
||||||
|
with pytest.raises(Exception):
|
||||||
|
web_client.package_logs(LogRecordId(package_ahriman.base, package_ahriman.version), log_record)
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_logs_failed_http_error(web_client: WebClient, log_record: logging.LogRecord, package_ahriman: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must pass exception during log post
|
||||||
|
"""
|
||||||
|
mocker.patch("requests.Session.request", side_effect=requests.exceptions.HTTPError())
|
||||||
|
log_record.package_base = package_ahriman.base
|
||||||
|
with pytest.raises(Exception):
|
||||||
|
web_client.package_logs(LogRecordId(package_ahriman.base, package_ahriman.version), log_record)
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_remove(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must process package removal
|
||||||
|
"""
|
||||||
|
requests_mock = mocker.patch("requests.Session.request")
|
||||||
|
|
||||||
|
web_client.package_remove(package_ahriman.base)
|
||||||
|
requests_mock.assert_called_once_with("DELETE", pytest.helpers.anyvar(str, True),
|
||||||
|
params=None, json=None, files=None)
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_remove_failed(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must suppress any exception happened during removal
|
||||||
|
"""
|
||||||
|
mocker.patch("requests.Session.request", side_effect=Exception())
|
||||||
|
web_client.package_remove(package_ahriman.base)
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_remove_failed_http_error(web_client: WebClient, package_ahriman: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must suppress HTTP exception happened during removal
|
||||||
|
"""
|
||||||
|
mocker.patch("requests.Session.request", side_effect=requests.exceptions.HTTPError())
|
||||||
|
web_client.package_remove(package_ahriman.base)
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_update(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must process package update
|
||||||
|
"""
|
||||||
|
requests_mock = mocker.patch("requests.Session.request")
|
||||||
|
|
||||||
|
web_client.package_update(package_ahriman.base, BuildStatusEnum.Unknown)
|
||||||
|
requests_mock.assert_called_once_with("POST", pytest.helpers.anyvar(str, True), params=None, json={
|
||||||
|
"status": BuildStatusEnum.Unknown.value
|
||||||
|
}, files=None)
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_update_failed(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must suppress any exception happened during update
|
||||||
|
"""
|
||||||
|
mocker.patch("requests.Session.request", side_effect=Exception())
|
||||||
|
web_client.package_update(package_ahriman.base, BuildStatusEnum.Unknown)
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_update_failed_http_error(web_client: WebClient, package_ahriman: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must suppress HTTP exception happened during update
|
||||||
|
"""
|
||||||
|
mocker.patch("requests.Session.request", side_effect=requests.exceptions.HTTPError())
|
||||||
|
web_client.package_update(package_ahriman.base, BuildStatusEnum.Unknown)
|
||||||
|
|
||||||
|
|
||||||
|
def test_status_get(web_client: WebClient, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must return web service status
|
must return web service status
|
||||||
"""
|
"""
|
||||||
status = InternalStatus(status=BuildStatus(), architecture="x86_64")
|
status = InternalStatus(status=BuildStatus(), architecture="x86_64")
|
||||||
response_obj = Response()
|
response_obj = requests.Response()
|
||||||
response_obj._content = json.dumps(status.view()).encode("utf8")
|
response_obj._content = json.dumps(status.view()).encode("utf8")
|
||||||
response_obj.status_code = 200
|
response_obj.status_code = 200
|
||||||
|
|
||||||
requests_mock = mocker.patch("requests.Session.get", return_value=response_obj)
|
requests_mock = mocker.patch("requests.Session.request", return_value=response_obj)
|
||||||
|
|
||||||
result = web_client.get_internal()
|
result = web_client.status_get()
|
||||||
requests_mock.assert_called_once_with(web_client._status_url)
|
requests_mock.assert_called_once_with("GET", f"{web_client.address}{web_client._status_url}",
|
||||||
|
params=None, json=None, files=None)
|
||||||
assert result.architecture == "x86_64"
|
assert result.architecture == "x86_64"
|
||||||
|
|
||||||
|
|
||||||
def test_get_internal_failed(web_client: WebClient, mocker: MockerFixture) -> None:
|
def test_status_get_failed(web_client: WebClient, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must suppress any exception happened during web service status getting
|
must suppress any exception happened during web service status getting
|
||||||
"""
|
"""
|
||||||
mocker.patch("requests.Session.get", side_effect=Exception())
|
mocker.patch("requests.Session.request", side_effect=Exception())
|
||||||
assert web_client.get_internal().architecture is None
|
assert web_client.status_get().architecture is None
|
||||||
|
|
||||||
|
|
||||||
def test_get_internal_failed_http_error(web_client: WebClient, mocker: MockerFixture) -> None:
|
def test_status_get_failed_http_error(web_client: WebClient, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must suppress HTTP exception happened during web service status getting
|
must suppress HTTP exception happened during web service status getting
|
||||||
"""
|
"""
|
||||||
mocker.patch("requests.Session.get", side_effect=requests.exceptions.HTTPError())
|
mocker.patch("requests.Session.request", side_effect=requests.exceptions.HTTPError())
|
||||||
assert web_client.get_internal().architecture is None
|
assert web_client.status_get().architecture is None
|
||||||
|
|
||||||
|
|
||||||
def test_logs(web_client: WebClient, log_record: logging.LogRecord, package_ahriman: Package,
|
def test_status_update(web_client: WebClient, mocker: MockerFixture) -> None:
|
||||||
mocker: MockerFixture) -> None:
|
|
||||||
"""
|
|
||||||
must process log record
|
|
||||||
"""
|
|
||||||
requests_mock = mocker.patch("requests.Session.post")
|
|
||||||
payload = {
|
|
||||||
"created": log_record.created,
|
|
||||||
"message": log_record.getMessage(),
|
|
||||||
"process_id": log_record.process,
|
|
||||||
}
|
|
||||||
|
|
||||||
web_client.logs(package_ahriman.base, log_record)
|
|
||||||
requests_mock.assert_called_once_with(pytest.helpers.anyvar(str, True), json=payload)
|
|
||||||
|
|
||||||
|
|
||||||
def test_log_failed(web_client: WebClient, log_record: logging.LogRecord, package_ahriman: Package,
|
|
||||||
mocker: MockerFixture) -> None:
|
|
||||||
"""
|
|
||||||
must pass exception during log post
|
|
||||||
"""
|
|
||||||
mocker.patch("requests.Session.post", side_effect=Exception())
|
|
||||||
log_record.package_base = package_ahriman.base
|
|
||||||
with pytest.raises(Exception):
|
|
||||||
web_client.logs(package_ahriman.base, log_record)
|
|
||||||
|
|
||||||
|
|
||||||
def test_remove(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
|
||||||
"""
|
|
||||||
must process package removal
|
|
||||||
"""
|
|
||||||
requests_mock = mocker.patch("requests.Session.delete")
|
|
||||||
|
|
||||||
web_client.remove(package_ahriman.base)
|
|
||||||
requests_mock.assert_called_once_with(pytest.helpers.anyvar(str, True))
|
|
||||||
|
|
||||||
|
|
||||||
def test_remove_failed(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
|
||||||
"""
|
|
||||||
must suppress any exception happened during removal
|
|
||||||
"""
|
|
||||||
mocker.patch("requests.Session.delete", side_effect=Exception())
|
|
||||||
web_client.remove(package_ahriman.base)
|
|
||||||
|
|
||||||
|
|
||||||
def test_remove_failed_http_error(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
|
||||||
"""
|
|
||||||
must suppress HTTP exception happened during removal
|
|
||||||
"""
|
|
||||||
mocker.patch("requests.Session.delete", side_effect=requests.exceptions.HTTPError())
|
|
||||||
web_client.remove(package_ahriman.base)
|
|
||||||
|
|
||||||
|
|
||||||
def test_update(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
|
||||||
"""
|
|
||||||
must process package update
|
|
||||||
"""
|
|
||||||
requests_mock = mocker.patch("requests.Session.post")
|
|
||||||
|
|
||||||
web_client.update(package_ahriman.base, BuildStatusEnum.Unknown)
|
|
||||||
requests_mock.assert_called_once_with(pytest.helpers.anyvar(str, True), json={
|
|
||||||
"status": BuildStatusEnum.Unknown.value})
|
|
||||||
|
|
||||||
|
|
||||||
def test_update_failed(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
|
||||||
"""
|
|
||||||
must suppress any exception happened during update
|
|
||||||
"""
|
|
||||||
mocker.patch("requests.Session.post", side_effect=Exception())
|
|
||||||
web_client.update(package_ahriman.base, BuildStatusEnum.Unknown)
|
|
||||||
|
|
||||||
|
|
||||||
def test_update_failed_http_error(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
|
||||||
"""
|
|
||||||
must suppress HTTP exception happened during update
|
|
||||||
"""
|
|
||||||
mocker.patch("requests.Session.post", side_effect=requests.exceptions.HTTPError())
|
|
||||||
web_client.update(package_ahriman.base, BuildStatusEnum.Unknown)
|
|
||||||
|
|
||||||
|
|
||||||
def test_update_self(web_client: WebClient, mocker: MockerFixture) -> None:
|
|
||||||
"""
|
"""
|
||||||
must process service update
|
must process service update
|
||||||
"""
|
"""
|
||||||
requests_mock = mocker.patch("requests.Session.post")
|
requests_mock = mocker.patch("requests.Session.request")
|
||||||
|
|
||||||
web_client.update_self(BuildStatusEnum.Unknown)
|
web_client.status_update(BuildStatusEnum.Unknown)
|
||||||
requests_mock.assert_called_once_with(pytest.helpers.anyvar(str, True), json={
|
requests_mock.assert_called_once_with("POST", pytest.helpers.anyvar(str, True), params=None, json={
|
||||||
"status": BuildStatusEnum.Unknown.value})
|
"status": BuildStatusEnum.Unknown.value
|
||||||
|
}, files=None)
|
||||||
|
|
||||||
|
|
||||||
def test_update_self_failed(web_client: WebClient, mocker: MockerFixture) -> None:
|
def test_status_update_self_failed(web_client: WebClient, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must suppress any exception happened during service update
|
must suppress any exception happened during service update
|
||||||
"""
|
"""
|
||||||
mocker.patch("requests.Session.post", side_effect=Exception())
|
mocker.patch("requests.Session.request", side_effect=Exception())
|
||||||
web_client.update_self(BuildStatusEnum.Unknown)
|
web_client.status_update(BuildStatusEnum.Unknown)
|
||||||
|
|
||||||
|
|
||||||
def test_update_self_failed_http_error(web_client: WebClient, mocker: MockerFixture) -> None:
|
def test_status_update_failed_http_error(web_client: WebClient, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must suppress HTTP exception happened during service update
|
must suppress HTTP exception happened during service update
|
||||||
"""
|
"""
|
||||||
mocker.patch("requests.Session.post", side_effect=requests.exceptions.HTTPError())
|
mocker.patch("requests.Session.request", side_effect=requests.exceptions.HTTPError())
|
||||||
web_client.update_self(BuildStatusEnum.Unknown)
|
web_client.status_update(BuildStatusEnum.Unknown)
|
||||||
|
@ -1,9 +1,17 @@
|
|||||||
from pytest_mock import MockerFixture
|
from pytest_mock import MockerFixture
|
||||||
from unittest.mock import MagicMock
|
from unittest.mock import MagicMock, call as MockCall
|
||||||
|
|
||||||
from ahriman.core.spawn import Spawn
|
from ahriman.core.spawn import Spawn
|
||||||
|
|
||||||
|
|
||||||
|
def test_boolean_action_argument() -> None:
|
||||||
|
"""
|
||||||
|
must correctly convert argument to boolean flag
|
||||||
|
"""
|
||||||
|
assert Spawn.boolean_action_argument("option", True) == "option"
|
||||||
|
assert Spawn.boolean_action_argument("option", False) == "no-option"
|
||||||
|
|
||||||
|
|
||||||
def test_process(spawner: Spawn) -> None:
|
def test_process(spawner: Spawn) -> None:
|
||||||
"""
|
"""
|
||||||
must process external process run correctly
|
must process external process run correctly
|
||||||
@ -15,9 +23,10 @@ def test_process(spawner: Spawn) -> None:
|
|||||||
spawner.process(callback, args, spawner.architecture, "id", spawner.queue)
|
spawner.process(callback, args, spawner.architecture, "id", spawner.queue)
|
||||||
|
|
||||||
callback.assert_called_once_with(args, spawner.architecture)
|
callback.assert_called_once_with(args, spawner.architecture)
|
||||||
(uuid, status) = spawner.queue.get()
|
(uuid, status, time) = spawner.queue.get()
|
||||||
assert uuid == "id"
|
assert uuid == "id"
|
||||||
assert status
|
assert status
|
||||||
|
assert time >= 0
|
||||||
assert spawner.queue.empty()
|
assert spawner.queue.empty()
|
||||||
|
|
||||||
|
|
||||||
@ -30,9 +39,10 @@ def test_process_error(spawner: Spawn) -> None:
|
|||||||
|
|
||||||
spawner.process(callback, MagicMock(), spawner.architecture, "id", spawner.queue)
|
spawner.process(callback, MagicMock(), spawner.architecture, "id", spawner.queue)
|
||||||
|
|
||||||
(uuid, status) = spawner.queue.get()
|
(uuid, status, time) = spawner.queue.get()
|
||||||
assert uuid == "id"
|
assert uuid == "id"
|
||||||
assert not status
|
assert not status
|
||||||
|
assert time >= 0
|
||||||
assert spawner.queue.empty()
|
assert spawner.queue.empty()
|
||||||
|
|
||||||
|
|
||||||
@ -42,7 +52,7 @@ def test_spawn_process(spawner: Spawn, mocker: MockerFixture) -> None:
|
|||||||
"""
|
"""
|
||||||
start_mock = mocker.patch("multiprocessing.Process.start")
|
start_mock = mocker.patch("multiprocessing.Process.start")
|
||||||
|
|
||||||
spawner._spawn_process("add", "ahriman", now="", maybe="?", none=None)
|
assert spawner._spawn_process("add", "ahriman", now="", maybe="?", none=None)
|
||||||
start_mock.assert_called_once_with()
|
start_mock.assert_called_once_with()
|
||||||
spawner.args_parser.parse_args.assert_called_once_with(
|
spawner.args_parser.parse_args.assert_called_once_with(
|
||||||
spawner.command_arguments + [
|
spawner.command_arguments + [
|
||||||
@ -51,12 +61,22 @@ def test_spawn_process(spawner: Spawn, mocker: MockerFixture) -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_has_process(spawner: Spawn) -> None:
|
||||||
|
"""
|
||||||
|
must correctly determine if there is a process
|
||||||
|
"""
|
||||||
|
assert not spawner.has_process("id")
|
||||||
|
|
||||||
|
spawner.active["id"] = MagicMock()
|
||||||
|
assert spawner.has_process("id")
|
||||||
|
|
||||||
|
|
||||||
def test_key_import(spawner: Spawn, mocker: MockerFixture) -> None:
|
def test_key_import(spawner: Spawn, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must call key import
|
must call key import
|
||||||
"""
|
"""
|
||||||
spawn_mock = mocker.patch("ahriman.core.spawn.Spawn._spawn_process")
|
spawn_mock = mocker.patch("ahriman.core.spawn.Spawn._spawn_process")
|
||||||
spawner.key_import("0xdeadbeaf", None)
|
assert spawner.key_import("0xdeadbeaf", None)
|
||||||
spawn_mock.assert_called_once_with("service-key-import", "0xdeadbeaf")
|
spawn_mock.assert_called_once_with("service-key-import", "0xdeadbeaf")
|
||||||
|
|
||||||
|
|
||||||
@ -65,7 +85,7 @@ def test_key_import_with_server(spawner: Spawn, mocker: MockerFixture) -> None:
|
|||||||
must call key import with server specified
|
must call key import with server specified
|
||||||
"""
|
"""
|
||||||
spawn_mock = mocker.patch("ahriman.core.spawn.Spawn._spawn_process")
|
spawn_mock = mocker.patch("ahriman.core.spawn.Spawn._spawn_process")
|
||||||
spawner.key_import("0xdeadbeaf", "keyserver.ubuntu.com")
|
assert spawner.key_import("0xdeadbeaf", "keyserver.ubuntu.com")
|
||||||
spawn_mock.assert_called_once_with("service-key-import", "0xdeadbeaf", **{"key-server": "keyserver.ubuntu.com"})
|
spawn_mock.assert_called_once_with("service-key-import", "0xdeadbeaf", **{"key-server": "keyserver.ubuntu.com"})
|
||||||
|
|
||||||
|
|
||||||
@ -74,7 +94,7 @@ def test_packages_add(spawner: Spawn, mocker: MockerFixture) -> None:
|
|||||||
must call package addition
|
must call package addition
|
||||||
"""
|
"""
|
||||||
spawn_mock = mocker.patch("ahriman.core.spawn.Spawn._spawn_process")
|
spawn_mock = mocker.patch("ahriman.core.spawn.Spawn._spawn_process")
|
||||||
spawner.packages_add(["ahriman", "linux"], None, now=False)
|
assert spawner.packages_add(["ahriman", "linux"], None, now=False)
|
||||||
spawn_mock.assert_called_once_with("package-add", "ahriman", "linux", username=None)
|
spawn_mock.assert_called_once_with("package-add", "ahriman", "linux", username=None)
|
||||||
|
|
||||||
|
|
||||||
@ -83,7 +103,7 @@ def test_packages_add_with_build(spawner: Spawn, mocker: MockerFixture) -> None:
|
|||||||
must call package addition with update
|
must call package addition with update
|
||||||
"""
|
"""
|
||||||
spawn_mock = mocker.patch("ahriman.core.spawn.Spawn._spawn_process")
|
spawn_mock = mocker.patch("ahriman.core.spawn.Spawn._spawn_process")
|
||||||
spawner.packages_add(["ahriman", "linux"], None, now=True)
|
assert spawner.packages_add(["ahriman", "linux"], None, now=True)
|
||||||
spawn_mock.assert_called_once_with("package-add", "ahriman", "linux", username=None, now="")
|
spawn_mock.assert_called_once_with("package-add", "ahriman", "linux", username=None, now="")
|
||||||
|
|
||||||
|
|
||||||
@ -92,7 +112,7 @@ def test_packages_add_with_username(spawner: Spawn, mocker: MockerFixture) -> No
|
|||||||
must call package addition with username
|
must call package addition with username
|
||||||
"""
|
"""
|
||||||
spawn_mock = mocker.patch("ahriman.core.spawn.Spawn._spawn_process")
|
spawn_mock = mocker.patch("ahriman.core.spawn.Spawn._spawn_process")
|
||||||
spawner.packages_add(["ahriman", "linux"], "username", now=False)
|
assert spawner.packages_add(["ahriman", "linux"], "username", now=False)
|
||||||
spawn_mock.assert_called_once_with("package-add", "ahriman", "linux", username="username")
|
spawn_mock.assert_called_once_with("package-add", "ahriman", "linux", username="username")
|
||||||
|
|
||||||
|
|
||||||
@ -101,7 +121,7 @@ def test_packages_rebuild(spawner: Spawn, mocker: MockerFixture) -> None:
|
|||||||
must call package rebuild
|
must call package rebuild
|
||||||
"""
|
"""
|
||||||
spawn_mock = mocker.patch("ahriman.core.spawn.Spawn._spawn_process")
|
spawn_mock = mocker.patch("ahriman.core.spawn.Spawn._spawn_process")
|
||||||
spawner.packages_rebuild("python", "packager")
|
assert spawner.packages_rebuild("python", "packager")
|
||||||
spawn_mock.assert_called_once_with("repo-rebuild", **{"depends-on": "python", "username": "packager"})
|
spawn_mock.assert_called_once_with("repo-rebuild", **{"depends-on": "python", "username": "packager"})
|
||||||
|
|
||||||
|
|
||||||
@ -110,7 +130,7 @@ def test_packages_remove(spawner: Spawn, mocker: MockerFixture) -> None:
|
|||||||
must call package removal
|
must call package removal
|
||||||
"""
|
"""
|
||||||
spawn_mock = mocker.patch("ahriman.core.spawn.Spawn._spawn_process")
|
spawn_mock = mocker.patch("ahriman.core.spawn.Spawn._spawn_process")
|
||||||
spawner.packages_remove(["ahriman", "linux"])
|
assert spawner.packages_remove(["ahriman", "linux"])
|
||||||
spawn_mock.assert_called_once_with("package-remove", "ahriman", "linux")
|
spawn_mock.assert_called_once_with("package-remove", "ahriman", "linux")
|
||||||
|
|
||||||
|
|
||||||
@ -119,8 +139,26 @@ def test_packages_update(spawner: Spawn, mocker: MockerFixture) -> None:
|
|||||||
must call repo update
|
must call repo update
|
||||||
"""
|
"""
|
||||||
spawn_mock = mocker.patch("ahriman.core.spawn.Spawn._spawn_process")
|
spawn_mock = mocker.patch("ahriman.core.spawn.Spawn._spawn_process")
|
||||||
spawner.packages_update("packager")
|
|
||||||
spawn_mock.assert_called_once_with("repo-update", username="packager")
|
assert spawner.packages_update("packager", aur=True, local=True, manual=True)
|
||||||
|
args = {"username": "packager", "aur": "", "local": "", "manual": ""}
|
||||||
|
spawn_mock.assert_called_once_with("repo-update", **args)
|
||||||
|
spawn_mock.reset_mock()
|
||||||
|
|
||||||
|
assert spawner.packages_update("packager", aur=False, local=True, manual=True)
|
||||||
|
args = {"username": "packager", "no-aur": "", "local": "", "manual": ""}
|
||||||
|
spawn_mock.assert_called_once_with("repo-update", **args)
|
||||||
|
spawn_mock.reset_mock()
|
||||||
|
|
||||||
|
assert spawner.packages_update("packager", aur=True, local=False, manual=True)
|
||||||
|
args = {"username": "packager", "aur": "", "no-local": "", "manual": ""}
|
||||||
|
spawn_mock.assert_called_once_with("repo-update", **args)
|
||||||
|
spawn_mock.reset_mock()
|
||||||
|
|
||||||
|
assert spawner.packages_update("packager", aur=True, local=True, manual=False)
|
||||||
|
args = {"username": "packager", "aur": "", "local": "", "no-manual": ""}
|
||||||
|
spawn_mock.assert_called_once_with("repo-update", **args)
|
||||||
|
spawn_mock.reset_mock()
|
||||||
|
|
||||||
|
|
||||||
def test_run(spawner: Spawn, mocker: MockerFixture) -> None:
|
def test_run(spawner: Spawn, mocker: MockerFixture) -> None:
|
||||||
@ -129,8 +167,8 @@ def test_run(spawner: Spawn, mocker: MockerFixture) -> None:
|
|||||||
"""
|
"""
|
||||||
logging_mock = mocker.patch("logging.Logger.info")
|
logging_mock = mocker.patch("logging.Logger.info")
|
||||||
|
|
||||||
spawner.queue.put(("1", False))
|
spawner.queue.put(("1", False, 1))
|
||||||
spawner.queue.put(("2", True))
|
spawner.queue.put(("2", True, 1))
|
||||||
spawner.queue.put(None) # terminate
|
spawner.queue.put(None) # terminate
|
||||||
|
|
||||||
spawner.run()
|
spawner.run()
|
||||||
@ -144,8 +182,8 @@ def test_run_pop(spawner: Spawn) -> None:
|
|||||||
first = spawner.active["1"] = MagicMock()
|
first = spawner.active["1"] = MagicMock()
|
||||||
second = spawner.active["2"] = MagicMock()
|
second = spawner.active["2"] = MagicMock()
|
||||||
|
|
||||||
spawner.queue.put(("1", False))
|
spawner.queue.put(("1", False, 1))
|
||||||
spawner.queue.put(("2", True))
|
spawner.queue.put(("2", True, 1))
|
||||||
spawner.queue.put(None) # terminate
|
spawner.queue.put(None) # terminate
|
||||||
|
|
||||||
spawner.run()
|
spawner.run()
|
||||||
|
@ -273,6 +273,15 @@ def test_package_like(package_ahriman: Package) -> None:
|
|||||||
assert package_like(package_ahriman.packages[package_ahriman.base].filepath)
|
assert package_like(package_ahriman.packages[package_ahriman.base].filepath)
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_like_hidden(package_ahriman: Package) -> None:
|
||||||
|
"""
|
||||||
|
package_like must return false for hidden files
|
||||||
|
"""
|
||||||
|
package_file = package_ahriman.packages[package_ahriman.base].filepath
|
||||||
|
hidden_file = package_file.parent / f".{package_file.name}"
|
||||||
|
assert not package_like(hidden_file)
|
||||||
|
|
||||||
|
|
||||||
def test_package_like_sig(package_ahriman: Package) -> None:
|
def test_package_like_sig(package_ahriman: Package) -> None:
|
||||||
"""
|
"""
|
||||||
package_like must return false for signature files
|
package_like must return false for signature files
|
||||||
|
@ -5,6 +5,7 @@ from unittest.mock import MagicMock
|
|||||||
|
|
||||||
from ahriman.core.configuration import Configuration
|
from ahriman.core.configuration import Configuration
|
||||||
from ahriman.core.upload.github import Github
|
from ahriman.core.upload.github import Github
|
||||||
|
from ahriman.core.upload.remote_service import RemoteService
|
||||||
from ahriman.core.upload.rsync import Rsync
|
from ahriman.core.upload.rsync import Rsync
|
||||||
from ahriman.core.upload.s3 import S3
|
from ahriman.core.upload.s3 import S3
|
||||||
|
|
||||||
@ -45,6 +46,22 @@ def github_release() -> dict[str, Any]:
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def remote_service(configuration: Configuration) -> RemoteService:
|
||||||
|
"""
|
||||||
|
fixture for remote service synchronization
|
||||||
|
|
||||||
|
Args:
|
||||||
|
configuration(Configuration): configuration fixture
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
RemoteService: remote service test instance
|
||||||
|
"""
|
||||||
|
configuration.set_option("web", "host", "localhost")
|
||||||
|
configuration.set_option("web", "port", "8080")
|
||||||
|
return RemoteService("x86_64", configuration, "remote-service")
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def rsync(configuration: Configuration) -> Rsync:
|
def rsync(configuration: Configuration) -> Rsync:
|
||||||
"""
|
"""
|
||||||
|
@ -47,7 +47,7 @@ def test_request(github: Github, mocker: MockerFixture) -> None:
|
|||||||
must call request method
|
must call request method
|
||||||
"""
|
"""
|
||||||
response_mock = MagicMock()
|
response_mock = MagicMock()
|
||||||
request_mock = mocker.patch("requests.request", return_value=response_mock)
|
request_mock = mocker.patch("requests.Session.request", return_value=response_mock)
|
||||||
|
|
||||||
github._request("GET", "url", arg="arg")
|
github._request("GET", "url", arg="arg")
|
||||||
request_mock.assert_called_once_with("GET", "url", auth=github.auth, timeout=github.timeout, arg="arg")
|
request_mock.assert_called_once_with("GET", "url", auth=github.auth, timeout=github.timeout, arg="arg")
|
||||||
@ -58,6 +58,6 @@ def test_request_exception(github: Github, mocker: MockerFixture) -> None:
|
|||||||
"""
|
"""
|
||||||
must call request method and log HTTPError exception
|
must call request method and log HTTPError exception
|
||||||
"""
|
"""
|
||||||
mocker.patch("requests.request", side_effect=requests.HTTPError())
|
mocker.patch("requests.Session.request", side_effect=requests.HTTPError())
|
||||||
with pytest.raises(requests.HTTPError):
|
with pytest.raises(requests.HTTPError):
|
||||||
github._request("GET", "url", arg="arg")
|
github._request("GET", "url", arg="arg")
|
||||||
|
82
tests/ahriman/core/upload/test_remote_service.py
Normal file
82
tests/ahriman/core/upload/test_remote_service.py
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from pytest_mock import MockerFixture
|
||||||
|
from unittest.mock import MagicMock, call as MockCall
|
||||||
|
|
||||||
|
from ahriman.core.upload.remote_service import RemoteService
|
||||||
|
from ahriman.models.package import Package
|
||||||
|
|
||||||
|
|
||||||
|
def test_session(remote_service: RemoteService, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must generate ahriman session
|
||||||
|
"""
|
||||||
|
upload_mock = mocker.patch("ahriman.core.status.web_client.WebClient._create_session")
|
||||||
|
assert remote_service.session
|
||||||
|
upload_mock.assert_called_once_with(use_unix_socket=False)
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_upload(remote_service: RemoteService, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must upload package to remote host
|
||||||
|
"""
|
||||||
|
mocker.patch("pathlib.Path.is_file", return_value=False)
|
||||||
|
file_mock = MagicMock()
|
||||||
|
open_mock = mocker.patch("pathlib.Path.open", return_value=file_mock)
|
||||||
|
upload_mock = mocker.patch("ahriman.core.upload.http_upload.HttpUpload._request")
|
||||||
|
filename = package_ahriman.packages[package_ahriman.base].filename
|
||||||
|
|
||||||
|
remote_service.sync(Path("local"), [package_ahriman])
|
||||||
|
open_mock.assert_called_once_with("rb")
|
||||||
|
file_mock.close.assert_called_once()
|
||||||
|
upload_mock.assert_called_once_with("POST", f"{remote_service.client.address}/api/v1/service/upload", files={
|
||||||
|
"package": (filename, pytest.helpers.anyvar(int), "application/octet-stream", {})
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_upload_with_signature(remote_service: RemoteService, package_ahriman: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must upload package to remote host with signatures
|
||||||
|
"""
|
||||||
|
mocker.patch("pathlib.Path.is_file", return_value=True)
|
||||||
|
file_mock = MagicMock()
|
||||||
|
open_mock = mocker.patch("pathlib.Path.open", return_value=file_mock)
|
||||||
|
upload_mock = mocker.patch("ahriman.core.upload.http_upload.HttpUpload._request")
|
||||||
|
filename = package_ahriman.packages[package_ahriman.base].filename
|
||||||
|
|
||||||
|
remote_service.sync(Path("local"), [package_ahriman])
|
||||||
|
open_mock.assert_has_calls([MockCall("rb"), MockCall("rb")])
|
||||||
|
file_mock.close.assert_has_calls([MockCall(), MockCall()])
|
||||||
|
upload_mock.assert_called_once_with(
|
||||||
|
"POST", f"{remote_service.client.address}/api/v1/service/upload", files={
|
||||||
|
"package": (filename, pytest.helpers.anyvar(int), "application/octet-stream", {}),
|
||||||
|
"signature": (f"{filename}.sig", pytest.helpers.anyvar(int), "application/octet-stream", {})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_upload_no_filename(remote_service: RemoteService, package_ahriman: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must skip upload if no filename set
|
||||||
|
"""
|
||||||
|
open_mock = mocker.patch("pathlib.Path.open")
|
||||||
|
upload_mock = mocker.patch("ahriman.core.status.web_client.WebClient.make_request")
|
||||||
|
package_ahriman.packages[package_ahriman.base].filename = None
|
||||||
|
|
||||||
|
remote_service.sync(Path("local"), [package_ahriman])
|
||||||
|
open_mock.assert_not_called()
|
||||||
|
upload_mock.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
def test_sync(remote_service: RemoteService, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must run sync command
|
||||||
|
"""
|
||||||
|
upload_mock = mocker.patch("ahriman.core.upload.remote_service.RemoteService.package_upload")
|
||||||
|
local = Path("local")
|
||||||
|
|
||||||
|
remote_service.sync(local, [package_ahriman])
|
||||||
|
upload_mock.assert_called_once_with(local, package_ahriman)
|
@ -53,3 +53,15 @@ def test_upload_github(configuration: Configuration, mocker: MockerFixture) -> N
|
|||||||
upload_mock = mocker.patch("ahriman.core.upload.github.Github.sync")
|
upload_mock = mocker.patch("ahriman.core.upload.github.Github.sync")
|
||||||
Upload.load("x86_64", configuration, "github").run(Path("path"), [])
|
Upload.load("x86_64", configuration, "github").run(Path("path"), [])
|
||||||
upload_mock.assert_called_once_with(Path("path"), [])
|
upload_mock.assert_called_once_with(Path("path"), [])
|
||||||
|
|
||||||
|
|
||||||
|
def test_upload_ahriman(configuration: Configuration, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must upload via ahriman
|
||||||
|
"""
|
||||||
|
upload_mock = mocker.patch("ahriman.core.upload.remote_service.RemoteService.sync")
|
||||||
|
configuration.set_option("web", "host", "localhost")
|
||||||
|
configuration.set_option("web", "port", "8080")
|
||||||
|
|
||||||
|
Upload.load("x86_64", configuration, "remote-service").run(Path("path"), [])
|
||||||
|
upload_mock.assert_called_once_with(Path("path"), [])
|
||||||
|
@ -23,3 +23,8 @@ def test_from_option_valid() -> None:
|
|||||||
|
|
||||||
assert ReportSettings.from_option("telegram") == ReportSettings.Telegram
|
assert ReportSettings.from_option("telegram") == ReportSettings.Telegram
|
||||||
assert ReportSettings.from_option("TElegraM") == ReportSettings.Telegram
|
assert ReportSettings.from_option("TElegraM") == ReportSettings.Telegram
|
||||||
|
|
||||||
|
assert ReportSettings.from_option("remote-call") == ReportSettings.RemoteCall
|
||||||
|
assert ReportSettings.from_option("reMOte-cALL") == ReportSettings.RemoteCall
|
||||||
|
assert ReportSettings.from_option("ahriman") == ReportSettings.RemoteCall
|
||||||
|
assert ReportSettings.from_option("AhRiMAN") == ReportSettings.RemoteCall
|
||||||
|
@ -20,3 +20,8 @@ def test_from_option_valid() -> None:
|
|||||||
|
|
||||||
assert UploadSettings.from_option("github") == UploadSettings.Github
|
assert UploadSettings.from_option("github") == UploadSettings.Github
|
||||||
assert UploadSettings.from_option("GitHub") == UploadSettings.Github
|
assert UploadSettings.from_option("GitHub") == UploadSettings.Github
|
||||||
|
|
||||||
|
assert UploadSettings.from_option("remote-service") == UploadSettings.RemoteService
|
||||||
|
assert UploadSettings.from_option("Remote-Service") == UploadSettings.RemoteService
|
||||||
|
assert UploadSettings.from_option("ahriman") == UploadSettings.RemoteService
|
||||||
|
assert UploadSettings.from_option("AhRiMAN") == UploadSettings.RemoteService
|
||||||
|
29
tests/ahriman/models/test_waiter.py
Normal file
29
tests/ahriman/models/test_waiter.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
import time
|
||||||
|
|
||||||
|
from ahriman.models.waiter import Waiter
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_timed_out() -> None:
|
||||||
|
"""
|
||||||
|
must correctly check if timer runs out
|
||||||
|
"""
|
||||||
|
assert Waiter(-1).is_timed_out()
|
||||||
|
assert Waiter(1, start_time=time.monotonic() - 10.0).is_timed_out()
|
||||||
|
assert not Waiter(1, start_time=time.monotonic() + 10.0).is_timed_out()
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_timed_out_infinite() -> None:
|
||||||
|
"""
|
||||||
|
must treat 0 wait timeout as infinite
|
||||||
|
"""
|
||||||
|
assert not Waiter(0).is_timed_out()
|
||||||
|
assert not Waiter(0, start_time=time.monotonic() - 10.0).is_timed_out()
|
||||||
|
|
||||||
|
|
||||||
|
def test_wait() -> None:
|
||||||
|
"""
|
||||||
|
must wait until file will disappear
|
||||||
|
"""
|
||||||
|
results = iter([True, False])
|
||||||
|
waiter = Waiter(1, interval=1)
|
||||||
|
assert waiter.wait(lambda: next(results)) > 0
|
1
tests/ahriman/web/schemas/test_file_schema.py
Normal file
1
tests/ahriman/web/schemas/test_file_schema.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
# schema testing goes in view class tests
|
1
tests/ahriman/web/schemas/test_process_id_schema.py
Normal file
1
tests/ahriman/web/schemas/test_process_id_schema.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
# schema testing goes in view class tests
|
1
tests/ahriman/web/schemas/test_process_schema.py
Normal file
1
tests/ahriman/web/schemas/test_process_schema.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
# schema testing goes in view class tests
|
1
tests/ahriman/web/schemas/test_update_flags_schema.py
Normal file
1
tests/ahriman/web/schemas/test_update_flags_schema.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
# schema testing goes in view class tests
|
@ -21,11 +21,12 @@ async def test_post(client: TestClient, mocker: MockerFixture) -> None:
|
|||||||
"""
|
"""
|
||||||
must call post request correctly
|
must call post request correctly
|
||||||
"""
|
"""
|
||||||
add_mock = mocker.patch("ahriman.core.spawn.Spawn.packages_add")
|
add_mock = mocker.patch("ahriman.core.spawn.Spawn.packages_add", return_value="abc")
|
||||||
user_mock = AsyncMock()
|
user_mock = AsyncMock()
|
||||||
user_mock.return_value = "username"
|
user_mock.return_value = "username"
|
||||||
mocker.patch("ahriman.web.views.base.BaseView.username", side_effect=user_mock)
|
mocker.patch("ahriman.web.views.base.BaseView.username", side_effect=user_mock)
|
||||||
request_schema = pytest.helpers.schema_request(AddView.post)
|
request_schema = pytest.helpers.schema_request(AddView.post)
|
||||||
|
response_schema = pytest.helpers.schema_response(AddView.post)
|
||||||
|
|
||||||
payload = {"packages": ["ahriman"]}
|
payload = {"packages": ["ahriman"]}
|
||||||
assert not request_schema.validate(payload)
|
assert not request_schema.validate(payload)
|
||||||
@ -33,6 +34,10 @@ async def test_post(client: TestClient, mocker: MockerFixture) -> None:
|
|||||||
assert response.ok
|
assert response.ok
|
||||||
add_mock.assert_called_once_with(["ahriman"], "username", now=True)
|
add_mock.assert_called_once_with(["ahriman"], "username", now=True)
|
||||||
|
|
||||||
|
json = await response.json()
|
||||||
|
assert json["process_id"] == "abc"
|
||||||
|
assert not response_schema.validate(json)
|
||||||
|
|
||||||
|
|
||||||
async def test_post_empty(client: TestClient, mocker: MockerFixture) -> None:
|
async def test_post_empty(client: TestClient, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user