Compare commits

...

10 Commits

Author SHA1 Message Date
7769a4a6e0 Release 2.18.3 2025-06-20 17:20:19 +03:00
066d1b1dde refactor: rework few tests and build system
This commit includes the following changes
* Bump github actions
* Update tests github action to check documentation and streamline
  process
* Update test cases to use temporary directories as roots
* Simplify tox.ini
2025-06-20 17:04:57 +03:00
1f22a27360 build: remove defaults from pylint config 2025-06-18 16:20:14 +03:00
75682bc7be feat: add support of openmetrics (#144)
* feat: add openmetrics support & endpoint

* add support of named resources

* update docstrings

* generate docs

* add another test for http api
2025-06-18 14:42:09 +03:00
e5d824b03f build: add regress weekly pipeline
This commit also adds manual dispatch for tests and setup
2025-06-16 21:15:52 +03:00
8d0d597473 Release 2.18.2 2025-06-16 19:03:05 +03:00
995b396360 bug: fix invalid logs rotation 2025-06-16 16:36:34 +03:00
7f813cf0c3 Release 2.18.1 2025-06-16 15:33:24 +03:00
d4eb55ef95 bug: correctly close sqlite3 connection
After the last updates, tests produce warnings that the connection to
database is leaked, which appears to be correct. This commit changes
behaviour to closing connection explicitly via contextlib
2025-06-16 15:24:57 +03:00
09350e88ab style: fix few typos 2025-06-14 23:34:53 +03:00
59 changed files with 1891 additions and 2065 deletions

View File

@ -1,6 +0,0 @@
skips:
- B101
- B104
- B105
- B106
- B404

View File

@ -21,18 +21,18 @@ jobs:
packages: write
steps:
- uses: docker/setup-qemu-action@v2
- uses: docker/setup-qemu-action@v3
- uses: docker/setup-buildx-action@v2
- uses: docker/setup-buildx-action@v3
- name: Login to docker hub
uses: docker/login-action@v2
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to github container registry
uses: docker/login-action@v2
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@ -40,7 +40,7 @@ jobs:
- name: Extract docker metadata
id: meta
uses: docker/metadata-action@v3
uses: docker/metadata-action@v5
with:
images: |
arcan1s/ahriman
@ -50,7 +50,7 @@ jobs:
type=edge
- name: Build an image and push
uses: docker/build-push-action@v4
uses: docker/build-push-action@v6
with:
file: docker/Dockerfile
push: true

View File

@ -1,6 +1,9 @@
name: Regress
on: workflow_dispatch
on:
schedule:
- cron: 1 0 * * 0
workflow_dispatch:
permissions:
contents: read
@ -34,8 +37,6 @@ jobs:
- repo:/var/lib/ahriman
steps:
- uses: actions/checkout@v3
- run: pacman -Sy
- name: Init repository

View File

@ -14,7 +14,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Extract version
id: version
@ -27,8 +27,7 @@ jobs:
token: ${{ secrets.GITHUB_TOKEN }}
filter: 'Release \d+\.\d+\.\d+'
- name: Install dependencies
uses: ConorMacBride/install-package@v1.1.0
- uses: ConorMacBride/install-package@v1.1.0
with:
apt: tox
@ -38,7 +37,7 @@ jobs:
VERSION: ${{ steps.version.outputs.VERSION }}
- name: Publish release
uses: softprops/action-gh-release@v1
uses: softprops/action-gh-release@v2
with:
body: |
${{ steps.changelog.outputs.compareurl }}

View File

@ -7,6 +7,7 @@ on:
pull_request:
branches:
- master
workflow_dispatch:
permissions:
contents: read
@ -23,7 +24,7 @@ jobs:
- ${{ github.workspace }}:/build
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Setup the minimal service in arch linux container
run: .github/workflows/setup.sh minimal
@ -39,7 +40,7 @@ jobs:
options: --privileged -w /build
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Setup the service in arch linux container
run: .github/workflows/setup.sh

View File

@ -1,10 +0,0 @@
#!/bin/bash
# Install dependencies and run test in container
set -ex
# install dependencies
pacman --noconfirm -Syyu base-devel python-tox
# run test and check targets
tox

View File

@ -9,6 +9,7 @@ on:
- master
schedule:
- cron: 1 0 * * *
workflow_dispatch:
permissions:
contents: read
@ -25,7 +26,16 @@ jobs:
- ${{ github.workspace }}:/build
steps:
- uses: actions/checkout@v3
- run: pacman --noconfirm -Syu base-devel git python-tox
- name: Run check and tests in arch linux container
run: .github/workflows/tests.sh
- run: git config --global --add safe.directory *
- uses: actions/checkout@v4
- name: Run check and tests
run: tox
- name: Generate documentation and check if there are untracked changes
run: |
tox -e docs
[ -z "$(git status --porcelain docs/*.rst)" ]

45
.pylint.toml Normal file
View File

@ -0,0 +1,45 @@
[tool.pylint.main]
init-hook = "sys.path.append('pylint_plugins')"
load-plugins = [
"pylint.extensions.docparams",
"pylint.extensions.bad_builtin",
"definition_order",
"import_order",
]
[tool.pylint.classes]
bad-functions = [
"print",
]
[tool.pylint.design]
max-parents = 15
[tool.pylint."messages control"]
disable = [
"raw-checker-failed",
"bad-inline-option",
"locally-disabled",
"file-ignored",
"suppressed-message",
"useless-suppression",
"deprecated-pragma",
"use-symbolic-message-instead",
"use-implicit-booleaness-not-comparison-to-string",
"use-implicit-booleaness-not-comparison-to-zero",
"missing-module-docstring",
"line-too-long",
"no-name-in-module",
"import-outside-toplevel",
"invalid-name",
"raise-missing-from",
"wrong-import-order",
"too-few-public-methods",
"too-many-instance-attributes",
"broad-exception-caught",
"fixme",
"too-many-arguments",
"duplicate-code",
"cyclic-import",
"too-many-positional-arguments",
]

651
.pylintrc
View File

@ -1,651 +0,0 @@
[MAIN]
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Clear in-memory caches upon conclusion of linting. Useful if running pylint
# in a server-like mode.
clear-cache-post-run=no
# Load and enable all available extensions. Use --list-extensions to see a list
# all available extensions.
#enable-all-extensions=
# In error mode, messages with a category besides ERROR or FATAL are
# suppressed, and no reports are done by default. Error mode is compatible with
# disabling specific errors.
#errors-only=
# Always return a 0 (non-error) status code, even if lint errors are found.
# This is primarily useful in continuous integration scripts.
#exit-zero=
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
extension-pkg-allow-list=
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
# for backward compatibility.)
extension-pkg-whitelist=
# Return non-zero exit code if any of these messages/categories are detected,
# even if score is above --fail-under value. Syntax same as enable. Messages
# specified are enabled, while categories only check already-enabled messages.
fail-on=
# Specify a score threshold under which the program will exit with error.
fail-under=10
# Interpret the stdin as a python script, whose filename needs to be passed as
# the module_or_package argument.
#from-stdin=
# Files or directories to be skipped. They should be base names, not paths.
ignore=CVS
# Add files or directories matching the regular expressions patterns to the
# ignore-list. The regex matches against paths and can be in Posix or Windows
# format. Because '\\' represents the directory delimiter on Windows systems,
# it can't be used as an escape character.
ignore-paths=
# Files or directories matching the regular expression patterns are skipped.
# The regex matches against base names, not paths. The default value ignores
# Emacs file locks
ignore-patterns=^\.#
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis). It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
init-hook='import sys; sys.path.append("pylint_plugins")'
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
# number of processors available to use, and will cap the count on Windows to
# avoid hangs.
jobs=1
# Control the amount of potential inferred values when inferring a single
# object. This can help the performance when dealing with large functions or
# complex, nested conditions.
limit-inference-results=100
# List of plugins (as comma separated values of python module names) to load,
# usually to register additional checkers.
load-plugins=pylint.extensions.docparams,
pylint.extensions.bad_builtin,
definition_order,
import_order,
# Pickle collected data for later comparisons.
persistent=yes
# Minimum Python version to use for version dependent checks. Will default to
# the version used to run pylint.
py-version=3.11
# Discover python modules and packages in the file system subtree.
recursive=no
# Add paths to the list of the source roots. Supports globbing patterns. The
# source root is an absolute path or a path relative to the current working
# directory used to determine a package namespace for modules located under the
# source root.
source-roots=
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages.
suggestion-mode=yes
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
# In verbose mode, extra non-checker-related info will be displayed.
#verbose=
[BASIC]
# Naming style matching correct argument names.
argument-naming-style=snake_case
# Regular expression matching correct argument names. Overrides argument-
# naming-style. If left empty, argument names will be checked with the set
# naming style.
#argument-rgx=
# Naming style matching correct attribute names.
attr-naming-style=snake_case
# Regular expression matching correct attribute names. Overrides attr-naming-
# style. If left empty, attribute names will be checked with the set naming
# style.
#attr-rgx=
bad-functions=print,
# Bad variable names which should always be refused, separated by a comma.
bad-names=foo,
bar,
baz,
toto,
tutu,
tata
# Bad variable names regexes, separated by a comma. If names match any regex,
# they will always be refused
bad-names-rgxs=
# Naming style matching correct class attribute names.
class-attribute-naming-style=any
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style. If left empty, class attribute names will be checked
# with the set naming style.
#class-attribute-rgx=
# Naming style matching correct class constant names.
class-const-naming-style=UPPER_CASE
# Regular expression matching correct class constant names. Overrides class-
# const-naming-style. If left empty, class constant names will be checked with
# the set naming style.
#class-const-rgx=
# Naming style matching correct class names.
class-naming-style=PascalCase
# Regular expression matching correct class names. Overrides class-naming-
# style. If left empty, class names will be checked with the set naming style.
#class-rgx=
# Naming style matching correct constant names.
const-naming-style=UPPER_CASE
# Regular expression matching correct constant names. Overrides const-naming-
# style. If left empty, constant names will be checked with the set naming
# style.
#const-rgx=
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming style matching correct function names.
function-naming-style=snake_case
# Regular expression matching correct function names. Overrides function-
# naming-style. If left empty, function names will be checked with the set
# naming style.
#function-rgx=
# Good variable names which should always be accepted, separated by a comma.
good-names=i,
j,
k,
ex,
Run,
_
# Good variable names regexes, separated by a comma. If names match any regex,
# they will always be accepted
good-names-rgxs=
# Include a hint for the correct naming format with invalid-name.
include-naming-hint=no
# Naming style matching correct inline iteration names.
inlinevar-naming-style=any
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style. If left empty, inline iteration names will be checked
# with the set naming style.
#inlinevar-rgx=
# Naming style matching correct method names.
method-naming-style=snake_case
# Regular expression matching correct method names. Overrides method-naming-
# style. If left empty, method names will be checked with the set naming style.
#method-rgx=
# Naming style matching correct module names.
module-naming-style=snake_case
# Regular expression matching correct module names. Overrides module-naming-
# style. If left empty, module names will be checked with the set naming style.
#module-rgx=
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
# These decorators are taken in consideration only for invalid-name.
property-classes=abc.abstractproperty
# Regular expression matching correct type alias names. If left empty, type
# alias names will be checked with the set naming style.
#typealias-rgx=
# Regular expression matching correct type variable names. If left empty, type
# variable names will be checked with the set naming style.
#typevar-rgx=
# Naming style matching correct variable names.
variable-naming-style=snake_case
# Regular expression matching correct variable names. Overrides variable-
# naming-style. If left empty, variable names will be checked with the set
# naming style.
#variable-rgx=
[CLASSES]
# Warn about protected attribute access inside special methods
check-protected-access-in-special-methods=no
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,
__new__,
setUp,
asyncSetUp,
__post_init__
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
[DESIGN]
# List of regular expressions of class ancestor names to ignore when counting
# public methods (see R0903)
exclude-too-few-public-methods=
# List of qualified class names to ignore when counting class parents (see
# R0901)
ignored-parents=
# Maximum number of arguments for function / method.
max-args=5
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Maximum number of boolean expressions in an if statement (see R0916).
max-bool-expr=5
# Maximum number of branch for function / method body.
max-branches=12
# Maximum number of locals for function / method body.
max-locals=15
# Maximum number of parents for a class (see R0901).
max-parents=15
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of return / yield for function / method body.
max-returns=6
# Maximum number of statements in function / method body.
max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
[EXCEPTIONS]
# Exceptions that will emit a warning when caught.
overgeneral-exceptions=builtins.BaseException,builtins.Exception
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=100
# Maximum number of lines in a module.
max-module-lines=1000
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[IMPORTS]
# List of modules that can be imported at any level, not just the top level
# one.
allow-any-import-level=
# Allow explicit reexports by alias from a package __init__.
allow-reexport-from-package=no
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Deprecated modules which should not be used, separated by a comma.
deprecated-modules=
# Output a graph (.gv or any supported image format) of external dependencies
# to the given file (report RP0402 must not be disabled).
ext-import-graph=
# Output a graph (.gv or any supported image format) of all (i.e. internal and
# external) dependencies to the given file (report RP0402 must not be
# disabled).
import-graph=
# Output a graph (.gv or any supported image format) of internal dependencies
# to the given file (report RP0402 must not be disabled).
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Couples of modules and preferred modules, separated by a comma.
preferred-modules=
[LOGGING]
# The type of string formatting that logging methods do. `old` means using %
# formatting, `new` is for `{}` formatting.
logging-format-style=old
# Logging modules to check that the string format arguments are in logging
# function parameter format.
logging-modules=logging
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
# UNDEFINED.
confidence=HIGH,
CONTROL_FLOW,
INFERENCE,
INFERENCE_FAILURE,
UNDEFINED
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once). You can also use "--disable=all" to
# disable everything first and then re-enable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".
disable=raw-checker-failed,
bad-inline-option,
locally-disabled,
file-ignored,
suppressed-message,
useless-suppression,
deprecated-pragma,
use-symbolic-message-instead,
missing-module-docstring,
line-too-long,
no-name-in-module,
import-outside-toplevel,
invalid-name,
raise-missing-from,
wrong-import-order,
too-few-public-methods,
too-many-instance-attributes,
broad-except,
fixme,
too-many-arguments,
duplicate-code,
cyclic-import,
too-many-positional-arguments,
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=c-extension-no-member
[METHOD_ARGS]
# List of qualified names (i.e., library.method) which require a timeout
# parameter e.g. 'requests.api.get,requests.api.post'
timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO
# Regular expression of note tags to take in consideration.
notes-rgx=
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
# it will be considered as an explicit return statement and no message will be
# printed.
never-returning-functions=sys.exit,argparse.parse_error
[REPORTS]
# Python expression which should return a score less than or equal to 10. You
# have access to the variables 'fatal', 'error', 'warning', 'refactor',
# 'convention', and 'info' which contain the number of messages in each
# category, as well as 'statement' which is the total number of statements
# analyzed. This score is used by the global evaluation report (RP0004).
evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details.
msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio). You can also give a reporter class, e.g.
# mypackage.mymodule.MyReporterClass.
#output-format=
# Tells whether to display a full report or only the messages.
reports=no
# Activate the evaluation score.
score=yes
[SIMILARITIES]
# Comments are removed from the similarity computation
ignore-comments=yes
# Docstrings are removed from the similarity computation
ignore-docstrings=yes
# Imports are removed from the similarity computation
ignore-imports=yes
# Signatures are removed from the similarity computation
ignore-signatures=yes
# Minimum lines number of a similarity.
min-similarity-lines=4
[SPELLING]
# Limits count of emitted suggestions for spelling mistakes.
max-spelling-suggestions=4
# Spelling dictionary name. No available dictionaries : You need to install
# both the python package and the system dependency for enchant to work..
spelling-dict=
# List of comma separated words that should be considered directives if they
# appear at the beginning of a comment and should not be checked.
spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains the private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to the private dictionary (see the
# --spelling-private-dict-file option) instead of raising a message.
spelling-store-unknown-words=no
[STRING]
# This flag controls whether inconsistent-quotes generates a warning when the
# character used as a quote delimiter is used inconsistently within a module.
check-quote-consistency=no
# This flag controls whether the implicit-str-concat should generate a warning
# on implicit string concatenation in sequences defined over several lines.
check-str-concat-over-line-jumps=no
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# Tells whether to warn about missing members when the owner of the attribute
# is inferred to be None.
ignore-none=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of symbolic message names to ignore for Mixin members.
ignored-checks-for-mixins=no-member,
not-async-context-manager,
not-context-manager,
attribute-defined-outside-init
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
# Regex pattern to define which classes are considered mixins.
mixin-class-rgx=.*[Mm]ixin
# List of decorators that change the signature of a decorated function.
signature-mutators=
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid defining new builtins when possible.
additional-builtins=
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of names allowed to shadow builtins
allowed-redefined-builtins=
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,
_cb
# A regular expression matching the name of dummy variables (i.e. expected to
# not be used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored.
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io

View File

@ -40,6 +40,7 @@ RUN pacman -S --noconfirm --asdeps \
pacman -S --noconfirm --asdeps \
git \
python-aiohttp \
python-aiohttp-openmetrics \
python-boto3 \
python-cerberus \
python-cryptography \
@ -112,6 +113,7 @@ RUN pacman -S --noconfirm ahriman
RUN pacman -S --noconfirm --asdeps \
python-aioauth-client \
python-aiohttp-apispec-git \
python-aiohttp-openmetrics \
python-aiohttp-security \
python-aiohttp-session \
python-boto3 \

File diff suppressed because it is too large Load Diff

View File

@ -20,6 +20,14 @@ ahriman.web.middlewares.exception\_handler module
:no-undoc-members:
:show-inheritance:
ahriman.web.middlewares.metrics\_handler module
-----------------------------------------------
.. automodule:: ahriman.web.middlewares.metrics_handler
:members:
:no-undoc-members:
:show-inheritance:
Module contents
---------------

View File

@ -4,6 +4,14 @@ ahriman.web.schemas package
Submodules
----------
ahriman.web.schemas.any\_schema module
--------------------------------------
.. automodule:: ahriman.web.schemas.any_schema
:members:
:no-undoc-members:
:show-inheritance:
ahriman.web.schemas.aur\_package\_schema module
-----------------------------------------------

View File

@ -12,6 +12,14 @@ ahriman.web.views.v1.status.info module
:no-undoc-members:
:show-inheritance:
ahriman.web.views.v1.status.metrics module
------------------------------------------
.. automodule:: ahriman.web.views.v1.status.metrics
:members:
:no-undoc-members:
:show-inheritance:
ahriman.web.views.v1.status.repositories module
-----------------------------------------------

View File

@ -413,10 +413,11 @@ Web application
Web application requires the following python packages to be installed:
* Core part requires ``aiohttp`` (application itself), ``aiohttp_jinja2`` and ``Jinja2`` (HTML generation from templates).
* Additional web features also require ``aiohttp-apispec`` (autogenerated documentation), ``aiohttp_cors`` (CORS support, required by documentation).
* Additional web features also require ``aiohttp-apispec`` (autogenerated documentation, optional), ``aiohttp_cors`` (CORS support, required by documentation).
* In addition, authorization feature requires ``aiohttp_security``, ``aiohttp_session`` and ``cryptography``.
* In addition to base authorization dependencies, OAuth2 also requires ``aioauth-client`` library.
* In addition if you would like to disable authorization for local access (recommended way in order to run the application itself with reporting support), the ``requests-unixsocket2`` library is required.
* Application metrics will be automatically enabled after installing ``aiohttp-openmetrics`` package.
Middlewares
^^^^^^^^^^^

View File

@ -1,36 +1,36 @@
# This file was autogenerated by uv via the following command:
# uv pip compile --group ../pyproject.toml:docs --extra s3 --extra validator --extra web --output-file ../docs/requirements.txt ../pyproject.toml
# uv pip compile --group pyproject.toml:docs --extra s3 --extra validator --extra web --output-file docs/requirements.txt pyproject.toml
aiohappyeyeballs==2.6.1
# via aiohttp
aiohttp==3.11.18
# via
# ahriman (../pyproject.toml)
# ahriman (pyproject.toml)
# aiohttp-cors
# aiohttp-jinja2
aiohttp-cors==0.8.1
# via ahriman (../pyproject.toml)
# via ahriman (pyproject.toml)
aiohttp-jinja2==1.6
# via ahriman (../pyproject.toml)
# via ahriman (pyproject.toml)
aiosignal==1.3.2
# via aiohttp
alabaster==1.0.0
# via sphinx
argparse-manpage==4.6
# via ahriman (../pyproject.toml:docs)
# via ahriman (pyproject.toml:docs)
attrs==25.3.0
# via aiohttp
babel==2.17.0
# via sphinx
bcrypt==4.3.0
# via ahriman (../pyproject.toml)
# via ahriman (pyproject.toml)
boto3==1.38.11
# via ahriman (../pyproject.toml)
# via ahriman (pyproject.toml)
botocore==1.38.11
# via
# boto3
# s3transfer
cerberus==1.3.7
# via ahriman (../pyproject.toml)
# via ahriman (pyproject.toml)
certifi==2025.4.26
# via requests
charset-normalizer==3.4.2
@ -51,7 +51,7 @@ idna==3.10
imagesize==1.4.1
# via sphinx
inflection==0.5.1
# via ahriman (../pyproject.toml)
# via ahriman (pyproject.toml)
jinja2==3.1.6
# via
# aiohttp-jinja2
@ -73,37 +73,37 @@ propcache==0.3.1
# aiohttp
# yarl
pydeps==3.0.1
# via ahriman (../pyproject.toml:docs)
# via ahriman (pyproject.toml:docs)
pyelftools==0.32
# via ahriman (../pyproject.toml)
# via ahriman (pyproject.toml)
pygments==2.19.1
# via sphinx
python-dateutil==2.9.0.post0
# via botocore
requests==2.32.3
# via
# ahriman (../pyproject.toml)
# ahriman (pyproject.toml)
# sphinx
roman-numerals-py==3.1.0
# via sphinx
s3transfer==0.12.0
# via boto3
shtab==1.7.2
# via ahriman (../pyproject.toml:docs)
# via ahriman (pyproject.toml:docs)
six==1.17.0
# via python-dateutil
snowballstemmer==3.0.0.1
# via sphinx
sphinx==8.2.3
# via
# ahriman (../pyproject.toml:docs)
# ahriman (pyproject.toml:docs)
# sphinx-argparse
# sphinx-rtd-theme
# sphinxcontrib-jquery
sphinx-argparse==0.5.2
# via ahriman (../pyproject.toml:docs)
# via ahriman (pyproject.toml:docs)
sphinx-rtd-theme==3.0.2
# via ahriman (../pyproject.toml:docs)
# via ahriman (pyproject.toml:docs)
sphinxcontrib-applehelp==2.0.0
# via sphinx
sphinxcontrib-devhelp==2.0.0

View File

@ -2,7 +2,7 @@
pkgbase='ahriman'
pkgname=('ahriman' 'ahriman-core' 'ahriman-triggers' 'ahriman-web')
pkgver=2.18.0
pkgver=2.18.3
pkgrel=1
pkgdesc="ArcH linux ReposItory MANager"
arch=('any')
@ -75,6 +75,7 @@ package_ahriman-web() {
depends=("$pkgbase-core=$pkgver" 'python-aiohttp-cors' 'python-aiohttp-jinja2')
optdepends=('python-aioauth-client: OAuth2 authorization support'
'python-aiohttp-apispec>=3.0.0: autogenerated API documentation'
'python-aiohttp-openmetrics: HTTP metrics support'
'python-aiohttp-security: authorization support'
'python-aiohttp-session: authorization support'
'python-cryptography: authorization support')

View File

@ -1,4 +1,4 @@
.TH AHRIMAN "1" "2025\-06\-13" "ahriman" "Generated Python Manual"
.TH AHRIMAN "1" "2025\-06\-16" "ahriman" "Generated Python Manual"
.SH NAME
ahriman
.SH SYNOPSIS

View File

@ -69,6 +69,10 @@ web_auth = [
"aiohttp_security",
"cryptography",
]
web_metrics = [
"ahriman[web]",
"aiohttp-openmetrics",
]
web_oauth2 = [
"ahriman[web_auth]",
"aioauth-client",

View File

@ -17,4 +17,4 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__version__ = "2.18.0"
__version__ = "2.18.3"

View File

@ -35,7 +35,7 @@ class Remote(SyncHttpClient):
>>> package = AUR.info("ahriman")
>>> search_result = Official.multisearch("pacman", "manager", pacman=pacman)
Differnece between :func:`search()` and :func:`multisearch()` is that :func:`search()` passes all arguments to
Difference between :func:`search()` and :func:`multisearch()` is that :func:`search()` passes all arguments to
underlying wrapper directly, whereas :func:`multisearch()` splits search one by one and finds intersection
between search results.
"""

View File

@ -210,6 +210,17 @@ class Configuration(configparser.RawConfigParser):
raise InitializeError("Configuration path and/or repository id are not set")
return self.path, self.repository_id
def copy_from(self, configuration: Self) -> None:
"""
copy values from another instance overriding existing
Args:
configuration(Self): configuration instance to merge from
"""
for section in configuration.sections():
for key, value in configuration.items(section):
self.set_option(section, key, value)
def dump(self) -> dict[str, dict[str, str]]:
"""
dump configuration to dictionary
@ -220,6 +231,7 @@ class Configuration(configparser.RawConfigParser):
return {
section: dict(self.items(section))
for section in self.sections()
if self[section]
}
# pylint and mypy are too stupid to find these methods

View File

@ -153,10 +153,13 @@ class LogsOperations(Operations):
"""
delete from logs
where (package_base, repository, process_id) in (
select package_base, repository, process_id from logs
where repository = :repository
group by package_base, repository, process_id
order by min(created) desc limit -1 offset :offset
select package_base, repository, process_id from (
select package_base, repository, process_id, row_number() over (partition by package_base order by max(created) desc) as rn
from logs
where repository = :repository
group by package_base, repository, process_id
)
where rn > :offset
)
""",
{

View File

@ -17,6 +17,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import contextlib
import sqlite3
from collections.abc import Callable
@ -87,10 +88,12 @@ class Operations(LazyLogging):
Returns:
T: result of the ``query`` call
"""
with sqlite3.connect(self.path, detect_types=sqlite3.PARSE_DECLTYPES) as connection:
with contextlib.closing(sqlite3.connect(self.path, detect_types=sqlite3.PARSE_DECLTYPES)) as connection:
connection.set_trace_callback(self.logger.debug)
connection.row_factory = self.factory
result = query(connection)
if commit:
connection.commit()
return result

View File

@ -40,7 +40,7 @@ class JinjaTemplate:
* homepage - link to homepage, string, optional
* last_update - report generation time, pretty printed datetime, required
* link_path - prefix fo packages to download, string, required
* link_path - prefix of packages to download, string, required
* has_package_signed - ``True`` in case if package sign enabled, ``False`` otherwise, required
* has_repo_signed - ``True`` in case if repository database sign enabled, ``False`` otherwise, required
* packages - sorted list of packages properties, required
@ -64,7 +64,7 @@ class JinjaTemplate:
Attributes:
default_pgp_key(str | None): default PGP key
homepage(str | None): homepage link if any (for footer)
link_path(str): prefix fo packages to download
link_path(str): prefix of packages to download
name(str): repository name
rss_url(str | None): link to the RSS feed
sign_targets(set[SignSettings]): targets to sign enabled in configuration

View File

@ -71,7 +71,7 @@ class EventLogger:
>>> with self.in_event(package_base, EventType.PackageUpdated):
>>> do_something()
Additional parameter ``failure`` can be set in order to emit an event on exception occured. If none set
Additional parameter ``failure`` can be set in order to emit an event on exception occurred. If none set
(default), then no event will be recorded on exception
"""
with MetricsTimer() as timer:

View File

@ -69,7 +69,7 @@ class Package(LazyLogging):
:attr:`ahriman.models.package_source.PackageSource.Archive`,
:attr:`ahriman.models.package_source.PackageSource.AUR`,
:attr:`ahriman.models.package_source.PackageSource.Local` and
:attr:`ahriman.models.package_source.PackageSource.Repository` repsectively:
:attr:`ahriman.models.package_source.PackageSource.Repository` respectively:
>>> ahriman_package = Package.from_aur("ahriman")
>>> pacman_package = Package.from_official("pacman", pacman)

View File

@ -0,0 +1,69 @@
#
# Copyright (c) 2021-2025 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
try:
import aiohttp_openmetrics
except ImportError:
aiohttp_openmetrics = None # type: ignore[assignment]
from aiohttp.typedefs import Middleware
from aiohttp.web import HTTPNotFound, Request, Response, StreamResponse, middleware
from ahriman.web.middlewares import HandlerType
__all__ = [
"metrics",
"metrics_handler",
]
async def metrics(request: Request) -> Response:
"""
handler for returning metrics
Args:
request(Request): request object
Returns:
Response: response object
Raises:
HTTPNotFound: endpoint is disabled
"""
if aiohttp_openmetrics is None:
raise HTTPNotFound
return await aiohttp_openmetrics.metrics(request)
def metrics_handler() -> Middleware:
"""
middleware for metrics support
Returns:
Middleware: middleware function to handle server metrics
"""
if aiohttp_openmetrics is not None:
return aiohttp_openmetrics.metrics_middleware
@middleware
async def handle(request: Request, handler: HandlerType) -> StreamResponse:
return await handler(request)
return handle

View File

@ -17,6 +17,8 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import re
from aiohttp.web import Application, View
from collections.abc import Generator
@ -45,6 +47,23 @@ def _dynamic_routes(configuration: Configuration) -> Generator[tuple[str, type[V
yield route, view
def _identifier(route: str) -> str:
"""
extract valid route identifier (aka name) for the route. This method replaces curly brackets by single colon
and replaces other special symbols (including slashes) by underscore
Args:
route(str): source route
Returns:
str: route with special symbols being replaced
"""
# replace special symbols
alphanum = re.sub(r"[^A-Za-z\d\-{}]", "_", route)
# finally replace curly brackets
return alphanum.replace("{", ":").replace("}", "")
def setup_routes(application: Application, configuration: Configuration) -> None:
"""
setup all defined routes
@ -53,7 +72,8 @@ def setup_routes(application: Application, configuration: Configuration) -> None
application(Application): web application instance
configuration(Configuration): configuration instance
"""
application.router.add_static("/static", configuration.getpath("web", "static_path"), follow_symlinks=True)
application.router.add_static("/static", configuration.getpath("web", "static_path"),
name="_static", follow_symlinks=True)
for route, view in _dynamic_routes(configuration):
application.router.add_view(route, view)
application.router.add_view(route, view, name=_identifier(route))

View File

@ -17,6 +17,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from ahriman.web.schemas.any_schema import AnySchema
from ahriman.web.schemas.aur_package_schema import AURPackageSchema
from ahriman.web.schemas.auth_schema import AuthSchema
from ahriman.web.schemas.build_options_schema import BuildOptionsSchema

View File

@ -0,0 +1,26 @@
#
# Copyright (c) 2021-2025 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from ahriman.web.apispec import Schema
class AnySchema(Schema):
"""
response dummy schema
"""

View File

@ -39,7 +39,7 @@ class RemoteSchema(Schema):
"example": ".",
})
source = fields.Enum(PackageSource, by_value=True, required=True, metadata={
"description": "Pacakge source",
"description": "Package source",
})
web_url = fields.String(metadata={
"description": "Package repository page",

View File

@ -167,6 +167,9 @@ class BaseView(View, CorsViewMixin):
"""
HEAD method implementation based on the result of GET method
Returns:
StreamResponse: generated response for the request
Raises:
HTTPMethodNotAllowed: in case if there is no GET method implemented
"""

View File

@ -106,7 +106,7 @@ class PackageView(StatusViewGuard, BaseView):
@apidocs(
tags=["Packages"],
summary="Update package",
description="Update package status and set its descriptior optionally",
description="Update package status and set its descriptor optionally",
permission=POST_PERMISSION,
error_400_enabled=True,
error_404_description="Repository is unknown",

View File

@ -46,7 +46,7 @@ class PackagesView(StatusViewGuard, BaseView):
ROUTES = ["/api/v1/packages"]
@apidocs(
tags=["packages"],
tags=["Packages"],
summary="Get packages list",
description="Retrieve packages and their descriptors",
permission=GET_PERMISSION,

View File

@ -0,0 +1,56 @@
#
# Copyright (c) 2021-2025 ahriman team.
#
# This file is part of ahriman
# (see https://github.com/arcan1s/ahriman).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from aiohttp.web import Response
from typing import ClassVar
from ahriman.models.user_access import UserAccess
from ahriman.web.apispec.decorators import apidocs
from ahriman.web.middlewares.metrics_handler import metrics
from ahriman.web.schemas import AnySchema
from ahriman.web.views.base import BaseView
class MetricsView(BaseView):
"""
open metrics endpoints
Attributes:
GET_PERMISSION(UserAccess): (class attribute) get permissions of self
"""
GET_PERMISSION: ClassVar[UserAccess] = UserAccess.Unauthorized
ROUTES = ["/api/v1/metrics"]
@apidocs(
tags=["Status"],
summary="OpenMetrics endpoint",
description="Get service metrics in OpenMetrics format",
permission=GET_PERMISSION,
error_404_description="Endpoint is disabled",
schema=AnySchema,
)
async def get(self) -> Response:
"""
get service HTTP metrics
Returns:
Response: 200 with service metrics as generated by the library
"""
return await metrics(self.request)

View File

@ -97,6 +97,7 @@ class LoginView(BaseView):
login user to service. The authentication session will be passed in ``Set-Cookie`` header.
Raises:
HTTPBadRequest: if bad data is supplied
HTTPFound: on success response
HTTPUnauthorized: if case of authorization error
"""

View File

@ -37,6 +37,7 @@ from ahriman.web.apispec.info import setup_apispec
from ahriman.web.cors import setup_cors
from ahriman.web.keys import AuthKey, ConfigurationKey, SpawnKey, WatcherKey, WorkersKey
from ahriman.web.middlewares.exception_handler import exception_handler
from ahriman.web.middlewares.metrics_handler import metrics_handler
from ahriman.web.routes import setup_routes
@ -146,6 +147,7 @@ def setup_server(configuration: Configuration, spawner: Spawn, repositories: lis
application.middlewares.append(normalize_path_middleware(append_slash=False, remove_slash=True))
application.middlewares.append(exception_handler(application.logger))
application.middlewares.append(metrics_handler())
application.logger.info("setup routes")
setup_routes(application, configuration)

View File

@ -144,6 +144,7 @@ def test_repositories_extract(args: argparse.Namespace, configuration: Configura
args.architecture = "arch"
args.configuration = configuration.path
args.repository = "repo"
mocker.patch("ahriman.core.configuration.Configuration.load", new=lambda self, _: self.copy_from(configuration))
known_architectures_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_architectures")
known_repositories_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_repositories")
@ -159,6 +160,7 @@ def test_repositories_extract_repository(args: argparse.Namespace, configuration
"""
args.architecture = "arch"
args.configuration = configuration.path
mocker.patch("ahriman.core.configuration.Configuration.load", new=lambda self, _: self.copy_from(configuration))
known_architectures_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_architectures")
known_repositories_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_repositories",
return_value={"repo"})
@ -175,6 +177,7 @@ def test_repositories_extract_repository_legacy(args: argparse.Namespace, config
"""
args.architecture = "arch"
args.configuration = configuration.path
mocker.patch("ahriman.core.configuration.Configuration.load", new=lambda self, _: self.copy_from(configuration))
known_architectures_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_architectures")
known_repositories_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_repositories",
return_value=set())
@ -191,6 +194,7 @@ def test_repositories_extract_architecture(args: argparse.Namespace, configurati
"""
args.configuration = configuration.path
args.repository = "repo"
mocker.patch("ahriman.core.configuration.Configuration.load", new=lambda self, _: self.copy_from(configuration))
known_architectures_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_architectures",
return_value={"arch"})
known_repositories_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_repositories")
@ -207,6 +211,7 @@ def test_repositories_extract_empty(args: argparse.Namespace, configuration: Con
"""
args.command = "config"
args.configuration = configuration.path
mocker.patch("ahriman.core.configuration.Configuration.load", new=lambda self, _: self.copy_from(configuration))
mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_architectures", return_value=set())
mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_repositories", return_value=set())
@ -221,6 +226,7 @@ def test_repositories_extract_systemd(args: argparse.Namespace, configuration: C
"""
args.configuration = configuration.path
args.repository_id = "i686/some/repo/name"
mocker.patch("ahriman.core.configuration.Configuration.load", new=lambda self, _: self.copy_from(configuration))
known_architectures_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_architectures")
known_repositories_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_repositories")
@ -236,6 +242,7 @@ def test_repositories_extract_systemd_with_dash(args: argparse.Namespace, config
"""
args.configuration = configuration.path
args.repository_id = "i686-some-repo-name"
mocker.patch("ahriman.core.configuration.Configuration.load", new=lambda self, _: self.copy_from(configuration))
known_architectures_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_architectures")
known_repositories_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_repositories")
@ -251,6 +258,7 @@ def test_repositories_extract_systemd_legacy(args: argparse.Namespace, configura
"""
args.configuration = configuration.path
args.repository_id = "i686"
mocker.patch("ahriman.core.configuration.Configuration.load", new=lambda self, _: self.copy_from(configuration))
known_architectures_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_architectures")
known_repositories_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_repositories",
return_value=set())

View File

@ -6,6 +6,7 @@ from pytest_mock import MockerFixture
from ahriman.application.application import Application
from ahriman.application.handlers.copy import Copy
from ahriman.core.configuration import Configuration
from ahriman.core.database import SQLite
from ahriman.core.repository import Repository
from ahriman.models.build_status import BuildStatusEnum
from ahriman.models.package import Package
@ -30,11 +31,12 @@ def _default_args(args: argparse.Namespace) -> argparse.Namespace:
def test_run(args: argparse.Namespace, configuration: Configuration, repository: Repository,
package_ahriman: Package, mocker: MockerFixture) -> None:
database: SQLite, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must run command
"""
args = _default_args(args)
mocker.patch("ahriman.core.database.SQLite.load", return_value=database)
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
mocker.patch("ahriman.core.repository.Repository.packages", return_value=[package_ahriman])
application_mock = mocker.patch("ahriman.application.handlers.copy.Copy.copy_package")
@ -51,12 +53,13 @@ def test_run(args: argparse.Namespace, configuration: Configuration, repository:
def test_run_remove(args: argparse.Namespace, configuration: Configuration, repository: Repository,
package_ahriman: Package, mocker: MockerFixture) -> None:
database: SQLite, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must run command and remove packages afterward
"""
args = _default_args(args)
args.remove = True
mocker.patch("ahriman.core.database.SQLite.load", return_value=database)
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
mocker.patch("ahriman.core.repository.Repository.packages", return_value=[package_ahriman])
mocker.patch("ahriman.application.handlers.copy.Copy.copy_package")
@ -69,12 +72,14 @@ def test_run_remove(args: argparse.Namespace, configuration: Configuration, repo
def test_run_empty_exception(args: argparse.Namespace, configuration: Configuration, repository: Repository,
mocker: MockerFixture) -> None:
database: SQLite, mocker: MockerFixture) -> None:
"""
must raise ExitCode exception on empty result
"""
args = _default_args(args)
args.exit_code = True
mocker.patch("ahriman.core.database.SQLite.load", return_value=database)
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
mocker.patch("ahriman.core.repository.Repository.packages", return_value=[])
mocker.patch("ahriman.application.application.Application.update")
check_mock = mocker.patch("ahriman.application.handlers.handler.Handler.check_status")

View File

@ -9,6 +9,7 @@ from urllib.parse import quote_plus as url_encode
from ahriman.application.handlers.setup import Setup
from ahriman.core.configuration import Configuration
from ahriman.core.database import SQLite
from ahriman.core.exceptions import MissingArchitectureError
from ahriman.core.repository import Repository
from ahriman.models.repository_id import RepositoryId
@ -44,11 +45,12 @@ def _default_args(args: argparse.Namespace) -> argparse.Namespace:
def test_run(args: argparse.Namespace, configuration: Configuration, repository: Repository,
repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
database: SQLite, repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
"""
must run command
"""
args = _default_args(args)
mocker.patch("ahriman.core.database.SQLite.load", return_value=database)
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
ahriman_configuration_mock = mocker.patch("ahriman.application.handlers.setup.Setup.configuration_create_ahriman")
devtools_configuration_mock = mocker.patch("ahriman.application.handlers.setup.Setup.configuration_create_devtools")
@ -88,12 +90,13 @@ def test_run_no_architecture_or_repository(configuration: Configuration) -> None
def test_run_with_server(args: argparse.Namespace, configuration: Configuration, repository: Repository,
mocker: MockerFixture) -> None:
database: SQLite, mocker: MockerFixture) -> None:
"""
must run command with server specified
"""
args = _default_args(args)
args.server = "server"
mocker.patch("ahriman.core.database.SQLite.load", return_value=database)
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
mocker.patch("ahriman.application.handlers.setup.Setup.configuration_create_ahriman")
mocker.patch("ahriman.application.handlers.setup.Setup.configuration_create_makepkg")

View File

@ -51,7 +51,8 @@ def test_run(args: argparse.Namespace, configuration: Configuration, database: S
update_mock.assert_called_once_with(user)
def test_run_empty_salt(args: argparse.Namespace, configuration: Configuration, mocker: MockerFixture) -> None:
def test_run_empty_salt(args: argparse.Namespace, configuration: Configuration, database: SQLite,
mocker: MockerFixture) -> None:
"""
must process users with empty password salt
"""
@ -59,6 +60,7 @@ def test_run_empty_salt(args: argparse.Namespace, configuration: Configuration,
args = _default_args(args)
user = User(username=args.username, password=args.password, access=args.role,
packager_id=args.packager, key=args.key)
mocker.patch("ahriman.core.database.SQLite.load", return_value=database)
mocker.patch("ahriman.models.user.User.hash_password", return_value=user)
create_user_mock = mocker.patch("ahriman.application.handlers.users.Users.user_create", return_value=user)
update_mock = mocker.patch("ahriman.core.database.SQLite.user_update")

View File

@ -1575,6 +1575,7 @@ def test_run(args: argparse.Namespace, configuration: Configuration, mocker: Moc
args.command = ""
args.handler = Handler
mocker.patch("ahriman.core.configuration.Configuration.load", new=lambda self, _: self.copy_from(configuration))
mocker.patch("argparse.ArgumentParser.parse_args", return_value=args)
assert ahriman.run() == 1

View File

@ -249,19 +249,25 @@ def auth(configuration: Configuration) -> Auth:
@pytest.fixture
def configuration(repository_id: RepositoryId, resource_path_root: Path) -> Configuration:
def configuration(repository_id: RepositoryId, tmp_path: Path, resource_path_root: Path) -> Configuration:
"""
configuration fixture
Args:
repository_id(RepositoryId): repository identifier fixture
tmp_path(Path): temporary path used by the fixture as root
resource_path_root(Path): resource path root directory
Returns:
Configuration: configuration test instance
"""
path = resource_path_root / "core" / "ahriman.ini"
return Configuration.from_path(path, repository_id)
instance = Configuration.from_path(path, repository_id)
instance.set_option("repository", "root", str(tmp_path))
instance.set_option("settings", "database", str(tmp_path / "ahriman.db"))
return instance
@pytest.fixture
@ -275,9 +281,7 @@ def database(configuration: Configuration) -> SQLite:
Returns:
SQLite: database test instance
"""
database = SQLite.load(configuration)
yield database
database.path.unlink()
return SQLite.load(configuration)
@pytest.fixture

View File

@ -53,7 +53,7 @@ def test_remote_git_url(remote: Remote) -> None:
must raise NotImplemented for missing remote git url
"""
with pytest.raises(NotImplementedError):
remote.remote_git_url("package", "repositorys")
remote.remote_git_url("package", "repositories")
def test_remote_web_url(remote: Remote) -> None:

View File

@ -10,7 +10,7 @@ from ahriman.core.exceptions import PacmanError
def test_copy(mocker: MockerFixture) -> None:
"""
must copy loca database file
must copy local database file
"""
copy_mock = mocker.patch("shutil.copy")
PacmanDatabase.copy(Path("remote"), Path("local"))

View File

@ -102,6 +102,15 @@ def test_check_loaded_architecture(configuration: Configuration) -> None:
configuration.check_loaded()
def test_copy_from(configuration: Configuration) -> None:
"""
must copy values from another instance
"""
instance = Configuration()
instance.copy_from(configuration)
assert instance.dump() == configuration.dump()
def test_dump(configuration: Configuration) -> None:
"""
dump must not be empty

View File

@ -62,8 +62,8 @@ def test_validate_is_ip_address(validator: Validator, mocker: MockerFixture) ->
validator._validate_is_ip_address([], "field", "localhost")
validator._validate_is_ip_address([], "field", "127.0.0.1")
validator._validate_is_ip_address([], "field", "::")
validator._validate_is_ip_address([], "field", "0.0.0.0")
validator._validate_is_ip_address([], "field", "::") # nosec
validator._validate_is_ip_address([], "field", "0.0.0.0") # nosec
validator._validate_is_ip_address([], "field", "random string")

View File

@ -93,6 +93,27 @@ def test_logs_insert_get_multi(database: SQLite, package_ahriman: Package) -> No
]
def test_logs_rotate_remove_older(database: SQLite, package_ahriman: Package,
package_python_schedule: Package) -> None:
"""
must correctly remove old records
"""
database.logs_insert(LogRecord(LogRecordId(package_ahriman.base, "1", "p1"), 42.0, "message 1"))
database.logs_insert(LogRecord(LogRecordId(package_ahriman.base, "1", "p1"), 43.0, "message 2"))
database.logs_insert(LogRecord(LogRecordId(package_ahriman.base, "2", "p2"), 44.0, "message 3"))
database.logs_insert(LogRecord(LogRecordId(package_ahriman.base, "2", "p2"), 45.0, "message 4"))
database.logs_insert(LogRecord(LogRecordId(package_python_schedule.base, "3", "p1"), 40.0, "message 5"))
database.logs_rotate(1)
assert database.logs_get(package_ahriman.base) == [
LogRecord(LogRecordId(package_ahriman.base, "2", "p2"), 44.0, "message 3"),
LogRecord(LogRecordId(package_ahriman.base, "2", "p2"), 45.0, "message 4"),
]
assert database.logs_get(package_python_schedule.base) == [
LogRecord(LogRecordId(package_python_schedule.base, "3", "p1"), 40.0, "message 5"),
]
def test_logs_rotate_remove_all(database: SQLite, package_ahriman: Package) -> None:
"""
must remove all records when rotating with keep_last_records is 0

View File

@ -1,3 +1,4 @@
import pytest
import sqlite3
from pytest_mock import MockerFixture
@ -24,15 +25,29 @@ def test_factory(database: SQLite) -> None:
def test_with_connection(database: SQLite, mocker: MockerFixture) -> None:
"""
must run query inside connection
must run query inside connection and close it at the end
"""
connection_mock = MagicMock()
connect_mock = mocker.patch("sqlite3.connect", return_value=connection_mock)
database.with_connection(lambda conn: conn.execute("select 1"))
connect_mock.assert_called_once_with(database.path, detect_types=sqlite3.PARSE_DECLTYPES)
connection_mock.__enter__().set_trace_callback.assert_called_once_with(database.logger.debug)
connection_mock.__enter__().commit.assert_not_called()
connection_mock.set_trace_callback.assert_called_once_with(database.logger.debug)
connection_mock.commit.assert_not_called()
connection_mock.close.assert_called_once_with()
def test_with_connection_close(database: SQLite, mocker: MockerFixture) -> None:
"""
must close connection on errors
"""
connection_mock = MagicMock()
connection_mock.commit.side_effect = Exception
mocker.patch("sqlite3.connect", return_value=connection_mock)
with pytest.raises(Exception):
database.with_connection(lambda conn: conn.execute("select 1"), commit=True)
connection_mock.close.assert_called_once_with()
def test_with_connection_with_commit(database: SQLite, mocker: MockerFixture) -> None:
@ -44,4 +59,4 @@ def test_with_connection_with_commit(database: SQLite, mocker: MockerFixture) ->
mocker.patch("sqlite3.connect", return_value=connection_mock)
database.with_connection(lambda conn: conn.execute("select 1"), commit=True)
connection_mock.__enter__().commit.assert_called_once_with()
connection_mock.commit.assert_called_once_with()

View File

@ -285,7 +285,7 @@ def test_set_unknown(client: Client, package_ahriman: Package, mocker: MockerFix
def test_set_unknown_skip(client: Client, package_ahriman: Package, mocker: MockerFixture) -> None:
"""
must skip unknown status update in case if pacakge is already known
must skip unknown status update in case if package is already known
"""
mocker.patch("ahriman.core.status.Client.package_get", return_value=[(package_ahriman, None)])
update_mock = mocker.patch("ahriman.core.status.Client.package_update")

View File

@ -73,7 +73,7 @@ def test_configuration_sections(configuration: Configuration) -> None:
def test_on_result(trigger: Trigger) -> None:
"""
must pass execution nto run method
must pass execution to run method
"""
trigger.on_result(Result(), [])

View File

@ -3,7 +3,7 @@ from ahriman.models.log_record_id import LogRecordId
def test_init() -> None:
"""
must correctly assign proces identifier if not set
must correctly assign process identifier if not set
"""
assert LogRecordId("1", "2").process_id == LogRecordId.DEFAULT_PROCESS_ID
assert LogRecordId("1", "2", "3").process_id == "3"

View File

@ -0,0 +1,59 @@
import importlib
import pytest
import sys
from aiohttp.web import HTTPNotFound
from pytest_mock import MockerFixture
from unittest.mock import AsyncMock
import ahriman.web.middlewares.metrics_handler as metrics_handler
async def test_metrics(mocker: MockerFixture) -> None:
"""
must return metrics methods if library is available
"""
metrics_mock = AsyncMock()
mocker.patch.object(metrics_handler, "aiohttp_openmetrics", metrics_mock)
await metrics_handler.metrics(42)
metrics_mock.metrics.assert_called_once_with(42)
async def test_metrics_dummy(mocker: MockerFixture) -> None:
"""
must raise HTTPNotFound if no module found
"""
mocker.patch.object(metrics_handler, "aiohttp_openmetrics", None)
with pytest.raises(HTTPNotFound):
await metrics_handler.metrics(None)
async def test_metrics_handler() -> None:
"""
must return metrics handler if library is available
"""
assert metrics_handler.metrics_handler() == metrics_handler.aiohttp_openmetrics.metrics_middleware
async def test_metrics_handler_dummy(mocker: MockerFixture) -> None:
"""
must return dummy handler if no module found
"""
mocker.patch.object(metrics_handler, "aiohttp_openmetrics", None)
handler = metrics_handler.metrics_handler()
async def handle(result: int) -> int:
return result
assert await handler(42, handle) == 42
def test_import_openmetrics_missing(mocker: MockerFixture) -> None:
"""
must correctly process missing module
"""
mocker.patch.dict(sys.modules, {"aiohttp_openmetrics": None})
importlib.reload(metrics_handler)
assert metrics_handler.aiohttp_openmetrics is None

View File

@ -0,0 +1 @@
# schema testing goes in view class tests

View File

@ -3,7 +3,7 @@ from pathlib import Path
from ahriman.core.configuration import Configuration
from ahriman.core.utils import walk
from ahriman.web.routes import _dynamic_routes, setup_routes
from ahriman.web.routes import _dynamic_routes, _identifier, setup_routes
def test_dynamic_routes(resource_path_root: Path, configuration: Configuration) -> None:
@ -22,9 +22,19 @@ def test_dynamic_routes(resource_path_root: Path, configuration: Configuration)
assert len(set(routes.values())) == len(expected_views)
def test_identifier() -> None:
"""
must correctly extract route identifiers
"""
assert _identifier("/") == "_"
assert _identifier("/api/v1/status") == "_api_v1_status"
assert _identifier("/api/v1/packages/{package}") == "_api_v1_packages_:package"
def test_setup_routes(application: Application, configuration: Configuration) -> None:
"""
must generate non-empty list of routes
"""
application.router._named_resources = {}
setup_routes(application, configuration)
assert application.router.routes()

View File

@ -0,0 +1,50 @@
import pytest
from aiohttp.test_utils import TestClient
from aiohttp.web import Response
from pytest_mock import MockerFixture
import ahriman.web.middlewares.metrics_handler as metrics_handler
from ahriman.models.user_access import UserAccess
from ahriman.web.views.v1.status.metrics import MetricsView
async def test_get_permission() -> None:
"""
must return correct permission for the request
"""
for method in ("GET",):
request = pytest.helpers.request("", "", method)
assert await MetricsView.get_permission(request) == UserAccess.Unauthorized
def test_routes() -> None:
"""
must return correct routes
"""
assert MetricsView.ROUTES == ["/api/v1/metrics"]
async def test_get(client: TestClient, mocker: MockerFixture) -> None:
"""
must return service metrics
"""
metrics_mock = mocker.patch("ahriman.web.views.v1.status.metrics.metrics", return_value=Response())
response = await client.get("/api/v1/metrics")
assert response.ok
# there is no response validation here, because it is free text, so we check call instead
metrics_mock.assert_called_once_with(pytest.helpers.anyvar(int))
async def test_get_not_found(client: TestClient, mocker: MockerFixture) -> None:
"""
must return 404 error if no module found
"""
mocker.patch.object(metrics_handler, "aiohttp_openmetrics", None)
response_schema = pytest.helpers.schema_response(MetricsView.get, code=404)
response = await client.get("/api/v1/metrics")
assert response.status == 404
assert not response_schema.validate(await response.json())

View File

@ -1,7 +1,6 @@
[settings]
include = .
logging = logging.ini
database = ../../../ahriman-test.db
[alpm]
database = /var/lib/pacman
@ -31,7 +30,6 @@ triggers_known = ahriman.core.distributed.WorkerLoaderTrigger ahriman.core.distr
[repository]
name = aur
root = ../../../
[sign]
target =

42
tox.ini
View File

@ -3,11 +3,17 @@ envlist = check, tests
isolated_build = true
labels =
release = version, docs, publish
dependencies = -e .[journald,pacman,reports,s3,shell,stats,unixsocket,validator,web,web_api-docs,web_auth,web_oauth2]
dependencies = -e .[journald,pacman,reports,s3,shell,stats,unixsocket,validator,web,web_api-docs,web_auth,web_oauth2,web_metrics]
project_name = ahriman
[mypy]
flags = --implicit-reexport --strict --allow-untyped-decorators --allow-subclassing-any
[flags]
autopep8 = --max-line-length 120 -aa --in-place
bandit = --configfile .bandit.yml
manpage = --author "ahriman team" --author-email "" --description "ArcH linux ReposItory MANager" --manual-title "ArcH linux ReposItory MANager" --project-name ahriman --url https://github.com/arcan1s/ahriman
mypy = --implicit-reexport --strict --allow-untyped-decorators --allow-subclassing-any
pydeps = --no-config --cluster
pylint = --rcfile .pylint.toml
shtab = --prefix ahriman --prog ahriman ahriman.application.ahriman._parser
[pytest]
addopts = --cov=ahriman --cov-report=term-missing:skip-covered --no-cov-on-fail --cov-fail-under=100 --spec
@ -33,19 +39,17 @@ setenv =
CFLAGS="-Wno-unterminated-string-initialization"
MYPYPATH=src
commands =
autopep8 --exit-code --max-line-length 120 -aa -i -j 0 -r "src/{[tox]project_name}" "tests/{[tox]project_name}"
pylint --rcfile=.pylintrc "src/{[tox]project_name}"
bandit -c .bandit.yml -r "src/{[tox]project_name}"
bandit -c .bandit-test.yml -r "tests/{[tox]project_name}"
mypy {[mypy]flags} -p "{[tox]project_name}" --install-types --non-interactive
autopep8 {[flags]autopep8} --exit-code --jobs 0 --recursive "src/{[tox]project_name}" "tests/{[tox]project_name}"
pylint {[flags]pylint} "src/{[tox]project_name}"
bandit {[flags]bandit} --recursive "src/{[tox]project_name}"
bandit {[flags]bandit} --skip B101,B105,B106 --recursive "tests/{[tox]project_name}"
mypy {[flags]mypy} --install-types --non-interactive --package "{[tox]project_name}"
[testenv:docs]
description = Generate source files for documentation
allowlist_externals =
bash
find
mv
changedir = src
dependency_groups =
docs
depends =
@ -55,18 +59,18 @@ deps =
uv
pip_pre = true
setenv =
PYTHONPATH=src
SPHINX_APIDOC_OPTIONS=members,no-undoc-members,show-inheritance
commands =
bash -c 'shtab --shell bash --prefix ahriman --prog ahriman ahriman.application.ahriman._parser > ../package/share/bash-completion/completions/_ahriman'
bash -c 'shtab --shell zsh --prefix ahriman --prog ahriman ahriman.application.ahriman._parser > ../package/share/zsh/site-functions/_ahriman'
argparse-manpage --module ahriman.application.ahriman --function _parser --author "ahriman team" --project-name ahriman --author-email "" --url https://github.com/arcan1s/ahriman --output ../package/share/man/man1/ahriman.1
pydeps ahriman --no-output --show-dot --dot-output architecture.dot --no-config --cluster
mv architecture.dot ../docs/_static/architecture.dot
bash -c 'shtab {[flags]shtab} --shell bash > package/share/bash-completion/completions/_ahriman'
bash -c 'shtab {[flags]shtab} --shell zsh > package/share/zsh/site-functions/_ahriman'
argparse-manpage {[flags]manpage} --module ahriman.application.ahriman --function _parser --output ../package/share/man/man1/ahriman.1
pydeps {[flags]pydeps} --no-output --show-dot --dot-output {tox_root}{/}docs/_static/architecture.dot src/ahriman
# remove autogenerated modules rst files
find ../docs -type f -name "{[tox]project_name}*.rst" -delete
sphinx-apidoc -o ../docs .
find docs -type f -name "{[tox]project_name}*.rst" -delete
sphinx-apidoc --output-dir docs src
# compile list of dependencies for rtd.io
uv pip compile --group ../pyproject.toml:docs --extra s3 --extra validator --extra web --output-file ../docs/requirements.txt --quiet ../pyproject.toml
uv pip compile --group pyproject.toml:docs --extra s3 --extra validator --extra web --output-file docs/requirements.txt --quiet pyproject.toml
[testenv:html]
description = Generate html documentation
@ -77,7 +81,7 @@ deps =
pip_pre = true
recreate = true
commands =
sphinx-build -b html -a -j auto -W docs {envtmpdir}{/}html
sphinx-build --builder html --write-all --jobs auto --fail-on-warning docs {envtmpdir}{/}html
[testenv:publish]
description = Create and publish release to GitHub