mirror of
https://github.com/arcan1s/ahriman.git
synced 2025-07-22 10:19:57 +00:00
Compare commits
27 Commits
9a23f5c79d
...
2.19.0
Author | SHA1 | Date | |
---|---|---|---|
2b1b17a1a3 | |||
9e6705056a | |||
b3a3a81f70 | |||
3e5dbbd6cd | |||
f41e44895d | |||
765bbf486f | |||
a3c54afb82 | |||
7f223ecc0a | |||
7769a4a6e0 | |||
066d1b1dde | |||
1f22a27360 | |||
75682bc7be | |||
e5d824b03f | |||
8d0d597473 | |||
995b396360 | |||
7f813cf0c3 | |||
d4eb55ef95 | |||
09350e88ab | |||
2feaa14f46 | |||
9653fc4f4a | |||
bcd46c66e8 | |||
6ea56faede | |||
9e346530f2 | |||
d283dccc1e | |||
8a4e900ab9 | |||
fa6cf8ce36 | |||
a706fbb751 |
@ -1,6 +0,0 @@
|
||||
skips:
|
||||
- B101
|
||||
- B104
|
||||
- B105
|
||||
- B106
|
||||
- B404
|
16
.github/workflows/docker.yml
vendored
16
.github/workflows/docker.yml
vendored
@ -8,6 +8,10 @@ on:
|
||||
- '*'
|
||||
- '!*rc*'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
docker-image:
|
||||
|
||||
@ -17,18 +21,18 @@ jobs:
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- uses: docker/setup-qemu-action@v2
|
||||
- uses: docker/setup-qemu-action@v3
|
||||
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to docker hub
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to github container registry
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@ -36,7 +40,7 @@ jobs:
|
||||
|
||||
- name: Extract docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
arcan1s/ahriman
|
||||
@ -46,7 +50,7 @@ jobs:
|
||||
type=edge
|
||||
|
||||
- name: Build an image and push
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
file: docker/Dockerfile
|
||||
push: true
|
||||
|
10
.github/workflows/regress.yml
vendored
10
.github/workflows/regress.yml
vendored
@ -1,6 +1,12 @@
|
||||
name: Regress
|
||||
|
||||
on: workflow_dispatch
|
||||
on:
|
||||
schedule:
|
||||
- cron: 1 0 * * 0
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
run-regress-tests:
|
||||
@ -31,8 +37,6 @@ jobs:
|
||||
- repo:/var/lib/ahriman
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- run: pacman -Sy
|
||||
|
||||
- name: Init repository
|
||||
|
20
.github/workflows/release.yml
vendored
20
.github/workflows/release.yml
vendored
@ -5,13 +5,24 @@ on:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
make-release:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
container:
|
||||
image: archlinux:base
|
||||
options: -w /build
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/build
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: pacman --noconfirm -Syu base-devel git python-tox
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Extract version
|
||||
id: version
|
||||
@ -24,18 +35,13 @@ jobs:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
filter: 'Release \d+\.\d+\.\d+'
|
||||
|
||||
- name: Install dependencies
|
||||
uses: ConorMacBride/install-package@v1.1.0
|
||||
with:
|
||||
apt: tox
|
||||
|
||||
- name: Create archive
|
||||
run: tox -e archive
|
||||
env:
|
||||
VERSION: ${{ steps.version.outputs.VERSION }}
|
||||
|
||||
- name: Publish release
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
body: |
|
||||
${{ steps.changelog.outputs.compareurl }}
|
||||
|
8
.github/workflows/setup.yml
vendored
8
.github/workflows/setup.yml
vendored
@ -7,6 +7,10 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
run-setup-minimal:
|
||||
@ -20,7 +24,7 @@ jobs:
|
||||
- ${{ github.workspace }}:/build
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup the minimal service in arch linux container
|
||||
run: .github/workflows/setup.sh minimal
|
||||
@ -36,7 +40,7 @@ jobs:
|
||||
options: --privileged -w /build
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup the service in arch linux container
|
||||
run: .github/workflows/setup.sh
|
||||
|
10
.github/workflows/tests.sh
vendored
10
.github/workflows/tests.sh
vendored
@ -1,10 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Install dependencies and run test in container
|
||||
|
||||
set -ex
|
||||
|
||||
# install dependencies
|
||||
pacman --noconfirm -Syyu base-devel python-tox
|
||||
|
||||
# run test and check targets
|
||||
tox
|
19
.github/workflows/tests.yml
vendored
19
.github/workflows/tests.yml
vendored
@ -9,6 +9,10 @@ on:
|
||||
- master
|
||||
schedule:
|
||||
- cron: 1 0 * * *
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
run-tests:
|
||||
@ -22,7 +26,16 @@ jobs:
|
||||
- ${{ github.workspace }}:/build
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: pacman --noconfirm -Syu base-devel git python-tox
|
||||
|
||||
- name: Run check and tests in arch linux container
|
||||
run: .github/workflows/tests.sh
|
||||
- run: git config --global --add safe.directory *
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Run check and tests
|
||||
run: tox
|
||||
|
||||
- name: Generate documentation and check if there are untracked changes
|
||||
run: |
|
||||
tox -e docs
|
||||
[ -z "$(git status --porcelain docs/*.rst)" ]
|
||||
|
45
.pylint.toml
Normal file
45
.pylint.toml
Normal file
@ -0,0 +1,45 @@
|
||||
[tool.pylint.main]
|
||||
init-hook = "sys.path.append('tools')"
|
||||
load-plugins = [
|
||||
"pylint.extensions.docparams",
|
||||
"pylint.extensions.bad_builtin",
|
||||
"pylint_plugins.definition_order",
|
||||
"pylint_plugins.import_order",
|
||||
]
|
||||
|
||||
[tool.pylint.classes]
|
||||
bad-functions = [
|
||||
"print",
|
||||
]
|
||||
|
||||
[tool.pylint.design]
|
||||
max-parents = 15
|
||||
|
||||
[tool.pylint."messages control"]
|
||||
disable = [
|
||||
"raw-checker-failed",
|
||||
"bad-inline-option",
|
||||
"locally-disabled",
|
||||
"file-ignored",
|
||||
"suppressed-message",
|
||||
"useless-suppression",
|
||||
"deprecated-pragma",
|
||||
"use-symbolic-message-instead",
|
||||
"use-implicit-booleaness-not-comparison-to-string",
|
||||
"use-implicit-booleaness-not-comparison-to-zero",
|
||||
"missing-module-docstring",
|
||||
"line-too-long",
|
||||
"no-name-in-module",
|
||||
"import-outside-toplevel",
|
||||
"invalid-name",
|
||||
"raise-missing-from",
|
||||
"wrong-import-order",
|
||||
"too-few-public-methods",
|
||||
"too-many-instance-attributes",
|
||||
"broad-exception-caught",
|
||||
"fixme",
|
||||
"too-many-arguments",
|
||||
"duplicate-code",
|
||||
"cyclic-import",
|
||||
"too-many-positional-arguments",
|
||||
]
|
651
.pylintrc
651
.pylintrc
@ -1,651 +0,0 @@
|
||||
[MAIN]
|
||||
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
# Clear in-memory caches upon conclusion of linting. Useful if running pylint
|
||||
# in a server-like mode.
|
||||
clear-cache-post-run=no
|
||||
|
||||
# Load and enable all available extensions. Use --list-extensions to see a list
|
||||
# all available extensions.
|
||||
#enable-all-extensions=
|
||||
|
||||
# In error mode, messages with a category besides ERROR or FATAL are
|
||||
# suppressed, and no reports are done by default. Error mode is compatible with
|
||||
# disabling specific errors.
|
||||
#errors-only=
|
||||
|
||||
# Always return a 0 (non-error) status code, even if lint errors are found.
|
||||
# This is primarily useful in continuous integration scripts.
|
||||
#exit-zero=
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code.
|
||||
extension-pkg-allow-list=
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
|
||||
# for backward compatibility.)
|
||||
extension-pkg-whitelist=
|
||||
|
||||
# Return non-zero exit code if any of these messages/categories are detected,
|
||||
# even if score is above --fail-under value. Syntax same as enable. Messages
|
||||
# specified are enabled, while categories only check already-enabled messages.
|
||||
fail-on=
|
||||
|
||||
# Specify a score threshold under which the program will exit with error.
|
||||
fail-under=10
|
||||
|
||||
# Interpret the stdin as a python script, whose filename needs to be passed as
|
||||
# the module_or_package argument.
|
||||
#from-stdin=
|
||||
|
||||
# Files or directories to be skipped. They should be base names, not paths.
|
||||
ignore=CVS
|
||||
|
||||
# Add files or directories matching the regular expressions patterns to the
|
||||
# ignore-list. The regex matches against paths and can be in Posix or Windows
|
||||
# format. Because '\\' represents the directory delimiter on Windows systems,
|
||||
# it can't be used as an escape character.
|
||||
ignore-paths=
|
||||
|
||||
# Files or directories matching the regular expression patterns are skipped.
|
||||
# The regex matches against base names, not paths. The default value ignores
|
||||
# Emacs file locks
|
||||
ignore-patterns=^\.#
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis). It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
init-hook='import sys; sys.path.append("pylint_plugins")'
|
||||
|
||||
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
|
||||
# number of processors available to use, and will cap the count on Windows to
|
||||
# avoid hangs.
|
||||
jobs=1
|
||||
|
||||
# Control the amount of potential inferred values when inferring a single
|
||||
# object. This can help the performance when dealing with large functions or
|
||||
# complex, nested conditions.
|
||||
limit-inference-results=100
|
||||
|
||||
# List of plugins (as comma separated values of python module names) to load,
|
||||
# usually to register additional checkers.
|
||||
load-plugins=pylint.extensions.docparams,
|
||||
pylint.extensions.bad_builtin,
|
||||
definition_order,
|
||||
import_order,
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
# Minimum Python version to use for version dependent checks. Will default to
|
||||
# the version used to run pylint.
|
||||
py-version=3.11
|
||||
|
||||
# Discover python modules and packages in the file system subtree.
|
||||
recursive=no
|
||||
|
||||
# Add paths to the list of the source roots. Supports globbing patterns. The
|
||||
# source root is an absolute path or a path relative to the current working
|
||||
# directory used to determine a package namespace for modules located under the
|
||||
# source root.
|
||||
source-roots=
|
||||
|
||||
# When enabled, pylint would attempt to guess common misconfiguration and emit
|
||||
# user-friendly hints instead of false-positive error messages.
|
||||
suggestion-mode=yes
|
||||
|
||||
# Allow loading of arbitrary C extensions. Extensions are imported into the
|
||||
# active Python interpreter and may run arbitrary code.
|
||||
unsafe-load-any-extension=no
|
||||
|
||||
# In verbose mode, extra non-checker-related info will be displayed.
|
||||
#verbose=
|
||||
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Naming style matching correct argument names.
|
||||
argument-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct argument names. Overrides argument-
|
||||
# naming-style. If left empty, argument names will be checked with the set
|
||||
# naming style.
|
||||
#argument-rgx=
|
||||
|
||||
# Naming style matching correct attribute names.
|
||||
attr-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct attribute names. Overrides attr-naming-
|
||||
# style. If left empty, attribute names will be checked with the set naming
|
||||
# style.
|
||||
#attr-rgx=
|
||||
|
||||
bad-functions=print,
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma.
|
||||
bad-names=foo,
|
||||
bar,
|
||||
baz,
|
||||
toto,
|
||||
tutu,
|
||||
tata
|
||||
|
||||
# Bad variable names regexes, separated by a comma. If names match any regex,
|
||||
# they will always be refused
|
||||
bad-names-rgxs=
|
||||
|
||||
# Naming style matching correct class attribute names.
|
||||
class-attribute-naming-style=any
|
||||
|
||||
# Regular expression matching correct class attribute names. Overrides class-
|
||||
# attribute-naming-style. If left empty, class attribute names will be checked
|
||||
# with the set naming style.
|
||||
#class-attribute-rgx=
|
||||
|
||||
# Naming style matching correct class constant names.
|
||||
class-const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct class constant names. Overrides class-
|
||||
# const-naming-style. If left empty, class constant names will be checked with
|
||||
# the set naming style.
|
||||
#class-const-rgx=
|
||||
|
||||
# Naming style matching correct class names.
|
||||
class-naming-style=PascalCase
|
||||
|
||||
# Regular expression matching correct class names. Overrides class-naming-
|
||||
# style. If left empty, class names will be checked with the set naming style.
|
||||
#class-rgx=
|
||||
|
||||
# Naming style matching correct constant names.
|
||||
const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct constant names. Overrides const-naming-
|
||||
# style. If left empty, constant names will be checked with the set naming
|
||||
# style.
|
||||
#const-rgx=
|
||||
|
||||
# Minimum line length for functions/classes that require docstrings, shorter
|
||||
# ones are exempt.
|
||||
docstring-min-length=-1
|
||||
|
||||
# Naming style matching correct function names.
|
||||
function-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct function names. Overrides function-
|
||||
# naming-style. If left empty, function names will be checked with the set
|
||||
# naming style.
|
||||
#function-rgx=
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma.
|
||||
good-names=i,
|
||||
j,
|
||||
k,
|
||||
ex,
|
||||
Run,
|
||||
_
|
||||
|
||||
# Good variable names regexes, separated by a comma. If names match any regex,
|
||||
# they will always be accepted
|
||||
good-names-rgxs=
|
||||
|
||||
# Include a hint for the correct naming format with invalid-name.
|
||||
include-naming-hint=no
|
||||
|
||||
# Naming style matching correct inline iteration names.
|
||||
inlinevar-naming-style=any
|
||||
|
||||
# Regular expression matching correct inline iteration names. Overrides
|
||||
# inlinevar-naming-style. If left empty, inline iteration names will be checked
|
||||
# with the set naming style.
|
||||
#inlinevar-rgx=
|
||||
|
||||
# Naming style matching correct method names.
|
||||
method-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct method names. Overrides method-naming-
|
||||
# style. If left empty, method names will be checked with the set naming style.
|
||||
#method-rgx=
|
||||
|
||||
# Naming style matching correct module names.
|
||||
module-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct module names. Overrides module-naming-
|
||||
# style. If left empty, module names will be checked with the set naming style.
|
||||
#module-rgx=
|
||||
|
||||
# Colon-delimited sets of names that determine each other's naming style when
|
||||
# the name regexes allow several styles.
|
||||
name-group=
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=
|
||||
|
||||
# List of decorators that produce properties, such as abc.abstractproperty. Add
|
||||
# to this list to register other decorators that produce valid properties.
|
||||
# These decorators are taken in consideration only for invalid-name.
|
||||
property-classes=abc.abstractproperty
|
||||
|
||||
# Regular expression matching correct type alias names. If left empty, type
|
||||
# alias names will be checked with the set naming style.
|
||||
#typealias-rgx=
|
||||
|
||||
# Regular expression matching correct type variable names. If left empty, type
|
||||
# variable names will be checked with the set naming style.
|
||||
#typevar-rgx=
|
||||
|
||||
# Naming style matching correct variable names.
|
||||
variable-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct variable names. Overrides variable-
|
||||
# naming-style. If left empty, variable names will be checked with the set
|
||||
# naming style.
|
||||
#variable-rgx=
|
||||
|
||||
|
||||
[CLASSES]
|
||||
|
||||
# Warn about protected attribute access inside special methods
|
||||
check-protected-access-in-special-methods=no
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,
|
||||
__new__,
|
||||
setUp,
|
||||
asyncSetUp,
|
||||
__post_init__
|
||||
|
||||
# List of member names, which should be excluded from the protected access
|
||||
# warning.
|
||||
exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit
|
||||
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=mcs
|
||||
|
||||
|
||||
[DESIGN]
|
||||
|
||||
# List of regular expressions of class ancestor names to ignore when counting
|
||||
# public methods (see R0903)
|
||||
exclude-too-few-public-methods=
|
||||
|
||||
# List of qualified class names to ignore when counting class parents (see
|
||||
# R0901)
|
||||
ignored-parents=
|
||||
|
||||
# Maximum number of arguments for function / method.
|
||||
max-args=5
|
||||
|
||||
# Maximum number of attributes for a class (see R0902).
|
||||
max-attributes=7
|
||||
|
||||
# Maximum number of boolean expressions in an if statement (see R0916).
|
||||
max-bool-expr=5
|
||||
|
||||
# Maximum number of branch for function / method body.
|
||||
max-branches=12
|
||||
|
||||
# Maximum number of locals for function / method body.
|
||||
max-locals=15
|
||||
|
||||
# Maximum number of parents for a class (see R0901).
|
||||
max-parents=15
|
||||
|
||||
# Maximum number of public methods for a class (see R0904).
|
||||
max-public-methods=20
|
||||
|
||||
# Maximum number of return / yield for function / method body.
|
||||
max-returns=6
|
||||
|
||||
# Maximum number of statements in function / method body.
|
||||
max-statements=50
|
||||
|
||||
# Minimum number of public methods for a class (see R0903).
|
||||
min-public-methods=2
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when caught.
|
||||
overgeneral-exceptions=builtins.BaseException,builtins.Exception
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
expected-line-ending-format=
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
indent-after-paren=4
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=100
|
||||
|
||||
# Maximum number of lines in a module.
|
||||
max-module-lines=1000
|
||||
|
||||
# Allow the body of a class to be on the same line as the declaration if body
|
||||
# contains single statement.
|
||||
single-line-class-stmt=no
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# List of modules that can be imported at any level, not just the top level
|
||||
# one.
|
||||
allow-any-import-level=
|
||||
|
||||
# Allow explicit reexports by alias from a package __init__.
|
||||
allow-reexport-from-package=no
|
||||
|
||||
# Allow wildcard imports from modules that define __all__.
|
||||
allow-wildcard-with-all=no
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma.
|
||||
deprecated-modules=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of external dependencies
|
||||
# to the given file (report RP0402 must not be disabled).
|
||||
ext-import-graph=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of all (i.e. internal and
|
||||
# external) dependencies to the given file (report RP0402 must not be
|
||||
# disabled).
|
||||
import-graph=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of internal dependencies
|
||||
# to the given file (report RP0402 must not be disabled).
|
||||
int-import-graph=
|
||||
|
||||
# Force import order to recognize a module as part of the standard
|
||||
# compatibility libraries.
|
||||
known-standard-library=
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
|
||||
# Couples of modules and preferred modules, separated by a comma.
|
||||
preferred-modules=
|
||||
|
||||
|
||||
[LOGGING]
|
||||
|
||||
# The type of string formatting that logging methods do. `old` means using %
|
||||
# formatting, `new` is for `{}` formatting.
|
||||
logging-format-style=old
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format.
|
||||
logging-modules=logging
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
|
||||
# UNDEFINED.
|
||||
confidence=HIGH,
|
||||
CONTROL_FLOW,
|
||||
INFERENCE,
|
||||
INFERENCE_FAILURE,
|
||||
UNDEFINED
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once). You can also use "--disable=all" to
|
||||
# disable everything first and then re-enable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use "--disable=all --enable=classes
|
||||
# --disable=W".
|
||||
disable=raw-checker-failed,
|
||||
bad-inline-option,
|
||||
locally-disabled,
|
||||
file-ignored,
|
||||
suppressed-message,
|
||||
useless-suppression,
|
||||
deprecated-pragma,
|
||||
use-symbolic-message-instead,
|
||||
missing-module-docstring,
|
||||
line-too-long,
|
||||
no-name-in-module,
|
||||
import-outside-toplevel,
|
||||
invalid-name,
|
||||
raise-missing-from,
|
||||
wrong-import-order,
|
||||
too-few-public-methods,
|
||||
too-many-instance-attributes,
|
||||
broad-except,
|
||||
fixme,
|
||||
too-many-arguments,
|
||||
duplicate-code,
|
||||
cyclic-import,
|
||||
too-many-positional-arguments,
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once). See also the "--disable" option for examples.
|
||||
enable=c-extension-no-member
|
||||
|
||||
|
||||
[METHOD_ARGS]
|
||||
|
||||
# List of qualified names (i.e., library.method) which require a timeout
|
||||
# parameter e.g. 'requests.api.get,requests.api.post'
|
||||
timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,
|
||||
XXX,
|
||||
TODO
|
||||
|
||||
# Regular expression of note tags to take in consideration.
|
||||
notes-rgx=
|
||||
|
||||
|
||||
[REFACTORING]
|
||||
|
||||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
||||
# Complete name of functions that never returns. When checking for
|
||||
# inconsistent-return-statements if a never returning function is called then
|
||||
# it will be considered as an explicit return statement and no message will be
|
||||
# printed.
|
||||
never-returning-functions=sys.exit,argparse.parse_error
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Python expression which should return a score less than or equal to 10. You
|
||||
# have access to the variables 'fatal', 'error', 'warning', 'refactor',
|
||||
# 'convention', and 'info' which contain the number of messages in each
|
||||
# category, as well as 'statement' which is the total number of statements
|
||||
# analyzed. This score is used by the global evaluation report (RP0004).
|
||||
evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details.
|
||||
msg-template=
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, json
|
||||
# and msvs (visual studio). You can also give a reporter class, e.g.
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
#output-format=
|
||||
|
||||
# Tells whether to display a full report or only the messages.
|
||||
reports=no
|
||||
|
||||
# Activate the evaluation score.
|
||||
score=yes
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Comments are removed from the similarity computation
|
||||
ignore-comments=yes
|
||||
|
||||
# Docstrings are removed from the similarity computation
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Imports are removed from the similarity computation
|
||||
ignore-imports=yes
|
||||
|
||||
# Signatures are removed from the similarity computation
|
||||
ignore-signatures=yes
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=4
|
||||
|
||||
|
||||
[SPELLING]
|
||||
|
||||
# Limits count of emitted suggestions for spelling mistakes.
|
||||
max-spelling-suggestions=4
|
||||
|
||||
# Spelling dictionary name. No available dictionaries : You need to install
|
||||
# both the python package and the system dependency for enchant to work..
|
||||
spelling-dict=
|
||||
|
||||
# List of comma separated words that should be considered directives if they
|
||||
# appear at the beginning of a comment and should not be checked.
|
||||
spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
|
||||
|
||||
# List of comma separated words that should not be checked.
|
||||
spelling-ignore-words=
|
||||
|
||||
# A path to a file that contains the private dictionary; one word per line.
|
||||
spelling-private-dict-file=
|
||||
|
||||
# Tells whether to store unknown words to the private dictionary (see the
|
||||
# --spelling-private-dict-file option) instead of raising a message.
|
||||
spelling-store-unknown-words=no
|
||||
|
||||
|
||||
[STRING]
|
||||
|
||||
# This flag controls whether inconsistent-quotes generates a warning when the
|
||||
# character used as a quote delimiter is used inconsistently within a module.
|
||||
check-quote-consistency=no
|
||||
|
||||
# This flag controls whether the implicit-str-concat should generate a warning
|
||||
# on implicit string concatenation in sequences defined over several lines.
|
||||
check-str-concat-over-line-jumps=no
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
||||
# List of decorators that produce context managers, such as
|
||||
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||
# produce valid context managers.
|
||||
contextmanager-decorators=contextlib.contextmanager
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=
|
||||
|
||||
# Tells whether to warn about missing members when the owner of the attribute
|
||||
# is inferred to be None.
|
||||
ignore-none=yes
|
||||
|
||||
# This flag controls whether pylint should warn about no-member and similar
|
||||
# checks whenever an opaque object is returned when inferring. The inference
|
||||
# can return multiple potential results while evaluating a Python object, but
|
||||
# some branches might not be evaluated, which results in partial inference. In
|
||||
# that case, it might be useful to still emit no-member and other checks for
|
||||
# the rest of the inferred objects.
|
||||
ignore-on-opaque-inference=yes
|
||||
|
||||
# List of symbolic message names to ignore for Mixin members.
|
||||
ignored-checks-for-mixins=no-member,
|
||||
not-async-context-manager,
|
||||
not-context-manager,
|
||||
attribute-defined-outside-init
|
||||
|
||||
# List of class names for which member attributes should not be checked (useful
|
||||
# for classes with dynamically set attributes). This supports the use of
|
||||
# qualified names.
|
||||
ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace
|
||||
|
||||
# Show a hint with possible names when a member name was not found. The aspect
|
||||
# of finding the hint is based on edit distance.
|
||||
missing-member-hint=yes
|
||||
|
||||
# The minimum edit distance a name should have in order to be considered a
|
||||
# similar match for a missing member name.
|
||||
missing-member-hint-distance=1
|
||||
|
||||
# The total number of similar names that should be taken in consideration when
|
||||
# showing a hint for a missing member.
|
||||
missing-member-max-choices=1
|
||||
|
||||
# Regex pattern to define which classes are considered mixins.
|
||||
mixin-class-rgx=.*[Mm]ixin
|
||||
|
||||
# List of decorators that change the signature of a decorated function.
|
||||
signature-mutators=
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid defining new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
# Tells whether unused global variables should be treated as a violation.
|
||||
allow-global-unused-variables=yes
|
||||
|
||||
# List of names allowed to shadow builtins
|
||||
allowed-redefined-builtins=
|
||||
|
||||
# List of strings which can identify a callback function by name. A callback
|
||||
# name must start or end with one of those strings.
|
||||
callbacks=cb_,
|
||||
_cb
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expected to
|
||||
# not be used).
|
||||
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
|
||||
|
||||
# Argument names that match this expression will be ignored.
|
||||
ignored-argument-names=_.*|^ignored_|^unused_
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
|
5
.pytest.ini
Normal file
5
.pytest.ini
Normal file
@ -0,0 +1,5 @@
|
||||
[pytest]
|
||||
addopts = --cov=ahriman --cov-report=term-missing:skip-covered --no-cov-on-fail --cov-fail-under=100 --spec
|
||||
asyncio_default_fixture_loop_scope = function
|
||||
asyncio_mode = auto
|
||||
spec_test_format = {result} {docstring_summary}
|
@ -9,13 +9,7 @@ build:
|
||||
|
||||
python:
|
||||
install:
|
||||
- method: pip
|
||||
path: .
|
||||
extra_requirements:
|
||||
- docs
|
||||
- s3
|
||||
- validator
|
||||
- web
|
||||
- requirements: docs/requirements.txt
|
||||
|
||||
formats:
|
||||
- pdf
|
||||
|
@ -40,6 +40,7 @@ RUN pacman -S --noconfirm --asdeps \
|
||||
pacman -S --noconfirm --asdeps \
|
||||
git \
|
||||
python-aiohttp \
|
||||
python-aiohttp-openmetrics \
|
||||
python-boto3 \
|
||||
python-cerberus \
|
||||
python-cryptography \
|
||||
@ -112,6 +113,7 @@ RUN pacman -S --noconfirm ahriman
|
||||
RUN pacman -S --noconfirm --asdeps \
|
||||
python-aioauth-client \
|
||||
python-aiohttp-apispec-git \
|
||||
python-aiohttp-openmetrics \
|
||||
python-aiohttp-security \
|
||||
python-aiohttp-session \
|
||||
python-boto3 \
|
||||
|
3661
docs/_static/architecture.dot
vendored
3661
docs/_static/architecture.dot
vendored
File diff suppressed because it is too large
Load Diff
@ -20,6 +20,14 @@ ahriman.web.middlewares.exception\_handler module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.middlewares.metrics\_handler module
|
||||
-----------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.web.middlewares.metrics_handler
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
|
@ -4,6 +4,14 @@ ahriman.web.schemas package
|
||||
Submodules
|
||||
----------
|
||||
|
||||
ahriman.web.schemas.any\_schema module
|
||||
--------------------------------------
|
||||
|
||||
.. automodule:: ahriman.web.schemas.any_schema
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.schemas.aur\_package\_schema module
|
||||
-----------------------------------------------
|
||||
|
||||
|
@ -12,6 +12,14 @@ ahriman.web.views.v1.status.info module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.views.v1.status.metrics module
|
||||
------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.web.views.v1.status.metrics
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.views.v1.status.repositories module
|
||||
-----------------------------------------------
|
||||
|
||||
|
@ -413,10 +413,11 @@ Web application
|
||||
Web application requires the following python packages to be installed:
|
||||
|
||||
* Core part requires ``aiohttp`` (application itself), ``aiohttp_jinja2`` and ``Jinja2`` (HTML generation from templates).
|
||||
* Additional web features also require ``aiohttp-apispec`` (autogenerated documentation), ``aiohttp_cors`` (CORS support, required by documentation).
|
||||
* Additional web features also require ``aiohttp-apispec`` (autogenerated documentation, optional), ``aiohttp_cors`` (CORS support, required by documentation).
|
||||
* In addition, authorization feature requires ``aiohttp_security``, ``aiohttp_session`` and ``cryptography``.
|
||||
* In addition to base authorization dependencies, OAuth2 also requires ``aioauth-client`` library.
|
||||
* In addition if you would like to disable authorization for local access (recommended way in order to run the application itself with reporting support), the ``requests-unixsocket2`` library is required.
|
||||
* Application metrics will be automatically enabled after installing ``aiohttp-openmetrics`` package.
|
||||
|
||||
Middlewares
|
||||
^^^^^^^^^^^
|
||||
|
12
docs/conf.py
12
docs/conf.py
@ -15,9 +15,8 @@ import sys
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from ahriman import __version__
|
||||
|
||||
|
||||
# support package imports
|
||||
basedir = Path(__file__).resolve().parent.parent / "src"
|
||||
sys.path.insert(0, str(basedir))
|
||||
|
||||
@ -29,6 +28,7 @@ copyright = f"2021-{datetime.date.today().year}, ahriman team"
|
||||
author = "ahriman team"
|
||||
|
||||
# The full version, including alpha/beta/rc tags
|
||||
from ahriman import __version__
|
||||
release = __version__
|
||||
|
||||
|
||||
@ -91,7 +91,13 @@ autoclass_content = "both"
|
||||
|
||||
autodoc_member_order = "groupwise"
|
||||
|
||||
autodoc_mock_imports = ["cryptography", "pyalpm"]
|
||||
autodoc_mock_imports = [
|
||||
"aioauth_client",
|
||||
"aiohttp_security",
|
||||
"aiohttp_session",
|
||||
"cryptography",
|
||||
"pyalpm",
|
||||
]
|
||||
|
||||
autodoc_default_options = {
|
||||
"no-undoc-members": True,
|
||||
|
@ -81,6 +81,7 @@ Base configuration settings.
|
||||
* ``apply_migrations`` - perform database migrations on the application start, boolean, optional, default ``yes``. Useful if you are using git version. Note, however, that this option must be changed only if you know what to do and going to handle migrations manually.
|
||||
* ``database`` - path to the application SQLite database, string, required.
|
||||
* ``include`` - path to directory with configuration files overrides, string, optional. Files will be read in alphabetical order.
|
||||
* ``keep_last_logs`` - amount of build logs to be kept for each package, integer, optional ,default ``0``. Logs will be cleared at the end of each process.
|
||||
* ``logging`` - path to logging configuration, string, required. Check ``logging.ini`` for reference.
|
||||
|
||||
``alpm:*`` groups
|
||||
@ -217,7 +218,7 @@ Mirrorlist generator plugin
|
||||
``remote-pull`` group
|
||||
---------------------
|
||||
|
||||
Remote git source synchronization settings. Unlike ``Upload`` triggers those triggers are used for PKGBUILD synchronization - fetch from remote repository PKGBUILDs before updating process.
|
||||
Remote git source synchronization settings. Unlike ``upload`` triggers those triggers are used for PKGBUILD synchronization - fetch from remote repository PKGBUILDs before updating process.
|
||||
|
||||
It supports authorization; to do so you'd need to prefix the URL with authorization part, e.g. ``https://key:token@github.com/arcan1s/ahriman.git``. It is highly recommended to use application tokens instead of your user authorization details. Alternatively, you can use any other option supported by git, e.g.:
|
||||
|
||||
|
@ -56,6 +56,13 @@ Though originally I've created ahriman by trying to improve the project, it stil
|
||||
|
||||
It is automation tools for ``repoctl`` mentioned above. Except for using shell it looks pretty cool and also offers some additional features like patches, remote synchronization (isn't it?) and reporting.
|
||||
|
||||
`AURCache <https://github.com/Lukas-Heiligenbrunner/AURCache>`__
|
||||
""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
That's really cool project if you are looking for simple service to build AUR packages. It provides very informative dashboard and easy to configure and use. However, it doesn't provide direct way to control build process (e.g. it is neither trivial to build packages for architectures which are not supported by default nor to change build flags).
|
||||
|
||||
Also this application relies on docker setup (e.g. builders are only available as special docker containers). In addition, it uses ``paru`` to build packages instead of ``devtools``.
|
||||
|
||||
How to check service logs
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
|
128
docs/requirements.txt
Normal file
128
docs/requirements.txt
Normal file
@ -0,0 +1,128 @@
|
||||
# This file was autogenerated by uv via the following command:
|
||||
# uv pip compile --group pyproject.toml:docs --extra s3 --extra validator --extra web --output-file docs/requirements.txt pyproject.toml
|
||||
aiohappyeyeballs==2.6.1
|
||||
# via aiohttp
|
||||
aiohttp==3.11.18
|
||||
# via
|
||||
# ahriman (pyproject.toml)
|
||||
# aiohttp-cors
|
||||
# aiohttp-jinja2
|
||||
aiohttp-cors==0.8.1
|
||||
# via ahriman (pyproject.toml)
|
||||
aiohttp-jinja2==1.6
|
||||
# via ahriman (pyproject.toml)
|
||||
aiosignal==1.3.2
|
||||
# via aiohttp
|
||||
alabaster==1.0.0
|
||||
# via sphinx
|
||||
argparse-manpage==4.6
|
||||
# via ahriman (pyproject.toml:docs)
|
||||
attrs==25.3.0
|
||||
# via aiohttp
|
||||
babel==2.17.0
|
||||
# via sphinx
|
||||
bcrypt==4.3.0
|
||||
# via ahriman (pyproject.toml)
|
||||
boto3==1.38.11
|
||||
# via ahriman (pyproject.toml)
|
||||
botocore==1.38.11
|
||||
# via
|
||||
# boto3
|
||||
# s3transfer
|
||||
cerberus==1.3.7
|
||||
# via ahriman (pyproject.toml)
|
||||
certifi==2025.4.26
|
||||
# via requests
|
||||
charset-normalizer==3.4.2
|
||||
# via requests
|
||||
docutils==0.21.2
|
||||
# via
|
||||
# sphinx
|
||||
# sphinx-argparse
|
||||
# sphinx-rtd-theme
|
||||
frozenlist==1.6.0
|
||||
# via
|
||||
# aiohttp
|
||||
# aiosignal
|
||||
idna==3.10
|
||||
# via
|
||||
# requests
|
||||
# yarl
|
||||
imagesize==1.4.1
|
||||
# via sphinx
|
||||
inflection==0.5.1
|
||||
# via ahriman (pyproject.toml)
|
||||
jinja2==3.1.6
|
||||
# via
|
||||
# aiohttp-jinja2
|
||||
# sphinx
|
||||
jmespath==1.0.1
|
||||
# via
|
||||
# boto3
|
||||
# botocore
|
||||
markupsafe==3.0.2
|
||||
# via jinja2
|
||||
multidict==6.4.3
|
||||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
packaging==25.0
|
||||
# via sphinx
|
||||
propcache==0.3.1
|
||||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
pydeps==3.0.1
|
||||
# via ahriman (pyproject.toml:docs)
|
||||
pyelftools==0.32
|
||||
# via ahriman (pyproject.toml)
|
||||
pygments==2.19.1
|
||||
# via sphinx
|
||||
python-dateutil==2.9.0.post0
|
||||
# via botocore
|
||||
requests==2.32.3
|
||||
# via
|
||||
# ahriman (pyproject.toml)
|
||||
# sphinx
|
||||
roman-numerals-py==3.1.0
|
||||
# via sphinx
|
||||
s3transfer==0.12.0
|
||||
# via boto3
|
||||
shtab==1.7.2
|
||||
# via ahriman (pyproject.toml:docs)
|
||||
six==1.17.0
|
||||
# via python-dateutil
|
||||
snowballstemmer==3.0.0.1
|
||||
# via sphinx
|
||||
sphinx==8.2.3
|
||||
# via
|
||||
# ahriman (pyproject.toml:docs)
|
||||
# sphinx-argparse
|
||||
# sphinx-rtd-theme
|
||||
# sphinxcontrib-jquery
|
||||
sphinx-argparse==0.5.2
|
||||
# via ahriman (pyproject.toml:docs)
|
||||
sphinx-rtd-theme==3.0.2
|
||||
# via ahriman (pyproject.toml:docs)
|
||||
sphinxcontrib-applehelp==2.0.0
|
||||
# via sphinx
|
||||
sphinxcontrib-devhelp==2.0.0
|
||||
# via sphinx
|
||||
sphinxcontrib-htmlhelp==2.1.0
|
||||
# via sphinx
|
||||
sphinxcontrib-jquery==4.1
|
||||
# via sphinx-rtd-theme
|
||||
sphinxcontrib-jsmath==1.0.1
|
||||
# via sphinx
|
||||
sphinxcontrib-qthelp==2.0.0
|
||||
# via sphinx
|
||||
sphinxcontrib-serializinghtml==2.0.0
|
||||
# via sphinx
|
||||
stdlib-list==0.11.1
|
||||
# via pydeps
|
||||
urllib3==2.4.0
|
||||
# via
|
||||
# botocore
|
||||
# requests
|
||||
yarl==1.20.0
|
||||
# via aiohttp
|
@ -12,19 +12,22 @@ Initial setup
|
||||
|
||||
sudo ahriman -a x86_64 -r aur service-setup ...
|
||||
|
||||
``service-setup`` literally does the following steps:
|
||||
.. admonition:: Details
|
||||
:collapsible: closed
|
||||
|
||||
#.
|
||||
Create ``/var/lib/ahriman/.makepkg.conf`` with ``makepkg.conf`` overrides if required (at least you might want to set ``PACKAGER``):
|
||||
``service-setup`` literally does the following steps:
|
||||
|
||||
.. code-block:: shell
|
||||
#.
|
||||
Create ``/var/lib/ahriman/.makepkg.conf`` with ``makepkg.conf`` overrides if required (at least you might want to set ``PACKAGER``):
|
||||
|
||||
echo 'PACKAGER="ahriman bot <ahriman@example.com>"' | sudo -u ahriman tee -a /var/lib/ahriman/.makepkg.conf
|
||||
.. code-block:: shell
|
||||
|
||||
#.
|
||||
Configure build tools (it is required for correct dependency management system):
|
||||
echo 'PACKAGER="ahriman bot <ahriman@example.com>"' | sudo -u ahriman tee -a /var/lib/ahriman/.makepkg.conf
|
||||
|
||||
#.
|
||||
#.
|
||||
Configure build tools (it is required for correct dependency management system):
|
||||
|
||||
#.
|
||||
Create build command (you can choose any name for command, basically it should be ``{name}-{arch}-build``):
|
||||
|
||||
.. code-block:: shell
|
||||
@ -67,7 +70,7 @@ Initial setup
|
||||
echo 'ahriman ALL=(ALL) NOPASSWD:SETENV: CARCHBUILD_CMD' | tee -a /etc/sudoers.d/ahriman
|
||||
chmod 400 /etc/sudoers.d/ahriman
|
||||
|
||||
This command supports several arguments, kindly refer to its help message.
|
||||
This command supports several arguments, kindly refer to its help message.
|
||||
|
||||
#.
|
||||
Start and enable ``ahriman@.timer`` via ``systemctl``:
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
pkgbase='ahriman'
|
||||
pkgname=('ahriman' 'ahriman-core' 'ahriman-triggers' 'ahriman-web')
|
||||
pkgver=2.17.1
|
||||
pkgver=2.19.0
|
||||
pkgrel=1
|
||||
pkgdesc="ArcH linux ReposItory MANager"
|
||||
arch=('any')
|
||||
@ -75,6 +75,7 @@ package_ahriman-web() {
|
||||
depends=("$pkgbase-core=$pkgver" 'python-aiohttp-cors' 'python-aiohttp-jinja2')
|
||||
optdepends=('python-aioauth-client: OAuth2 authorization support'
|
||||
'python-aiohttp-apispec>=3.0.0: autogenerated API documentation'
|
||||
'python-aiohttp-openmetrics: HTTP metrics support'
|
||||
'python-aiohttp-security: authorization support'
|
||||
'python-aiohttp-session: authorization support'
|
||||
'python-cryptography: authorization support')
|
||||
|
@ -55,6 +55,11 @@
|
||||
<i class="bi bi-play"></i> update
|
||||
</button>
|
||||
</li>
|
||||
<li>
|
||||
<button id="update-repositories-button" class="btn dropdown-item" onclick="refreshDatabases()">
|
||||
<i class="bi bi-arrow-down-circle"></i> update pacman databases
|
||||
</button>
|
||||
</li>
|
||||
<li>
|
||||
<button id="package-rebuild-button" class="btn dropdown-item" data-bs-toggle="modal" data-bs-target="#package-rebuild-modal">
|
||||
<i class="bi bi-arrow-clockwise"></i> rebuild
|
||||
|
@ -24,6 +24,13 @@
|
||||
<datalist id="package-add-known-packages-dlist"></datalist>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group row">
|
||||
<label class="col-3 col-form-label"></label>
|
||||
<div class="col-9">
|
||||
<input id="package-add-refresh-input" type="checkbox" class="form-check-input" value="" checked>
|
||||
<label for="package-add-refresh-input" class="form-check-label">update pacman databases</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group row">
|
||||
<div class="col-12">
|
||||
<button id="package-add-variable-button" type="button" class="form-control btn btn-light rounded" onclick="packageAddVariableInputCreate()"><i class="bi bi-plus"></i> add environment variable </button>
|
||||
@ -50,6 +57,8 @@
|
||||
|
||||
const packageAddVariablesDiv = document.getElementById("package-add-variables-div");
|
||||
|
||||
const packageAddRefreshInput = document.getElementById("package-add-refresh-input");
|
||||
|
||||
function packageAddVariableInputCreate() {
|
||||
const variableInput = document.createElement("div");
|
||||
variableInput.classList.add("input-group");
|
||||
@ -99,16 +108,18 @@
|
||||
return {patches: patches};
|
||||
}
|
||||
|
||||
function packagesAdd(packages, patches, repository) {
|
||||
function packagesAdd(packages, patches, repository, data) {
|
||||
packages = packages ?? packageAddInput.value;
|
||||
patches = patches ?? patchesParse();
|
||||
repository = repository ?? getRepositorySelector(packageAddRepositoryInput);
|
||||
data = data ?? {refresh: packageAddRefreshInput.checked};
|
||||
|
||||
if (packages) {
|
||||
bootstrap.Modal.getOrCreateInstance(packageAddModal).hide();
|
||||
const onSuccess = update => `Packages ${update} have been added`;
|
||||
const onFailure = error => `Package addition failed: ${error}`;
|
||||
doPackageAction("/api/v1/service/add", [packages], repository, onSuccess, onFailure, patches);
|
||||
const parameters = Object.assign({}, data, patches);
|
||||
doPackageAction("/api/v1/service/add", [packages], repository, onSuccess, onFailure, parameters);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -60,10 +60,13 @@
|
||||
<div class="tab-content" id="nav-tabContent">
|
||||
<div id="package-info-logs" class="tab-pane fade show active" role="tabpanel" aria-labelledby="package-info-logs-button" tabindex="0">
|
||||
<div class="row">
|
||||
<div class="col-2">
|
||||
<nav id="package-info-logs-versions" class="nav flex-column"></nav>
|
||||
<div class="col-1 dropend">
|
||||
<button id="package-info-logs-dropdown" class="btn dropdown-toggle" type="button" data-bs-toggle="dropdown" aria-expanded="false">
|
||||
<i class="bi bi-list"></i>
|
||||
</button>
|
||||
<nav id="package-info-logs-versions" class="dropdown-menu" aria-labelledby="package-info-logs-dropdown"></nav>
|
||||
</div>
|
||||
<div class="col-10">
|
||||
<div class="col-11">
|
||||
<pre class="language-console"><code id="package-info-logs-input" class="pre-scrollable language-console"></code><button id="package-info-logs-copy-button" type="button" class="btn language-console" onclick="copyLogs()"><i class="bi bi-clipboard"></i> copy</button></pre>
|
||||
</div>
|
||||
</div>
|
||||
@ -92,6 +95,9 @@
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
{% if not auth.enabled or auth.username is not none %}
|
||||
<input id="package-info-refresh-input" type="checkbox" class="form-check-input" value="" checked>
|
||||
<label for="package-info-refresh-input" class="form-check-label">update pacman databases</label>
|
||||
|
||||
<button id="package-info-update-button" type="submit" class="btn btn-success" onclick="packageInfoUpdate()" data-bs-dismiss="modal"><i class="bi bi-play"></i><span class="d-none d-sm-inline"> update</span></button>
|
||||
<button id="package-info-remove-button" type="submit" class="btn btn-danger" onclick="packageInfoRemove()" data-bs-dismiss="modal"><i class="bi bi-trash"></i><span class="d-none d-sm-inline"> remove</span></button>
|
||||
{% endif %}
|
||||
@ -132,6 +138,8 @@
|
||||
const packageInfoVariablesBlock = document.getElementById("package-info-variables-block");
|
||||
const packageInfoVariablesDiv = document.getElementById("package-info-variables-div");
|
||||
|
||||
const packageInfoRefreshInput = document.getElementById("package-info-refresh-input");
|
||||
|
||||
function clearChart() {
|
||||
packageInfoEventsUpdateChartCanvas.hidden = true;
|
||||
if (packageInfoEventsUpdateChart) {
|
||||
@ -309,9 +317,9 @@
|
||||
)
|
||||
.map(version => {
|
||||
const link = document.createElement("a");
|
||||
link.classList.add("nav-link");
|
||||
link.classList.add("dropdown-item");
|
||||
|
||||
link.textContent = version.version;
|
||||
link.textContent = new Date(1000 * version.created).toISOStringShort();
|
||||
link.href = "#";
|
||||
link.onclick = _ => {
|
||||
const logs = data
|
||||
@ -401,7 +409,7 @@
|
||||
|
||||
function packageInfoUpdate() {
|
||||
const packageBase = packageInfoModal.package;
|
||||
packagesAdd(packageBase, [], repository);
|
||||
packagesAdd(packageBase, [], repository, {refresh: packageInfoRefreshInput.checked});
|
||||
}
|
||||
|
||||
function showPackageInfo(packageBase) {
|
||||
|
@ -73,6 +73,19 @@
|
||||
doPackageAction(url, currentSelection, repository, onSuccess, onFailure);
|
||||
}
|
||||
|
||||
function refreshDatabases() {
|
||||
const onSuccess = _ => "Pacman database update has been requested";
|
||||
const onFailure = error => `Could not update pacman databases: ${error}`;
|
||||
const parameters = {
|
||||
refresh: true,
|
||||
aur: false,
|
||||
local: false,
|
||||
manual: false,
|
||||
};
|
||||
|
||||
doPackageAction("/api/v1/service/update", [], repository, onSuccess, onFailure, parameters);
|
||||
}
|
||||
|
||||
function reload() {
|
||||
table.bootstrapTable("showLoading");
|
||||
|
||||
|
@ -27,10 +27,4 @@
|
||||
top: 0;
|
||||
right: 5px;
|
||||
}
|
||||
|
||||
.nav-link.active {
|
||||
pointer-events: none;
|
||||
cursor: default;
|
||||
color: black !important;
|
||||
}
|
||||
</style>
|
||||
|
@ -635,6 +635,7 @@ _set_new_action() {
|
||||
# ${!x} -> ${hello} -> "world"
|
||||
_shtab_ahriman() {
|
||||
local completing_word="${COMP_WORDS[COMP_CWORD]}"
|
||||
local previous_word="${COMP_WORDS[COMP_CWORD-1]}"
|
||||
local completed_positional_actions
|
||||
local current_action
|
||||
local current_action_args_start_index
|
||||
@ -691,6 +692,10 @@ _shtab_ahriman() {
|
||||
if [[ $pos_only = 0 && "${completing_word}" == -* ]]; then
|
||||
# optional argument started: use option strings
|
||||
COMPREPLY=( $(compgen -W "${current_option_strings[*]}" -- "${completing_word}") )
|
||||
elif [[ "${previous_word}" == ">" || "${previous_word}" == ">>" ||
|
||||
"${previous_word}" =~ ^[12]">" || "${previous_word}" =~ ^[12]">>" ]]; then
|
||||
# handle redirection operators
|
||||
COMPREPLY=( $(compgen -f -- "${completing_word}") )
|
||||
else
|
||||
# use choices & compgen
|
||||
local IFS=$'\n' # items may contain spaces, so delimit using newline
|
||||
|
@ -1,6 +1,6 @@
|
||||
.TH AHRIMAN "1" "2025\-01\-05" "ahriman" "Generated Python Manual"
|
||||
.TH AHRIMAN "1" "2025\-06\-29" "ahriman 2.19.0" "ArcH linux ReposItory MANager"
|
||||
.SH NAME
|
||||
ahriman
|
||||
ahriman \- ArcH linux ReposItory MANager
|
||||
.SH SYNOPSIS
|
||||
.B ahriman
|
||||
[-h] [-a ARCHITECTURE] [-c CONFIGURATION] [--force] [-l LOCK] [--log-handler {console,syslog,journald}] [-q] [--report | --no-report] [-r REPOSITORY] [--unsafe] [-V] [--wait-timeout WAIT_TIMEOUT] {add,aur-search,check,clean,config,config-validate,copy,daemon,help,help-commands-unsafe,help-updates,help-version,init,key-import,package-add,package-changes,package-changes-remove,package-copy,package-remove,package-status,package-status-remove,package-status-update,package-update,patch-add,patch-list,patch-remove,patch-set-add,rebuild,remove,remove-unknown,repo-backup,repo-check,repo-clean,repo-config,repo-config-validate,repo-create-keyring,repo-create-mirrorlist,repo-daemon,repo-init,repo-rebuild,repo-remove-unknown,repo-report,repo-restore,repo-setup,repo-sign,repo-statistics,repo-status-update,repo-sync,repo-tree,repo-triggers,repo-update,report,run,search,service-clean,service-config,service-config-validate,service-key-import,service-repositories,service-run,service-setup,service-shell,service-tree-migrate,setup,shell,sign,status,status-update,sync,update,user-add,user-list,user-remove,version,web} ...
|
||||
|
@ -25,15 +25,68 @@ dependencies = [
|
||||
|
||||
dynamic = ["version"]
|
||||
|
||||
[project.optional-dependencies]
|
||||
journald = [
|
||||
"systemd-python",
|
||||
]
|
||||
# FIXME technically this dependency is required, but in some cases we do not have access to
|
||||
# the libalpm which is required in order to install the package. Thus in case if we do not
|
||||
# really need to run the application we can move it to "optional" dependencies
|
||||
pacman = [
|
||||
"pyalpm",
|
||||
]
|
||||
reports = [
|
||||
"Jinja2",
|
||||
]
|
||||
s3 = [
|
||||
"boto3",
|
||||
]
|
||||
shell = [
|
||||
"IPython"
|
||||
]
|
||||
stats = [
|
||||
"matplotlib",
|
||||
]
|
||||
unixsocket = [
|
||||
"requests-unixsocket2", # required by unix socket support
|
||||
]
|
||||
validator = [
|
||||
"cerberus",
|
||||
]
|
||||
web = [
|
||||
"aiohttp",
|
||||
"aiohttp_cors",
|
||||
"aiohttp_jinja2",
|
||||
]
|
||||
web-auth = [
|
||||
"ahriman[web]",
|
||||
"aiohttp_session",
|
||||
"aiohttp_security",
|
||||
"cryptography",
|
||||
]
|
||||
web-docs = [
|
||||
"ahriman[web]",
|
||||
"aiohttp-apispec",
|
||||
"setuptools", # required by aiohttp-apispec
|
||||
]
|
||||
web-metrics = [
|
||||
"ahriman[web]",
|
||||
"aiohttp-openmetrics",
|
||||
]
|
||||
web-oauth2 = [
|
||||
"ahriman[web-auth]",
|
||||
"aioauth-client",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
ahriman = "ahriman.application.ahriman:run"
|
||||
|
||||
[project.urls]
|
||||
Documentation = "https://ahriman.readthedocs.io/"
|
||||
Repository = "https://github.com/arcan1s/ahriman"
|
||||
Changelog = "https://github.com/arcan1s/ahriman/releases"
|
||||
|
||||
[project.scripts]
|
||||
ahriman = "ahriman.application.ahriman:run"
|
||||
|
||||
[project.optional-dependencies]
|
||||
[dependency-groups]
|
||||
check = [
|
||||
"autopep8",
|
||||
"bandit",
|
||||
@ -47,24 +100,6 @@ docs = [
|
||||
"shtab",
|
||||
"sphinx-argparse",
|
||||
"sphinx-rtd-theme>=1.1.1", # https://stackoverflow.com/a/74355734
|
||||
]
|
||||
journald = [
|
||||
"systemd-python",
|
||||
]
|
||||
# FIXME technically this dependency is required, but in some cases we do not have access to
|
||||
# the libalpm which is required in order to install the package. Thus in case if we do not
|
||||
# really need to run the application we can move it to "optional" dependencies
|
||||
pacman = [
|
||||
"pyalpm",
|
||||
]
|
||||
s3 = [
|
||||
"boto3",
|
||||
]
|
||||
shell = [
|
||||
"IPython"
|
||||
]
|
||||
stats = [
|
||||
"matplotlib",
|
||||
]
|
||||
tests = [
|
||||
"pytest",
|
||||
@ -75,22 +110,6 @@ tests = [
|
||||
"pytest-resource-path",
|
||||
"pytest-spec",
|
||||
]
|
||||
validator = [
|
||||
"cerberus",
|
||||
]
|
||||
web = [
|
||||
"Jinja2",
|
||||
"aioauth-client",
|
||||
"aiohttp",
|
||||
"aiohttp-apispec",
|
||||
"aiohttp_cors",
|
||||
"aiohttp_jinja2",
|
||||
"aiohttp_session",
|
||||
"aiohttp_security",
|
||||
"cryptography",
|
||||
"requests-unixsocket2", # required by unix socket support
|
||||
"setuptools", # required by aiohttp-apispec
|
||||
]
|
||||
|
||||
[tool.flit.sdist]
|
||||
include = [
|
||||
|
@ -17,4 +17,4 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
__version__ = "2.17.1"
|
||||
__version__ = "2.19.0"
|
||||
|
@ -117,7 +117,7 @@ class Application(ApplicationPackages, ApplicationRepository):
|
||||
|
||||
Args:
|
||||
packages(list[Package]): list of source packages of which dependencies have to be processed
|
||||
process_dependencies(bool): if no set, dependencies will not be processed
|
||||
process_dependencies(bool): if set to ``False``, dependencies will not be processed
|
||||
|
||||
Returns:
|
||||
list[Package]: updated packages list. Packager for dependencies will be copied from the original package
|
||||
@ -130,37 +130,47 @@ class Application(ApplicationPackages, ApplicationRepository):
|
||||
>>> packages = application.with_dependencies(packages, process_dependencies=True)
|
||||
>>> application.print_updates(packages, log_fn=print)
|
||||
"""
|
||||
def missing_dependencies(source: Iterable[Package]) -> dict[str, str | None]:
|
||||
# append list of known packages with packages which are in current sources
|
||||
satisfied_packages = known_packages | {
|
||||
single
|
||||
for package in source
|
||||
for single in package.packages_full
|
||||
}
|
||||
|
||||
return {
|
||||
dependency: package.packager
|
||||
for package in source
|
||||
for dependency in package.depends_build
|
||||
if dependency not in satisfied_packages
|
||||
}
|
||||
|
||||
if not process_dependencies or not packages:
|
||||
return packages
|
||||
|
||||
def missing_dependencies(sources: Iterable[Package]) -> dict[str, str | None]:
|
||||
# append list of known packages with packages which are in current sources
|
||||
satisfied_packages = known_packages | {
|
||||
single
|
||||
for source in sources
|
||||
for single in source.packages_full
|
||||
}
|
||||
|
||||
return {
|
||||
dependency: source.packager
|
||||
for source in sources
|
||||
for dependency in source.depends_build
|
||||
if dependency not in satisfied_packages
|
||||
}
|
||||
|
||||
def new_packages(root: Package) -> dict[str, Package]:
|
||||
portion = {root.base: root}
|
||||
while missing := missing_dependencies(portion.values()):
|
||||
for package_name, packager in missing.items():
|
||||
if (source_dir := self.repository.paths.cache_for(package_name)).is_dir():
|
||||
# there is local cache, load package from it
|
||||
leaf = Package.from_build(source_dir, self.repository.architecture, packager)
|
||||
else:
|
||||
leaf = Package.from_aur(package_name, packager, include_provides=True)
|
||||
portion[leaf.base] = leaf
|
||||
|
||||
# register package in the database
|
||||
self.repository.reporter.set_unknown(leaf)
|
||||
|
||||
return portion
|
||||
|
||||
known_packages = self._known_packages()
|
||||
with_dependencies = {package.base: package for package in packages}
|
||||
|
||||
while missing := missing_dependencies(with_dependencies.values()):
|
||||
for package_name, username in missing.items():
|
||||
if (source_dir := self.repository.paths.cache_for(package_name)).is_dir():
|
||||
# there is local cache, load package from it
|
||||
package = Package.from_build(source_dir, self.repository.architecture, username)
|
||||
else:
|
||||
package = Package.from_aur(package_name, username)
|
||||
with_dependencies[package.base] = package
|
||||
|
||||
# register package in the database
|
||||
self.repository.reporter.set_unknown(package)
|
||||
with_dependencies: dict[str, Package] = {}
|
||||
for package in packages:
|
||||
with self.in_package_context(package.base, package.version): # use the same context for the logger
|
||||
try:
|
||||
with_dependencies |= new_packages(package)
|
||||
except Exception:
|
||||
self.logger.exception("could not process dependencies of %s, skip the package", package.base)
|
||||
|
||||
return list(with_dependencies.values())
|
||||
|
@ -53,7 +53,7 @@ class Handler:
|
||||
Wrapper for all command line actions, though each derived class implements :func:`run()` method, it usually
|
||||
must not be called directly. The recommended way is to call :func:`execute()` class method, e.g.::
|
||||
|
||||
>>> from ahriman.application.handlers import Add
|
||||
>>> from ahriman.application.handlers.add import Add
|
||||
>>>
|
||||
>>> Add.execute(args)
|
||||
"""
|
||||
|
@ -255,3 +255,19 @@ class Pacman(LazyLogging):
|
||||
result.update(trim_package(provides) for provides in package.provides)
|
||||
|
||||
return result
|
||||
|
||||
def provided_by(self, package_name: str) -> Generator[Package, None, None]:
|
||||
"""
|
||||
search through databases and emit packages which provides the ``package_name``
|
||||
|
||||
Args:
|
||||
package_name(str): package name to search
|
||||
|
||||
Yields:
|
||||
Package: list of packages which were returned by the query
|
||||
"""
|
||||
def is_package_provided(package: Package) -> bool:
|
||||
return package_name in package.provides
|
||||
|
||||
for database in self.handle.get_syncdbs():
|
||||
yield from filter(is_package_provided, database.search(package_name))
|
||||
|
@ -97,20 +97,17 @@ class AUR(Remote):
|
||||
|
||||
Returns:
|
||||
list[AURPackage]: response parsed to package list
|
||||
|
||||
Raises:
|
||||
PackageInfoError: if multiple arguments are passed
|
||||
"""
|
||||
query: list[tuple[str, str]] = [
|
||||
("type", request_type),
|
||||
("v", self.DEFAULT_RPC_VERSION),
|
||||
]
|
||||
if len(args) != 1:
|
||||
raise PackageInfoError("AUR API requires exactly one argument to search")
|
||||
|
||||
arg_query = "arg[]" if len(args) > 1 else "arg"
|
||||
for arg in args:
|
||||
query.append((arg_query, arg))
|
||||
url = f"{self.DEFAULT_RPC_URL}/v{self.DEFAULT_RPC_VERSION}/{request_type}/{args[0]}"
|
||||
query = list(kwargs.items())
|
||||
|
||||
for key, value in kwargs.items():
|
||||
query.append((key, value))
|
||||
|
||||
response = self.make_request("GET", self.DEFAULT_RPC_URL, params=query)
|
||||
response = self.make_request("GET", url, params=query)
|
||||
return self.parse_response(response.json())
|
||||
|
||||
def package_info(self, package_name: str, *, pacman: Pacman | None) -> AURPackage:
|
||||
@ -133,15 +130,36 @@ class AUR(Remote):
|
||||
except StopIteration:
|
||||
raise UnknownPackageError(package_name) from None
|
||||
|
||||
def package_search(self, *keywords: str, pacman: Pacman | None) -> list[AURPackage]:
|
||||
def package_provided_by(self, package_name: str, *, pacman: Pacman | None) -> list[AURPackage]:
|
||||
"""
|
||||
get package list which provide the specified package name
|
||||
|
||||
Args:
|
||||
package_name(str): package name to search
|
||||
pacman(Pacman | None): alpm wrapper instance, required for official repositories search
|
||||
|
||||
Returns:
|
||||
list[AURPackage]: list of packages which match the criteria
|
||||
"""
|
||||
return [
|
||||
package
|
||||
# search api provides reduced models
|
||||
for stub in self.package_search(package_name, pacman=pacman, search_by="provides")
|
||||
# verity that found package actually provides it
|
||||
if package_name in (package := self.package_info(stub.package_base, pacman=pacman)).provides
|
||||
]
|
||||
|
||||
def package_search(self, *keywords: str, pacman: Pacman | None, search_by: str | None) -> list[AURPackage]:
|
||||
"""
|
||||
search package in AUR web
|
||||
|
||||
Args:
|
||||
*keywords(str): keywords to search
|
||||
pacman(Pacman | None): alpm wrapper instance, required for official repositories search
|
||||
search_by(str | None): search by keywords
|
||||
|
||||
Returns:
|
||||
list[AURPackage]: list of packages which match the criteria
|
||||
"""
|
||||
return self.aur_request("search", *keywords, by="name-desc")
|
||||
search_by = search_by or "name-desc"
|
||||
return self.aur_request("search", *keywords, by=search_by)
|
||||
|
@ -127,15 +127,17 @@ class Official(Remote):
|
||||
except StopIteration:
|
||||
raise UnknownPackageError(package_name) from None
|
||||
|
||||
def package_search(self, *keywords: str, pacman: Pacman | None) -> list[AURPackage]:
|
||||
def package_search(self, *keywords: str, pacman: Pacman | None, search_by: str | None) -> list[AURPackage]:
|
||||
"""
|
||||
search package in AUR web
|
||||
|
||||
Args:
|
||||
*keywords(str): keywords to search
|
||||
pacman(Pacman | None): alpm wrapper instance, required for official repositories search
|
||||
search_by(str | None): search by keywords
|
||||
|
||||
Returns:
|
||||
list[AURPackage]: list of packages which match the criteria
|
||||
"""
|
||||
return self.arch_request(*keywords, by="q")
|
||||
search_by = search_by or "q"
|
||||
return self.arch_request(*keywords, by=search_by)
|
||||
|
@ -59,3 +59,22 @@ class OfficialSyncdb(Official):
|
||||
return next(AURPackage.from_pacman(package) for package in pacman.package(package_name))
|
||||
except StopIteration:
|
||||
raise UnknownPackageError(package_name) from None
|
||||
|
||||
def package_provided_by(self, package_name: str, *, pacman: Pacman | None) -> list[AURPackage]:
|
||||
"""
|
||||
get package list which provide the specified package name
|
||||
|
||||
Args:
|
||||
package_name(str): package name to search
|
||||
pacman(Pacman | None): alpm wrapper instance, required for official repositories search
|
||||
|
||||
Returns:
|
||||
list[AURPackage]: list of packages which match the criteria
|
||||
"""
|
||||
if pacman is None:
|
||||
return []
|
||||
|
||||
return [
|
||||
AURPackage.from_pacman(package)
|
||||
for package in pacman.provided_by(package_name)
|
||||
]
|
||||
|
@ -18,6 +18,7 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from ahriman.core.alpm.pacman import Pacman
|
||||
from ahriman.core.exceptions import UnknownPackageError
|
||||
from ahriman.core.http import SyncHttpClient
|
||||
from ahriman.models.aur_package import AURPackage
|
||||
|
||||
@ -35,28 +36,42 @@ class Remote(SyncHttpClient):
|
||||
>>> package = AUR.info("ahriman")
|
||||
>>> search_result = Official.multisearch("pacman", "manager", pacman=pacman)
|
||||
|
||||
Differnece between :func:`search()` and :func:`multisearch()` is that :func:`search()` passes all arguments to
|
||||
Difference between :func:`search()` and :func:`multisearch()` is that :func:`search()` passes all arguments to
|
||||
underlying wrapper directly, whereas :func:`multisearch()` splits search one by one and finds intersection
|
||||
between search results.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def info(cls, package_name: str, *, pacman: Pacman | None = None) -> AURPackage:
|
||||
def info(cls, package_name: str, *, pacman: Pacman | None = None, include_provides: bool = False) -> AURPackage:
|
||||
"""
|
||||
get package info by its name
|
||||
get package info by its name. If ``include_provides`` is set to ``True``, then, in addition, this method
|
||||
will perform search by :attr:`ahriman.models.aur_package.AURPackage.provides` and return first package found.
|
||||
Note, however, that in this case some implementation might not provide this method and search result will might
|
||||
not be stable
|
||||
|
||||
Args:
|
||||
package_name(str): package name to search
|
||||
pacman(Pacman | None, optional): alpm wrapper instance, required for official repositories search
|
||||
(Default value = None)
|
||||
include_provides(bool, optional): search by provides if no exact match found (Default value = False)
|
||||
|
||||
Returns:
|
||||
AURPackage: package which match the package name
|
||||
|
||||
Raises:
|
||||
UnknownPackageError: if requested package not found
|
||||
"""
|
||||
return cls().package_info(package_name, pacman=pacman)
|
||||
instance = cls()
|
||||
try:
|
||||
return instance.package_info(package_name, pacman=pacman)
|
||||
except UnknownPackageError:
|
||||
if include_provides and (provided_by := instance.package_provided_by(package_name, pacman=pacman)):
|
||||
return next(iter(provided_by))
|
||||
raise
|
||||
|
||||
@classmethod
|
||||
def multisearch(cls, *keywords: str, pacman: Pacman | None = None) -> list[AURPackage]:
|
||||
def multisearch(cls, *keywords: str, pacman: Pacman | None = None,
|
||||
search_by: str | None = None) -> list[AURPackage]:
|
||||
"""
|
||||
search in remote repository by using API with multiple words. This method is required in order to handle
|
||||
https://bugs.archlinux.org/task/49133. In addition, short words will be dropped
|
||||
@ -65,6 +80,7 @@ class Remote(SyncHttpClient):
|
||||
*keywords(str): search terms, e.g. "ahriman", "is", "cool"
|
||||
pacman(Pacman | None, optional): alpm wrapper instance, required for official repositories search
|
||||
(Default value = None)
|
||||
search_by(str | None, optional): search by keywords (Default value = None)
|
||||
|
||||
Returns:
|
||||
list[AURPackage]: list of packages each of them matches all search terms
|
||||
@ -72,7 +88,7 @@ class Remote(SyncHttpClient):
|
||||
instance = cls()
|
||||
packages: dict[str, AURPackage] = {}
|
||||
for term in filter(lambda word: len(word) >= 3, keywords):
|
||||
portion = instance.search(term, pacman=pacman)
|
||||
portion = instance.package_search(term, pacman=pacman, search_by=search_by)
|
||||
packages = {
|
||||
package.name: package # not mistake to group them by name
|
||||
for package in portion
|
||||
@ -114,7 +130,7 @@ class Remote(SyncHttpClient):
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def search(cls, *keywords: str, pacman: Pacman | None = None) -> list[AURPackage]:
|
||||
def search(cls, *keywords: str, pacman: Pacman | None = None, search_by: str | None = None) -> list[AURPackage]:
|
||||
"""
|
||||
search package in AUR web
|
||||
|
||||
@ -122,11 +138,12 @@ class Remote(SyncHttpClient):
|
||||
*keywords(str): search terms, e.g. "ahriman", "is", "cool"
|
||||
pacman(Pacman | None, optional): alpm wrapper instance, required for official repositories search
|
||||
(Default value = None)
|
||||
search_by(str | None, optional): search by keywords (Default value = None)
|
||||
|
||||
Returns:
|
||||
list[AURPackage]: list of packages which match the criteria
|
||||
"""
|
||||
return cls().package_search(*keywords, pacman=pacman)
|
||||
return cls().package_search(*keywords, pacman=pacman, search_by=search_by)
|
||||
|
||||
def package_info(self, package_name: str, *, pacman: Pacman | None) -> AURPackage:
|
||||
"""
|
||||
@ -144,13 +161,28 @@ class Remote(SyncHttpClient):
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def package_search(self, *keywords: str, pacman: Pacman | None) -> list[AURPackage]:
|
||||
def package_provided_by(self, package_name: str, *, pacman: Pacman | None) -> list[AURPackage]:
|
||||
"""
|
||||
get package list which provide the specified package name
|
||||
|
||||
Args:
|
||||
package_name(str): package name to search
|
||||
pacman(Pacman | None): alpm wrapper instance, required for official repositories search
|
||||
|
||||
Returns:
|
||||
list[AURPackage]: list of packages which match the criteria
|
||||
"""
|
||||
del package_name, pacman
|
||||
return []
|
||||
|
||||
def package_search(self, *keywords: str, pacman: Pacman | None, search_by: str | None) -> list[AURPackage]:
|
||||
"""
|
||||
search package in AUR web
|
||||
|
||||
Args:
|
||||
*keywords(str): keywords to search
|
||||
pacman(Pacman | None): alpm wrapper instance, required for official repositories search
|
||||
search_by(str | None): search by keywords
|
||||
|
||||
Returns:
|
||||
list[AURPackage]: list of packages which match the criteria
|
||||
|
@ -210,6 +210,17 @@ class Configuration(configparser.RawConfigParser):
|
||||
raise InitializeError("Configuration path and/or repository id are not set")
|
||||
return self.path, self.repository_id
|
||||
|
||||
def copy_from(self, configuration: Self) -> None:
|
||||
"""
|
||||
copy values from another instance overriding existing
|
||||
|
||||
Args:
|
||||
configuration(Self): configuration instance to merge from
|
||||
"""
|
||||
for section in configuration.sections():
|
||||
for key, value in configuration.items(section):
|
||||
self.set_option(section, key, value)
|
||||
|
||||
def dump(self) -> dict[str, dict[str, str]]:
|
||||
"""
|
||||
dump configuration to dictionary
|
||||
@ -220,6 +231,7 @@ class Configuration(configparser.RawConfigParser):
|
||||
return {
|
||||
section: dict(self.items(section))
|
||||
for section in self.sections()
|
||||
if self[section]
|
||||
}
|
||||
|
||||
# pylint and mypy are too stupid to find these methods
|
||||
|
@ -57,10 +57,6 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
||||
"path_exists": True,
|
||||
"path_type": "file",
|
||||
},
|
||||
"suppress_http_log_errors": {
|
||||
"type": "boolean",
|
||||
"coerce": "boolean",
|
||||
}
|
||||
},
|
||||
},
|
||||
"alpm": {
|
||||
@ -347,10 +343,6 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
||||
"coerce": "integer",
|
||||
"min": 0,
|
||||
},
|
||||
"password": {
|
||||
"type": "string",
|
||||
"empty": False,
|
||||
},
|
||||
"port": {
|
||||
"type": "integer",
|
||||
"coerce": "integer",
|
||||
@ -379,11 +371,6 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
||||
},
|
||||
"empty": False,
|
||||
},
|
||||
"timeout": {
|
||||
"type": "integer",
|
||||
"coerce": "integer",
|
||||
"min": 0,
|
||||
},
|
||||
"unix_socket": {
|
||||
"type": "path",
|
||||
"coerce": "absolute_path",
|
||||
@ -392,10 +379,6 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
||||
"type": "boolean",
|
||||
"coerce": "boolean",
|
||||
},
|
||||
"username": {
|
||||
"type": "string",
|
||||
"empty": False,
|
||||
},
|
||||
"wait_timeout": {
|
||||
"type": "integer",
|
||||
"coerce": "integer",
|
||||
|
@ -23,7 +23,7 @@ import sys
|
||||
|
||||
from collections.abc import Generator, Mapping, MutableMapping
|
||||
from string import Template
|
||||
from typing import ClassVar
|
||||
from typing import Any, ClassVar
|
||||
|
||||
from ahriman.core.configuration.shell_template import ShellTemplate
|
||||
|
||||
@ -85,7 +85,7 @@ class ShellInterpolator(configparser.Interpolation):
|
||||
"prefix": sys.prefix,
|
||||
}
|
||||
|
||||
def before_get(self, parser: MutableMapping[str, Mapping[str, str]], section: str, option: str, value: str,
|
||||
def before_get(self, parser: MutableMapping[str, Mapping[str, str]], section: Any, option: Any, value: str,
|
||||
defaults: Mapping[str, str]) -> str:
|
||||
"""
|
||||
interpolate option value
|
||||
@ -100,8 +100,8 @@ class ShellInterpolator(configparser.Interpolation):
|
||||
|
||||
Args:
|
||||
parser(MutableMapping[str, Mapping[str, str]]): option parser
|
||||
section(str): section name
|
||||
option(str): option name
|
||||
section(Any): section name
|
||||
option(Any): option name
|
||||
value(str): source (not-converted) value
|
||||
defaults(Mapping[str, str]): default values
|
||||
|
||||
|
@ -153,10 +153,13 @@ class LogsOperations(Operations):
|
||||
"""
|
||||
delete from logs
|
||||
where (package_base, repository, process_id) in (
|
||||
select package_base, repository, process_id from logs
|
||||
where repository = :repository
|
||||
group by package_base, repository, process_id
|
||||
order by min(created) desc limit -1 offset :offset
|
||||
select package_base, repository, process_id from (
|
||||
select package_base, repository, process_id, row_number() over (partition by package_base order by max(created) desc) as rn
|
||||
from logs
|
||||
where repository = :repository
|
||||
group by package_base, repository, process_id
|
||||
)
|
||||
where rn > :offset
|
||||
)
|
||||
""",
|
||||
{
|
||||
|
@ -17,6 +17,7 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import contextlib
|
||||
import sqlite3
|
||||
|
||||
from collections.abc import Callable
|
||||
@ -87,10 +88,12 @@ class Operations(LazyLogging):
|
||||
Returns:
|
||||
T: result of the ``query`` call
|
||||
"""
|
||||
with sqlite3.connect(self.path, detect_types=sqlite3.PARSE_DECLTYPES) as connection:
|
||||
with contextlib.closing(sqlite3.connect(self.path, detect_types=sqlite3.PARSE_DECLTYPES)) as connection:
|
||||
connection.set_trace_callback(self.logger.debug)
|
||||
connection.row_factory = self.factory
|
||||
|
||||
result = query(connection)
|
||||
if commit:
|
||||
connection.commit()
|
||||
|
||||
return result
|
||||
|
@ -95,19 +95,6 @@ class DuplicateRunError(RuntimeError):
|
||||
self, "Another application instance is run. This error can be suppressed by using --force flag.")
|
||||
|
||||
|
||||
class EncodeError(ValueError):
|
||||
"""
|
||||
exception used for bytes encoding errors
|
||||
"""
|
||||
|
||||
def __init__(self, encodings: list[str]) -> None:
|
||||
"""
|
||||
Args:
|
||||
encodings(list[str]): list of encodings tried
|
||||
"""
|
||||
ValueError.__init__(self, f"Could not encode bytes by using {encodings}")
|
||||
|
||||
|
||||
class ExitCode(RuntimeError):
|
||||
"""
|
||||
special exception which has to be thrown to return non-zero status without error message
|
||||
|
@ -18,6 +18,7 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import requests
|
||||
import sys
|
||||
|
||||
from functools import cached_property
|
||||
from typing import Any, IO, Literal
|
||||
@ -70,7 +71,10 @@ class SyncHttpClient(LazyLogging):
|
||||
request.Session: created session object
|
||||
"""
|
||||
session = requests.Session()
|
||||
session.headers["User-Agent"] = f"ahriman/{__version__}"
|
||||
python_version = ".".join(map(str, sys.version_info[:3])) # just major.minor.patch
|
||||
session.headers["User-Agent"] = f"ahriman/{__version__}" \
|
||||
f"{requests.utils.default_user_agent()}" \
|
||||
f"python/{python_version}"
|
||||
|
||||
return session
|
||||
|
||||
|
@ -40,7 +40,7 @@ class JinjaTemplate:
|
||||
|
||||
* homepage - link to homepage, string, optional
|
||||
* last_update - report generation time, pretty printed datetime, required
|
||||
* link_path - prefix fo packages to download, string, required
|
||||
* link_path - prefix of packages to download, string, required
|
||||
* has_package_signed - ``True`` in case if package sign enabled, ``False`` otherwise, required
|
||||
* has_repo_signed - ``True`` in case if repository database sign enabled, ``False`` otherwise, required
|
||||
* packages - sorted list of packages properties, required
|
||||
@ -64,7 +64,7 @@ class JinjaTemplate:
|
||||
Attributes:
|
||||
default_pgp_key(str | None): default PGP key
|
||||
homepage(str | None): homepage link if any (for footer)
|
||||
link_path(str): prefix fo packages to download
|
||||
link_path(str): prefix of packages to download
|
||||
name(str): repository name
|
||||
rss_url(str | None): link to the RSS feed
|
||||
sign_targets(set[SignSettings]): targets to sign enabled in configuration
|
||||
|
@ -67,14 +67,6 @@ class ReportTrigger(Trigger):
|
||||
"type": "string",
|
||||
"allowed": ["email"],
|
||||
},
|
||||
"full_template_path": {
|
||||
"type": "path",
|
||||
"coerce": "absolute_path",
|
||||
"excludes": ["template_full"],
|
||||
"required": True,
|
||||
"path_exists": True,
|
||||
"path_type": "file",
|
||||
},
|
||||
"homepage": {
|
||||
"type": "string",
|
||||
"empty": False,
|
||||
@ -132,26 +124,16 @@ class ReportTrigger(Trigger):
|
||||
},
|
||||
"template": {
|
||||
"type": "string",
|
||||
"excludes": ["template_path"],
|
||||
"dependencies": ["templates"],
|
||||
"required": True,
|
||||
"empty": False,
|
||||
},
|
||||
"template_full": {
|
||||
"type": "string",
|
||||
"excludes": ["template_path"],
|
||||
"dependencies": ["templates"],
|
||||
"required": True,
|
||||
"empty": False,
|
||||
},
|
||||
"template_path": {
|
||||
"type": "path",
|
||||
"coerce": "absolute_path",
|
||||
"excludes": ["template"],
|
||||
"required": True,
|
||||
"path_exists": True,
|
||||
"path_type": "file",
|
||||
},
|
||||
"templates": {
|
||||
"type": "list",
|
||||
"coerce": "list",
|
||||
@ -199,19 +181,10 @@ class ReportTrigger(Trigger):
|
||||
},
|
||||
"template": {
|
||||
"type": "string",
|
||||
"excludes": ["template_path"],
|
||||
"dependencies": ["templates"],
|
||||
"required": True,
|
||||
"empty": False,
|
||||
},
|
||||
"template_path": {
|
||||
"type": "path",
|
||||
"coerce": "absolute_path",
|
||||
"excludes": ["template"],
|
||||
"required": True,
|
||||
"path_exists": True,
|
||||
"path_type": "file",
|
||||
},
|
||||
"templates": {
|
||||
"type": "list",
|
||||
"coerce": "list",
|
||||
@ -225,76 +198,6 @@ class ReportTrigger(Trigger):
|
||||
},
|
||||
},
|
||||
},
|
||||
"telegram": {
|
||||
"type": "dict",
|
||||
"schema": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"allowed": ["telegram"],
|
||||
},
|
||||
"api_key": {
|
||||
"type": "string",
|
||||
"required": True,
|
||||
"empty": False,
|
||||
},
|
||||
"chat_id": {
|
||||
"type": "string",
|
||||
"required": True,
|
||||
"empty": False,
|
||||
},
|
||||
"homepage": {
|
||||
"type": "string",
|
||||
"empty": False,
|
||||
"is_url": ["http", "https"],
|
||||
},
|
||||
"link_path": {
|
||||
"type": "string",
|
||||
"required": True,
|
||||
"empty": False,
|
||||
"is_url": [],
|
||||
},
|
||||
"rss_url": {
|
||||
"type": "string",
|
||||
"empty": False,
|
||||
"is_url": ["http", "https"],
|
||||
},
|
||||
"template": {
|
||||
"type": "string",
|
||||
"excludes": ["template_path"],
|
||||
"dependencies": ["templates"],
|
||||
"required": True,
|
||||
"empty": False,
|
||||
},
|
||||
"template_path": {
|
||||
"type": "path",
|
||||
"coerce": "absolute_path",
|
||||
"excludes": ["template"],
|
||||
"required": True,
|
||||
"path_exists": True,
|
||||
"path_type": "file",
|
||||
},
|
||||
"template_type": {
|
||||
"type": "string",
|
||||
"allowed": ["MarkdownV2", "HTML", "Markdown"],
|
||||
},
|
||||
"templates": {
|
||||
"type": "list",
|
||||
"coerce": "list",
|
||||
"schema": {
|
||||
"type": "path",
|
||||
"coerce": "absolute_path",
|
||||
"path_exists": True,
|
||||
"path_type": "dir",
|
||||
},
|
||||
"empty": False,
|
||||
},
|
||||
"timeout": {
|
||||
"type": "integer",
|
||||
"coerce": "integer",
|
||||
"min": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
"remote-call": {
|
||||
"type": "dict",
|
||||
"schema": {
|
||||
@ -354,19 +257,10 @@ class ReportTrigger(Trigger):
|
||||
},
|
||||
"template": {
|
||||
"type": "string",
|
||||
"excludes": ["template_path"],
|
||||
"dependencies": ["templates"],
|
||||
"required": True,
|
||||
"empty": False,
|
||||
},
|
||||
"template_path": {
|
||||
"type": "path",
|
||||
"coerce": "absolute_path",
|
||||
"excludes": ["template"],
|
||||
"required": True,
|
||||
"path_exists": True,
|
||||
"path_type": "file",
|
||||
},
|
||||
"templates": {
|
||||
"type": "list",
|
||||
"coerce": "list",
|
||||
@ -380,6 +274,67 @@ class ReportTrigger(Trigger):
|
||||
},
|
||||
},
|
||||
},
|
||||
"telegram": {
|
||||
"type": "dict",
|
||||
"schema": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"allowed": ["telegram"],
|
||||
},
|
||||
"api_key": {
|
||||
"type": "string",
|
||||
"required": True,
|
||||
"empty": False,
|
||||
},
|
||||
"chat_id": {
|
||||
"type": "string",
|
||||
"required": True,
|
||||
"empty": False,
|
||||
},
|
||||
"homepage": {
|
||||
"type": "string",
|
||||
"empty": False,
|
||||
"is_url": ["http", "https"],
|
||||
},
|
||||
"link_path": {
|
||||
"type": "string",
|
||||
"required": True,
|
||||
"empty": False,
|
||||
"is_url": [],
|
||||
},
|
||||
"rss_url": {
|
||||
"type": "string",
|
||||
"empty": False,
|
||||
"is_url": ["http", "https"],
|
||||
},
|
||||
"template": {
|
||||
"type": "string",
|
||||
"dependencies": ["templates"],
|
||||
"required": True,
|
||||
"empty": False,
|
||||
},
|
||||
"template_type": {
|
||||
"type": "string",
|
||||
"allowed": ["MarkdownV2", "HTML", "Markdown"],
|
||||
},
|
||||
"templates": {
|
||||
"type": "list",
|
||||
"coerce": "list",
|
||||
"schema": {
|
||||
"type": "path",
|
||||
"coerce": "absolute_path",
|
||||
"path_exists": True,
|
||||
"path_type": "dir",
|
||||
},
|
||||
"empty": False,
|
||||
},
|
||||
"timeout": {
|
||||
"type": "integer",
|
||||
"coerce": "integer",
|
||||
"min": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self, repository_id: RepositoryId, configuration: Configuration) -> None:
|
||||
|
@ -71,7 +71,7 @@ class EventLogger:
|
||||
>>> with self.in_event(package_base, EventType.PackageUpdated):
|
||||
>>> do_something()
|
||||
|
||||
Additional parameter ``failure`` can be set in order to emit an event on exception occured. If none set
|
||||
Additional parameter ``failure`` can be set in order to emit an event on exception occurred. If none set
|
||||
(default), then no event will be recorded on exception
|
||||
"""
|
||||
with MetricsTimer() as timer:
|
||||
|
@ -33,6 +33,7 @@ class Leaf:
|
||||
|
||||
Attributes:
|
||||
dependencies(set[str]): list of package dependencies
|
||||
items(list[str]): list of packages in this leaf including provides
|
||||
package(Package): leaf package properties
|
||||
"""
|
||||
|
||||
@ -42,17 +43,9 @@ class Leaf:
|
||||
package(Package): package properties
|
||||
"""
|
||||
self.package = package
|
||||
# store frequently used properties
|
||||
self.dependencies = package.depends_build
|
||||
|
||||
@property
|
||||
def items(self) -> Iterable[str]:
|
||||
"""
|
||||
extract all packages from the leaf
|
||||
|
||||
Returns:
|
||||
Iterable[str]: packages containing in this leaf
|
||||
"""
|
||||
return self.package.packages.keys()
|
||||
self.items = self.package.packages_full
|
||||
|
||||
def is_dependency(self, packages: Iterable[Leaf]) -> bool:
|
||||
"""
|
||||
|
@ -83,6 +83,20 @@ class UploadTrigger(Trigger):
|
||||
},
|
||||
},
|
||||
},
|
||||
"remote-service": {
|
||||
"type": "dict",
|
||||
"schema": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"allowed": ["ahriman", "remote-service"],
|
||||
},
|
||||
"timeout": {
|
||||
"type": "integer",
|
||||
"coerce": "integer",
|
||||
"min": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
"rsync": {
|
||||
"type": "dict",
|
||||
"schema": {
|
||||
@ -107,20 +121,6 @@ class UploadTrigger(Trigger):
|
||||
},
|
||||
},
|
||||
},
|
||||
"remote-service": {
|
||||
"type": "dict",
|
||||
"schema": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"allowed": ["ahriman", "remote-service"],
|
||||
},
|
||||
"timeout": {
|
||||
"type": "integer",
|
||||
"coerce": "integer",
|
||||
"min": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
"s3": {
|
||||
"type": "dict",
|
||||
"schema": {
|
||||
|
@ -136,7 +136,8 @@ def check_output(*args: str, exception: Exception | Callable[[int, list[str], st
|
||||
} | environment
|
||||
|
||||
with subprocess.Popen(args, cwd=cwd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
||||
user=user, env=full_environment, text=True, encoding="utf8", bufsize=1) as process:
|
||||
user=user, env=full_environment, text=True, encoding="utf8", errors="backslashreplace",
|
||||
bufsize=1) as process:
|
||||
if input_data is not None:
|
||||
input_channel = get_io(process, "stdin")
|
||||
input_channel.write(input_data)
|
||||
|
@ -69,7 +69,7 @@ class Package(LazyLogging):
|
||||
:attr:`ahriman.models.package_source.PackageSource.Archive`,
|
||||
:attr:`ahriman.models.package_source.PackageSource.AUR`,
|
||||
:attr:`ahriman.models.package_source.PackageSource.Local` and
|
||||
:attr:`ahriman.models.package_source.PackageSource.Repository` repsectively:
|
||||
:attr:`ahriman.models.package_source.PackageSource.Repository` respectively:
|
||||
|
||||
>>> ahriman_package = Package.from_aur("ahriman")
|
||||
>>> pacman_package = Package.from_official("pacman", pacman)
|
||||
@ -213,18 +213,19 @@ class Package(LazyLogging):
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_aur(cls, name: str, packager: str | None = None) -> Self:
|
||||
def from_aur(cls, name: str, packager: str | None = None, *, include_provides: bool = False) -> Self:
|
||||
"""
|
||||
construct package properties from AUR page
|
||||
|
||||
Args:
|
||||
name(str): package name (either base or normal name)
|
||||
packager(str | None, optional): packager to be used for this build (Default value = None)
|
||||
include_provides(bool, optional): search by provides if no exact match found (Default value = False)
|
||||
|
||||
Returns:
|
||||
Self: package properties
|
||||
"""
|
||||
package = AUR.info(name)
|
||||
package = AUR.info(name, include_provides=include_provides)
|
||||
|
||||
remote = RemoteSource(
|
||||
source=PackageSource.AUR,
|
||||
@ -310,7 +311,8 @@ class Package(LazyLogging):
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_official(cls, name: str, pacman: Pacman, packager: str | None = None, *, use_syncdb: bool = True) -> Self:
|
||||
def from_official(cls, name: str, pacman: Pacman, packager: str | None = None, *, use_syncdb: bool = True,
|
||||
include_provides: bool = False) -> Self:
|
||||
"""
|
||||
construct package properties from official repository page
|
||||
|
||||
@ -319,11 +321,13 @@ class Package(LazyLogging):
|
||||
pacman(Pacman): alpm wrapper instance
|
||||
packager(str | None, optional): packager to be used for this build (Default value = None)
|
||||
use_syncdb(bool, optional): use pacman databases instead of official repositories RPC (Default value = True)
|
||||
include_provides(bool, optional): search by provides if no exact match found (Default value = False)
|
||||
|
||||
Returns:
|
||||
Self: package properties
|
||||
"""
|
||||
package = OfficialSyncdb.info(name, pacman=pacman) if use_syncdb else Official.info(name)
|
||||
impl = OfficialSyncdb if use_syncdb else Official
|
||||
package = impl.info(name, pacman=pacman, include_provides=include_provides)
|
||||
|
||||
remote = RemoteSource(
|
||||
source=PackageSource.Repository,
|
||||
|
@ -24,7 +24,6 @@ from pathlib import Path
|
||||
from typing import Any, ClassVar, IO, Self
|
||||
|
||||
from ahriman.core.alpm.pkgbuild_parser import PkgbuildParser, PkgbuildToken
|
||||
from ahriman.core.exceptions import EncodeError
|
||||
from ahriman.models.pkgbuild_patch import PkgbuildPatch
|
||||
|
||||
|
||||
@ -34,13 +33,13 @@ class Pkgbuild(Mapping[str, Any]):
|
||||
model and proxy for PKGBUILD properties
|
||||
|
||||
Attributes:
|
||||
DEFAULT_ENCODINGS(list[str]): (class attribute) list of encoding to be applied on the file content
|
||||
DEFAULT_ENCODINGS(str): (class attribute) default encoding to be applied on the file content
|
||||
fields(dict[str, PkgbuildPatch]): PKGBUILD fields
|
||||
"""
|
||||
|
||||
fields: dict[str, PkgbuildPatch]
|
||||
|
||||
DEFAULT_ENCODINGS: ClassVar[list[str]] = ["utf8", "latin-1"]
|
||||
DEFAULT_ENCODINGS: ClassVar[str] = "utf8"
|
||||
|
||||
@property
|
||||
def variables(self) -> dict[str, str]:
|
||||
@ -58,13 +57,13 @@ class Pkgbuild(Mapping[str, Any]):
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, path: Path, encodings: list[str] | None = None) -> Self:
|
||||
def from_file(cls, path: Path, encoding: str | None = None) -> Self:
|
||||
"""
|
||||
parse PKGBUILD from the file
|
||||
|
||||
Args:
|
||||
path(Path): path to the PKGBUILD file
|
||||
encodings(list[str] | None, optional): the encoding of the file (Default value = None)
|
||||
encoding(str | None, optional): the encoding of the file (Default value = None)
|
||||
|
||||
Returns:
|
||||
Self: constructed instance of self
|
||||
@ -77,15 +76,10 @@ class Pkgbuild(Mapping[str, Any]):
|
||||
content = input_file.read()
|
||||
|
||||
# decode bytes content based on either
|
||||
encodings = encodings or cls.DEFAULT_ENCODINGS
|
||||
for encoding in encodings:
|
||||
try:
|
||||
io = StringIO(content.decode(encoding))
|
||||
return cls.from_io(io)
|
||||
except ValueError:
|
||||
pass
|
||||
encoding = encoding or cls.DEFAULT_ENCODINGS
|
||||
io = StringIO(content.decode(encoding, errors="backslashreplace"))
|
||||
|
||||
raise EncodeError(encodings)
|
||||
return cls.from_io(io)
|
||||
|
||||
@classmethod
|
||||
def from_io(cls, stream: IO[str]) -> Self:
|
||||
|
@ -17,7 +17,7 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import aiohttp_cors # type: ignore[import-untyped]
|
||||
import aiohttp_cors
|
||||
|
||||
from aiohttp.web import Application
|
||||
|
||||
@ -36,7 +36,7 @@ def setup_cors(application: Application) -> aiohttp_cors.CorsConfig:
|
||||
aiohttp_cors.CorsConfig: generated CORS configuration
|
||||
"""
|
||||
cors = aiohttp_cors.setup(application, defaults={
|
||||
"*": aiohttp_cors.ResourceOptions(
|
||||
"*": aiohttp_cors.ResourceOptions( # type: ignore[no-untyped-call]
|
||||
expose_headers="*",
|
||||
allow_headers="*",
|
||||
allow_methods="*",
|
||||
|
69
src/ahriman/web/middlewares/metrics_handler.py
Normal file
69
src/ahriman/web/middlewares/metrics_handler.py
Normal file
@ -0,0 +1,69 @@
|
||||
#
|
||||
# Copyright (c) 2021-2025 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
try:
|
||||
import aiohttp_openmetrics
|
||||
except ImportError:
|
||||
aiohttp_openmetrics = None # type: ignore[assignment]
|
||||
|
||||
from aiohttp.typedefs import Middleware
|
||||
from aiohttp.web import HTTPNotFound, Request, Response, StreamResponse, middleware
|
||||
|
||||
from ahriman.web.middlewares import HandlerType
|
||||
|
||||
|
||||
__all__ = [
|
||||
"metrics",
|
||||
"metrics_handler",
|
||||
]
|
||||
|
||||
|
||||
async def metrics(request: Request) -> Response:
|
||||
"""
|
||||
handler for returning metrics
|
||||
|
||||
Args:
|
||||
request(Request): request object
|
||||
|
||||
Returns:
|
||||
Response: response object
|
||||
|
||||
Raises:
|
||||
HTTPNotFound: endpoint is disabled
|
||||
"""
|
||||
if aiohttp_openmetrics is None:
|
||||
raise HTTPNotFound
|
||||
return await aiohttp_openmetrics.metrics(request)
|
||||
|
||||
|
||||
def metrics_handler() -> Middleware:
|
||||
"""
|
||||
middleware for metrics support
|
||||
|
||||
Returns:
|
||||
Middleware: middleware function to handle server metrics
|
||||
"""
|
||||
if aiohttp_openmetrics is not None:
|
||||
return aiohttp_openmetrics.metrics_middleware
|
||||
|
||||
@middleware
|
||||
async def handle(request: Request, handler: HandlerType) -> StreamResponse:
|
||||
return await handler(request)
|
||||
|
||||
return handle
|
@ -17,6 +17,8 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import re
|
||||
|
||||
from aiohttp.web import Application, View
|
||||
from collections.abc import Generator
|
||||
|
||||
@ -45,6 +47,23 @@ def _dynamic_routes(configuration: Configuration) -> Generator[tuple[str, type[V
|
||||
yield route, view
|
||||
|
||||
|
||||
def _identifier(route: str) -> str:
|
||||
"""
|
||||
extract valid route identifier (aka name) for the route. This method replaces curly brackets by single colon
|
||||
and replaces other special symbols (including slashes) by underscore
|
||||
|
||||
Args:
|
||||
route(str): source route
|
||||
|
||||
Returns:
|
||||
str: route with special symbols being replaced
|
||||
"""
|
||||
# replace special symbols
|
||||
alphanum = re.sub(r"[^A-Za-z\d\-{}]", "_", route)
|
||||
# finally replace curly brackets
|
||||
return alphanum.replace("{", ":").replace("}", "")
|
||||
|
||||
|
||||
def setup_routes(application: Application, configuration: Configuration) -> None:
|
||||
"""
|
||||
setup all defined routes
|
||||
@ -53,7 +72,8 @@ def setup_routes(application: Application, configuration: Configuration) -> None
|
||||
application(Application): web application instance
|
||||
configuration(Configuration): configuration instance
|
||||
"""
|
||||
application.router.add_static("/static", configuration.getpath("web", "static_path"), follow_symlinks=True)
|
||||
application.router.add_static("/static", configuration.getpath("web", "static_path"),
|
||||
name="_static", follow_symlinks=True)
|
||||
|
||||
for route, view in _dynamic_routes(configuration):
|
||||
application.router.add_view(route, view)
|
||||
application.router.add_view(route, view, name=_identifier(route))
|
||||
|
@ -17,6 +17,7 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from ahriman.web.schemas.any_schema import AnySchema
|
||||
from ahriman.web.schemas.aur_package_schema import AURPackageSchema
|
||||
from ahriman.web.schemas.auth_schema import AuthSchema
|
||||
from ahriman.web.schemas.build_options_schema import BuildOptionsSchema
|
||||
|
26
src/ahriman/web/schemas/any_schema.py
Normal file
26
src/ahriman/web/schemas/any_schema.py
Normal file
@ -0,0 +1,26 @@
|
||||
#
|
||||
# Copyright (c) 2021-2025 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from ahriman.web.apispec import Schema
|
||||
|
||||
|
||||
class AnySchema(Schema):
|
||||
"""
|
||||
response dummy schema
|
||||
"""
|
@ -39,7 +39,7 @@ class RemoteSchema(Schema):
|
||||
"example": ".",
|
||||
})
|
||||
source = fields.Enum(PackageSource, by_value=True, required=True, metadata={
|
||||
"description": "Pacakge source",
|
||||
"description": "Package source",
|
||||
})
|
||||
web_url = fields.String(metadata={
|
||||
"description": "Package repository page",
|
||||
|
@ -18,7 +18,7 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from aiohttp.web import HTTPBadRequest, HTTPNotFound, Request, StreamResponse, View
|
||||
from aiohttp_cors import CorsViewMixin # type: ignore[import-untyped]
|
||||
from aiohttp_cors import CorsViewMixin
|
||||
from collections.abc import Awaitable, Callable
|
||||
from typing import ClassVar, TypeVar
|
||||
|
||||
@ -167,6 +167,9 @@ class BaseView(View, CorsViewMixin):
|
||||
"""
|
||||
HEAD method implementation based on the result of GET method
|
||||
|
||||
Returns:
|
||||
StreamResponse: generated response for the request
|
||||
|
||||
Raises:
|
||||
HTTPMethodNotAllowed: in case if there is no GET method implemented
|
||||
"""
|
||||
|
@ -106,7 +106,7 @@ class PackageView(StatusViewGuard, BaseView):
|
||||
@apidocs(
|
||||
tags=["Packages"],
|
||||
summary="Update package",
|
||||
description="Update package status and set its descriptior optionally",
|
||||
description="Update package status and set its descriptor optionally",
|
||||
permission=POST_PERMISSION,
|
||||
error_400_enabled=True,
|
||||
error_404_description="Repository is unknown",
|
||||
|
@ -46,7 +46,7 @@ class PackagesView(StatusViewGuard, BaseView):
|
||||
ROUTES = ["/api/v1/packages"]
|
||||
|
||||
@apidocs(
|
||||
tags=["packages"],
|
||||
tags=["Packages"],
|
||||
summary="Get packages list",
|
||||
description="Retrieve packages and their descriptors",
|
||||
permission=GET_PERMISSION,
|
||||
|
56
src/ahriman/web/views/v1/status/metrics.py
Normal file
56
src/ahriman/web/views/v1/status/metrics.py
Normal file
@ -0,0 +1,56 @@
|
||||
#
|
||||
# Copyright (c) 2021-2025 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from aiohttp.web import Response
|
||||
from typing import ClassVar
|
||||
|
||||
from ahriman.models.user_access import UserAccess
|
||||
from ahriman.web.apispec.decorators import apidocs
|
||||
from ahriman.web.middlewares.metrics_handler import metrics
|
||||
from ahriman.web.schemas import AnySchema
|
||||
from ahriman.web.views.base import BaseView
|
||||
|
||||
|
||||
class MetricsView(BaseView):
|
||||
"""
|
||||
open metrics endpoints
|
||||
|
||||
Attributes:
|
||||
GET_PERMISSION(UserAccess): (class attribute) get permissions of self
|
||||
"""
|
||||
|
||||
GET_PERMISSION: ClassVar[UserAccess] = UserAccess.Unauthorized
|
||||
ROUTES = ["/api/v1/metrics"]
|
||||
|
||||
@apidocs(
|
||||
tags=["Status"],
|
||||
summary="OpenMetrics endpoint",
|
||||
description="Get service metrics in OpenMetrics format",
|
||||
permission=GET_PERMISSION,
|
||||
error_404_description="Endpoint is disabled",
|
||||
schema=AnySchema,
|
||||
)
|
||||
async def get(self) -> Response:
|
||||
"""
|
||||
get service HTTP metrics
|
||||
|
||||
Returns:
|
||||
Response: 200 with service metrics as generated by the library
|
||||
"""
|
||||
return await metrics(self.request)
|
@ -97,6 +97,7 @@ class LoginView(BaseView):
|
||||
login user to service. The authentication session will be passed in ``Set-Cookie`` header.
|
||||
|
||||
Raises:
|
||||
HTTPBadRequest: if bad data is supplied
|
||||
HTTPFound: on success response
|
||||
HTTPUnauthorized: if case of authorization error
|
||||
"""
|
||||
|
@ -37,6 +37,7 @@ from ahriman.web.apispec.info import setup_apispec
|
||||
from ahriman.web.cors import setup_cors
|
||||
from ahriman.web.keys import AuthKey, ConfigurationKey, SpawnKey, WatcherKey, WorkersKey
|
||||
from ahriman.web.middlewares.exception_handler import exception_handler
|
||||
from ahriman.web.middlewares.metrics_handler import metrics_handler
|
||||
from ahriman.web.routes import setup_routes
|
||||
|
||||
|
||||
@ -146,6 +147,7 @@ def setup_server(configuration: Configuration, spawner: Spawn, repositories: lis
|
||||
|
||||
application.middlewares.append(normalize_path_middleware(append_slash=False, remove_slash=True))
|
||||
application.middlewares.append(exception_handler(application.logger))
|
||||
application.middlewares.append(metrics_handler())
|
||||
|
||||
application.logger.info("setup routes")
|
||||
setup_routes(application, configuration)
|
||||
|
@ -1,5 +1,5 @@
|
||||
#
|
||||
# Copyright (c) 2021-2024 ahriman team.
|
||||
# Copyright (c) 2021-2025 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
|
@ -1,4 +1,6 @@
|
||||
from pathlib import Path
|
||||
from pytest_mock import MockerFixture
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock, call as MockCall
|
||||
|
||||
from ahriman.application.application import Application
|
||||
@ -73,6 +75,10 @@ def test_with_dependencies(application: Application, package_ahriman: Package, p
|
||||
mock.packages_full = [package_base]
|
||||
return mock
|
||||
|
||||
def get_package(name: str | Path, *args: Any, **kwargs: Any) -> Package:
|
||||
name = name if isinstance(name, str) else name.name
|
||||
return packages[name]
|
||||
|
||||
package_python_schedule.packages = {
|
||||
package_python_schedule.base: package_python_schedule.packages[package_python_schedule.base]
|
||||
}
|
||||
@ -87,10 +93,8 @@ def test_with_dependencies(application: Application, package_ahriman: Package, p
|
||||
}
|
||||
|
||||
mocker.patch("pathlib.Path.is_dir", autospec=True, side_effect=lambda p: p.name == "python")
|
||||
package_aur_mock = mocker.patch("ahriman.models.package.Package.from_aur",
|
||||
side_effect=lambda *args: packages[args[0]])
|
||||
package_local_mock = mocker.patch("ahriman.models.package.Package.from_build",
|
||||
side_effect=lambda *args: packages[args[0].name])
|
||||
package_aur_mock = mocker.patch("ahriman.models.package.Package.from_aur", side_effect=get_package)
|
||||
package_local_mock = mocker.patch("ahriman.models.package.Package.from_build", side_effect=get_package)
|
||||
packages_mock = mocker.patch("ahriman.application.application.Application._known_packages",
|
||||
return_value={"devtools", "python-build", "python-pytest"})
|
||||
status_client_mock = mocker.patch("ahriman.core.status.Client.set_unknown")
|
||||
@ -98,8 +102,8 @@ def test_with_dependencies(application: Application, package_ahriman: Package, p
|
||||
result = application.with_dependencies([package_ahriman], process_dependencies=True)
|
||||
assert {package.base: package for package in result} == packages
|
||||
package_aur_mock.assert_has_calls([
|
||||
MockCall(package_python_schedule.base, package_ahriman.packager),
|
||||
MockCall("python-installer", package_ahriman.packager),
|
||||
MockCall(package_python_schedule.base, package_ahriman.packager, include_provides=True),
|
||||
MockCall("python-installer", package_ahriman.packager, include_provides=True),
|
||||
], any_order=True)
|
||||
package_local_mock.assert_has_calls([
|
||||
MockCall(application.repository.paths.cache_for("python"), "x86_64", package_ahriman.packager),
|
||||
@ -113,6 +117,40 @@ def test_with_dependencies(application: Application, package_ahriman: Package, p
|
||||
], any_order=True)
|
||||
|
||||
|
||||
def test_with_dependencies_exception(application: Application, package_ahriman: Package,
|
||||
package_python_schedule: Package, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must skip packages if exception occurs
|
||||
"""
|
||||
def create_package_mock(package_base) -> MagicMock:
|
||||
mock = MagicMock()
|
||||
mock.base = package_base
|
||||
mock.depends_build = []
|
||||
mock.packages_full = [package_base]
|
||||
return mock
|
||||
|
||||
package_python_schedule.packages = {
|
||||
package_python_schedule.base: package_python_schedule.packages[package_python_schedule.base]
|
||||
}
|
||||
package_ahriman.packages[package_ahriman.base].depends = ["devtools", "python", package_python_schedule.base]
|
||||
package_ahriman.packages[package_ahriman.base].make_depends = ["python-build", "python-installer"]
|
||||
|
||||
packages = {
|
||||
package_ahriman.base: package_ahriman,
|
||||
package_python_schedule.base: package_python_schedule,
|
||||
"python": create_package_mock("python"),
|
||||
"python-installer": create_package_mock("python-installer"),
|
||||
}
|
||||
|
||||
mocker.patch("pathlib.Path.is_dir", autospec=True, side_effect=lambda p: p.name == "python")
|
||||
mocker.patch("ahriman.models.package.Package.from_aur", side_effect=lambda *args: packages[args[0]])
|
||||
mocker.patch("ahriman.models.package.Package.from_build", side_effect=Exception)
|
||||
mocker.patch("ahriman.application.application.Application._known_packages",
|
||||
return_value={"devtools", "python-build", "python-pytest"})
|
||||
|
||||
assert not application.with_dependencies([package_ahriman], process_dependencies=True)
|
||||
|
||||
|
||||
def test_with_dependencies_skip(application: Application, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must skip processing of dependencies
|
||||
|
@ -144,6 +144,7 @@ def test_repositories_extract(args: argparse.Namespace, configuration: Configura
|
||||
args.architecture = "arch"
|
||||
args.configuration = configuration.path
|
||||
args.repository = "repo"
|
||||
mocker.patch("ahriman.core.configuration.Configuration.load", new=lambda self, _: self.copy_from(configuration))
|
||||
known_architectures_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_architectures")
|
||||
known_repositories_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_repositories")
|
||||
|
||||
@ -159,6 +160,7 @@ def test_repositories_extract_repository(args: argparse.Namespace, configuration
|
||||
"""
|
||||
args.architecture = "arch"
|
||||
args.configuration = configuration.path
|
||||
mocker.patch("ahriman.core.configuration.Configuration.load", new=lambda self, _: self.copy_from(configuration))
|
||||
known_architectures_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_architectures")
|
||||
known_repositories_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_repositories",
|
||||
return_value={"repo"})
|
||||
@ -175,6 +177,7 @@ def test_repositories_extract_repository_legacy(args: argparse.Namespace, config
|
||||
"""
|
||||
args.architecture = "arch"
|
||||
args.configuration = configuration.path
|
||||
mocker.patch("ahriman.core.configuration.Configuration.load", new=lambda self, _: self.copy_from(configuration))
|
||||
known_architectures_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_architectures")
|
||||
known_repositories_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_repositories",
|
||||
return_value=set())
|
||||
@ -191,6 +194,7 @@ def test_repositories_extract_architecture(args: argparse.Namespace, configurati
|
||||
"""
|
||||
args.configuration = configuration.path
|
||||
args.repository = "repo"
|
||||
mocker.patch("ahriman.core.configuration.Configuration.load", new=lambda self, _: self.copy_from(configuration))
|
||||
known_architectures_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_architectures",
|
||||
return_value={"arch"})
|
||||
known_repositories_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_repositories")
|
||||
@ -207,6 +211,7 @@ def test_repositories_extract_empty(args: argparse.Namespace, configuration: Con
|
||||
"""
|
||||
args.command = "config"
|
||||
args.configuration = configuration.path
|
||||
mocker.patch("ahriman.core.configuration.Configuration.load", new=lambda self, _: self.copy_from(configuration))
|
||||
mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_architectures", return_value=set())
|
||||
mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_repositories", return_value=set())
|
||||
|
||||
@ -221,6 +226,7 @@ def test_repositories_extract_systemd(args: argparse.Namespace, configuration: C
|
||||
"""
|
||||
args.configuration = configuration.path
|
||||
args.repository_id = "i686/some/repo/name"
|
||||
mocker.patch("ahriman.core.configuration.Configuration.load", new=lambda self, _: self.copy_from(configuration))
|
||||
known_architectures_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_architectures")
|
||||
known_repositories_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_repositories")
|
||||
|
||||
@ -236,6 +242,7 @@ def test_repositories_extract_systemd_with_dash(args: argparse.Namespace, config
|
||||
"""
|
||||
args.configuration = configuration.path
|
||||
args.repository_id = "i686-some-repo-name"
|
||||
mocker.patch("ahriman.core.configuration.Configuration.load", new=lambda self, _: self.copy_from(configuration))
|
||||
known_architectures_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_architectures")
|
||||
known_repositories_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_repositories")
|
||||
|
||||
@ -251,6 +258,7 @@ def test_repositories_extract_systemd_legacy(args: argparse.Namespace, configura
|
||||
"""
|
||||
args.configuration = configuration.path
|
||||
args.repository_id = "i686"
|
||||
mocker.patch("ahriman.core.configuration.Configuration.load", new=lambda self, _: self.copy_from(configuration))
|
||||
known_architectures_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_architectures")
|
||||
known_repositories_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.known_repositories",
|
||||
return_value=set())
|
||||
|
@ -6,6 +6,7 @@ from pytest_mock import MockerFixture
|
||||
from ahriman.application.application import Application
|
||||
from ahriman.application.handlers.copy import Copy
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.database import SQLite
|
||||
from ahriman.core.repository import Repository
|
||||
from ahriman.models.build_status import BuildStatusEnum
|
||||
from ahriman.models.package import Package
|
||||
@ -30,11 +31,12 @@ def _default_args(args: argparse.Namespace) -> argparse.Namespace:
|
||||
|
||||
|
||||
def test_run(args: argparse.Namespace, configuration: Configuration, repository: Repository,
|
||||
package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||
database: SQLite, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must run command
|
||||
"""
|
||||
args = _default_args(args)
|
||||
mocker.patch("ahriman.core.database.SQLite.load", return_value=database)
|
||||
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
||||
mocker.patch("ahriman.core.repository.Repository.packages", return_value=[package_ahriman])
|
||||
application_mock = mocker.patch("ahriman.application.handlers.copy.Copy.copy_package")
|
||||
@ -51,12 +53,13 @@ def test_run(args: argparse.Namespace, configuration: Configuration, repository:
|
||||
|
||||
|
||||
def test_run_remove(args: argparse.Namespace, configuration: Configuration, repository: Repository,
|
||||
package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||
database: SQLite, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must run command and remove packages afterward
|
||||
"""
|
||||
args = _default_args(args)
|
||||
args.remove = True
|
||||
mocker.patch("ahriman.core.database.SQLite.load", return_value=database)
|
||||
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
||||
mocker.patch("ahriman.core.repository.Repository.packages", return_value=[package_ahriman])
|
||||
mocker.patch("ahriman.application.handlers.copy.Copy.copy_package")
|
||||
@ -69,12 +72,14 @@ def test_run_remove(args: argparse.Namespace, configuration: Configuration, repo
|
||||
|
||||
|
||||
def test_run_empty_exception(args: argparse.Namespace, configuration: Configuration, repository: Repository,
|
||||
mocker: MockerFixture) -> None:
|
||||
database: SQLite, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must raise ExitCode exception on empty result
|
||||
"""
|
||||
args = _default_args(args)
|
||||
args.exit_code = True
|
||||
mocker.patch("ahriman.core.database.SQLite.load", return_value=database)
|
||||
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
||||
mocker.patch("ahriman.core.repository.Repository.packages", return_value=[])
|
||||
mocker.patch("ahriman.application.application.Application.update")
|
||||
check_mock = mocker.patch("ahriman.application.handlers.handler.Handler.check_status")
|
||||
|
@ -9,6 +9,7 @@ from urllib.parse import quote_plus as url_encode
|
||||
|
||||
from ahriman.application.handlers.setup import Setup
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.database import SQLite
|
||||
from ahriman.core.exceptions import MissingArchitectureError
|
||||
from ahriman.core.repository import Repository
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
@ -44,11 +45,12 @@ def _default_args(args: argparse.Namespace) -> argparse.Namespace:
|
||||
|
||||
|
||||
def test_run(args: argparse.Namespace, configuration: Configuration, repository: Repository,
|
||||
repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
|
||||
database: SQLite, repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must run command
|
||||
"""
|
||||
args = _default_args(args)
|
||||
mocker.patch("ahriman.core.database.SQLite.load", return_value=database)
|
||||
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
||||
ahriman_configuration_mock = mocker.patch("ahriman.application.handlers.setup.Setup.configuration_create_ahriman")
|
||||
devtools_configuration_mock = mocker.patch("ahriman.application.handlers.setup.Setup.configuration_create_devtools")
|
||||
@ -88,12 +90,13 @@ def test_run_no_architecture_or_repository(configuration: Configuration) -> None
|
||||
|
||||
|
||||
def test_run_with_server(args: argparse.Namespace, configuration: Configuration, repository: Repository,
|
||||
mocker: MockerFixture) -> None:
|
||||
database: SQLite, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must run command with server specified
|
||||
"""
|
||||
args = _default_args(args)
|
||||
args.server = "server"
|
||||
mocker.patch("ahriman.core.database.SQLite.load", return_value=database)
|
||||
mocker.patch("ahriman.core.repository.Repository.load", return_value=repository)
|
||||
mocker.patch("ahriman.application.handlers.setup.Setup.configuration_create_ahriman")
|
||||
mocker.patch("ahriman.application.handlers.setup.Setup.configuration_create_makepkg")
|
||||
|
@ -51,7 +51,8 @@ def test_run(args: argparse.Namespace, configuration: Configuration, database: S
|
||||
update_mock.assert_called_once_with(user)
|
||||
|
||||
|
||||
def test_run_empty_salt(args: argparse.Namespace, configuration: Configuration, mocker: MockerFixture) -> None:
|
||||
def test_run_empty_salt(args: argparse.Namespace, configuration: Configuration, database: SQLite,
|
||||
mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must process users with empty password salt
|
||||
"""
|
||||
@ -59,6 +60,7 @@ def test_run_empty_salt(args: argparse.Namespace, configuration: Configuration,
|
||||
args = _default_args(args)
|
||||
user = User(username=args.username, password=args.password, access=args.role,
|
||||
packager_id=args.packager, key=args.key)
|
||||
mocker.patch("ahriman.core.database.SQLite.load", return_value=database)
|
||||
mocker.patch("ahriman.models.user.User.hash_password", return_value=user)
|
||||
create_user_mock = mocker.patch("ahriman.application.handlers.users.Users.user_create", return_value=user)
|
||||
update_mock = mocker.patch("ahriman.core.database.SQLite.user_update")
|
||||
|
@ -1575,6 +1575,7 @@ def test_run(args: argparse.Namespace, configuration: Configuration, mocker: Moc
|
||||
args.command = ""
|
||||
args.handler = Handler
|
||||
|
||||
mocker.patch("ahriman.core.configuration.Configuration.load", new=lambda self, _: self.copy_from(configuration))
|
||||
mocker.patch("argparse.ArgumentParser.parse_args", return_value=args)
|
||||
|
||||
assert ahriman.run() == 1
|
||||
|
@ -249,19 +249,25 @@ def auth(configuration: Configuration) -> Auth:
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configuration(repository_id: RepositoryId, resource_path_root: Path) -> Configuration:
|
||||
def configuration(repository_id: RepositoryId, tmp_path: Path, resource_path_root: Path) -> Configuration:
|
||||
"""
|
||||
configuration fixture
|
||||
|
||||
Args:
|
||||
repository_id(RepositoryId): repository identifier fixture
|
||||
tmp_path(Path): temporary path used by the fixture as root
|
||||
resource_path_root(Path): resource path root directory
|
||||
|
||||
Returns:
|
||||
Configuration: configuration test instance
|
||||
"""
|
||||
path = resource_path_root / "core" / "ahriman.ini"
|
||||
return Configuration.from_path(path, repository_id)
|
||||
|
||||
instance = Configuration.from_path(path, repository_id)
|
||||
instance.set_option("repository", "root", str(tmp_path))
|
||||
instance.set_option("settings", "database", str(tmp_path / "ahriman.db"))
|
||||
|
||||
return instance
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -275,9 +281,7 @@ def database(configuration: Configuration) -> SQLite:
|
||||
Returns:
|
||||
SQLite: database test instance
|
||||
"""
|
||||
database = SQLite.load(configuration)
|
||||
yield database
|
||||
database.path.unlink()
|
||||
return SQLite.load(configuration)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -4,7 +4,7 @@ import requests
|
||||
|
||||
from pathlib import Path
|
||||
from pytest_mock import MockerFixture
|
||||
from unittest.mock import MagicMock
|
||||
from unittest.mock import MagicMock, call as MockCall
|
||||
|
||||
from ahriman.core.alpm.remote import AUR
|
||||
from ahriman.core.exceptions import PackageInfoError, UnknownPackageError
|
||||
@ -76,24 +76,18 @@ def test_aur_request(aur: AUR, aur_package_ahriman: AURPackage,
|
||||
request_mock = mocker.patch("ahriman.core.alpm.remote.AUR.make_request", return_value=response_mock)
|
||||
|
||||
assert aur.aur_request("info", "ahriman") == [aur_package_ahriman]
|
||||
request_mock.assert_called_once_with(
|
||||
"GET", "https://aur.archlinux.org/rpc",
|
||||
params=[("type", "info"), ("v", "5"), ("arg", "ahriman")])
|
||||
request_mock.assert_called_once_with("GET", "https://aur.archlinux.org/rpc/v5/info/ahriman", params=[])
|
||||
|
||||
|
||||
def test_aur_request_multi_arg(aur: AUR, aur_package_ahriman: AURPackage,
|
||||
mocker: MockerFixture, resource_path_root: Path) -> None:
|
||||
def test_aur_request_multi_arg(aur: AUR) -> None:
|
||||
"""
|
||||
must perform request to AUR with multiple args
|
||||
must raise PackageInfoError if invalid amount of arguments supplied
|
||||
"""
|
||||
response_mock = MagicMock()
|
||||
response_mock.json.return_value = json.loads(_get_response(resource_path_root))
|
||||
request_mock = mocker.patch("ahriman.core.alpm.remote.AUR.make_request", return_value=response_mock)
|
||||
with pytest.raises(PackageInfoError):
|
||||
aur.aur_request("search", "ahriman", "is", "cool")
|
||||
|
||||
assert aur.aur_request("search", "ahriman", "is", "cool") == [aur_package_ahriman]
|
||||
request_mock.assert_called_once_with(
|
||||
"GET", "https://aur.archlinux.org/rpc",
|
||||
params=[("type", "search"), ("v", "5"), ("arg[]", "ahriman"), ("arg[]", "is"), ("arg[]", "cool")])
|
||||
with pytest.raises(PackageInfoError):
|
||||
aur.aur_request("search")
|
||||
|
||||
|
||||
def test_aur_request_with_kwargs(aur: AUR, aur_package_ahriman: AURPackage,
|
||||
@ -106,9 +100,8 @@ def test_aur_request_with_kwargs(aur: AUR, aur_package_ahriman: AURPackage,
|
||||
request_mock = mocker.patch("ahriman.core.alpm.remote.AUR.make_request", return_value=response_mock)
|
||||
|
||||
assert aur.aur_request("search", "ahriman", by="name") == [aur_package_ahriman]
|
||||
request_mock.assert_called_once_with(
|
||||
"GET", "https://aur.archlinux.org/rpc",
|
||||
params=[("type", "search"), ("v", "5"), ("arg", "ahriman"), ("by", "name")])
|
||||
request_mock.assert_called_once_with("GET", "https://aur.archlinux.org/rpc/v5/search/ahriman",
|
||||
params=[("by", "name")])
|
||||
|
||||
|
||||
def test_aur_request_failed(aur: AUR, mocker: MockerFixture) -> None:
|
||||
@ -139,17 +132,46 @@ def test_package_info(aur: AUR, aur_package_ahriman: AURPackage, mocker: MockerF
|
||||
|
||||
def test_package_info_not_found(aur: AUR, aur_package_ahriman: AURPackage, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must raise UnknownPackage exception in case if no package was found
|
||||
must raise UnknownPackageError in case if no package was found
|
||||
"""
|
||||
mocker.patch("ahriman.core.alpm.remote.AUR.aur_request", return_value=[])
|
||||
with pytest.raises(UnknownPackageError, match=aur_package_ahriman.name):
|
||||
assert aur.package_info(aur_package_ahriman.name, pacman=None)
|
||||
|
||||
|
||||
def test_package_provided_by(aur: AUR, aur_package_ahriman: AURPackage, aur_package_akonadi: AURPackage,
|
||||
mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must search for packages which provide required one
|
||||
"""
|
||||
aur_package_ahriman.provides.append(aur_package_ahriman.name)
|
||||
search_mock = mocker.patch("ahriman.core.alpm.remote.AUR.package_search", return_value=[
|
||||
aur_package_ahriman, aur_package_akonadi
|
||||
])
|
||||
info_mock = mocker.patch("ahriman.core.alpm.remote.AUR.package_info", side_effect=[
|
||||
aur_package_ahriman, aur_package_akonadi
|
||||
])
|
||||
|
||||
assert aur.package_provided_by(aur_package_ahriman.name, pacman=None) == [aur_package_ahriman]
|
||||
search_mock.assert_called_once_with(aur_package_ahriman.name, pacman=None, search_by="provides")
|
||||
info_mock.assert_has_calls([
|
||||
MockCall(aur_package_ahriman.name, pacman=None), MockCall(aur_package_akonadi.name, pacman=None)
|
||||
])
|
||||
|
||||
|
||||
def test_package_search(aur: AUR, aur_package_ahriman: AURPackage, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must make request for search
|
||||
"""
|
||||
request_mock = mocker.patch("ahriman.core.alpm.remote.AUR.aur_request", return_value=[aur_package_ahriman])
|
||||
assert aur.package_search(aur_package_ahriman.name, pacman=None) == [aur_package_ahriman]
|
||||
assert aur.package_search(aur_package_ahriman.name, pacman=None, search_by=None) == [aur_package_ahriman]
|
||||
request_mock.assert_called_once_with("search", aur_package_ahriman.name, by="name-desc")
|
||||
|
||||
|
||||
def test_package_search_provides(aur: AUR, aur_package_ahriman: AURPackage, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must make request for search with custom field
|
||||
"""
|
||||
request_mock = mocker.patch("ahriman.core.alpm.remote.AUR.aur_request")
|
||||
aur.package_search(aur_package_ahriman.name, pacman=None, search_by="provides")
|
||||
request_mock.assert_called_once_with("search", aur_package_ahriman.name, by="provides")
|
||||
|
@ -106,7 +106,7 @@ def test_package_info(official: Official, aur_package_akonadi: AURPackage, mocke
|
||||
|
||||
def test_package_info_not_found(official: Official, aur_package_ahriman: AURPackage, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must raise UnknownPackage exception in case if no package was found
|
||||
must raise UnknownPackageError in case if no package was found
|
||||
"""
|
||||
mocker.patch("ahriman.core.alpm.remote.Official.arch_request", return_value=[])
|
||||
with pytest.raises(UnknownPackageError, match=aur_package_ahriman.name):
|
||||
@ -119,5 +119,16 @@ def test_package_search(official: Official, aur_package_akonadi: AURPackage, moc
|
||||
"""
|
||||
request_mock = mocker.patch("ahriman.core.alpm.remote.Official.arch_request",
|
||||
return_value=[aur_package_akonadi])
|
||||
assert official.package_search(aur_package_akonadi.name, pacman=None) == [aur_package_akonadi]
|
||||
assert official.package_search(aur_package_akonadi.name, pacman=None, search_by=None) == [
|
||||
aur_package_akonadi,
|
||||
]
|
||||
request_mock.assert_called_once_with(aur_package_akonadi.name, by="q")
|
||||
|
||||
|
||||
def test_package_search_name(official: Official, aur_package_akonadi: AURPackage, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must make request for search with custom field
|
||||
"""
|
||||
request_mock = mocker.patch("ahriman.core.alpm.remote.Official.arch_request")
|
||||
official.package_search(aur_package_akonadi.name, pacman=None, search_by="name")
|
||||
request_mock.assert_called_once_with(aur_package_akonadi.name, by="name")
|
||||
|
@ -16,18 +16,14 @@ def test_package_info(official_syncdb: OfficialSyncdb, aur_package_akonadi: AURP
|
||||
mocker.patch("ahriman.models.aur_package.AURPackage.from_pacman", return_value=aur_package_akonadi)
|
||||
get_mock = mocker.patch("ahriman.core.alpm.pacman.Pacman.package", return_value=[aur_package_akonadi])
|
||||
|
||||
package = official_syncdb.package_info(aur_package_akonadi.name, pacman=pacman)
|
||||
assert official_syncdb.package_info(aur_package_akonadi.name, pacman=pacman) == aur_package_akonadi
|
||||
get_mock.assert_called_once_with(aur_package_akonadi.name)
|
||||
assert package == aur_package_akonadi
|
||||
|
||||
|
||||
def test_package_info_no_pacman(official_syncdb: OfficialSyncdb, aur_package_akonadi: AURPackage,
|
||||
mocker: MockerFixture) -> None:
|
||||
def test_package_info_no_pacman(official_syncdb: OfficialSyncdb, aur_package_akonadi: AURPackage) -> None:
|
||||
"""
|
||||
must raise UnknownPackageError if no pacman set
|
||||
"""
|
||||
mocker.patch("ahriman.core.alpm.pacman.Pacman.package", return_value=[aur_package_akonadi])
|
||||
|
||||
with pytest.raises(UnknownPackageError, match=aur_package_akonadi.name):
|
||||
official_syncdb.package_info(aur_package_akonadi.name, pacman=None)
|
||||
|
||||
@ -40,3 +36,22 @@ def test_package_info_not_found(official_syncdb: OfficialSyncdb, aur_package_ako
|
||||
mocker.patch("ahriman.core.alpm.pacman.Pacman.package", return_value=[])
|
||||
with pytest.raises(UnknownPackageError, match=aur_package_akonadi.name):
|
||||
assert official_syncdb.package_info(aur_package_akonadi.name, pacman=pacman)
|
||||
|
||||
|
||||
def test_package_provided_by(official_syncdb: OfficialSyncdb, aur_package_akonadi: AURPackage, pacman: Pacman,
|
||||
mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must search by provides in database
|
||||
"""
|
||||
mocker.patch("ahriman.models.aur_package.AURPackage.from_pacman", return_value=aur_package_akonadi)
|
||||
get_mock = mocker.patch("ahriman.core.alpm.pacman.Pacman.provided_by", return_value=[aur_package_akonadi])
|
||||
|
||||
assert official_syncdb.package_provided_by(aur_package_akonadi.name, pacman=pacman) == [aur_package_akonadi]
|
||||
get_mock.assert_called_once_with(aur_package_akonadi.name)
|
||||
|
||||
|
||||
def test_package_provided_by_no_pacman(official_syncdb: OfficialSyncdb, aur_package_akonadi: AURPackage) -> None:
|
||||
"""
|
||||
must return empty list if no pacman set
|
||||
"""
|
||||
assert official_syncdb.package_provided_by(aur_package_akonadi.name, pacman=None) == []
|
||||
|
@ -5,16 +5,53 @@ from unittest.mock import call as MockCall
|
||||
|
||||
from ahriman.core.alpm.pacman import Pacman
|
||||
from ahriman.core.alpm.remote import Remote
|
||||
from ahriman.core.exceptions import UnknownPackageError
|
||||
from ahriman.models.aur_package import AURPackage
|
||||
|
||||
|
||||
def test_info(pacman: Pacman, mocker: MockerFixture) -> None:
|
||||
def test_info(aur_package_ahriman: AURPackage, pacman: Pacman, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must call info method
|
||||
"""
|
||||
info_mock = mocker.patch("ahriman.core.alpm.remote.Remote.package_info")
|
||||
Remote.info("ahriman", pacman=pacman)
|
||||
info_mock.assert_called_once_with("ahriman", pacman=pacman)
|
||||
info_mock = mocker.patch("ahriman.core.alpm.remote.Remote.package_info", return_value=aur_package_ahriman)
|
||||
assert Remote.info(aur_package_ahriman.name, pacman=pacman) == aur_package_ahriman
|
||||
info_mock.assert_called_once_with(aur_package_ahriman.name, pacman=pacman)
|
||||
|
||||
|
||||
def test_info_not_found(aur_package_ahriman: AURPackage, pacman: Pacman, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must raise UnknownPackageError if no package found and search by provides is disabled
|
||||
"""
|
||||
mocker.patch("ahriman.core.alpm.remote.Remote.package_info",
|
||||
side_effect=UnknownPackageError(aur_package_ahriman.name))
|
||||
with pytest.raises(UnknownPackageError):
|
||||
Remote.info(aur_package_ahriman.name, pacman=pacman)
|
||||
|
||||
|
||||
def test_info_include_provides(aur_package_ahriman: AURPackage, pacman: Pacman, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must perform search through provides list is set
|
||||
"""
|
||||
mocker.patch("ahriman.core.alpm.remote.Remote.package_info",
|
||||
side_effect=UnknownPackageError(aur_package_ahriman.name))
|
||||
provided_mock = mocker.patch("ahriman.core.alpm.remote.Remote.package_provided_by",
|
||||
return_value=[aur_package_ahriman])
|
||||
|
||||
assert Remote.info(aur_package_ahriman.name, pacman=pacman, include_provides=True) == aur_package_ahriman
|
||||
provided_mock.assert_called_once_with(aur_package_ahriman.name, pacman=pacman)
|
||||
|
||||
|
||||
def test_info_include_provides_not_found(aur_package_ahriman: AURPackage, pacman: Pacman,
|
||||
mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must raise UnknownPackageError if no package found and search by provides returns empty list
|
||||
"""
|
||||
mocker.patch("ahriman.core.alpm.remote.Remote.package_info",
|
||||
side_effect=UnknownPackageError(aur_package_ahriman.name))
|
||||
mocker.patch("ahriman.core.alpm.remote.Remote.package_provided_by", return_value=[])
|
||||
|
||||
with pytest.raises(UnknownPackageError):
|
||||
Remote.info("ahriman", pacman=pacman, include_provides=True)
|
||||
|
||||
|
||||
def test_multisearch(aur_package_ahriman: AURPackage, pacman: Pacman, mocker: MockerFixture) -> None:
|
||||
@ -22,10 +59,13 @@ def test_multisearch(aur_package_ahriman: AURPackage, pacman: Pacman, mocker: Mo
|
||||
must search in AUR with multiple words
|
||||
"""
|
||||
terms = ["ahriman", "is", "cool"]
|
||||
search_mock = mocker.patch("ahriman.core.alpm.remote.Remote.search", return_value=[aur_package_ahriman])
|
||||
search_mock = mocker.patch("ahriman.core.alpm.remote.Remote.package_search", return_value=[aur_package_ahriman])
|
||||
|
||||
assert Remote.multisearch(*terms, pacman=pacman) == [aur_package_ahriman]
|
||||
search_mock.assert_has_calls([MockCall("ahriman", pacman=pacman), MockCall("cool", pacman=pacman)])
|
||||
assert Remote.multisearch(*terms, pacman=pacman, search_by="name") == [aur_package_ahriman]
|
||||
search_mock.assert_has_calls([
|
||||
MockCall("ahriman", pacman=pacman, search_by="name"),
|
||||
MockCall("cool", pacman=pacman, search_by="name"),
|
||||
])
|
||||
|
||||
|
||||
def test_multisearch_empty(pacman: Pacman, mocker: MockerFixture) -> None:
|
||||
@ -33,7 +73,7 @@ def test_multisearch_empty(pacman: Pacman, mocker: MockerFixture) -> None:
|
||||
must return empty list if no long terms supplied
|
||||
"""
|
||||
terms = ["it", "is"]
|
||||
search_mock = mocker.patch("ahriman.core.alpm.remote.Remote.search")
|
||||
search_mock = mocker.patch("ahriman.core.alpm.remote.Remote.package_search")
|
||||
|
||||
assert Remote.multisearch(*terms, pacman=pacman) == []
|
||||
search_mock.assert_not_called()
|
||||
@ -43,9 +83,9 @@ def test_multisearch_single(aur_package_ahriman: AURPackage, pacman: Pacman, moc
|
||||
"""
|
||||
must search in AUR with one word
|
||||
"""
|
||||
search_mock = mocker.patch("ahriman.core.alpm.remote.Remote.search", return_value=[aur_package_ahriman])
|
||||
search_mock = mocker.patch("ahriman.core.alpm.remote.Remote.package_search", return_value=[aur_package_ahriman])
|
||||
assert Remote.multisearch("ahriman", pacman=pacman) == [aur_package_ahriman]
|
||||
search_mock.assert_called_once_with("ahriman", pacman=pacman)
|
||||
search_mock.assert_called_once_with("ahriman", pacman=pacman, search_by=None)
|
||||
|
||||
|
||||
def test_remote_git_url(remote: Remote) -> None:
|
||||
@ -53,7 +93,7 @@ def test_remote_git_url(remote: Remote) -> None:
|
||||
must raise NotImplemented for missing remote git url
|
||||
"""
|
||||
with pytest.raises(NotImplementedError):
|
||||
remote.remote_git_url("package", "repositorys")
|
||||
remote.remote_git_url("package", "repositories")
|
||||
|
||||
|
||||
def test_remote_web_url(remote: Remote) -> None:
|
||||
@ -69,8 +109,8 @@ def test_search(pacman: Pacman, mocker: MockerFixture) -> None:
|
||||
must call search method
|
||||
"""
|
||||
search_mock = mocker.patch("ahriman.core.alpm.remote.Remote.package_search")
|
||||
Remote.search("ahriman", pacman=pacman)
|
||||
search_mock.assert_called_once_with("ahriman", pacman=pacman)
|
||||
Remote.search("ahriman", pacman=pacman, search_by="name")
|
||||
search_mock.assert_called_once_with("ahriman", pacman=pacman, search_by="name")
|
||||
|
||||
|
||||
def test_package_info(remote: Remote, pacman: Pacman) -> None:
|
||||
@ -81,9 +121,16 @@ def test_package_info(remote: Remote, pacman: Pacman) -> None:
|
||||
remote.package_info("package", pacman=pacman)
|
||||
|
||||
|
||||
def test_package_provided_by(remote: Remote, pacman: Pacman) -> None:
|
||||
"""
|
||||
must return empty list for provides method
|
||||
"""
|
||||
assert remote.package_provided_by("package", pacman=pacman) == []
|
||||
|
||||
|
||||
def test_package_search(remote: Remote, pacman: Pacman) -> None:
|
||||
"""
|
||||
must raise NotImplemented for missing package search method
|
||||
"""
|
||||
with pytest.raises(NotImplementedError):
|
||||
remote.package_search("package", pacman=pacman)
|
||||
remote.package_search("package", pacman=pacman, search_by=None)
|
||||
|
@ -282,3 +282,10 @@ def test_packages_with_provides(pacman: Pacman) -> None:
|
||||
"""
|
||||
assert "sh" in pacman.packages()
|
||||
assert "mysql" in pacman.packages() # mariadb
|
||||
|
||||
|
||||
def test_package_provided_by(pacman: Pacman) -> None:
|
||||
"""
|
||||
must search through the provides lists
|
||||
"""
|
||||
assert list(pacman.provided_by("sh"))
|
||||
|
@ -10,7 +10,7 @@ from ahriman.core.exceptions import PacmanError
|
||||
|
||||
def test_copy(mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must copy loca database file
|
||||
must copy local database file
|
||||
"""
|
||||
copy_mock = mocker.patch("shutil.copy")
|
||||
PacmanDatabase.copy(Path("remote"), Path("local"))
|
||||
|
@ -102,6 +102,15 @@ def test_check_loaded_architecture(configuration: Configuration) -> None:
|
||||
configuration.check_loaded()
|
||||
|
||||
|
||||
def test_copy_from(configuration: Configuration) -> None:
|
||||
"""
|
||||
must copy values from another instance
|
||||
"""
|
||||
instance = Configuration()
|
||||
instance.copy_from(configuration)
|
||||
assert instance.dump() == configuration.dump()
|
||||
|
||||
|
||||
def test_dump(configuration: Configuration) -> None:
|
||||
"""
|
||||
dump must not be empty
|
||||
|
@ -62,8 +62,8 @@ def test_validate_is_ip_address(validator: Validator, mocker: MockerFixture) ->
|
||||
validator._validate_is_ip_address([], "field", "localhost")
|
||||
|
||||
validator._validate_is_ip_address([], "field", "127.0.0.1")
|
||||
validator._validate_is_ip_address([], "field", "::")
|
||||
validator._validate_is_ip_address([], "field", "0.0.0.0")
|
||||
validator._validate_is_ip_address([], "field", "::") # nosec
|
||||
validator._validate_is_ip_address([], "field", "0.0.0.0") # nosec
|
||||
|
||||
validator._validate_is_ip_address([], "field", "random string")
|
||||
|
||||
|
@ -93,6 +93,27 @@ def test_logs_insert_get_multi(database: SQLite, package_ahriman: Package) -> No
|
||||
]
|
||||
|
||||
|
||||
def test_logs_rotate_remove_older(database: SQLite, package_ahriman: Package,
|
||||
package_python_schedule: Package) -> None:
|
||||
"""
|
||||
must correctly remove old records
|
||||
"""
|
||||
database.logs_insert(LogRecord(LogRecordId(package_ahriman.base, "1", "p1"), 42.0, "message 1"))
|
||||
database.logs_insert(LogRecord(LogRecordId(package_ahriman.base, "1", "p1"), 43.0, "message 2"))
|
||||
database.logs_insert(LogRecord(LogRecordId(package_ahriman.base, "2", "p2"), 44.0, "message 3"))
|
||||
database.logs_insert(LogRecord(LogRecordId(package_ahriman.base, "2", "p2"), 45.0, "message 4"))
|
||||
database.logs_insert(LogRecord(LogRecordId(package_python_schedule.base, "3", "p1"), 40.0, "message 5"))
|
||||
|
||||
database.logs_rotate(1)
|
||||
assert database.logs_get(package_ahriman.base) == [
|
||||
LogRecord(LogRecordId(package_ahriman.base, "2", "p2"), 44.0, "message 3"),
|
||||
LogRecord(LogRecordId(package_ahriman.base, "2", "p2"), 45.0, "message 4"),
|
||||
]
|
||||
assert database.logs_get(package_python_schedule.base) == [
|
||||
LogRecord(LogRecordId(package_python_schedule.base, "3", "p1"), 40.0, "message 5"),
|
||||
]
|
||||
|
||||
|
||||
def test_logs_rotate_remove_all(database: SQLite, package_ahriman: Package) -> None:
|
||||
"""
|
||||
must remove all records when rotating with keep_last_records is 0
|
||||
|
@ -1,3 +1,4 @@
|
||||
import pytest
|
||||
import sqlite3
|
||||
|
||||
from pytest_mock import MockerFixture
|
||||
@ -24,15 +25,29 @@ def test_factory(database: SQLite) -> None:
|
||||
|
||||
def test_with_connection(database: SQLite, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must run query inside connection
|
||||
must run query inside connection and close it at the end
|
||||
"""
|
||||
connection_mock = MagicMock()
|
||||
connect_mock = mocker.patch("sqlite3.connect", return_value=connection_mock)
|
||||
|
||||
database.with_connection(lambda conn: conn.execute("select 1"))
|
||||
connect_mock.assert_called_once_with(database.path, detect_types=sqlite3.PARSE_DECLTYPES)
|
||||
connection_mock.__enter__().set_trace_callback.assert_called_once_with(database.logger.debug)
|
||||
connection_mock.__enter__().commit.assert_not_called()
|
||||
connection_mock.set_trace_callback.assert_called_once_with(database.logger.debug)
|
||||
connection_mock.commit.assert_not_called()
|
||||
connection_mock.close.assert_called_once_with()
|
||||
|
||||
|
||||
def test_with_connection_close(database: SQLite, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must close connection on errors
|
||||
"""
|
||||
connection_mock = MagicMock()
|
||||
connection_mock.commit.side_effect = Exception
|
||||
mocker.patch("sqlite3.connect", return_value=connection_mock)
|
||||
|
||||
with pytest.raises(Exception):
|
||||
database.with_connection(lambda conn: conn.execute("select 1"), commit=True)
|
||||
connection_mock.close.assert_called_once_with()
|
||||
|
||||
|
||||
def test_with_connection_with_commit(database: SQLite, mocker: MockerFixture) -> None:
|
||||
@ -44,4 +59,4 @@ def test_with_connection_with_commit(database: SQLite, mocker: MockerFixture) ->
|
||||
mocker.patch("sqlite3.connect", return_value=connection_mock)
|
||||
|
||||
database.with_connection(lambda conn: conn.execute("select 1"), commit=True)
|
||||
connection_mock.__enter__().commit.assert_called_once_with()
|
||||
connection_mock.commit.assert_called_once_with()
|
||||
|
@ -285,7 +285,7 @@ def test_set_unknown(client: Client, package_ahriman: Package, mocker: MockerFix
|
||||
|
||||
def test_set_unknown_skip(client: Client, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must skip unknown status update in case if pacakge is already known
|
||||
must skip unknown status update in case if package is already known
|
||||
"""
|
||||
mocker.patch("ahriman.core.status.Client.package_get", return_value=[(package_ahriman, None)])
|
||||
update_mock = mocker.patch("ahriman.core.status.Client.package_update")
|
||||
|
@ -195,6 +195,32 @@ def test_tree_levels_sorted() -> None:
|
||||
assert third == [leaf2.package, leaf4.package]
|
||||
|
||||
|
||||
def test_tree_levels_provides() -> None:
|
||||
"""
|
||||
must build tree according to provides list
|
||||
"""
|
||||
leaf1 = Leaf(
|
||||
Package(
|
||||
base="package1",
|
||||
version="1.0.0",
|
||||
remote=RemoteSource(source=PackageSource.AUR),
|
||||
packages={"package1": PackageDescription(depends=["package3"])},
|
||||
)
|
||||
)
|
||||
leaf2 = Leaf(
|
||||
Package(
|
||||
base="package2",
|
||||
version="1.0.0",
|
||||
remote=RemoteSource(source=PackageSource.AUR),
|
||||
packages={"package2": PackageDescription(provides=["package3"])},
|
||||
)
|
||||
)
|
||||
|
||||
first, second = Tree([leaf1, leaf2]).levels()
|
||||
assert first == [leaf2.package]
|
||||
assert second == [leaf1.package]
|
||||
|
||||
|
||||
def test_tree_partitions() -> None:
|
||||
"""
|
||||
must divide tree into partitions
|
||||
|
@ -150,6 +150,13 @@ def test_check_output_empty_line(mocker: MockerFixture) -> None:
|
||||
logger_mock.assert_has_calls([MockCall(""), MockCall("hello")])
|
||||
|
||||
|
||||
def test_check_output_encoding_error(resource_path_root: Path) -> None:
|
||||
"""
|
||||
must correctly process unicode encoding error in command output
|
||||
"""
|
||||
assert check_output("cat", str(resource_path_root / "models" / "package_pacman-static_pkgbuild"))
|
||||
|
||||
|
||||
def test_check_user(repository_id: RepositoryId, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must check user correctly
|
||||
|
@ -73,7 +73,7 @@ def test_configuration_sections(configuration: Configuration) -> None:
|
||||
|
||||
def test_on_result(trigger: Trigger) -> None:
|
||||
"""
|
||||
must pass execution nto run method
|
||||
must pass execution to run method
|
||||
"""
|
||||
trigger.on_result(Result(), [])
|
||||
|
||||
|
@ -3,7 +3,7 @@ from ahriman.models.log_record_id import LogRecordId
|
||||
|
||||
def test_init() -> None:
|
||||
"""
|
||||
must correctly assign proces identifier if not set
|
||||
must correctly assign process identifier if not set
|
||||
"""
|
||||
assert LogRecordId("1", "2").process_id == LogRecordId.DEFAULT_PROCESS_ID
|
||||
assert LogRecordId("1", "2", "3").process_id == "3"
|
||||
|
@ -167,15 +167,26 @@ def test_from_aur(package_ahriman: Package, aur_package_ahriman: AURPackage, moc
|
||||
"""
|
||||
must construct package from aur
|
||||
"""
|
||||
mocker.patch("ahriman.core.alpm.remote.AUR.info", return_value=aur_package_ahriman)
|
||||
info_mock = mocker.patch("ahriman.core.alpm.remote.AUR.info", return_value=aur_package_ahriman)
|
||||
|
||||
package = Package.from_aur(package_ahriman.base, package_ahriman.packager)
|
||||
info_mock.assert_called_once_with(package_ahriman.base, include_provides=False)
|
||||
assert package_ahriman.base == package.base
|
||||
assert package_ahriman.version == package.version
|
||||
assert package_ahriman.packages.keys() == package.packages.keys()
|
||||
assert package_ahriman.packager == package.packager
|
||||
|
||||
|
||||
def test_from_aur_include_provides(package_ahriman: Package, aur_package_ahriman: AURPackage,
|
||||
mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must construct package from aur by using provides list
|
||||
"""
|
||||
info_mock = mocker.patch("ahriman.core.alpm.remote.AUR.info", return_value=aur_package_ahriman)
|
||||
Package.from_aur(package_ahriman.base, package_ahriman.packager, include_provides=True)
|
||||
info_mock.assert_called_once_with(package_ahriman.base, include_provides=True)
|
||||
|
||||
|
||||
def test_from_build(package_ahriman: Package, mocker: MockerFixture, resource_path_root: Path) -> None:
|
||||
"""
|
||||
must construct package from PKGBUILD
|
||||
@ -269,14 +280,25 @@ def test_from_json_view_3(package_tpacpi_bat_git: Package) -> None:
|
||||
assert Package.from_json(package_tpacpi_bat_git.view()) == package_tpacpi_bat_git
|
||||
|
||||
|
||||
def test_from_official_include_provides(package_ahriman: Package, aur_package_ahriman: AURPackage, pacman: Pacman,
|
||||
mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must construct package from official repository
|
||||
"""
|
||||
info_mock = mocker.patch("ahriman.core.alpm.remote.Official.info", return_value=aur_package_ahriman)
|
||||
Package.from_official(package_ahriman.base, pacman, package_ahriman.packager, include_provides=True)
|
||||
info_mock.assert_called_once_with(package_ahriman.base, pacman=pacman, include_provides=True)
|
||||
|
||||
|
||||
def test_from_official(package_ahriman: Package, aur_package_ahriman: AURPackage, pacman: Pacman,
|
||||
mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must construct package from official repository
|
||||
"""
|
||||
mocker.patch("ahriman.core.alpm.remote.Official.info", return_value=aur_package_ahriman)
|
||||
info_mock = mocker.patch("ahriman.core.alpm.remote.Official.info", return_value=aur_package_ahriman)
|
||||
|
||||
package = Package.from_official(package_ahriman.base, pacman, package_ahriman.packager)
|
||||
info_mock.assert_called_once_with(package_ahriman.base, pacman=pacman, include_provides=False)
|
||||
assert package_ahriman.base == package.base
|
||||
assert package_ahriman.version == package.version
|
||||
assert package_ahriman.packages.keys() == package.packages.keys()
|
||||
|
@ -3,9 +3,7 @@ import pytest
|
||||
from io import BytesIO, StringIO
|
||||
from pathlib import Path
|
||||
from pytest_mock import MockerFixture
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from ahriman.core.exceptions import EncodeError
|
||||
from ahriman.models.pkgbuild import Pkgbuild
|
||||
from ahriman.models.pkgbuild_patch import PkgbuildPatch
|
||||
|
||||
@ -46,18 +44,6 @@ def test_from_file_latin(pkgbuild_ahriman: Pkgbuild, mocker: MockerFixture) -> N
|
||||
load_mock.assert_called_once_with(pytest.helpers.anyvar(int))
|
||||
|
||||
|
||||
def test_from_file_unknown_encoding(pkgbuild_ahriman: Pkgbuild, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must raise exception when encoding is unknown
|
||||
"""
|
||||
open_mock = mocker.patch("pathlib.Path.open")
|
||||
io_mock = open_mock.return_value.__enter__.return_value = MagicMock()
|
||||
io_mock.read.return_value.decode.side_effect = EncodeError(pkgbuild_ahriman.DEFAULT_ENCODINGS)
|
||||
|
||||
with pytest.raises(EncodeError):
|
||||
assert Pkgbuild.from_file(Path("local"))
|
||||
|
||||
|
||||
def test_from_io(pkgbuild_ahriman: Pkgbuild, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must correctly load from io
|
||||
|
@ -1,8 +1,8 @@
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
|
||||
from aiohttp.test_utils import TestClient
|
||||
from aiohttp.web import Application, Resource, UrlMappingMatchInfo
|
||||
from asyncio import BaseEventLoop
|
||||
from collections.abc import Awaitable, Callable
|
||||
from marshmallow import Schema
|
||||
from pytest_mock import MockerFixture
|
||||
@ -164,15 +164,13 @@ def application_with_auth(configuration: Configuration, user: User, spawner: Spa
|
||||
return application
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(application: Application, event_loop: BaseEventLoop, aiohttp_client: Any,
|
||||
mocker: MockerFixture) -> TestClient:
|
||||
@pytest_asyncio.fixture
|
||||
async def client(application: Application, aiohttp_client: Any, mocker: MockerFixture) -> TestClient:
|
||||
"""
|
||||
web client fixture
|
||||
|
||||
Args:
|
||||
application(Application): application fixture
|
||||
event_loop(BaseEventLoop): context event loop
|
||||
aiohttp_client(Any): aiohttp client fixture
|
||||
mocker(MockerFixture): mocker object
|
||||
|
||||
@ -180,37 +178,35 @@ def client(application: Application, event_loop: BaseEventLoop, aiohttp_client:
|
||||
TestClient: web client test instance
|
||||
"""
|
||||
mocker.patch("pathlib.Path.iterdir", return_value=[])
|
||||
return event_loop.run_until_complete(aiohttp_client(application))
|
||||
return await aiohttp_client(application)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client_with_auth(application_with_auth: Application, event_loop: BaseEventLoop, aiohttp_client: Any,
|
||||
mocker: MockerFixture) -> TestClient:
|
||||
"""
|
||||
web client fixture with full authorization functions
|
||||
|
||||
Args:
|
||||
application_with_auth(Application): application fixture
|
||||
event_loop(BaseEventLoop): context event loop
|
||||
aiohttp_client(Any): aiohttp client fixture
|
||||
mocker(MockerFixture): mocker object
|
||||
|
||||
Returns:
|
||||
TestClient: web client test instance
|
||||
"""
|
||||
mocker.patch("pathlib.Path.iterdir", return_value=[])
|
||||
return event_loop.run_until_complete(aiohttp_client(application_with_auth))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client_with_oauth_auth(application_with_auth: Application, event_loop: BaseEventLoop, aiohttp_client: Any,
|
||||
@pytest_asyncio.fixture
|
||||
async def client_with_auth(application_with_auth: Application, aiohttp_client: Any,
|
||||
mocker: MockerFixture) -> TestClient:
|
||||
"""
|
||||
web client fixture with full authorization functions
|
||||
|
||||
Args:
|
||||
application_with_auth(Application): application fixture
|
||||
event_loop(BaseEventLoop): context event loop
|
||||
aiohttp_client(Any): aiohttp client fixture
|
||||
mocker(MockerFixture): mocker object
|
||||
|
||||
Returns:
|
||||
TestClient: web client test instance
|
||||
"""
|
||||
mocker.patch("pathlib.Path.iterdir", return_value=[])
|
||||
return await aiohttp_client(application_with_auth)
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def client_with_oauth_auth(application_with_auth: Application, aiohttp_client: Any,
|
||||
mocker: MockerFixture) -> TestClient:
|
||||
"""
|
||||
web client fixture with full authorization functions
|
||||
|
||||
Args:
|
||||
application_with_auth(Application): application fixture
|
||||
aiohttp_client(Any): aiohttp client fixture
|
||||
mocker(MockerFixture): mocker object
|
||||
|
||||
@ -219,4 +215,4 @@ def client_with_oauth_auth(application_with_auth: Application, event_loop: BaseE
|
||||
"""
|
||||
mocker.patch("pathlib.Path.iterdir", return_value=[])
|
||||
application_with_auth[AuthKey] = MagicMock(spec=OAuth)
|
||||
return event_loop.run_until_complete(aiohttp_client(application_with_auth))
|
||||
return await aiohttp_client(application_with_auth)
|
||||
|
59
tests/ahriman/web/middlewares/test_metrics_handler.py
Normal file
59
tests/ahriman/web/middlewares/test_metrics_handler.py
Normal file
@ -0,0 +1,59 @@
|
||||
import importlib
|
||||
import pytest
|
||||
import sys
|
||||
|
||||
from aiohttp.web import HTTPNotFound
|
||||
from pytest_mock import MockerFixture
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
import ahriman.web.middlewares.metrics_handler as metrics_handler
|
||||
|
||||
|
||||
async def test_metrics(mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must return metrics methods if library is available
|
||||
"""
|
||||
metrics_mock = AsyncMock()
|
||||
mocker.patch.object(metrics_handler, "aiohttp_openmetrics", metrics_mock)
|
||||
|
||||
await metrics_handler.metrics(42)
|
||||
metrics_mock.metrics.assert_called_once_with(42)
|
||||
|
||||
|
||||
async def test_metrics_dummy(mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must raise HTTPNotFound if no module found
|
||||
"""
|
||||
mocker.patch.object(metrics_handler, "aiohttp_openmetrics", None)
|
||||
with pytest.raises(HTTPNotFound):
|
||||
await metrics_handler.metrics(None)
|
||||
|
||||
|
||||
async def test_metrics_handler() -> None:
|
||||
"""
|
||||
must return metrics handler if library is available
|
||||
"""
|
||||
assert metrics_handler.metrics_handler() == metrics_handler.aiohttp_openmetrics.metrics_middleware
|
||||
|
||||
|
||||
async def test_metrics_handler_dummy(mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must return dummy handler if no module found
|
||||
"""
|
||||
mocker.patch.object(metrics_handler, "aiohttp_openmetrics", None)
|
||||
handler = metrics_handler.metrics_handler()
|
||||
|
||||
async def handle(result: int) -> int:
|
||||
return result
|
||||
|
||||
assert await handler(42, handle) == 42
|
||||
|
||||
|
||||
def test_import_openmetrics_missing(mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must correctly process missing module
|
||||
"""
|
||||
mocker.patch.dict(sys.modules, {"aiohttp_openmetrics": None})
|
||||
importlib.reload(metrics_handler)
|
||||
|
||||
assert metrics_handler.aiohttp_openmetrics is None
|
1
tests/ahriman/web/schemas/test_any_schema.py
Normal file
1
tests/ahriman/web/schemas/test_any_schema.py
Normal file
@ -0,0 +1 @@
|
||||
# schema testing goes in view class tests
|
@ -3,7 +3,7 @@ from pathlib import Path
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.utils import walk
|
||||
from ahriman.web.routes import _dynamic_routes, setup_routes
|
||||
from ahriman.web.routes import _dynamic_routes, _identifier, setup_routes
|
||||
|
||||
|
||||
def test_dynamic_routes(resource_path_root: Path, configuration: Configuration) -> None:
|
||||
@ -22,9 +22,19 @@ def test_dynamic_routes(resource_path_root: Path, configuration: Configuration)
|
||||
assert len(set(routes.values())) == len(expected_views)
|
||||
|
||||
|
||||
def test_identifier() -> None:
|
||||
"""
|
||||
must correctly extract route identifiers
|
||||
"""
|
||||
assert _identifier("/") == "_"
|
||||
assert _identifier("/api/v1/status") == "_api_v1_status"
|
||||
assert _identifier("/api/v1/packages/{package}") == "_api_v1_packages_:package"
|
||||
|
||||
|
||||
def test_setup_routes(application: Application, configuration: Configuration) -> None:
|
||||
"""
|
||||
must generate non-empty list of routes
|
||||
"""
|
||||
application.router._named_resources = {}
|
||||
setup_routes(application, configuration)
|
||||
assert application.router.routes()
|
||||
|
@ -0,0 +1,50 @@
|
||||
import pytest
|
||||
|
||||
from aiohttp.test_utils import TestClient
|
||||
from aiohttp.web import Response
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
import ahriman.web.middlewares.metrics_handler as metrics_handler
|
||||
|
||||
from ahriman.models.user_access import UserAccess
|
||||
from ahriman.web.views.v1.status.metrics import MetricsView
|
||||
|
||||
|
||||
async def test_get_permission() -> None:
|
||||
"""
|
||||
must return correct permission for the request
|
||||
"""
|
||||
for method in ("GET",):
|
||||
request = pytest.helpers.request("", "", method)
|
||||
assert await MetricsView.get_permission(request) == UserAccess.Unauthorized
|
||||
|
||||
|
||||
def test_routes() -> None:
|
||||
"""
|
||||
must return correct routes
|
||||
"""
|
||||
assert MetricsView.ROUTES == ["/api/v1/metrics"]
|
||||
|
||||
|
||||
async def test_get(client: TestClient, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must return service metrics
|
||||
"""
|
||||
metrics_mock = mocker.patch("ahriman.web.views.v1.status.metrics.metrics", return_value=Response())
|
||||
|
||||
response = await client.get("/api/v1/metrics")
|
||||
assert response.ok
|
||||
# there is no response validation here, because it is free text, so we check call instead
|
||||
metrics_mock.assert_called_once_with(pytest.helpers.anyvar(int))
|
||||
|
||||
|
||||
async def test_get_not_found(client: TestClient, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must return 404 error if no module found
|
||||
"""
|
||||
mocker.patch.object(metrics_handler, "aiohttp_openmetrics", None)
|
||||
response_schema = pytest.helpers.schema_response(MetricsView.get, code=404)
|
||||
|
||||
response = await client.get("/api/v1/metrics")
|
||||
assert response.status == 404
|
||||
assert not response_schema.validate(await response.json())
|
@ -1,7 +1,6 @@
|
||||
[settings]
|
||||
include = .
|
||||
logging = logging.ini
|
||||
database = ../../../ahriman-test.db
|
||||
|
||||
[alpm]
|
||||
database = /var/lib/pacman
|
||||
@ -31,7 +30,6 @@ triggers_known = ahriman.core.distributed.WorkerLoaderTrigger ahriman.core.distr
|
||||
|
||||
[repository]
|
||||
name = aur
|
||||
root = ../../../
|
||||
|
||||
[sign]
|
||||
target =
|
||||
|
19
tools/__init__.py
Normal file
19
tools/__init__.py
Normal file
@ -0,0 +1,19 @@
|
||||
#
|
||||
# Copyright (c) 2021-2025 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
@ -1,5 +1,5 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
# Copyright (c) 2021-2025 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
@ -1,5 +1,5 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
# Copyright (c) 2021-2025 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user