mirror of
https://github.com/arcan1s/ahriman.git
synced 2025-07-16 15:29:56 +00:00
Compare commits
23 Commits
2.10.2
...
1a8d3efaf1
Author | SHA1 | Date | |
---|---|---|---|
1a8d3efaf1 | |||
6612510d12 | |||
74209acc21 | |||
969352d842 | |||
a1db4dc8b8 | |||
f6081507c0 | |||
477c473187 | |||
33e68a59e2 | |||
6dfe1b92f2 | |||
5dc6df11c5 | |||
d3f6ca24c8 | |||
c26a13c562 | |||
9ea3a911f7 | |||
ca60317750 | |||
1384efb31d | |||
8c6486c233 | |||
a1d0e993a8 | |||
572880eb73 | |||
d9eaf17a11 | |||
95e29d16bb | |||
1f2d56e605 | |||
1baf04998d | |||
3a88d00db0 |
42
.github/workflows/docker-image.yml
vendored
42
.github/workflows/docker-image.yml
vendored
@ -1,42 +0,0 @@
|
||||
name: docker image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
tags:
|
||||
- '*'
|
||||
- '!*rc*'
|
||||
|
||||
jobs:
|
||||
docker-image:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: extract docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: |
|
||||
arcan1s/ahriman
|
||||
tags: |
|
||||
type=ref,event=tag
|
||||
type=edge
|
||||
|
||||
- name: setup QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: setup docker buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: login to docker hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: build an image and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
51
.github/workflows/docker.yml
vendored
Normal file
51
.github/workflows/docker.yml
vendored
Normal file
@ -0,0 +1,51 @@
|
||||
name: Docker image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
tags:
|
||||
- '*'
|
||||
- '!*rc*'
|
||||
|
||||
jobs:
|
||||
docker-image:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- uses: docker/setup-qemu-action@v2
|
||||
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to docker hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to github container registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: |
|
||||
arcan1s/ahriman
|
||||
ghcr.io/arcan1s/ahriman
|
||||
tags: |
|
||||
type=semver,pattern={{raw}}
|
||||
type=edge
|
||||
|
||||
- name: Build an image and push
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
12
.github/workflows/release.yml
vendored
12
.github/workflows/release.yml
vendored
@ -1,4 +1,4 @@
|
||||
name: release
|
||||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
@ -11,25 +11,25 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: extract version
|
||||
- name: Extract version
|
||||
id: version
|
||||
run: echo ::set-output name=VERSION::${GITHUB_REF#refs/tags/}
|
||||
|
||||
- name: create changelog
|
||||
- name: Create changelog
|
||||
id: changelog
|
||||
uses: jaywcjlove/changelog-generator@main
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
filter: 'Release \d+\.\d+\.\d+'
|
||||
|
||||
- name: create archive
|
||||
- name: Create archive
|
||||
run: make archive
|
||||
env:
|
||||
VERSION: ${{ steps.version.outputs.VERSION }}
|
||||
|
||||
- name: release
|
||||
- name: Publish release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
body: |
|
||||
|
6
.github/workflows/setup.sh
vendored
6
.github/workflows/setup.sh
vendored
@ -12,7 +12,7 @@ pacman --noconfirm -Syu
|
||||
# main dependencies
|
||||
pacman --noconfirm -Sy base-devel devtools git pyalpm python-cerberus python-inflection python-passlib python-requests python-srcinfo python-systemd sudo
|
||||
# make dependencies
|
||||
pacman --noconfirm -Sy python-build python-installer python-wheel
|
||||
pacman --noconfirm -Sy python-build python-flit python-installer python-wheel
|
||||
# optional dependencies
|
||||
if [[ -z $MINIMAL_INSTALL ]]; then
|
||||
# VCS support
|
||||
@ -34,8 +34,6 @@ pacman --noconfirm -U ahriman-1.0.0-1-any.pkg.tar.zst
|
||||
# create machine-id which is required by build tools
|
||||
systemd-machine-id-setup
|
||||
|
||||
# special thing for the container, because /dev/log interface is not available there
|
||||
sed -i "s/handlers = syslog_handler/handlers = console_handler/g" /etc/ahriman.ini.d/logging.ini
|
||||
# initial setup command as root
|
||||
[[ -z $MINIMAL_INSTALL ]] && WEB_ARGS=("--web-port" "8080")
|
||||
ahriman -a x86_64 service-setup --packager "ahriman bot <ahriman@example.com>" --repository "github" "${WEB_ARGS[@]}"
|
||||
@ -48,10 +46,8 @@ if [[ -z $MINIMAL_INSTALL ]]; then
|
||||
# run web service (detached)
|
||||
sudo -u ahriman -- ahriman -a x86_64 web &
|
||||
WEB_PID=$!
|
||||
sleep 15s # wait for the web service activation
|
||||
fi
|
||||
# add the first package
|
||||
# the build itself does not really work in the container
|
||||
sudo -u ahriman -- ahriman package-add --now yay
|
||||
# check if package was actually installed
|
||||
test -n "$(find "/var/lib/ahriman/repository/x86_64" -name "yay*pkg*")"
|
||||
|
@ -1,4 +1,4 @@
|
||||
name: setup
|
||||
name: Setup
|
||||
|
||||
on:
|
||||
push:
|
||||
@ -18,9 +18,9 @@ jobs:
|
||||
options: --privileged -w /build
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: setup the minimal service in arch linux container
|
||||
- name: Setup the minimal service in arch linux container
|
||||
run: .github/workflows/setup.sh minimal
|
||||
|
||||
run-setup:
|
||||
@ -34,7 +34,7 @@ jobs:
|
||||
options: --privileged -w /build
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: setup the service in arch linux container
|
||||
- name: Setup the service in arch linux container
|
||||
run: .github/workflows/setup.sh
|
@ -1,4 +1,4 @@
|
||||
name: tests
|
||||
name: Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
@ -18,7 +18,7 @@ jobs:
|
||||
options: -w /build
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: run check and tests in arch linux container
|
||||
- name: Run check and tests in arch linux container
|
||||
run: .github/workflows/tests.sh
|
804
.pylintrc
804
.pylintrc
@ -1,28 +1,78 @@
|
||||
[MASTER]
|
||||
[MAIN]
|
||||
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
# Clear in-memory caches upon conclusion of linting. Useful if running pylint
|
||||
# in a server-like mode.
|
||||
clear-cache-post-run=no
|
||||
|
||||
# Load and enable all available extensions. Use --list-extensions to see a list
|
||||
# all available extensions.
|
||||
#enable-all-extensions=
|
||||
|
||||
# In error mode, messages with a category besides ERROR or FATAL are
|
||||
# suppressed, and no reports are done by default. Error mode is compatible with
|
||||
# disabling specific errors.
|
||||
#errors-only=
|
||||
|
||||
# Always return a 0 (non-error) status code, even if lint errors are found.
|
||||
# This is primarily useful in continuous integration scripts.
|
||||
#exit-zero=
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code.
|
||||
extension-pkg-allow-list=
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
|
||||
# for backward compatibility.)
|
||||
extension-pkg-whitelist=
|
||||
|
||||
# Specify a score threshold to be exceeded before program exits with error.
|
||||
fail-under=10.0
|
||||
# Return non-zero exit code if any of these messages/categories are detected,
|
||||
# even if score is above --fail-under value. Syntax same as enable. Messages
|
||||
# specified are enabled, while categories only check already-enabled messages.
|
||||
fail-on=
|
||||
|
||||
# Add files or directories to the blacklist. They should be base names, not
|
||||
# paths.
|
||||
# Specify a score threshold under which the program will exit with error.
|
||||
fail-under=10
|
||||
|
||||
# Interpret the stdin as a python script, whose filename needs to be passed as
|
||||
# the module_or_package argument.
|
||||
#from-stdin=
|
||||
|
||||
# Files or directories to be skipped. They should be base names, not paths.
|
||||
ignore=CVS
|
||||
|
||||
# Add files or directories matching the regex patterns to the blacklist. The
|
||||
# regex matches against base names, not paths.
|
||||
ignore-patterns=
|
||||
# Add files or directories matching the regular expressions patterns to the
|
||||
# ignore-list. The regex matches against paths and can be in Posix or Windows
|
||||
# format. Because '\\' represents the directory delimiter on Windows systems,
|
||||
# it can't be used as an escape character.
|
||||
ignore-paths=
|
||||
|
||||
# Files or directories matching the regular expression patterns are skipped.
|
||||
# The regex matches against base names, not paths. The default value ignores
|
||||
# Emacs file locks
|
||||
ignore-patterns=^\.#
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis). It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
#init-hook=
|
||||
|
||||
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
|
||||
# number of processors available to use.
|
||||
jobs=0
|
||||
# number of processors available to use, and will cap the count on Windows to
|
||||
# avoid hangs.
|
||||
jobs=1
|
||||
|
||||
# Control the amount of potential inferred values when inferring a single
|
||||
# object. This can help the performance when dealing with large functions or
|
||||
@ -36,6 +86,19 @@ load-plugins=
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
# Minimum Python version to use for version dependent checks. Will default to
|
||||
# the version used to run pylint.
|
||||
py-version=3.11
|
||||
|
||||
# Discover python modules and packages in the file system subtree.
|
||||
recursive=no
|
||||
|
||||
# Add paths to the list of the source roots. Supports globbing patterns. The
|
||||
# source root is an absolute path or a path relative to the current working
|
||||
# directory used to determine a package namespace for modules located under the
|
||||
# source root.
|
||||
source-roots=
|
||||
|
||||
# When enabled, pylint would attempt to guess common misconfiguration and emit
|
||||
# user-friendly hints instead of false-positive error messages.
|
||||
suggestion-mode=yes
|
||||
@ -44,120 +107,8 @@ suggestion-mode=yes
|
||||
# active Python interpreter and may run arbitrary code.
|
||||
unsafe-load-any-extension=no
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
|
||||
confidence=
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once). You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use "--disable=all --enable=classes
|
||||
# --disable=W".
|
||||
disable=raw-checker-failed,
|
||||
bad-inline-option,
|
||||
locally-disabled,
|
||||
file-ignored,
|
||||
suppressed-message,
|
||||
useless-suppression,
|
||||
deprecated-pragma,
|
||||
use-symbolic-message-instead,
|
||||
missing-module-docstring,
|
||||
line-too-long,
|
||||
no-name-in-module,
|
||||
import-outside-toplevel,
|
||||
invalid-name,
|
||||
raise-missing-from,
|
||||
wrong-import-order,
|
||||
too-few-public-methods,
|
||||
too-many-instance-attributes,
|
||||
broad-except,
|
||||
too-many-ancestors,
|
||||
fixme,
|
||||
too-many-arguments,
|
||||
duplicate-code,
|
||||
cyclic-import,
|
||||
confusing-with-statement,
|
||||
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once). See also the "--disable" option for examples.
|
||||
enable=c-extension-no-member
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Python expression which should return a score less than or equal to 10. You
|
||||
# have access to the variables 'error', 'warning', 'refactor', and 'convention'
|
||||
# which contain the number of messages in each category, as well as 'statement'
|
||||
# which is the total number of statements analyzed. This score is used by the
|
||||
# global evaluation report (RP0004).
|
||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details.
|
||||
#msg-template=
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, json
|
||||
# and msvs (visual studio). You can also give a reporter class, e.g.
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
output-format=text
|
||||
|
||||
# Tells whether to display a full report or only the messages.
|
||||
reports=no
|
||||
|
||||
# Activate the evaluation score.
|
||||
score=yes
|
||||
|
||||
|
||||
[REFACTORING]
|
||||
|
||||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
||||
# Complete name of functions that never returns. When checking for
|
||||
# inconsistent-return-statements if a never returning function is called then
|
||||
# it will be considered as an explicit return statement and no message will be
|
||||
# printed.
|
||||
never-returning-functions=sys.exit
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
expected-line-ending-format=
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
indent-after-paren=4
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=100
|
||||
|
||||
# Maximum number of lines in a module.
|
||||
max-module-lines=400
|
||||
|
||||
# Allow the body of a class to be on the same line as the declaration if body
|
||||
# contains single statement.
|
||||
single-line-class-stmt=no
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
# In verbose mode, extra non-checker-related info will be displayed.
|
||||
#verbose=
|
||||
|
||||
|
||||
[BASIC]
|
||||
@ -166,13 +117,15 @@ single-line-if-stmt=no
|
||||
argument-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct argument names. Overrides argument-
|
||||
# naming-style.
|
||||
# naming-style. If left empty, argument names will be checked with the set
|
||||
# naming style.
|
||||
#argument-rgx=
|
||||
|
||||
# Naming style matching correct attribute names.
|
||||
attr-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct attribute names. Overrides attr-naming-
|
||||
# style. If left empty, attribute names will be checked with the set naming
|
||||
# style.
|
||||
#attr-rgx=
|
||||
|
||||
@ -192,20 +145,30 @@ bad-names-rgxs=
|
||||
class-attribute-naming-style=any
|
||||
|
||||
# Regular expression matching correct class attribute names. Overrides class-
|
||||
# attribute-naming-style.
|
||||
# attribute-naming-style. If left empty, class attribute names will be checked
|
||||
# with the set naming style.
|
||||
#class-attribute-rgx=
|
||||
|
||||
# Naming style matching correct class constant names.
|
||||
class-const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct class constant names. Overrides class-
|
||||
# const-naming-style. If left empty, class constant names will be checked with
|
||||
# the set naming style.
|
||||
#class-const-rgx=
|
||||
|
||||
# Naming style matching correct class names.
|
||||
class-naming-style=PascalCase
|
||||
|
||||
# Regular expression matching correct class names. Overrides class-naming-
|
||||
# style.
|
||||
# style. If left empty, class names will be checked with the set naming style.
|
||||
#class-rgx=
|
||||
|
||||
# Naming style matching correct constant names.
|
||||
const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct constant names. Overrides const-naming-
|
||||
# style. If left empty, constant names will be checked with the set naming
|
||||
# style.
|
||||
#const-rgx=
|
||||
|
||||
@ -217,7 +180,8 @@ docstring-min-length=-1
|
||||
function-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct function names. Overrides function-
|
||||
# naming-style.
|
||||
# naming-style. If left empty, function names will be checked with the set
|
||||
# naming style.
|
||||
#function-rgx=
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma.
|
||||
@ -239,21 +203,22 @@ include-naming-hint=no
|
||||
inlinevar-naming-style=any
|
||||
|
||||
# Regular expression matching correct inline iteration names. Overrides
|
||||
# inlinevar-naming-style.
|
||||
# inlinevar-naming-style. If left empty, inline iteration names will be checked
|
||||
# with the set naming style.
|
||||
#inlinevar-rgx=
|
||||
|
||||
# Naming style matching correct method names.
|
||||
method-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct method names. Overrides method-naming-
|
||||
# style.
|
||||
# style. If left empty, method names will be checked with the set naming style.
|
||||
#method-rgx=
|
||||
|
||||
# Naming style matching correct module names.
|
||||
module-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct module names. Overrides module-naming-
|
||||
# style.
|
||||
# style. If left empty, module names will be checked with the set naming style.
|
||||
#module-rgx=
|
||||
|
||||
# Colon-delimited sets of names that determine each other's naming style when
|
||||
@ -269,209 +234,56 @@ no-docstring-rgx=^_
|
||||
# These decorators are taken in consideration only for invalid-name.
|
||||
property-classes=abc.abstractproperty
|
||||
|
||||
# Regular expression matching correct type alias names. If left empty, type
|
||||
# alias names will be checked with the set naming style.
|
||||
#typealias-rgx=
|
||||
|
||||
# Regular expression matching correct type variable names. If left empty, type
|
||||
# variable names will be checked with the set naming style.
|
||||
#typevar-rgx=
|
||||
|
||||
# Naming style matching correct variable names.
|
||||
variable-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct variable names. Overrides variable-
|
||||
# naming-style.
|
||||
# naming-style. If left empty, variable names will be checked with the set
|
||||
# naming style.
|
||||
#variable-rgx=
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
[CLASSES]
|
||||
|
||||
# List of decorators that produce context managers, such as
|
||||
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||
# produce valid context managers.
|
||||
contextmanager-decorators=contextlib.contextmanager
|
||||
# Warn about protected attribute access inside special methods
|
||||
check-protected-access-in-special-methods=no
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,
|
||||
__new__,
|
||||
setUp,
|
||||
asyncSetUp,
|
||||
__post_init__
|
||||
|
||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
||||
ignore-mixin-members=yes
|
||||
# List of member names, which should be excluded from the protected access
|
||||
# warning.
|
||||
exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit
|
||||
|
||||
# Tells whether to warn about missing members when the owner of the attribute
|
||||
# is inferred to be None.
|
||||
ignore-none=yes
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# This flag controls whether pylint should warn about no-member and similar
|
||||
# checks whenever an opaque object is returned when inferring. The inference
|
||||
# can return multiple potential results while evaluating a Python object, but
|
||||
# some branches might not be evaluated, which results in partial inference. In
|
||||
# that case, it might be useful to still emit no-member and other checks for
|
||||
# the rest of the inferred objects.
|
||||
ignore-on-opaque-inference=yes
|
||||
|
||||
# List of class names for which member attributes should not be checked (useful
|
||||
# for classes with dynamically set attributes). This supports the use of
|
||||
# qualified names.
|
||||
ignored-classes=optparse.Values,thread._local,_thread._local
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis). It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=
|
||||
|
||||
# Show a hint with possible names when a member name was not found. The aspect
|
||||
# of finding the hint is based on edit distance.
|
||||
missing-member-hint=yes
|
||||
|
||||
# The minimum edit distance a name should have in order to be considered a
|
||||
# similar match for a missing member name.
|
||||
missing-member-hint-distance=1
|
||||
|
||||
# The total number of similar names that should be taken in consideration when
|
||||
# showing a hint for a missing member.
|
||||
missing-member-max-choices=1
|
||||
|
||||
# List of decorators that change the signature of a decorated function.
|
||||
signature-mutators=
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Ignore comments when computing similarities.
|
||||
ignore-comments=yes
|
||||
|
||||
# Ignore docstrings when computing similarities.
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Ignore imports when computing similarities.
|
||||
ignore-imports=no
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=4
|
||||
|
||||
|
||||
[LOGGING]
|
||||
|
||||
# The type of string formatting that logging methods do. `old` means using %
|
||||
# formatting, `new` is for `{}` formatting.
|
||||
logging-format-style=old
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format.
|
||||
logging-modules=logging
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,
|
||||
XXX,
|
||||
TODO
|
||||
|
||||
# Regular expression of note tags to take in consideration.
|
||||
#notes-rgx=
|
||||
|
||||
|
||||
[SPELLING]
|
||||
|
||||
# Limits count of emitted suggestions for spelling mistakes.
|
||||
max-spelling-suggestions=4
|
||||
|
||||
# Spelling dictionary name. Available dictionaries: none. To make it work,
|
||||
# install the python-enchant package.
|
||||
spelling-dict=
|
||||
|
||||
# List of comma separated words that should not be checked.
|
||||
spelling-ignore-words=
|
||||
|
||||
# A path to a file that contains the private dictionary; one word per line.
|
||||
spelling-private-dict-file=
|
||||
|
||||
# Tells whether to store unknown words to the private dictionary (see the
|
||||
# --spelling-private-dict-file option) instead of raising a message.
|
||||
spelling-store-unknown-words=no
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid defining new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
# Tells whether unused global variables should be treated as a violation.
|
||||
allow-global-unused-variables=yes
|
||||
|
||||
# List of strings which can identify a callback function by name. A callback
|
||||
# name must start or end with one of those strings.
|
||||
callbacks=cb_,
|
||||
_cb
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expected to
|
||||
# not be used).
|
||||
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
|
||||
|
||||
# Argument names that match this expression will be ignored. Default to name
|
||||
# with leading underscore.
|
||||
ignored-argument-names=_.*|^ignored_|^unused_
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
|
||||
|
||||
|
||||
[STRING]
|
||||
|
||||
# This flag controls whether inconsistent-quotes generates a warning when the
|
||||
# character used as a quote delimiter is used inconsistently within a module.
|
||||
check-quote-consistency=no
|
||||
|
||||
# This flag controls whether the implicit-str-concat should generate a warning
|
||||
# on implicit string concatenation in sequences defined over several lines.
|
||||
check-str-concat-over-line-jumps=no
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# List of modules that can be imported at any level, not just the top level
|
||||
# one.
|
||||
allow-any-import-level=
|
||||
|
||||
# Allow wildcard imports from modules that define __all__.
|
||||
allow-wildcard-with-all=no
|
||||
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma.
|
||||
deprecated-modules=optparse,tkinter.tix
|
||||
|
||||
# Create a graph of external dependencies in the given file (report RP0402 must
|
||||
# not be disabled).
|
||||
ext-import-graph=
|
||||
|
||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
||||
# given file (report RP0402 must not be disabled).
|
||||
import-graph=
|
||||
|
||||
# Create a graph of internal dependencies in the given file (report RP0402 must
|
||||
# not be disabled).
|
||||
int-import-graph=
|
||||
|
||||
# Force import order to recognize a module as part of the standard
|
||||
# compatibility libraries.
|
||||
known-standard-library=
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
|
||||
# Couples of modules and preferred modules, separated by a comma.
|
||||
preferred-modules=
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=mcs
|
||||
|
||||
|
||||
[DESIGN]
|
||||
|
||||
# List of regular expressions of class ancestor names to ignore when counting
|
||||
# public methods (see R0903)
|
||||
exclude-too-few-public-methods=
|
||||
|
||||
# List of qualified class names to ignore when counting class parents (see
|
||||
# R0901)
|
||||
ignored-parents=
|
||||
|
||||
# Maximum number of arguments for function / method.
|
||||
max-args=5
|
||||
|
||||
@ -503,35 +315,331 @@ max-statements=50
|
||||
min-public-methods=2
|
||||
|
||||
|
||||
[CLASSES]
|
||||
|
||||
# Warn about protected attribute access inside special methods
|
||||
check-protected-access-in-special-methods=no
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,
|
||||
__new__,
|
||||
setUp,
|
||||
__post_init__
|
||||
|
||||
# List of member names, which should be excluded from the protected access
|
||||
# warning.
|
||||
exclude-protected=_asdict,
|
||||
_fields,
|
||||
_replace,
|
||||
_source,
|
||||
_make
|
||||
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=cls
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when being caught. Defaults to
|
||||
# "BaseException, Exception".
|
||||
overgeneral-exceptions=builtins.BaseException,
|
||||
builtins.Exception
|
||||
# Exceptions that will emit a warning when caught.
|
||||
overgeneral-exceptions=builtins.BaseException,builtins.Exception
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
expected-line-ending-format=
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
indent-after-paren=4
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=100
|
||||
|
||||
# Maximum number of lines in a module.
|
||||
max-module-lines=1000
|
||||
|
||||
# Allow the body of a class to be on the same line as the declaration if body
|
||||
# contains single statement.
|
||||
single-line-class-stmt=no
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# List of modules that can be imported at any level, not just the top level
|
||||
# one.
|
||||
allow-any-import-level=
|
||||
|
||||
# Allow explicit reexports by alias from a package __init__.
|
||||
allow-reexport-from-package=no
|
||||
|
||||
# Allow wildcard imports from modules that define __all__.
|
||||
allow-wildcard-with-all=no
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma.
|
||||
deprecated-modules=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of external dependencies
|
||||
# to the given file (report RP0402 must not be disabled).
|
||||
ext-import-graph=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of all (i.e. internal and
|
||||
# external) dependencies to the given file (report RP0402 must not be
|
||||
# disabled).
|
||||
import-graph=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of internal dependencies
|
||||
# to the given file (report RP0402 must not be disabled).
|
||||
int-import-graph=
|
||||
|
||||
# Force import order to recognize a module as part of the standard
|
||||
# compatibility libraries.
|
||||
known-standard-library=
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
|
||||
# Couples of modules and preferred modules, separated by a comma.
|
||||
preferred-modules=
|
||||
|
||||
|
||||
[LOGGING]
|
||||
|
||||
# The type of string formatting that logging methods do. `old` means using %
|
||||
# formatting, `new` is for `{}` formatting.
|
||||
logging-format-style=old
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format.
|
||||
logging-modules=logging
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
|
||||
# UNDEFINED.
|
||||
confidence=HIGH,
|
||||
CONTROL_FLOW,
|
||||
INFERENCE,
|
||||
INFERENCE_FAILURE,
|
||||
UNDEFINED
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once). You can also use "--disable=all" to
|
||||
# disable everything first and then re-enable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use "--disable=all --enable=classes
|
||||
# --disable=W".
|
||||
disable=raw-checker-failed,
|
||||
bad-inline-option,
|
||||
locally-disabled,
|
||||
file-ignored,
|
||||
suppressed-message,
|
||||
useless-suppression,
|
||||
deprecated-pragma,
|
||||
use-symbolic-message-instead,
|
||||
missing-module-docstring,
|
||||
line-too-long,
|
||||
no-name-in-module,
|
||||
import-outside-toplevel,
|
||||
invalid-name,
|
||||
raise-missing-from,
|
||||
wrong-import-order,
|
||||
too-few-public-methods,
|
||||
too-many-instance-attributes,
|
||||
broad-except,
|
||||
fixme,
|
||||
too-many-arguments,
|
||||
duplicate-code,
|
||||
cyclic-import,
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once). See also the "--disable" option for examples.
|
||||
enable=c-extension-no-member
|
||||
|
||||
|
||||
[METHOD_ARGS]
|
||||
|
||||
# List of qualified names (i.e., library.method) which require a timeout
|
||||
# parameter e.g. 'requests.api.get,requests.api.post'
|
||||
timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,
|
||||
XXX,
|
||||
TODO
|
||||
|
||||
# Regular expression of note tags to take in consideration.
|
||||
notes-rgx=
|
||||
|
||||
|
||||
[REFACTORING]
|
||||
|
||||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
||||
# Complete name of functions that never returns. When checking for
|
||||
# inconsistent-return-statements if a never returning function is called then
|
||||
# it will be considered as an explicit return statement and no message will be
|
||||
# printed.
|
||||
never-returning-functions=sys.exit,argparse.parse_error
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Python expression which should return a score less than or equal to 10. You
|
||||
# have access to the variables 'fatal', 'error', 'warning', 'refactor',
|
||||
# 'convention', and 'info' which contain the number of messages in each
|
||||
# category, as well as 'statement' which is the total number of statements
|
||||
# analyzed. This score is used by the global evaluation report (RP0004).
|
||||
evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details.
|
||||
msg-template=
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, json
|
||||
# and msvs (visual studio). You can also give a reporter class, e.g.
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
#output-format=
|
||||
|
||||
# Tells whether to display a full report or only the messages.
|
||||
reports=no
|
||||
|
||||
# Activate the evaluation score.
|
||||
score=yes
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Comments are removed from the similarity computation
|
||||
ignore-comments=yes
|
||||
|
||||
# Docstrings are removed from the similarity computation
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Imports are removed from the similarity computation
|
||||
ignore-imports=yes
|
||||
|
||||
# Signatures are removed from the similarity computation
|
||||
ignore-signatures=yes
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=4
|
||||
|
||||
|
||||
[SPELLING]
|
||||
|
||||
# Limits count of emitted suggestions for spelling mistakes.
|
||||
max-spelling-suggestions=4
|
||||
|
||||
# Spelling dictionary name. No available dictionaries : You need to install
|
||||
# both the python package and the system dependency for enchant to work..
|
||||
spelling-dict=
|
||||
|
||||
# List of comma separated words that should be considered directives if they
|
||||
# appear at the beginning of a comment and should not be checked.
|
||||
spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
|
||||
|
||||
# List of comma separated words that should not be checked.
|
||||
spelling-ignore-words=
|
||||
|
||||
# A path to a file that contains the private dictionary; one word per line.
|
||||
spelling-private-dict-file=
|
||||
|
||||
# Tells whether to store unknown words to the private dictionary (see the
|
||||
# --spelling-private-dict-file option) instead of raising a message.
|
||||
spelling-store-unknown-words=no
|
||||
|
||||
|
||||
[STRING]
|
||||
|
||||
# This flag controls whether inconsistent-quotes generates a warning when the
|
||||
# character used as a quote delimiter is used inconsistently within a module.
|
||||
check-quote-consistency=no
|
||||
|
||||
# This flag controls whether the implicit-str-concat should generate a warning
|
||||
# on implicit string concatenation in sequences defined over several lines.
|
||||
check-str-concat-over-line-jumps=no
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
||||
# List of decorators that produce context managers, such as
|
||||
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||
# produce valid context managers.
|
||||
contextmanager-decorators=contextlib.contextmanager
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=
|
||||
|
||||
# Tells whether to warn about missing members when the owner of the attribute
|
||||
# is inferred to be None.
|
||||
ignore-none=yes
|
||||
|
||||
# This flag controls whether pylint should warn about no-member and similar
|
||||
# checks whenever an opaque object is returned when inferring. The inference
|
||||
# can return multiple potential results while evaluating a Python object, but
|
||||
# some branches might not be evaluated, which results in partial inference. In
|
||||
# that case, it might be useful to still emit no-member and other checks for
|
||||
# the rest of the inferred objects.
|
||||
ignore-on-opaque-inference=yes
|
||||
|
||||
# List of symbolic message names to ignore for Mixin members.
|
||||
ignored-checks-for-mixins=no-member,
|
||||
not-async-context-manager,
|
||||
not-context-manager,
|
||||
attribute-defined-outside-init
|
||||
|
||||
# List of class names for which member attributes should not be checked (useful
|
||||
# for classes with dynamically set attributes). This supports the use of
|
||||
# qualified names.
|
||||
ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace
|
||||
|
||||
# Show a hint with possible names when a member name was not found. The aspect
|
||||
# of finding the hint is based on edit distance.
|
||||
missing-member-hint=yes
|
||||
|
||||
# The minimum edit distance a name should have in order to be considered a
|
||||
# similar match for a missing member name.
|
||||
missing-member-hint-distance=1
|
||||
|
||||
# The total number of similar names that should be taken in consideration when
|
||||
# showing a hint for a missing member.
|
||||
missing-member-max-choices=1
|
||||
|
||||
# Regex pattern to define which classes are considered mixins.
|
||||
mixin-class-rgx=.*[Mm]ixin
|
||||
|
||||
# List of decorators that change the signature of a decorated function.
|
||||
signature-mutators=
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid defining new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
# Tells whether unused global variables should be treated as a violation.
|
||||
allow-global-unused-variables=yes
|
||||
|
||||
# List of names allowed to shadow builtins
|
||||
allowed-redefined-builtins=
|
||||
|
||||
# List of strings which can identify a callback function by name. A callback
|
||||
# name must start or end with one of those strings.
|
||||
callbacks=cb_,
|
||||
_cb
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expected to
|
||||
# not be used).
|
||||
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
|
||||
|
||||
# Argument names that match this expression will be ignored.
|
||||
ignored-argument-names=_.*|^ignored_|^unused_
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
|
||||
|
@ -21,4 +21,3 @@ python:
|
||||
- docs
|
||||
- s3
|
||||
- web
|
||||
system_packages: true
|
||||
|
@ -46,7 +46,7 @@ Again, the most checks can be performed by `make check` command, though some add
|
||||
int: result
|
||||
|
||||
Raises:
|
||||
RuntimeException: a local function error occurs
|
||||
RuntimeError: a local function error occurs
|
||||
|
||||
Examples:
|
||||
Very informative example how to use this function, e.g.::
|
||||
@ -85,7 +85,7 @@ Again, the most checks can be performed by `make check` command, though some add
|
||||
|
||||
Args:
|
||||
*args(Any): positional arguments
|
||||
**kwargs(Any): keyword arguments
|
||||
**kwargs(Any): keyword arguments
|
||||
"""
|
||||
self.instance_attribute = ""
|
||||
```
|
||||
@ -130,6 +130,12 @@ Again, the most checks can be performed by `make check` command, though some add
|
||||
* Configuration interactions must go through `ahriman.core.configuration.Configuration` class instance.
|
||||
* In case if class load requires some actions, it is recommended to create class method which can be used for class instantiating.
|
||||
* The code must follow the exception safety, unless it is explicitly asked by end user. It means that most exceptions must be handled and printed to log, no other actions must be done (e.g. raising another exception).
|
||||
* Exceptions without parameters should be raised without parentheses, e.g.:
|
||||
|
||||
```python
|
||||
raise RuntimeError
|
||||
```
|
||||
|
||||
* For the external command `ahriman.core.util.check_output` function must be used.
|
||||
* Every temporary file/directory must be removed at the end of processing, no matter what. The `tempfile` module provides good ways to do it.
|
||||
* Import order must be the following:
|
||||
@ -158,7 +164,7 @@ Again, the most checks can be performed by `make check` command, though some add
|
||||
* One file should define only one class, exception is class satellites in case if file length remains less than 400 lines.
|
||||
* It is possible to create file which contains some functions (e.g. `ahriman.core.util`), but in this case you would need to define `__all__` attribute.
|
||||
* The file size mentioned above must be applicable in general. In case of big classes consider splitting them into traits. Note, however, that `pylint` includes comments and docstrings into counter, thus you need to check file size by other tools.
|
||||
* No global variable is allowed outside of `ahriman.version` module. `ahriman.core.context` is also special case.
|
||||
* No global variable is allowed outside of `ahriman` module. `ahriman.core.context` is also special case.
|
||||
* Single quotes are not allowed. The reason behind this restriction is the fact that docstrings must be written by using double quotes only, and we would like to make style consistent.
|
||||
* If your class writes anything to log, the `ahriman.core.log.LazyLogging` trait must be used.
|
||||
* Web API methods must be documented by using `aiohttp_apispec` library. Schema testing mostly should be implemented in related view class tests. Recommended example for documentation (excluding comments):
|
||||
|
@ -11,6 +11,7 @@ ENV AHRIMAN_PACKAGER="ahriman bot <ahriman@example.com>"
|
||||
ENV AHRIMAN_PACMAN_MIRROR=""
|
||||
ENV AHRIMAN_PORT=""
|
||||
ENV AHRIMAN_REPOSITORY="aur-clone"
|
||||
ENV AHRIMAN_REPOSITORY_SERVER=""
|
||||
ENV AHRIMAN_REPOSITORY_ROOT="/var/lib/ahriman/ahriman"
|
||||
ENV AHRIMAN_UNIX_SOCKET=""
|
||||
ENV AHRIMAN_USER="ahriman"
|
||||
@ -29,7 +30,7 @@ COPY "docker/install-aur-package.sh" "/usr/local/bin/install-aur-package"
|
||||
## install package dependencies
|
||||
## darcs is not installed by reasons, because it requires a lot haskell packages which dramatically increase image size
|
||||
RUN pacman -Sy --noconfirm --asdeps devtools git pyalpm python-cerberus python-inflection python-passlib python-requests python-srcinfo && \
|
||||
pacman -Sy --noconfirm --asdeps python-build python-installer python-wheel && \
|
||||
pacman -Sy --noconfirm --asdeps python-build python-flit python-installer python-wheel && \
|
||||
pacman -Sy --noconfirm --asdeps breezy mercurial python-aiohttp python-aiohttp-cors python-boto3 python-cryptography python-jinja python-requests-unixsocket python-systemd rsync subversion && \
|
||||
runuser -u build -- install-aur-package python-aioauth-client python-aiohttp-apispec-git python-aiohttp-jinja2 \
|
||||
python-aiohttp-debugtoolbar python-aiohttp-session python-aiohttp-security
|
||||
@ -39,7 +40,7 @@ RUN pacman -Sy --noconfirm --asdeps devtools git pyalpm python-cerberus python-i
|
||||
COPY --chown=build . "/home/build/ahriman"
|
||||
## create package archive and install it
|
||||
RUN cd "/home/build/ahriman" && \
|
||||
make VERSION=$(python -c "from src.ahriman.version import __version__; print(__version__)") archlinux && \
|
||||
make VERSION=$(python -c "from src.ahriman import __version__; print(__version__)") archlinux && \
|
||||
cp ./*-src.tar.xz "package/archlinux" && \
|
||||
cd "package/archlinux" && \
|
||||
runuser -u build -- makepkg --noconfirm --install --skipchecksums && \
|
||||
|
6
Makefile
6
Makefile
@ -3,7 +3,7 @@
|
||||
|
||||
PROJECT := ahriman
|
||||
|
||||
FILES := AUTHORS CONTRIBUTING.md COPYING Makefile README.md SECURITY.md docs package src setup.py tox.ini web.png
|
||||
FILES := AUTHORS CONTRIBUTING.md COPYING Makefile README.md SECURITY.md docs package pyproject.toml src tox.ini web.png
|
||||
TARGET_FILES := $(addprefix $(PROJECT)/, $(FILES))
|
||||
IGNORE_FILES := package/archlinux src/.mypy_cache
|
||||
|
||||
@ -38,7 +38,7 @@ html: specification
|
||||
tox -e docs-html
|
||||
|
||||
push: specification archlinux
|
||||
git add package/archlinux/PKGBUILD src/ahriman/version.py docs/ahriman-architecture.svg docs/ahriman.1 docs/completions/
|
||||
git add package/archlinux/PKGBUILD src/ahriman/__init__.py docs/ahriman-architecture.svg package/share/man/man1/ahriman.1 package/share/bash-completion/completions/_ahriman package/share/zsh/site-functions/_ahriman
|
||||
git commit -m "Release $(VERSION)"
|
||||
git tag "$(VERSION)"
|
||||
git push
|
||||
@ -56,4 +56,4 @@ version:
|
||||
ifndef VERSION
|
||||
$(error VERSION is required, but not set)
|
||||
endif
|
||||
sed -i 's/^__version__ = .*/__version__ = "$(VERSION)"/' src/ahriman/version.py
|
||||
sed -i 's/^__version__ = .*/__version__ = "$(VERSION)"/' src/ahriman/__init__.py
|
||||
|
@ -1,8 +1,8 @@
|
||||
# ArcH linux ReposItory MANager
|
||||
|
||||
[](https://github.com/arcan1s/ahriman/actions/workflows/run-tests.yml)
|
||||
[](https://github.com/arcan1s/ahriman/actions/workflows/run-setup.yml)
|
||||
[](https://hub.docker.com/r/arcan1s/ahriman)
|
||||
[](https://github.com/arcan1s/ahriman/actions/workflows/run-tests.yml)
|
||||
[](https://github.com/arcan1s/ahriman/actions/workflows/run-setup.yml)
|
||||
[](https://hub.docker.com/r/arcan1s/ahriman)
|
||||
[](https://www.codefactor.io/repository/github/arcan1s/ahriman)
|
||||
[](https://ahriman.readthedocs.io/?badge=latest)
|
||||
|
||||
@ -16,6 +16,7 @@ Wrapper for managing custom repository inspired by [repo-scripts](https://github
|
||||
* VCS packages support.
|
||||
* Official repository support.
|
||||
* Ability to patch AUR packages and even create package from local PKGBUILDs.
|
||||
* Various rebuild options with ability to automatically bump package version.
|
||||
* Sign support with gpg (repository, package), multiple packagers support.
|
||||
* Triggers for repository updates, e.g. synchronization to remote services (rsync, s3 and github) and report generation (email, html, telegram).
|
||||
* Repository status interface with optional authorization and control options:
|
||||
|
@ -17,6 +17,7 @@ host = $AHRIMAN_HOST
|
||||
EOF
|
||||
|
||||
AHRIMAN_DEFAULT_ARGS=("--architecture" "$AHRIMAN_ARCHITECTURE")
|
||||
AHRIMAN_DEFAULT_ARGS+=("--repository" "$AHRIMAN_REPOSITORY")
|
||||
if [ -n "$AHRIMAN_OUTPUT" ]; then
|
||||
AHRIMAN_DEFAULT_ARGS+=("--log-handler" "$AHRIMAN_OUTPUT")
|
||||
fi
|
||||
@ -33,13 +34,15 @@ chown "$AHRIMAN_USER":"$AHRIMAN_USER" "$AHRIMAN_GNUPG_HOME"
|
||||
# run built-in setup command
|
||||
AHRIMAN_SETUP_ARGS=("--build-as-user" "$AHRIMAN_USER")
|
||||
AHRIMAN_SETUP_ARGS+=("--packager" "$AHRIMAN_PACKAGER")
|
||||
AHRIMAN_SETUP_ARGS+=("--repository" "$AHRIMAN_REPOSITORY")
|
||||
if [ -z "$AHRIMAN_MULTILIB" ]; then
|
||||
AHRIMAN_SETUP_ARGS+=("--no-multilib")
|
||||
fi
|
||||
if [ -n "$AHRIMAN_PACMAN_MIRROR" ]; then
|
||||
AHRIMAN_SETUP_ARGS+=("--mirror" "$AHRIMAN_PACMAN_MIRROR")
|
||||
fi
|
||||
if [ -n "$AHRIMAN_REPOSITORY_SERVER" ]; then
|
||||
AHRIMAN_SETUP_ARGS+=("--server" "$AHRIMAN_REPOSITORY_SERVER")
|
||||
fi
|
||||
if [ -n "$AHRIMAN_PORT" ]; then
|
||||
AHRIMAN_SETUP_ARGS+=("--web-port" "$AHRIMAN_PORT")
|
||||
fi
|
||||
@ -59,7 +62,7 @@ systemd-machine-id-setup &> /dev/null
|
||||
if [ -n "$AHRIMAN_FORCE_ROOT" ]; then
|
||||
AHRIMAN_EXECUTABLE=("ahriman")
|
||||
elif ahriman help-commands-unsafe -- "$@" &> /dev/null; then
|
||||
AHRIMAN_EXECUTABLE=("sudo" "-u" "$AHRIMAN_USER" "--" "ahriman")
|
||||
AHRIMAN_EXECUTABLE=("sudo" "-E" "-u" "$AHRIMAN_USER" "--" "ahriman")
|
||||
else
|
||||
AHRIMAN_EXECUTABLE=("ahriman")
|
||||
fi
|
||||
|
File diff suppressed because it is too large
Load Diff
Before Width: | Height: | Size: 829 KiB After Width: | Height: | Size: 839 KiB |
@ -20,6 +20,14 @@ ahriman.core.configuration.schema module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.configuration.shell\_interpolator module
|
||||
-----------------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.core.configuration.shell_interpolator
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.configuration.validator module
|
||||
-------------------------------------------
|
||||
|
||||
|
@ -76,6 +76,22 @@ ahriman.core.database.migrations.m008\_packagers module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.database.migrations.m009\_local\_source module
|
||||
-----------------------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.core.database.migrations.m009_local_source
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.database.migrations.m010\_version\_based\_logs\_removal module
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.core.database.migrations.m010_version_based_logs_removal
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
|
21
docs/ahriman.core.http.rst
Normal file
21
docs/ahriman.core.http.rst
Normal file
@ -0,0 +1,21 @@
|
||||
ahriman.core.http package
|
||||
=========================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
ahriman.core.http.sync\_http\_client module
|
||||
-------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.core.http.sync_http_client
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: ahriman.core.http
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
@ -36,6 +36,14 @@ ahriman.core.report.jinja\_template module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.report.remote\_call module
|
||||
---------------------------------------
|
||||
|
||||
.. automodule:: ahriman.core.report.remote_call
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.report.report module
|
||||
---------------------------------
|
||||
|
||||
|
@ -14,6 +14,7 @@ Subpackages
|
||||
ahriman.core.database
|
||||
ahriman.core.formatters
|
||||
ahriman.core.gitremote
|
||||
ahriman.core.http
|
||||
ahriman.core.log
|
||||
ahriman.core.report
|
||||
ahriman.core.repository
|
||||
|
@ -20,6 +20,14 @@ ahriman.core.upload.http\_upload module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.upload.remote\_service module
|
||||
------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.core.upload.remote_service
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.upload.rsync module
|
||||
--------------------------------
|
||||
|
||||
|
@ -220,6 +220,14 @@ ahriman.models.user\_access module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.models.waiter module
|
||||
----------------------------
|
||||
|
||||
.. automodule:: ahriman.models.waiter
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
|
@ -12,17 +12,6 @@ Subpackages
|
||||
ahriman.models
|
||||
ahriman.web
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
ahriman.version module
|
||||
----------------------
|
||||
|
||||
.. automodule:: ahriman.version
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
|
@ -36,6 +36,14 @@ ahriman.web.schemas.error\_schema module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.schemas.file\_schema module
|
||||
---------------------------------------
|
||||
|
||||
.. automodule:: ahriman.web.schemas.file_schema
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.schemas.internal\_status\_schema module
|
||||
---------------------------------------------------
|
||||
|
||||
@ -132,6 +140,22 @@ ahriman.web.schemas.pgp\_key\_schema module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.schemas.process\_id\_schema module
|
||||
----------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.web.schemas.process_id_schema
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.schemas.process\_schema module
|
||||
------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.web.schemas.process_schema
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.schemas.remote\_schema module
|
||||
-----------------------------------------
|
||||
|
||||
@ -156,6 +180,14 @@ ahriman.web.schemas.status\_schema module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.schemas.update\_flags\_schema module
|
||||
------------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.web.schemas.update_flags_schema
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
|
@ -20,6 +20,14 @@ ahriman.web.views.service.pgp module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.views.service.process module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: ahriman.web.views.service.process
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.views.service.rebuild module
|
||||
----------------------------------------
|
||||
|
||||
@ -60,6 +68,14 @@ ahriman.web.views.service.update module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.views.service.upload module
|
||||
---------------------------------------
|
||||
|
||||
.. automodule:: ahriman.web.views.service.upload
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
|
@ -37,6 +37,7 @@ This package contains everything required for the most of application actions an
|
||||
* ``ahriman.core.database`` is everything including data and schema migrations for database.
|
||||
* ``ahriman.core.formatters`` package provides ``Printer`` sub-classes for printing data (e.g. package properties) to stdout which are used by some handlers.
|
||||
* ``ahriman.core.gitremote`` is a package with remote PKGBUILD triggers. Should not be called directly.
|
||||
* ``ahriman.core.http`` package provides HTTP clients which can be later used by other classes.
|
||||
* ``ahriman.core.log`` is a log utils package. It includes logger loader class, custom HTTP based logger and access logger for HTTP services with additional filters.
|
||||
* ``ahriman.core.report`` is a package with reporting triggers. Should not be called directly.
|
||||
* ``ahriman.core.repository`` contains several traits and base repository (``ahriman.core.repository.Repository`` class) implementation.
|
||||
@ -168,6 +169,7 @@ This feature is divided into to stages: check AUR for updates and run rebuild fo
|
||||
#. For each level of tree it does:
|
||||
|
||||
#. Download package data from AUR.
|
||||
#. Bump ``pkgrel`` if there is duplicate version in the local repository (see explanation below).
|
||||
#. Build every package in clean chroot.
|
||||
#. Sign packages if required.
|
||||
#. Add packages to database and sign database if required.
|
||||
@ -175,6 +177,20 @@ This feature is divided into to stages: check AUR for updates and run rebuild fo
|
||||
|
||||
After any step any package data is being removed.
|
||||
|
||||
pkgrel bump rules
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
The application is able to automatically bump package release (``pkgrel``) during build process if there is duplicate version in repository. The version will be incremented as following:
|
||||
|
||||
#. Get version of the remote package.
|
||||
#. Get version of the local package if any.
|
||||
#. If local version is not set, proceed with remote one.
|
||||
#. If local version is set and epoch or package version (``pkgver``) are different, proceed with remote version.
|
||||
#. If local version is set and remote version is newer than local one, proceed with remote.
|
||||
#. Extract ``pkgrel`` value.
|
||||
#. If it has ``major.minor`` notation (e.g. ``1.1``), then increment last part by 1, e.g. ``1.1 -> 1.2``, ``1.0.1 -> 1.0.2``.
|
||||
#. If ``pkgrel`` is a number (e.g. ``1``), then append 1 to the end of the string, e.g. ``1 -> 1.1``.
|
||||
|
||||
Core functions reference
|
||||
------------------------
|
||||
|
||||
@ -216,7 +232,7 @@ The package provides several authorization methods: disabled, based on configura
|
||||
|
||||
Disabled (default) authorization provider just allows everything for everyone and does not have any specific configuration (it uses some default configuration parameters though). It also provides generic interface for derived classes.
|
||||
|
||||
Mapping (aka configuration) provider uses hashed passwords with salt from the database in order to authenticate users. This provider also enables user permission checking (read/write) (authorization). Thus, it defines the following methods:
|
||||
Mapping (aka configuration) provider uses hashed passwords with optional salt from the database in order to authenticate users. This provider also enables user permission checking (read/write) (authorization). Thus, it defines the following methods:
|
||||
|
||||
* ``check_credentials`` - user password validation (authentication).
|
||||
* ``verify_access`` - user permission validation (authorization).
|
||||
|
@ -15,7 +15,7 @@ import sys
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from ahriman.version import __version__
|
||||
from ahriman import __version__
|
||||
|
||||
|
||||
basedir = Path(__file__).resolve().parent.parent / "src"
|
||||
@ -42,6 +42,7 @@ release = __version__
|
||||
extensions = [
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.napoleon",
|
||||
"sphinx_rtd_theme",
|
||||
"sphinxarg.ext",
|
||||
]
|
||||
|
||||
@ -66,7 +67,7 @@ exclude_patterns = []
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
#
|
||||
html_theme = "default" if on_rtd else "alabaster"
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
|
@ -1,7 +1,12 @@
|
||||
Configuration
|
||||
=============
|
||||
|
||||
Some groups can be specified for each architecture separately. E.g. if there are ``build`` and ``build:x86_64`` groups it will use an option from ``build:x86_64`` for the ``x86_64`` architecture and ``build`` for any other (architecture specific group has higher priority). In case if both groups are presented, architecture specific options will be merged into global ones overriding them.
|
||||
Some groups can be specified for each architecture and/or repository separately. E.g. if there are ``build`` and ``build:x86_64`` groups it will use an option from ``build:x86_64`` for the ``x86_64`` architecture and ``build`` for any other (architecture specific group has higher priority). In case if both groups are presented, architecture specific options will be merged into global ones overriding them. The order which will be used for option resolution is the following:
|
||||
|
||||
1. Repository and architecture specific, e.g. ``build:aur-clone:x86_64``.
|
||||
2. Repository specific, e.g. ``build:aur-clone``.
|
||||
2. Architecture specific, e.g. ``build:x86_64``.
|
||||
2. Default section, e.g. ``build``.
|
||||
|
||||
There are two variable types which have been added to default ones, they are paths and lists. List values will be read in the same way as shell does:
|
||||
|
||||
@ -12,6 +17,15 @@ There are two variable types which have been added to default ones, they are pat
|
||||
|
||||
Path values, except for casting to ``pathlib.Path`` type, will be also expanded to absolute paths relative to the configuration path. E.g. if path is set to ``ahriman.ini.d/logging.ini`` and root configuration path is ``/etc/ahriman.ini``, the value will be expanded to ``/etc/ahriman.ini.d/logging.ini``. In order to disable path expand, use the full path, e.g. ``/etc/ahriman.ini.d/logging.ini``.
|
||||
|
||||
Configuration allows string interpolation from environment variables, e.g.:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[section]
|
||||
key = $SECRET
|
||||
|
||||
will try to read value from ``SECRET`` environment variable. In case if the required environment variable wasn't found, it will keep original value (i.e. ``$SECRET`` in the example). Dollar sign can be set as ``$$``.
|
||||
|
||||
There is also additional subcommand which will allow to validate configuration and print found errors. In order to do so, run ``service-config-validate`` subcommand, e.g.:
|
||||
|
||||
.. code-block:: shell
|
||||
@ -54,7 +68,7 @@ Base authorization settings. ``OAuth`` provider requires ``aioauth-client`` libr
|
||||
* ``max_age`` - parameter which controls both cookie expiration and token expiration inside the service, integer, optional, default is 7 days.
|
||||
* ``oauth_provider`` - OAuth2 provider class name as is in ``aioauth-client`` (e.g. ``GoogleClient``, ``GithubClient`` etc), string, required in case if ``oauth`` is used.
|
||||
* ``oauth_scopes`` - scopes list for OAuth2 provider, which will allow retrieving user email (which is used for checking user permissions), e.g. ``https://www.googleapis.com/auth/userinfo.email`` for ``GoogleClient`` or ``user:email`` for ``GithubClient``, space separated list of strings, required in case if ``oauth`` is used.
|
||||
* ``salt`` - password hash salt, string, required in case if authorization enabled (automatically generated by ``user-add`` subcommand).
|
||||
* ``salt`` - additional password hash salt, string, optional.
|
||||
|
||||
Authorized users are stored inside internal database, if any of external provides are used the password field for non-service users must be empty.
|
||||
|
||||
@ -77,7 +91,6 @@ Build related configuration. Group name can refer to architecture, e.g. ``build:
|
||||
|
||||
Base repository settings.
|
||||
|
||||
* ``name`` - repository name, string, required.
|
||||
* ``root`` - root path for application, string, required.
|
||||
|
||||
``sign:*`` groups
|
||||
@ -97,15 +110,19 @@ Web server settings. If any of ``host``/``port`` is not set, web integration wil
|
||||
* ``debug`` - enable debug toolbar, boolean, optional, default ``no``.
|
||||
* ``debug_check_host`` - check hosts to access debug toolbar, boolean, optional, default ``no``.
|
||||
* ``debug_allowed_hosts`` - allowed hosts to get access to debug toolbar, space separated list of string, optional.
|
||||
* ``enable_archive_upload`` - allow to upload packages via HTTP (i.e. call of ``/api/v1/service/upload`` uri), boolean, optional, default ``no``.
|
||||
* ``host`` - host to bind, string, optional.
|
||||
* ``index_url`` - full url of the repository index page, string, optional.
|
||||
* ``max_body_size`` - max body size in bytes to be validated for archive upload, integer, optional. If not set, validation will be disabled.
|
||||
* ``password`` - password to authorize in web service in order to update service status, string, required in case if authorization enabled.
|
||||
* ``port`` - port to bind, int, optional.
|
||||
* ``static_path`` - path to directory with static files, string, required.
|
||||
* ``templates`` - path to templates directory, string, required.
|
||||
* ``timeout`` - HTTP request timeout in seconds, int, optional, default is ``30``.
|
||||
* ``unix_socket`` - path to the listening unix socket, string, optional. If set, server will create the socket on the specified address which can (and will) be used by application. Note, that unlike usual host/port configuration, unix socket allows to perform requests without authorization.
|
||||
* ``unix_socket_unsafe`` - set unsafe (o+w) permissions to unix socket, boolean, optional, default ``yes``. This option is enabled by default, because it is supposed that unix socket is created in safe environment (only web service is supposed to be used in unsafe), but it can be disabled by configuration.
|
||||
* ``username`` - username to authorize in web service in order to update service status, string, required in case if authorization enabled.
|
||||
* ``wait_timeout`` - wait timeout in seconds, maximum amount of time to be waited before lock will be free, int, optional.
|
||||
|
||||
``keyring`` group
|
||||
--------------------
|
||||
@ -117,6 +134,7 @@ Keyring package generator plugin.
|
||||
Keyring generator plugin
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
* ``type`` - type of the generator, string, optional, must be set to ``keyring-generator`` if exists.
|
||||
* ``description`` - keyring package description, string, optional, default is ``repo PGP keyring``, where ``repo`` is the repository name.
|
||||
* ``homepage`` - url to homepage location if any, string, optional.
|
||||
* ``license`` - list of licenses which are applied to this package, space separated list of strings, optional, default is ``Unlicense``.
|
||||
@ -135,6 +153,7 @@ Mirrorlist package generator plugin.
|
||||
Mirrorlist generator plugin
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
* ``type`` - type of the generator, string, optional, must be set to ``mirrorlist-generator`` if exists.
|
||||
* ``description`` - mirrorlist package description, string, optional, default is ``repo mirror list for use by pacman``, where ``repo`` is the repository name.
|
||||
* ``homepage`` - url to homepage location if any, string, optional.
|
||||
* ``license`` - list of licenses which are applied to this package, space separated list of strings, optional, default is ``Unlicense``.
|
||||
@ -179,7 +198,8 @@ Available options are:
|
||||
Remote push trigger
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
* ``commit_author`` - git commit author, string, optional. In case if not set, the git will generate author for you. Note, however, that in this case it will disclosure your hostname.
|
||||
* ``commit_email`` - git commit email, string, optional, default is ``ahriman@localhost``.
|
||||
* ``commit_user`` - git commit user, string, optional, default is ``ahriman``.
|
||||
* ``push_url`` - url of the remote repository to which PKGBUILDs should be pushed after build process, string, required.
|
||||
* ``push_branch`` - branch of the remote repository to which PKGBUILDs should be pushed after build process, string, optional, default is ``master``.
|
||||
|
||||
@ -234,6 +254,17 @@ Section name must be either ``html`` (plus optional architecture name, e.g. ``ht
|
||||
* ``path`` - path to html report file, string, required.
|
||||
* ``template_path`` - path to Jinja2 template, string, required.
|
||||
|
||||
``remote-call`` type
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Section name must be either ``remote-call`` (plus optional architecture name, e.g. ``remote-call:x86_64``) or random name with ``type`` set.
|
||||
|
||||
* ``type`` - type of the report, string, optional, must be set to ``remote-call`` if exists.
|
||||
* ``aur`` - check for AUR packages updates, boolean, optional, default ``no``.
|
||||
* ``local`` - check for local packages updates, boolean, optional, default ``no``.
|
||||
* ``manual`` - update manually built packages, boolean, optional, default ``no``.
|
||||
* ``wait_timeout`` - maximum amount of time in seconds to be waited before remote process will be terminated, int, optional, default ``-1``.
|
||||
|
||||
``telegram`` type
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
@ -264,20 +295,29 @@ Type will be read from several sources:
|
||||
``github`` type
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
This feature requires Github key creation (see below). Section name must be either ``github`` (plus optional architecture name, e.g. ``github:x86_64``) or random name with ``type`` set.
|
||||
This feature requires GitHub key creation (see below). Section name must be either ``github`` (plus optional architecture name, e.g. ``github:x86_64``) or random name with ``type`` set.
|
||||
|
||||
* ``type`` - type of the upload, string, optional, must be set to ``github`` if exists.
|
||||
* ``owner`` - Github repository owner, string, required.
|
||||
* ``password`` - created Github API key. In order to create it do the following:
|
||||
* ``owner`` - GitHub repository owner, string, required.
|
||||
* ``password`` - created GitHub API key. In order to create it do the following:
|
||||
|
||||
#. Go to `settings page <https://github.com/settings/profile>`_.
|
||||
#. Switch to `developers settings <https://github.com/settings/apps>`_.
|
||||
#. Switch to `personal access tokens <https://github.com/settings/tokens>`_.
|
||||
#. Generate new token. Required scope is ``public_repo`` (or ``repo`` for private repository support).
|
||||
|
||||
* ``repository`` - Github repository name, string, required. Repository must be created before any action and must have active branch (e.g. with readme).
|
||||
* ``repository`` - GitHub repository name, string, required. Repository must be created before any action and must have active branch (e.g. with readme).
|
||||
* ``timeout`` - HTTP request timeout in seconds, int, optional, default is ``30``.
|
||||
* ``use_full_release_name`` - if set to ``yes``, the release will contain both repository name and architecture, and only architecture otherwise, boolean, optional, default ``no``.
|
||||
* ``username`` - GitHub authorization user, string, required. Basically the same as ``owner``.
|
||||
|
||||
``remote-service`` type
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Section name must be either ``remote-service`` (plus optional architecture name, e.g. ``remote-service:x86_64``) or random name with ``type`` set.
|
||||
|
||||
* ``type`` - type of the report, string, optional, must be set to ``remote-service`` if exists.
|
||||
* ``timeout`` - HTTP request timeout in seconds, int, optional, default is ``30``.
|
||||
* ``username`` - Github authorization user, string, required. Basically the same as ``owner``.
|
||||
|
||||
``rsync`` type
|
||||
^^^^^^^^^^^^^^
|
||||
@ -293,7 +333,7 @@ Requires ``rsync`` package to be installed. Do not forget to configure ssh for u
|
||||
|
||||
Requires ``boto3`` library to be installed. Section name must be either ``s3`` (plus optional architecture name, e.g. ``s3:x86_64``) or random name with ``type`` set.
|
||||
|
||||
* ``type`` - type of the upload, string, optional, must be set to ``github`` if exists.
|
||||
* ``type`` - type of the upload, string, optional, must be set to ``s3`` if exists.
|
||||
* ``access_key`` - AWS access key ID, string, required.
|
||||
* ``bucket`` - bucket name (e.g. ``bucket``), string, required.
|
||||
* ``chunk_size`` - chunk size for calculating entity tags, int, optional, default 8 * 1024 * 1024.
|
||||
|
235
docs/faq.rst
235
docs/faq.rst
@ -17,7 +17,7 @@ TL;DR
|
||||
.. code-block:: shell
|
||||
|
||||
yay -S ahriman
|
||||
ahriman -a x86_64 service-setup --packager "ahriman bot <ahriman@example.com>" --repository "repository"
|
||||
ahriman -a x86_64 -r aur-clone service-setup --packager "ahriman bot <ahriman@example.com>"
|
||||
systemctl enable --now ahriman@x86_64.timer
|
||||
|
||||
Long answer
|
||||
@ -32,7 +32,7 @@ There is special command which can be used in order to validate current configur
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
ahriman -a x86_64 service-config-validate --exit-code
|
||||
ahriman -a x86_64 -r aur-clone service-config-validate --exit-code
|
||||
|
||||
This command will print found errors, based on `cerberus <https://docs.python-cerberus.org/>`_, e.g.:
|
||||
|
||||
@ -305,7 +305,7 @@ TL;DR
|
||||
|
||||
sudo -u ahriman ahriman repo-rebuild --depends-on python
|
||||
|
||||
You can even rebuild the whole repository (which is particular useful in case if you would like to change packager) if you do not supply ``--depends-on`` option.
|
||||
You can even rebuild the whole repository (which is particular useful in case if you would like to change packager) if you do not supply ``--depends-on`` option. This action will automatically increment ``pkgrel`` value; in case if you don't want to, the ``--no-increment`` option has to be supplied.
|
||||
|
||||
However, note that you do not need to rebuild repository in case if you just changed signing option, just use ``repo-sign`` command instead.
|
||||
|
||||
@ -317,7 +317,7 @@ Add the following lines to your ``pacman.conf``:
|
||||
.. code-block:: ini
|
||||
|
||||
[repository]
|
||||
Server = file:///var/lib/ahriman/repository/x86_64
|
||||
Server = file:///var/lib/ahriman/repository/$repo/$arch
|
||||
|
||||
(You might need to add ``SigLevel`` option according to the pacman documentation.)
|
||||
|
||||
@ -396,6 +396,7 @@ The following environment variables are supported:
|
||||
* ``AHRIMAN_PACMAN_MIRROR`` - override pacman mirror server if set.
|
||||
* ``AHRIMAN_PORT`` - HTTP server port if any, default is empty.
|
||||
* ``AHRIMAN_REPOSITORY`` - repository name, default is ``aur-clone``.
|
||||
* ``AHRIMAN_REPOSITORY_SERVER`` - optional override for the repository url. Useful if you would like to download packages from remote instead of local filesystem.
|
||||
* ``AHRIMAN_REPOSITORY_ROOT`` - repository root. Because of filesystem rights it is required to override default repository root. By default, it uses ``ahriman`` directory inside ahriman's home, which can be passed as mount volume.
|
||||
* ``AHRIMAN_UNIX_SOCKET`` - full path to unix socket which is used by web server, default is empty. Note that more likely you would like to put it inside ``AHRIMAN_REPOSITORY_ROOT`` directory (e.g. ``/var/lib/ahriman/ahriman/ahriman-web.sock``) or to ``/tmp``.
|
||||
* ``AHRIMAN_USER`` - ahriman user, usually must not be overwritten, default is ``ahriman``.
|
||||
@ -553,8 +554,8 @@ There are several choices:
|
||||
.. code-block::
|
||||
|
||||
server {
|
||||
location /x86_64 {
|
||||
root /var/lib/ahriman/repository/x86_64;
|
||||
location /aur-clone/x86_64 {
|
||||
root /var/lib/ahriman/repository/aur-clone/x86_64;
|
||||
autoindex on;
|
||||
}
|
||||
}
|
||||
@ -570,7 +571,7 @@ There are several choices:
|
||||
[rsync]
|
||||
remote = 192.168.0.1:/srv/repo
|
||||
|
||||
After that just add ``/srv/repo`` to the ``pacman.conf`` as usual. You can also upload to S3 (e.g. ``Server = https://s3.eu-central-1.amazonaws.com/repository/x86_64``) or to Github (e.g. ``Server = https://github.com/ahriman/repository/releases/download/x86_64``).
|
||||
After that just add ``/srv/repo`` to the ``pacman.conf`` as usual. You can also upload to S3 (e.g. ``Server = https://s3.eu-central-1.amazonaws.com/repository/aur-clone/x86_64``) or to Github (e.g. ``Server = https://github.com/ahriman/repository/releases/download/aur-clone-x86_64``).
|
||||
|
||||
How to sync to S3
|
||||
^^^^^^^^^^^^^^^^^
|
||||
@ -675,7 +676,7 @@ How to report by email
|
||||
|
||||
[email]
|
||||
host = smtp.example.com
|
||||
link_path = http://example.com/x86_64
|
||||
link_path = http://example.com/aur-clone/x86_64
|
||||
password = ...
|
||||
port = 465
|
||||
receivers = me@example.com
|
||||
@ -701,8 +702,8 @@ How to generate index page for S3
|
||||
target = html
|
||||
|
||||
[html]
|
||||
path = /var/lib/ahriman/repository/x86_64/index.html
|
||||
link_path = http://example.com/x86_64
|
||||
path = /var/lib/ahriman/repository/aur-clone/x86_64/index.html
|
||||
link_path = http://example.com/aur-clone/x86_64
|
||||
|
||||
After these steps ``index.html`` file will be automatically synced to S3
|
||||
|
||||
@ -722,8 +723,7 @@ How to post build report to telegram
|
||||
#.
|
||||
Optionally (if you want to post message in chat):
|
||||
|
||||
|
||||
#. Create telegram channel.
|
||||
#. Create telegram channel.
|
||||
#. Invite your bot into the channel.
|
||||
#. Make your channel public
|
||||
|
||||
@ -741,7 +741,7 @@ How to post build report to telegram
|
||||
[telegram]
|
||||
api_key = aaAAbbBBccCC
|
||||
chat_id = @ahriman
|
||||
link_path = http://example.com/x86_64
|
||||
link_path = http://example.com/aur-clone/x86_64
|
||||
|
||||
``api_key`` is the one sent by `@BotFather <https://t.me/botfather>`_, ``chat_id`` is the value retrieved from previous step.
|
||||
|
||||
@ -753,6 +753,203 @@ If you did everything fine you should receive the message with the next update.
|
||||
|
||||
(replace ``${CHAT_ID}`` and ``${API_KEY}`` with the values from configuration).
|
||||
|
||||
Distributed builds
|
||||
------------------
|
||||
|
||||
The service allows to run build on multiple machines and collect packages on main node. There are multiple ways to achieve it, this section describes officially supported methods.
|
||||
|
||||
Remote synchronization and remote server call
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
This setup requires at least two instances of the service:
|
||||
|
||||
#. Web service (with opt-in authorization enabled), later will be referenced as ``master`` node.
|
||||
#. Application instances responsible for build, later will be referenced as ``worker`` nodes.
|
||||
|
||||
In this example the following settings are assumed:
|
||||
|
||||
* Repository architecture is ``x86_64``.
|
||||
* Master node address is ``master.example.com``.
|
||||
|
||||
Master node configuration
|
||||
"""""""""""""""""""""""""
|
||||
|
||||
The only requirements for the master node is that API must be available for worker nodes to call (e.g. port must be exposed to internet, or local network in case of VPN, etc) and file upload must be enabled:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[web]
|
||||
enable_archive_upload = yes
|
||||
|
||||
In addition, the following settings are recommended for the master node:
|
||||
|
||||
*
|
||||
As it has been mentioned above, it is recommended to enable authentication (see `How to enable basic authorization`_) and create system user which will be used later. Later this user (if any) will be referenced as ``worker-user``.
|
||||
|
||||
*
|
||||
In order to be able to spawn multiple processes at the same time, wait timeout must be configured:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[web]
|
||||
wait_timeout = 0
|
||||
|
||||
Worker nodes configuration
|
||||
""""""""""""""""""""""""""
|
||||
|
||||
#.
|
||||
First of all, in this setup you need to split your repository into chunks manually, e.g. if you have repository on master node with packages ``A``, ``B`` and ``C``, you need to split them between all available workers, as example:
|
||||
|
||||
* Worker #1: ``A``.
|
||||
* Worker #2: ``B`` and ``C``.
|
||||
|
||||
#.
|
||||
Each worker must be configured to upload files to master node:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[upload]
|
||||
target = remote-service
|
||||
|
||||
[remote-service]
|
||||
|
||||
#.
|
||||
Worker must be configured to access web on master node:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[web]
|
||||
address = master.example.com
|
||||
username = worker-user
|
||||
password = very-secure-password
|
||||
|
||||
As it has been mentioned above, ``web.address`` must be available for workers. In case if unix socket is used, it can be passed as ``web.unix_socket`` variable as usual. Optional ``web.username``/``web.password`` can be supplied in case if authentication was enabled on master node.
|
||||
|
||||
#.
|
||||
Each worker must call master node on success:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[report]
|
||||
target = remote-call
|
||||
|
||||
[remote-call]
|
||||
manual = yes
|
||||
|
||||
After success synchronization (see above), the built packages will be put into directory, from which they will be read during manual update, thus ``remote-call.manual`` flag is required.
|
||||
|
||||
#.
|
||||
Change order of trigger runs. This step is required, because by default the report trigger is called before the upload trigger and we would like to achieve the opposite:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[build]
|
||||
triggers = ahriman.core.gitremote.RemotePullTrigger ahriman.core.upload.UploadTrigger ahriman.core.report.ReportTrigger ahriman.core.gitremote.RemotePushTrigger
|
||||
|
||||
In addition, the following settings are recommended for workers:
|
||||
|
||||
*
|
||||
You might want to wait until report trigger will be completed; in this case the following option must be set:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[remote-call]
|
||||
wait_timeout = 0
|
||||
|
||||
Dependency management
|
||||
"""""""""""""""""""""
|
||||
|
||||
By default worker nodes don't know anything about master nodes packages, thus it will try to build each dependency by its own. However, using ``AHRIMAN_REPOSITORY_SERVER`` docker variable (or ``--server`` flag for setup command), it is possible to specify address of the master node for devtools configuration.
|
||||
|
||||
Repository and packages signing
|
||||
"""""""""""""""""""""""""""""""
|
||||
|
||||
You can sign packages on worker nodes and then signatures will be synced to master node. In order to do so, you need to configure worker node as following, e.g.:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[sign]
|
||||
target = package
|
||||
key = 8BE91E5A773FB48AC05CC1EDBED105AED6246B39
|
||||
|
||||
Note, however, that in this case, signatures will not be validated on master node and just will be copied to repository tree.
|
||||
|
||||
If you would like to sign only database files (aka repository sign), it has to be configured on master node only as usual, e.g.:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[sign]
|
||||
target = repository
|
||||
key = 8BE91E5A773FB48AC05CC1EDBED105AED6246B39
|
||||
|
||||
Double node minimal docker example
|
||||
""""""""""""""""""""""""""""""""""
|
||||
|
||||
Master node config (``master.ini``) as:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[auth]
|
||||
target = mapping
|
||||
|
||||
[web]
|
||||
enable_archive_upload = yes
|
||||
wait_timeout = 0
|
||||
|
||||
|
||||
Command to run master node:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
docker run --privileged -p 8080:8080 -e AHRIMAN_PORT=8080 -v master.ini:/etc/ahriman.ini.d/overrides.ini arcan1s/ahriman:latest web
|
||||
|
||||
The user ``worker-user`` has been created additionally. Worker node config (``worker.ini``) as:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[web]
|
||||
address = http://172.17.0.1:8080
|
||||
username = worker-user
|
||||
password = very-secure-password
|
||||
|
||||
[upload]
|
||||
target = remote-service
|
||||
|
||||
[remote-service]
|
||||
|
||||
[report]
|
||||
target = remote-call
|
||||
|
||||
[remote-call]
|
||||
manual = yes
|
||||
wait_timeout = 0
|
||||
|
||||
[build]
|
||||
triggers = ahriman.core.gitremote.RemotePullTrigger ahriman.core.upload.UploadTrigger ahriman.core.report.ReportTrigger ahriman.core.gitremote.RemotePushTrigger
|
||||
|
||||
The address above (``http://172.17.0.1:8080``) is something available for worker container.
|
||||
|
||||
Command to run worker node:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
docker run --privileged -v worker.ini:/etc/ahriman.ini.d/overrides.ini -it arcan1s/ahriman:latest package-add arhiman --now
|
||||
|
||||
The command above will successfully build ``ahriman`` package, upload it on master node and, finally, will update master node repository.
|
||||
|
||||
Addition of new package and repository update
|
||||
"""""""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
Just run on worker command as usual, the built packages will be automatically uploaded to master node. Note that automatic update process must be disabled on master node.
|
||||
|
||||
Package removal
|
||||
"""""""""""""""
|
||||
|
||||
This action must be done in two steps:
|
||||
|
||||
#. Remove package on worker.
|
||||
#. Remove package on master node.
|
||||
|
||||
Maintenance packages
|
||||
--------------------
|
||||
|
||||
@ -767,7 +964,7 @@ The application provides special plugin which generates keyring package. This pl
|
||||
.. code-block:: ini
|
||||
|
||||
[keyring]
|
||||
target = keyring_generator
|
||||
target = keyring-generator
|
||||
|
||||
By default it will use ``sign.key`` as trusted key and all other keys as packagers ones. For all available options refer to :doc:`configuration <configuration>`.
|
||||
|
||||
@ -802,12 +999,12 @@ The application provides special plugin which generates mirrorlist package also.
|
||||
.. code-block:: ini
|
||||
|
||||
[mirrorlist]
|
||||
target = mirrorlist_generator
|
||||
target = mirrorlist-generator
|
||||
|
||||
[mirrorlist_generator]
|
||||
[mirrorlist-generator]
|
||||
servers = https://repo.example.com/$arch
|
||||
|
||||
The ``mirrorlist_generator.servers`` must contain list of available mirrors, the ``$arch`` and ``$repo`` variables are supported. For more options kindly refer to :doc:`configuration <configuration>`.
|
||||
The ``mirrorlist-generator.servers`` must contain list of available mirrors, the ``$arch`` and ``$repo`` variables are supported. For more options kindly refer to :doc:`configuration <configuration>`.
|
||||
|
||||
#.
|
||||
Create package source files:
|
||||
@ -870,6 +1067,8 @@ How to enable basic authorization
|
||||
target = configuration
|
||||
salt = somerandomstring
|
||||
|
||||
The ``salt`` parameter is optional, but recommended.
|
||||
|
||||
#.
|
||||
In order to provide access for reporting from application instances you can (recommended way) use unix sockets by configuring the following (note, that it requires ``python-requests-unixsocket`` package to be installed):
|
||||
|
||||
@ -934,7 +1133,7 @@ How to enable OAuth authorization
|
||||
Configure ``oauth_provider`` and ``oauth_scopes`` in case if you would like to use different from Google provider. Scope must grant access to user email. ``web.address`` is required to make callback URL available from internet.
|
||||
|
||||
#.
|
||||
If you are not going to use unix socket, you also need to create service user (remember to set ``auth.salt`` option before):
|
||||
If you are not going to use unix socket, you also need to create service user (remember to set ``auth.salt`` option before if required):
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
|
@ -12,6 +12,7 @@ Features
|
||||
* VCS packages support.
|
||||
* Official repository support.
|
||||
* Ability to patch AUR packages and even create package from local PKGBUILDs.
|
||||
* Various rebuild options with ability to automatically bump package version.
|
||||
* Sign support with gpg (repository, package), multiple packagers support.
|
||||
* Triggers for repository updates, e.g. synchronization to remote services (rsync, s3 and github) and report generation (email, html, telegram).
|
||||
* Repository status interface with optional authorization and control options.
|
||||
|
@ -10,7 +10,7 @@ Initial setup
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
sudo ahriman -a x86_64 service-setup ...
|
||||
sudo ahriman -a x86_64 -r aur-clone service-setup ...
|
||||
|
||||
``service-setup`` literally does the following steps:
|
||||
|
||||
@ -29,26 +29,26 @@ Initial setup
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
ln -s /usr/bin/archbuild /usr/local/bin/ahriman-x86_64-build
|
||||
ln -s /usr/bin/archbuild /usr/local/bin/aur-clone-x86_64-build
|
||||
|
||||
#.
|
||||
Create configuration file (same as previous ``{name}.conf``):
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
cp /usr/share/devtools/pacman.conf.d/{extra,ahriman}.conf
|
||||
cp /usr/share/devtools/pacman.conf.d/{extra,aur-clone}.conf
|
||||
|
||||
#.
|
||||
Change configuration file, add your own repository, add multilib repository etc:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
echo '[multilib]' | tee -a /usr/share/devtools/pacman-ahriman.conf
|
||||
echo 'Include = /etc/pacman.d/mirrorlist' | tee -a /usr/share/devtools/pacman.conf.d/ahriman.conf
|
||||
echo '[multilib]' | tee -a /usr/share/devtools/pacman.conf.d/aur-clone-x86_64.conf
|
||||
echo 'Include = /etc/pacman.d/mirrorlist' | tee -a /usr/share/devtools/pacman.conf.d/aur-clone-x86_64.conf
|
||||
|
||||
echo '[aur-clone]' | tee -a /usr/share/devtools/pacman-ahriman.conf
|
||||
echo 'SigLevel = Optional TrustAll' | tee -a /usr/share/devtools/pacman.conf.d/ahriman.conf
|
||||
echo 'Server = file:///var/lib/ahriman/repository/$arch' | tee -a /usr/share/devtools/pacman.conf.d/ahriman.conf
|
||||
echo '[aur-clone]' | tee -a /usr/share/devtools/pacman.conf.d/aur-clone-x86_64.conf
|
||||
echo 'SigLevel = Optional TrustAll' | tee -a /usr/share/devtools/pacman.conf.d/aur-clone-x86_64.conf
|
||||
echo 'Server = file:///var/lib/ahriman/repository/$repo/$arch' | tee -a /usr/share/devtools/pacman.conf.d/aur-clone-x86_64.conf
|
||||
|
||||
#.
|
||||
Set ``build_command`` option to point to your command:
|
||||
@ -56,14 +56,14 @@ Initial setup
|
||||
.. code-block:: shell
|
||||
|
||||
echo '[build]' | tee -a /etc/ahriman.ini.d/build.ini
|
||||
echo 'build_command = ahriman-x86_64-build' | tee -a /etc/ahriman.ini.d/build.ini
|
||||
echo 'build_command = aur-clone-x86_64-build' | tee -a /etc/ahriman.ini.d/build.ini
|
||||
|
||||
#.
|
||||
Configure ``/etc/sudoers.d/ahriman`` to allow running command without a password:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
echo 'Cmnd_Alias CARCHBUILD_CMD = /usr/local/bin/ahriman-x86_64-build *' | tee -a /etc/sudoers.d/ahriman
|
||||
echo 'Cmnd_Alias CARCHBUILD_CMD = /usr/local/bin/aur-clone-x86_64-build *' | tee -a /etc/sudoers.d/ahriman
|
||||
echo 'ahriman ALL=(ALL) NOPASSWD:SETENV: CARCHBUILD_CMD' | tee -a /etc/sudoers.d/ahriman
|
||||
chmod 400 /etc/sudoers.d/ahriman
|
||||
|
||||
@ -88,6 +88,6 @@ Initial setup
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
sudo -u ahriman ahriman -a x86_64 package-add ahriman --now --refresh
|
||||
sudo -u ahriman ahriman package-add ahriman --now --refresh
|
||||
|
||||
The ``--refresh`` flag is required in order to handle local database update.
|
||||
|
@ -1,14 +1,14 @@
|
||||
# Maintainer: Evgeniy Alekseev
|
||||
|
||||
pkgname='ahriman'
|
||||
pkgver=2.10.2
|
||||
pkgver=2.11.0
|
||||
pkgrel=1
|
||||
pkgdesc="ArcH linux ReposItory MANager"
|
||||
arch=('any')
|
||||
url="https://github.com/arcan1s/ahriman"
|
||||
license=('GPL3')
|
||||
depends=('devtools>=1:1.0.0' 'git' 'pyalpm' 'python-cerberus' 'python-inflection' 'python-passlib' 'python-requests' 'python-srcinfo')
|
||||
makedepends=('python-build' 'python-installer' 'python-wheel')
|
||||
makedepends=('python-build' 'python-flit' 'python-installer' 'python-wheel')
|
||||
optdepends=('breezy: -bzr packages support'
|
||||
'darcs: -darcs packages support'
|
||||
'mercurial: -hg packages support'
|
||||
@ -45,8 +45,10 @@ package() {
|
||||
|
||||
python -m installer --destdir="$pkgdir" "dist/$pkgname-$pkgver-py3-none-any.whl"
|
||||
|
||||
# python-installer actually thinks that you cannot just copy files to root
|
||||
# thus we need to copy them manually
|
||||
# thanks too PEP517, which we all wanted, you need to install data files manually nowadays
|
||||
pushd package && find . -type f -exec install -Dm644 "{}" "$pkgdir/usr/{}" \; && popd
|
||||
|
||||
# keep usr/share configs as reference and copy them to /etc
|
||||
install -Dm644 "$pkgdir/usr/share/$pkgname/settings/ahriman.ini" "$pkgdir/etc/ahriman.ini"
|
||||
install -Dm644 "$pkgdir/usr/share/$pkgname/settings/ahriman.ini.d/logging.ini" "$pkgdir/etc/ahriman.ini.d/logging.ini"
|
||||
|
||||
|
@ -1,3 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
exec python -B -m ahriman.application.ahriman "$@"
|
@ -20,7 +20,6 @@ allow_read_only = yes
|
||||
|
||||
[build]
|
||||
archbuild_flags =
|
||||
build_command = extra-x86_64-build
|
||||
ignore_packages =
|
||||
makechrootpkg_flags =
|
||||
makepkg_flags = --nocolor --ignorearch
|
||||
@ -29,7 +28,6 @@ triggers_known = ahriman.core.gitremote.RemotePullTrigger ahriman.core.gitremote
|
||||
vcs_allowed_age = 604800
|
||||
|
||||
[repository]
|
||||
name = aur-clone
|
||||
root = /var/lib/ahriman
|
||||
|
||||
[sign]
|
||||
|
@ -112,7 +112,7 @@
|
||||
|
||||
const payload = response.map(description => {
|
||||
const package_base = description.package.base;
|
||||
const web_url = description.package.remote?.web_url;
|
||||
const web_url = description.package.remote.web_url;
|
||||
return {
|
||||
id: package_base,
|
||||
base: web_url ? `<a href="${safe(web_url)}" title="${safe(package_base)}">${safe(package_base)}</a>` : safe(package_base),
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
_shtab_ahriman_subparsers=('aur-search' 'search' 'help' 'help-commands-unsafe' 'help-updates' 'help-version' 'version' 'package-add' 'add' 'package-update' 'package-remove' 'remove' 'package-status' 'status' 'package-status-remove' 'package-status-update' 'status-update' 'patch-add' 'patch-list' 'patch-remove' 'patch-set-add' 'repo-backup' 'repo-check' 'check' 'repo-create-keyring' 'repo-create-mirrorlist' 'repo-daemon' 'daemon' 'repo-rebuild' 'rebuild' 'repo-remove-unknown' 'remove-unknown' 'repo-report' 'report' 'repo-restore' 'repo-sign' 'sign' 'repo-status-update' 'repo-sync' 'sync' 'repo-tree' 'repo-triggers' 'repo-update' 'update' 'service-clean' 'clean' 'repo-clean' 'service-config' 'config' 'repo-config' 'service-config-validate' 'config-validate' 'repo-config-validate' 'service-key-import' 'key-import' 'service-setup' 'init' 'repo-init' 'repo-setup' 'setup' 'service-shell' 'shell' 'user-add' 'user-list' 'user-remove' 'web')
|
||||
|
||||
_shtab_ahriman_option_strings=('-h' '--help' '-a' '--architecture' '-c' '--configuration' '--force' '-l' '--lock' '--log-handler' '--report' '--no-report' '-q' '--quiet' '--unsafe' '-V' '--version')
|
||||
_shtab_ahriman_option_strings=('-h' '--help' '-a' '--architecture' '-c' '--configuration' '--force' '-l' '--lock' '--log-handler' '--report' '--no-report' '-q' '--quiet' '--unsafe' '--wait-timeout' '-V' '--version')
|
||||
_shtab_ahriman_aur_search_option_strings=('-h' '--help' '-e' '--exit-code' '--info' '--no-info' '--sort-by')
|
||||
_shtab_ahriman_search_option_strings=('-h' '--help' '-e' '--exit-code' '--info' '--no-info' '--sort-by')
|
||||
_shtab_ahriman_help_option_strings=('-h' '--help')
|
||||
@ -10,9 +10,9 @@ _shtab_ahriman_help_commands_unsafe_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_help_updates_option_strings=('-h' '--help' '-e' '--exit-code')
|
||||
_shtab_ahriman_help_version_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_version_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_package_add_option_strings=('-h' '--help' '--dependencies' '--no-dependencies' '-e' '--exit-code' '-n' '--now' '-y' '--refresh' '-s' '--source' '-u' '--username')
|
||||
_shtab_ahriman_add_option_strings=('-h' '--help' '--dependencies' '--no-dependencies' '-e' '--exit-code' '-n' '--now' '-y' '--refresh' '-s' '--source' '-u' '--username')
|
||||
_shtab_ahriman_package_update_option_strings=('-h' '--help' '--dependencies' '--no-dependencies' '-e' '--exit-code' '-n' '--now' '-y' '--refresh' '-s' '--source' '-u' '--username')
|
||||
_shtab_ahriman_package_add_option_strings=('-h' '--help' '--dependencies' '--no-dependencies' '-e' '--exit-code' '--increment' '--no-increment' '-n' '--now' '-y' '--refresh' '-s' '--source' '-u' '--username')
|
||||
_shtab_ahriman_add_option_strings=('-h' '--help' '--dependencies' '--no-dependencies' '-e' '--exit-code' '--increment' '--no-increment' '-n' '--now' '-y' '--refresh' '-s' '--source' '-u' '--username')
|
||||
_shtab_ahriman_package_update_option_strings=('-h' '--help' '--dependencies' '--no-dependencies' '-e' '--exit-code' '--increment' '--no-increment' '-n' '--now' '-y' '--refresh' '-s' '--source' '-u' '--username')
|
||||
_shtab_ahriman_package_remove_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_remove_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_package_status_option_strings=('-h' '--help' '--ahriman' '-e' '--exit-code' '--info' '--no-info' '-s' '--status')
|
||||
@ -31,8 +31,8 @@ _shtab_ahriman_repo_create_keyring_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_repo_create_mirrorlist_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_repo_daemon_option_strings=('-h' '--help' '-i' '--interval' '--aur' '--no-aur' '--dependencies' '--no-dependencies' '--local' '--no-local' '--manual' '--no-manual' '--vcs' '--no-vcs' '-y' '--refresh')
|
||||
_shtab_ahriman_daemon_option_strings=('-h' '--help' '-i' '--interval' '--aur' '--no-aur' '--dependencies' '--no-dependencies' '--local' '--no-local' '--manual' '--no-manual' '--vcs' '--no-vcs' '-y' '--refresh')
|
||||
_shtab_ahriman_repo_rebuild_option_strings=('-h' '--help' '--depends-on' '--dry-run' '--from-database' '-e' '--exit-code' '-s' '--status' '-u' '--username')
|
||||
_shtab_ahriman_rebuild_option_strings=('-h' '--help' '--depends-on' '--dry-run' '--from-database' '-e' '--exit-code' '-s' '--status' '-u' '--username')
|
||||
_shtab_ahriman_repo_rebuild_option_strings=('-h' '--help' '--depends-on' '--dry-run' '--from-database' '--increment' '--no-increment' '-e' '--exit-code' '-s' '--status' '-u' '--username')
|
||||
_shtab_ahriman_rebuild_option_strings=('-h' '--help' '--depends-on' '--dry-run' '--from-database' '--increment' '--no-increment' '-e' '--exit-code' '-s' '--status' '-u' '--username')
|
||||
_shtab_ahriman_repo_remove_unknown_option_strings=('-h' '--help' '--dry-run')
|
||||
_shtab_ahriman_remove_unknown_option_strings=('-h' '--help' '--dry-run')
|
||||
_shtab_ahriman_repo_report_option_strings=('-h' '--help')
|
||||
@ -43,10 +43,10 @@ _shtab_ahriman_sign_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_repo_status_update_option_strings=('-h' '--help' '-s' '--status')
|
||||
_shtab_ahriman_repo_sync_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_sync_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_repo_tree_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_repo_tree_option_strings=('-h' '--help' '-p' '--partitions')
|
||||
_shtab_ahriman_repo_triggers_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_repo_update_option_strings=('-h' '--help' '--aur' '--no-aur' '--dependencies' '--no-dependencies' '--dry-run' '-e' '--exit-code' '--local' '--no-local' '--manual' '--no-manual' '-u' '--username' '--vcs' '--no-vcs' '-y' '--refresh')
|
||||
_shtab_ahriman_update_option_strings=('-h' '--help' '--aur' '--no-aur' '--dependencies' '--no-dependencies' '--dry-run' '-e' '--exit-code' '--local' '--no-local' '--manual' '--no-manual' '-u' '--username' '--vcs' '--no-vcs' '-y' '--refresh')
|
||||
_shtab_ahriman_repo_update_option_strings=('-h' '--help' '--aur' '--no-aur' '--dependencies' '--no-dependencies' '--dry-run' '-e' '--exit-code' '--increment' '--no-increment' '--local' '--no-local' '--manual' '--no-manual' '-u' '--username' '--vcs' '--no-vcs' '-y' '--refresh')
|
||||
_shtab_ahriman_update_option_strings=('-h' '--help' '--aur' '--no-aur' '--dependencies' '--no-dependencies' '--dry-run' '-e' '--exit-code' '--increment' '--no-increment' '--local' '--no-local' '--manual' '--no-manual' '-u' '--username' '--vcs' '--no-vcs' '-y' '--refresh')
|
||||
_shtab_ahriman_service_clean_option_strings=('-h' '--help' '--cache' '--no-cache' '--chroot' '--no-chroot' '--manual' '--no-manual' '--packages' '--no-packages' '--pacman' '--no-pacman')
|
||||
_shtab_ahriman_clean_option_strings=('-h' '--help' '--cache' '--no-cache' '--chroot' '--no-chroot' '--manual' '--no-manual' '--packages' '--no-packages' '--pacman' '--no-pacman')
|
||||
_shtab_ahriman_repo_clean_option_strings=('-h' '--help' '--cache' '--no-cache' '--chroot' '--no-chroot' '--manual' '--no-manual' '--packages' '--no-packages' '--pacman' '--no-pacman')
|
||||
@ -58,11 +58,11 @@ _shtab_ahriman_config_validate_option_strings=('-h' '--help' '-e' '--exit-code')
|
||||
_shtab_ahriman_repo_config_validate_option_strings=('-h' '--help' '-e' '--exit-code')
|
||||
_shtab_ahriman_service_key_import_option_strings=('-h' '--help' '--key-server')
|
||||
_shtab_ahriman_key_import_option_strings=('-h' '--help' '--key-server')
|
||||
_shtab_ahriman_service_setup_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_init_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_repo_init_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_repo_setup_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_setup_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_service_setup_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--server' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_init_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--server' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_repo_init_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--server' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_repo_setup_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--server' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_setup_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--server' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_service_shell_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_shell_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_user_add_option_strings=('-h' '--help' '--key' '--packager' '-p' '--password' '-r' '--role')
|
||||
@ -151,6 +151,8 @@ _shtab_ahriman_package_add___dependencies_nargs=0
|
||||
_shtab_ahriman_package_add___no_dependencies_nargs=0
|
||||
_shtab_ahriman_package_add__e_nargs=0
|
||||
_shtab_ahriman_package_add___exit_code_nargs=0
|
||||
_shtab_ahriman_package_add___increment_nargs=0
|
||||
_shtab_ahriman_package_add___no_increment_nargs=0
|
||||
_shtab_ahriman_package_add__n_nargs=0
|
||||
_shtab_ahriman_package_add___now_nargs=0
|
||||
_shtab_ahriman_package_add__y_nargs=0
|
||||
@ -162,6 +164,8 @@ _shtab_ahriman_add___dependencies_nargs=0
|
||||
_shtab_ahriman_add___no_dependencies_nargs=0
|
||||
_shtab_ahriman_add__e_nargs=0
|
||||
_shtab_ahriman_add___exit_code_nargs=0
|
||||
_shtab_ahriman_add___increment_nargs=0
|
||||
_shtab_ahriman_add___no_increment_nargs=0
|
||||
_shtab_ahriman_add__n_nargs=0
|
||||
_shtab_ahriman_add___now_nargs=0
|
||||
_shtab_ahriman_add__y_nargs=0
|
||||
@ -173,6 +177,8 @@ _shtab_ahriman_package_update___dependencies_nargs=0
|
||||
_shtab_ahriman_package_update___no_dependencies_nargs=0
|
||||
_shtab_ahriman_package_update__e_nargs=0
|
||||
_shtab_ahriman_package_update___exit_code_nargs=0
|
||||
_shtab_ahriman_package_update___increment_nargs=0
|
||||
_shtab_ahriman_package_update___no_increment_nargs=0
|
||||
_shtab_ahriman_package_update__n_nargs=0
|
||||
_shtab_ahriman_package_update___now_nargs=0
|
||||
_shtab_ahriman_package_update__y_nargs=0
|
||||
@ -274,12 +280,16 @@ _shtab_ahriman_repo_rebuild__h_nargs=0
|
||||
_shtab_ahriman_repo_rebuild___help_nargs=0
|
||||
_shtab_ahriman_repo_rebuild___dry_run_nargs=0
|
||||
_shtab_ahriman_repo_rebuild___from_database_nargs=0
|
||||
_shtab_ahriman_repo_rebuild___increment_nargs=0
|
||||
_shtab_ahriman_repo_rebuild___no_increment_nargs=0
|
||||
_shtab_ahriman_repo_rebuild__e_nargs=0
|
||||
_shtab_ahriman_repo_rebuild___exit_code_nargs=0
|
||||
_shtab_ahriman_rebuild__h_nargs=0
|
||||
_shtab_ahriman_rebuild___help_nargs=0
|
||||
_shtab_ahriman_rebuild___dry_run_nargs=0
|
||||
_shtab_ahriman_rebuild___from_database_nargs=0
|
||||
_shtab_ahriman_rebuild___increment_nargs=0
|
||||
_shtab_ahriman_rebuild___no_increment_nargs=0
|
||||
_shtab_ahriman_rebuild__e_nargs=0
|
||||
_shtab_ahriman_rebuild___exit_code_nargs=0
|
||||
_shtab_ahriman_repo_remove_unknown__h_nargs=0
|
||||
@ -321,6 +331,8 @@ _shtab_ahriman_repo_update___no_dependencies_nargs=0
|
||||
_shtab_ahriman_repo_update___dry_run_nargs=0
|
||||
_shtab_ahriman_repo_update__e_nargs=0
|
||||
_shtab_ahriman_repo_update___exit_code_nargs=0
|
||||
_shtab_ahriman_repo_update___increment_nargs=0
|
||||
_shtab_ahriman_repo_update___no_increment_nargs=0
|
||||
_shtab_ahriman_repo_update___local_nargs=0
|
||||
_shtab_ahriman_repo_update___no_local_nargs=0
|
||||
_shtab_ahriman_repo_update___manual_nargs=0
|
||||
@ -339,6 +351,8 @@ _shtab_ahriman_update___no_dependencies_nargs=0
|
||||
_shtab_ahriman_update___dry_run_nargs=0
|
||||
_shtab_ahriman_update__e_nargs=0
|
||||
_shtab_ahriman_update___exit_code_nargs=0
|
||||
_shtab_ahriman_update___increment_nargs=0
|
||||
_shtab_ahriman_update___no_increment_nargs=0
|
||||
_shtab_ahriman_update___local_nargs=0
|
||||
_shtab_ahriman_update___no_local_nargs=0
|
||||
_shtab_ahriman_update___manual_nargs=0
|
@ -1,9 +1,9 @@
|
||||
.TH AHRIMAN "1" "2023\-08\-07" "ahriman" "Generated Python Manual"
|
||||
.TH AHRIMAN "1" "2023\-08\-26" "ahriman" "Generated Python Manual"
|
||||
.SH NAME
|
||||
ahriman
|
||||
.SH SYNOPSIS
|
||||
.B ahriman
|
||||
[-h] [-a ARCHITECTURE] [-c CONFIGURATION] [--force] [-l LOCK] [--log-handler {console,syslog,journald}] [--report | --no-report] [-q] [--unsafe] [-V] {aur-search,search,help,help-commands-unsafe,help-updates,help-version,version,package-add,add,package-update,package-remove,remove,package-status,status,package-status-remove,package-status-update,status-update,patch-add,patch-list,patch-remove,patch-set-add,repo-backup,repo-check,check,repo-create-keyring,repo-create-mirrorlist,repo-daemon,daemon,repo-rebuild,rebuild,repo-remove-unknown,remove-unknown,repo-report,report,repo-restore,repo-sign,sign,repo-status-update,repo-sync,sync,repo-tree,repo-triggers,repo-update,update,service-clean,clean,repo-clean,service-config,config,repo-config,service-config-validate,config-validate,repo-config-validate,service-key-import,key-import,service-setup,init,repo-init,repo-setup,setup,service-shell,shell,user-add,user-list,user-remove,web} ...
|
||||
[-h] [-a ARCHITECTURE] [-c CONFIGURATION] [--force] [-l LOCK] [--log-handler {console,syslog,journald}] [--report | --no-report] [-q] [--unsafe] [--wait-timeout WAIT_TIMEOUT] [-V] {aur-search,search,help,help-commands-unsafe,help-updates,help-version,version,package-add,add,package-update,package-remove,remove,package-status,status,package-status-remove,package-status-update,status-update,patch-add,patch-list,patch-remove,patch-set-add,repo-backup,repo-check,check,repo-create-keyring,repo-create-mirrorlist,repo-daemon,daemon,repo-rebuild,rebuild,repo-remove-unknown,remove-unknown,repo-report,report,repo-restore,repo-sign,sign,repo-status-update,repo-sync,sync,repo-tree,repo-triggers,repo-update,update,service-clean,clean,repo-clean,service-config,config,repo-config,service-config-validate,config-validate,repo-config-validate,service-key-import,key-import,service-setup,init,repo-init,repo-setup,setup,service-shell,shell,user-add,user-list,user-remove,web} ...
|
||||
.SH DESCRIPTION
|
||||
ArcH linux ReposItory MANager
|
||||
|
||||
@ -40,6 +40,11 @@ force disable any logging
|
||||
\fB\-\-unsafe\fR
|
||||
allow to run ahriman as non\-ahriman user. Some actions might be unavailable
|
||||
|
||||
.TP
|
||||
\fB\-\-wait\-timeout\fR \fI\,WAIT_TIMEOUT\/\fR
|
||||
wait for lock to be free. Negative value will lead to immediate application run even if there is lock file. In case of
|
||||
zero value, the application will wait infinitely
|
||||
|
||||
.TP
|
||||
\fB\-V\fR, \fB\-\-version\fR
|
||||
show program's version number and exit
|
||||
@ -224,7 +229,7 @@ usage: ahriman help\-version [\-h]
|
||||
print application and its dependencies versions
|
||||
|
||||
.SH COMMAND \fI\,'ahriman package\-add'\/\fR
|
||||
usage: ahriman package\-add [\-h] [\-\-dependencies | \-\-no\-dependencies] [\-e] [\-n] [\-y]
|
||||
usage: ahriman package\-add [\-h] [\-\-dependencies | \-\-no\-dependencies] [\-e] [\-\-increment | \-\-no\-increment] [\-n] [\-y]
|
||||
[\-s {auto,archive,aur,directory,local,remote,repository}] [\-u USERNAME]
|
||||
package [package ...]
|
||||
|
||||
@ -243,6 +248,10 @@ process missing package dependencies
|
||||
\fB\-e\fR, \fB\-\-exit\-code\fR
|
||||
return non\-zero exit status if result is empty
|
||||
|
||||
.TP
|
||||
\fB\-\-increment\fR, \fB\-\-no\-increment\fR
|
||||
increment package release (pkgrel) version on duplicate
|
||||
|
||||
.TP
|
||||
\fB\-n\fR, \fB\-\-now\fR
|
||||
run update function after
|
||||
@ -459,8 +468,8 @@ fetch actual version of VCS packages
|
||||
download fresh package databases from the mirror before actions, \-yy to force refresh even if up to date
|
||||
|
||||
.SH COMMAND \fI\,'ahriman repo\-rebuild'\/\fR
|
||||
usage: ahriman repo\-rebuild [\-h] [\-\-depends\-on DEPENDS_ON] [\-\-dry\-run] [\-\-from\-database] [\-e]
|
||||
[\-s {unknown,pending,building,failed,success}] [\-u USERNAME]
|
||||
usage: ahriman repo\-rebuild [\-h] [\-\-depends\-on DEPENDS_ON] [\-\-dry\-run] [\-\-from\-database] [\-\-increment | \-\-no\-increment]
|
||||
[\-e] [\-s {unknown,pending,building,failed,success}] [\-u USERNAME]
|
||||
|
||||
force rebuild whole repository
|
||||
|
||||
@ -479,6 +488,10 @@ read packages from database instead of filesystem. This feature in particular is
|
||||
restore repository from another repository instance. Note, however, that in order to restore packages you need to have
|
||||
original ahriman instance run with web service and have run repo\-update at least once.
|
||||
|
||||
.TP
|
||||
\fB\-\-increment\fR, \fB\-\-no\-increment\fR
|
||||
increment package release (pkgrel) on duplicate
|
||||
|
||||
.TP
|
||||
\fB\-e\fR, \fB\-\-exit\-code\fR
|
||||
return non\-zero exit status if result is empty
|
||||
@ -545,10 +558,15 @@ usage: ahriman repo\-sync [\-h]
|
||||
sync repository files to remote server according to current settings
|
||||
|
||||
.SH COMMAND \fI\,'ahriman repo\-tree'\/\fR
|
||||
usage: ahriman repo\-tree [\-h]
|
||||
usage: ahriman repo\-tree [\-h] [\-p PARTITIONS]
|
||||
|
||||
dump repository tree based on packages dependencies
|
||||
|
||||
.SH OPTIONS \fI\,'ahriman repo\-tree'\/\fR
|
||||
.TP
|
||||
\fB\-p\fR \fI\,PARTITIONS\/\fR, \fB\-\-partitions\fR \fI\,PARTITIONS\/\fR
|
||||
also divide packages by independent partitions
|
||||
|
||||
.SH COMMAND \fI\,'ahriman repo\-triggers'\/\fR
|
||||
usage: ahriman repo\-triggers [\-h] [trigger ...]
|
||||
|
||||
@ -560,7 +578,8 @@ instead of running all triggers as set by configuration, just process specified
|
||||
|
||||
.SH COMMAND \fI\,'ahriman repo\-update'\/\fR
|
||||
usage: ahriman repo\-update [\-h] [\-\-aur | \-\-no\-aur] [\-\-dependencies | \-\-no\-dependencies] [\-\-dry\-run] [\-e]
|
||||
[\-\-local | \-\-no\-local] [\-\-manual | \-\-no\-manual] [\-u USERNAME] [\-\-vcs | \-\-no\-vcs] [\-y]
|
||||
[\-\-increment | \-\-no\-increment] [\-\-local | \-\-no\-local] [\-\-manual | \-\-no\-manual] [\-u USERNAME]
|
||||
[\-\-vcs | \-\-no\-vcs] [\-y]
|
||||
[package ...]
|
||||
|
||||
check for packages updates and run build process if requested
|
||||
@ -586,6 +605,10 @@ just perform check for updates, same as check command
|
||||
\fB\-e\fR, \fB\-\-exit\-code\fR
|
||||
return non\-zero exit status if result is empty
|
||||
|
||||
.TP
|
||||
\fB\-\-increment\fR, \fB\-\-no\-increment\fR
|
||||
increment package release (pkgrel) on duplicate
|
||||
|
||||
.TP
|
||||
\fB\-\-local\fR, \fB\-\-no\-local\fR
|
||||
enable or disable checking of local packages for updates
|
||||
@ -671,7 +694,7 @@ key server for key import
|
||||
usage: ahriman service\-setup [\-h] [\-\-build\-as\-user BUILD_AS_USER] [\-\-build\-command BUILD_COMMAND]
|
||||
[\-\-from\-configuration FROM_CONFIGURATION] [\-\-generate\-salt | \-\-no\-generate\-salt]
|
||||
[\-\-makeflags\-jobs | \-\-no\-makeflags\-jobs] [\-\-mirror MIRROR] [\-\-multilib | \-\-no\-multilib]
|
||||
\-\-packager PACKAGER \-\-repository REPOSITORY [\-\-sign\-key SIGN_KEY]
|
||||
\-\-packager PACKAGER \-\-repository REPOSITORY [\-\-server SERVER] [\-\-sign\-key SIGN_KEY]
|
||||
[\-\-sign\-target {disabled,packages,repository}] [\-\-web\-port WEB_PORT]
|
||||
[\-\-web\-unix\-socket WEB_UNIX_SOCKET]
|
||||
|
||||
@ -714,6 +737,10 @@ packager name and email
|
||||
\fB\-\-repository\fR \fI\,REPOSITORY\/\fR
|
||||
repository name
|
||||
|
||||
.TP
|
||||
\fB\-\-server\fR \fI\,SERVER\/\fR
|
||||
server to be used for devtools. If none set, local files will be used
|
||||
|
||||
.TP
|
||||
\fB\-\-sign\-key\fR \fI\,SIGN_KEY\/\fR
|
||||
sign key id
|
@ -85,6 +85,7 @@ _shtab_ahriman_options=(
|
||||
{--report,--no-report}"[force enable or disable reporting to web service (default\: True)]:report:"
|
||||
{-q,--quiet}"[force disable any logging (default\: False)]"
|
||||
"--unsafe[allow to run ahriman as non-ahriman user. Some actions might be unavailable (default\: False)]"
|
||||
"--wait-timeout[wait for lock to be free. Negative value will lead to immediate application run even if there is lock file. In case of zero value, the application will wait infinitely (default\: -1)]:wait_timeout:"
|
||||
"(- : *)"{-V,--version}"[show program\'s version number and exit]"
|
||||
)
|
||||
|
||||
@ -92,6 +93,7 @@ _shtab_ahriman_add_options=(
|
||||
"(- : *)"{-h,--help}"[show this help message and exit]"
|
||||
{--dependencies,--no-dependencies}"[process missing package dependencies (default\: True)]:dependencies:"
|
||||
{-e,--exit-code}"[return non-zero exit status if result is empty (default\: False)]"
|
||||
{--increment,--no-increment}"[increment package release (pkgrel) version on duplicate (default\: True)]:increment:"
|
||||
{-n,--now}"[run update function after (default\: False)]"
|
||||
"*"{-y,--refresh}"[download fresh package databases from the mirror before actions, -yy to force refresh even if up to date (default\: False)]"
|
||||
{-s,--source}"[explicitly specify the package source for this command (default\: PackageSource.Auto)]:source:(auto archive aur directory local remote repository)"
|
||||
@ -175,6 +177,7 @@ _shtab_ahriman_init_options=(
|
||||
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
||||
"--packager[packager name and email (default\: None)]:packager:"
|
||||
"--repository[repository name (default\: None)]:repository:"
|
||||
"--server[server to be used for devtools. If none set, local files will be used (default\: None)]:server:"
|
||||
"--sign-key[sign key id (default\: None)]:sign_key:"
|
||||
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
||||
"--web-port[port of the web service (default\: None)]:web_port:"
|
||||
@ -191,6 +194,7 @@ _shtab_ahriman_package_add_options=(
|
||||
"(- : *)"{-h,--help}"[show this help message and exit]"
|
||||
{--dependencies,--no-dependencies}"[process missing package dependencies (default\: True)]:dependencies:"
|
||||
{-e,--exit-code}"[return non-zero exit status if result is empty (default\: False)]"
|
||||
{--increment,--no-increment}"[increment package release (pkgrel) version on duplicate (default\: True)]:increment:"
|
||||
{-n,--now}"[run update function after (default\: False)]"
|
||||
"*"{-y,--refresh}"[download fresh package databases from the mirror before actions, -yy to force refresh even if up to date (default\: False)]"
|
||||
{-s,--source}"[explicitly specify the package source for this command (default\: PackageSource.Auto)]:source:(auto archive aur directory local remote repository)"
|
||||
@ -227,6 +231,7 @@ _shtab_ahriman_package_update_options=(
|
||||
"(- : *)"{-h,--help}"[show this help message and exit]"
|
||||
{--dependencies,--no-dependencies}"[process missing package dependencies (default\: True)]:dependencies:"
|
||||
{-e,--exit-code}"[return non-zero exit status if result is empty (default\: False)]"
|
||||
{--increment,--no-increment}"[increment package release (pkgrel) version on duplicate (default\: True)]:increment:"
|
||||
{-n,--now}"[run update function after (default\: False)]"
|
||||
"*"{-y,--refresh}"[download fresh package databases from the mirror before actions, -yy to force refresh even if up to date (default\: False)]"
|
||||
{-s,--source}"[explicitly specify the package source for this command (default\: PackageSource.Auto)]:source:(auto archive aur directory local remote repository)"
|
||||
@ -265,6 +270,7 @@ _shtab_ahriman_rebuild_options=(
|
||||
"*--depends-on[only rebuild packages that depend on specified packages (default\: None)]:depends_on:"
|
||||
"--dry-run[just perform check for packages without rebuild process itself (default\: False)]"
|
||||
"--from-database[read packages from database instead of filesystem. This feature in particular is required in case if you would like to restore repository from another repository instance. Note, however, that in order to restore packages you need to have original ahriman instance run with web service and have run repo-update at least once. (default\: False)]"
|
||||
{--increment,--no-increment}"[increment package release (pkgrel) on duplicate (default\: True)]:increment:"
|
||||
{-e,--exit-code}"[return non-zero exit status if result is empty (default\: False)]"
|
||||
{-s,--status}"[filter packages by status. Requires --from-database to be set (default\: None)]:status:(unknown pending building failed success)"
|
||||
{-u,--username}"[build as user (default\: None)]:username:"
|
||||
@ -342,6 +348,7 @@ _shtab_ahriman_repo_init_options=(
|
||||
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
||||
"--packager[packager name and email (default\: None)]:packager:"
|
||||
"--repository[repository name (default\: None)]:repository:"
|
||||
"--server[server to be used for devtools. If none set, local files will be used (default\: None)]:server:"
|
||||
"--sign-key[sign key id (default\: None)]:sign_key:"
|
||||
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
||||
"--web-port[port of the web service (default\: None)]:web_port:"
|
||||
@ -353,6 +360,7 @@ _shtab_ahriman_repo_rebuild_options=(
|
||||
"*--depends-on[only rebuild packages that depend on specified packages (default\: None)]:depends_on:"
|
||||
"--dry-run[just perform check for packages without rebuild process itself (default\: False)]"
|
||||
"--from-database[read packages from database instead of filesystem. This feature in particular is required in case if you would like to restore repository from another repository instance. Note, however, that in order to restore packages you need to have original ahriman instance run with web service and have run repo-update at least once. (default\: False)]"
|
||||
{--increment,--no-increment}"[increment package release (pkgrel) on duplicate (default\: True)]:increment:"
|
||||
{-e,--exit-code}"[return non-zero exit status if result is empty (default\: False)]"
|
||||
{-s,--status}"[filter packages by status. Requires --from-database to be set (default\: None)]:status:(unknown pending building failed success)"
|
||||
{-u,--username}"[build as user (default\: None)]:username:"
|
||||
@ -384,6 +392,7 @@ _shtab_ahriman_repo_setup_options=(
|
||||
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
||||
"--packager[packager name and email (default\: None)]:packager:"
|
||||
"--repository[repository name (default\: None)]:repository:"
|
||||
"--server[server to be used for devtools. If none set, local files will be used (default\: None)]:server:"
|
||||
"--sign-key[sign key id (default\: None)]:sign_key:"
|
||||
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
||||
"--web-port[port of the web service (default\: None)]:web_port:"
|
||||
@ -406,6 +415,7 @@ _shtab_ahriman_repo_sync_options=(
|
||||
|
||||
_shtab_ahriman_repo_tree_options=(
|
||||
"(- : *)"{-h,--help}"[show this help message and exit]"
|
||||
{-p,--partitions}"[also divide packages by independent partitions (default\: 1)]:partitions:"
|
||||
)
|
||||
|
||||
_shtab_ahriman_repo_triggers_options=(
|
||||
@ -419,6 +429,7 @@ _shtab_ahriman_repo_update_options=(
|
||||
{--dependencies,--no-dependencies}"[process missing package dependencies (default\: True)]:dependencies:"
|
||||
"--dry-run[just perform check for updates, same as check command (default\: False)]"
|
||||
{-e,--exit-code}"[return non-zero exit status if result is empty (default\: False)]"
|
||||
{--increment,--no-increment}"[increment package release (pkgrel) on duplicate (default\: True)]:increment:"
|
||||
{--local,--no-local}"[enable or disable checking of local packages for updates (default\: True)]:local:"
|
||||
{--manual,--no-manual}"[include or exclude manual updates (default\: True)]:manual:"
|
||||
{-u,--username}"[build as user (default\: None)]:username:"
|
||||
@ -475,6 +486,7 @@ _shtab_ahriman_service_setup_options=(
|
||||
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
||||
"--packager[packager name and email (default\: None)]:packager:"
|
||||
"--repository[repository name (default\: None)]:repository:"
|
||||
"--server[server to be used for devtools. If none set, local files will be used (default\: None)]:server:"
|
||||
"--sign-key[sign key id (default\: None)]:sign_key:"
|
||||
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
||||
"--web-port[port of the web service (default\: None)]:web_port:"
|
||||
@ -497,6 +509,7 @@ _shtab_ahriman_setup_options=(
|
||||
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
||||
"--packager[packager name and email (default\: None)]:packager:"
|
||||
"--repository[repository name (default\: None)]:repository:"
|
||||
"--server[server to be used for devtools. If none set, local files will be used (default\: None)]:server:"
|
||||
"--sign-key[sign key id (default\: None)]:sign_key:"
|
||||
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
||||
"--web-port[port of the web service (default\: None)]:web_port:"
|
||||
@ -538,6 +551,7 @@ _shtab_ahriman_update_options=(
|
||||
{--dependencies,--no-dependencies}"[process missing package dependencies (default\: True)]:dependencies:"
|
||||
"--dry-run[just perform check for updates, same as check command (default\: False)]"
|
||||
{-e,--exit-code}"[return non-zero exit status if result is empty (default\: False)]"
|
||||
{--increment,--no-increment}"[increment package release (pkgrel) on duplicate (default\: True)]:increment:"
|
||||
{--local,--no-local}"[enable or disable checking of local packages for updates (default\: True)]:local:"
|
||||
{--manual,--no-manual}"[include or exclude manual updates (default\: True)]:manual:"
|
||||
{-u,--username}"[build as user (default\: None)]:username:"
|
84
pyproject.toml
Normal file
84
pyproject.toml
Normal file
@ -0,0 +1,84 @@
|
||||
[build-system]
|
||||
requires = ["flit_core"]
|
||||
build-backend = "flit_core.buildapi"
|
||||
|
||||
[project]
|
||||
name = "ahriman"
|
||||
|
||||
description = "ArcH linux ReposItory MANager"
|
||||
readme = "README.md"
|
||||
|
||||
requires-python = ">=3.11"
|
||||
|
||||
license = {file = "COPYING"}
|
||||
authors = [
|
||||
{name = "ahriman team"},
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
"cerberus",
|
||||
"inflection",
|
||||
"passlib",
|
||||
"requests",
|
||||
"srcinfo",
|
||||
]
|
||||
|
||||
dynamic = ["version"]
|
||||
|
||||
[project.urls]
|
||||
Documentation = "https://ahriman.readthedocs.io/"
|
||||
Repository = "https://github.com/arcan1s/ahriman"
|
||||
Changelog = "https://github.com/arcan1s/ahriman/releases"
|
||||
|
||||
[project.scripts]
|
||||
ahriman = "ahriman.application.ahriman:run"
|
||||
|
||||
[project.optional-dependencies]
|
||||
check = [
|
||||
"autopep8",
|
||||
"bandit",
|
||||
"mypy",
|
||||
"pylint",
|
||||
]
|
||||
docs = [
|
||||
"Sphinx",
|
||||
"argparse-manpage",
|
||||
"pydeps",
|
||||
"shtab",
|
||||
"sphinx-argparse",
|
||||
"sphinx-rtd-theme>=1.1.1", # https://stackoverflow.com/a/74355734
|
||||
]
|
||||
journald = [
|
||||
"systemd-python",
|
||||
]
|
||||
# FIXME technically this dependency is required, but in some cases we do not have access to
|
||||
# the libalpm which is required in order to install the package. Thus in case if we do not
|
||||
# really need to run the application we can move it to "optional" dependencies
|
||||
pacman = [
|
||||
"pyalpm",
|
||||
]
|
||||
s3 = [
|
||||
"boto3",
|
||||
]
|
||||
tests = [
|
||||
"pytest",
|
||||
"pytest-aiohttp",
|
||||
"pytest-cov",
|
||||
"pytest-helpers-namespace",
|
||||
"pytest-mock",
|
||||
"pytest-resource-path",
|
||||
"pytest-spec",
|
||||
]
|
||||
web = [
|
||||
"Jinja2",
|
||||
"aioauth-client",
|
||||
"aiohttp",
|
||||
"aiohttp-apispec",
|
||||
"aiohttp_cors",
|
||||
"aiohttp_jinja2",
|
||||
"aiohttp_debugtoolbar",
|
||||
"aiohttp_session",
|
||||
"aiohttp_security",
|
||||
"cryptography",
|
||||
"requests-unixsocket", # required by unix socket support
|
||||
]
|
158
setup.py
158
setup.py
@ -1,158 +0,0 @@
|
||||
from pathlib import Path
|
||||
from setuptools import find_packages, setup
|
||||
from typing import Any
|
||||
|
||||
|
||||
metadata_path = Path(__file__).resolve().parent / "src/ahriman/version.py"
|
||||
metadata: dict[str, Any] = {}
|
||||
with metadata_path.open() as metadata_file:
|
||||
exec(metadata_file.read(), metadata) # pylint: disable=exec-used
|
||||
|
||||
|
||||
setup(
|
||||
name="ahriman",
|
||||
|
||||
version=metadata["__version__"],
|
||||
zip_safe=False,
|
||||
|
||||
description="ArcH linux ReposItory MANager",
|
||||
|
||||
author="ahriman team",
|
||||
author_email="",
|
||||
url="https://github.com/arcan1s/ahriman",
|
||||
|
||||
license="GPL3",
|
||||
|
||||
packages=find_packages("src"),
|
||||
package_dir={"": "src"},
|
||||
package_data={"": ["py.typed"]},
|
||||
|
||||
dependency_links=[
|
||||
],
|
||||
install_requires=[
|
||||
"cerberus",
|
||||
"inflection",
|
||||
"passlib",
|
||||
"requests",
|
||||
"srcinfo",
|
||||
],
|
||||
setup_requires=[
|
||||
],
|
||||
tests_require=[
|
||||
"pytest",
|
||||
"pytest-aiohttp",
|
||||
"pytest-cov",
|
||||
"pytest-helpers-namespace",
|
||||
"pytest-mock",
|
||||
"pytest-spec",
|
||||
"pytest-resource-path",
|
||||
],
|
||||
|
||||
include_package_data=True,
|
||||
scripts=[
|
||||
"package/bin/ahriman",
|
||||
],
|
||||
data_files=[
|
||||
# configuration
|
||||
("share/ahriman/settings", [
|
||||
"package/share/ahriman/settings/ahriman.ini",
|
||||
]),
|
||||
("share/ahriman/settings/ahriman.ini.d", [
|
||||
"package/share/ahriman/settings/ahriman.ini.d/logging.ini",
|
||||
]),
|
||||
# systemd files
|
||||
("lib/systemd/system", [
|
||||
"package/lib/systemd/system/ahriman@.service",
|
||||
"package/lib/systemd/system/ahriman@.timer",
|
||||
"package/lib/systemd/system/ahriman-web@.service",
|
||||
]),
|
||||
# templates
|
||||
("share/ahriman/templates", [
|
||||
"package/share/ahriman/templates/api.jinja2",
|
||||
"package/share/ahriman/templates/build-status.jinja2",
|
||||
"package/share/ahriman/templates/email-index.jinja2",
|
||||
"package/share/ahriman/templates/error.jinja2",
|
||||
"package/share/ahriman/templates/repo-index.jinja2",
|
||||
"package/share/ahriman/templates/shell",
|
||||
"package/share/ahriman/templates/telegram-index.jinja2",
|
||||
]),
|
||||
("share/ahriman/templates/build-status", [
|
||||
"package/share/ahriman/templates/build-status/alerts.jinja2",
|
||||
"package/share/ahriman/templates/build-status/key-import-modal.jinja2",
|
||||
"package/share/ahriman/templates/build-status/login-modal.jinja2",
|
||||
"package/share/ahriman/templates/build-status/package-add-modal.jinja2",
|
||||
"package/share/ahriman/templates/build-status/package-info-modal.jinja2",
|
||||
"package/share/ahriman/templates/build-status/package-rebuild-modal.jinja2",
|
||||
"package/share/ahriman/templates/build-status/table.jinja2",
|
||||
]),
|
||||
("share/ahriman/templates/static", [
|
||||
"package/share/ahriman/templates/static/favicon.ico",
|
||||
]),
|
||||
("share/ahriman/templates/utils", [
|
||||
"package/share/ahriman/templates/utils/bootstrap-scripts.jinja2",
|
||||
"package/share/ahriman/templates/utils/style.jinja2",
|
||||
]),
|
||||
# man pages
|
||||
("share/man/man1", [
|
||||
"docs/ahriman.1",
|
||||
]),
|
||||
# shell completions
|
||||
("share/bash-completion/completions", [
|
||||
"docs/completions/bash/_ahriman",
|
||||
]),
|
||||
("share/zsh/site-functions", [
|
||||
"docs/completions/zsh/_ahriman",
|
||||
]),
|
||||
],
|
||||
|
||||
extras_require={
|
||||
"check": [
|
||||
"autopep8",
|
||||
"bandit",
|
||||
"mypy",
|
||||
"pylint",
|
||||
],
|
||||
"docs": [
|
||||
"Sphinx",
|
||||
"argparse-manpage",
|
||||
"pydeps",
|
||||
"shtab",
|
||||
"sphinx-argparse",
|
||||
"sphinx-rtd-theme>=1.1.1", # https://stackoverflow.com/a/74355734
|
||||
],
|
||||
"journald": [
|
||||
"systemd-python",
|
||||
],
|
||||
# FIXME technically this dependency is required, but in some cases we do not have access to
|
||||
# the libalpm which is required in order to install the package. Thus in case if we do not
|
||||
# really need to run the application we can move it to "optional" dependencies
|
||||
"pacman": [
|
||||
"pyalpm",
|
||||
],
|
||||
"s3": [
|
||||
"boto3",
|
||||
],
|
||||
"tests": [
|
||||
"pytest",
|
||||
"pytest-aiohttp",
|
||||
"pytest-cov",
|
||||
"pytest-helpers-namespace",
|
||||
"pytest-mock",
|
||||
"pytest-resource-path",
|
||||
"pytest-spec",
|
||||
],
|
||||
"web": [
|
||||
"Jinja2",
|
||||
"aioauth-client",
|
||||
"aiohttp",
|
||||
"aiohttp-apispec",
|
||||
"aiohttp_cors",
|
||||
"aiohttp_jinja2",
|
||||
"aiohttp_debugtoolbar",
|
||||
"aiohttp_session",
|
||||
"aiohttp_security",
|
||||
"cryptography",
|
||||
"requests-unixsocket", # required by unix socket support
|
||||
],
|
||||
},
|
||||
)
|
@ -17,3 +17,4 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
__version__ = "2.11.0"
|
||||
|
@ -19,13 +19,12 @@
|
||||
#
|
||||
# pylint: disable=too-many-lines
|
||||
import argparse
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from pathlib import Path
|
||||
from typing import TypeVar
|
||||
|
||||
from ahriman import version
|
||||
from ahriman import __version__
|
||||
from ahriman.application import handlers
|
||||
from ahriman.core.util import enum_values, extract_user
|
||||
from ahriman.models.action import Action
|
||||
@ -82,10 +81,16 @@ def _parser() -> argparse.ArgumentParser:
|
||||
type=LogHandler, choices=enum_values(LogHandler))
|
||||
parser.add_argument("--report", help="force enable or disable reporting to web service",
|
||||
action=argparse.BooleanOptionalAction, default=True)
|
||||
parser.add_argument("-r", "--repository", help="target repository. For several subcommands it can be used "
|
||||
"multiple times", action="append")
|
||||
parser.add_argument("-q", "--quiet", help="force disable any logging", action="store_true")
|
||||
parser.add_argument("--unsafe", help="allow to run ahriman as non-ahriman user. Some actions might be unavailable",
|
||||
action="store_true")
|
||||
parser.add_argument("-V", "--version", action="version", version=version.__version__)
|
||||
parser.add_argument("--wait-timeout", help="wait for lock to be free. Negative value will lead to "
|
||||
"immediate application run even if there is lock file. "
|
||||
"In case of zero value, the application will wait infinitely",
|
||||
type=int, default=-1)
|
||||
parser.add_argument("-V", "--version", action="version", version=__version__)
|
||||
|
||||
subparsers = parser.add_subparsers(title="command", help="command to run", dest="command", required=True)
|
||||
|
||||
@ -256,6 +261,8 @@ def _set_package_add_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
parser.add_argument("--dependencies", help="process missing package dependencies",
|
||||
action=argparse.BooleanOptionalAction, default=True)
|
||||
parser.add_argument("-e", "--exit-code", help="return non-zero exit status if result is empty", action="store_true")
|
||||
parser.add_argument("--increment", help="increment package release (pkgrel) version on duplicate",
|
||||
action=argparse.BooleanOptionalAction, default=True)
|
||||
parser.add_argument("-n", "--now", help="run update function after", action="store_true")
|
||||
parser.add_argument("-y", "--refresh", help="download fresh package databases from the mirror before actions, "
|
||||
"-yy to force refresh even if up to date",
|
||||
@ -577,6 +584,8 @@ def _set_repo_rebuild_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
"instance. Note, however, that in order to restore packages you need to have original "
|
||||
"ahriman instance run with web service and have run repo-update at least once.",
|
||||
action="store_true")
|
||||
parser.add_argument("--increment", help="increment package release (pkgrel) on duplicate",
|
||||
action=argparse.BooleanOptionalAction, default=True)
|
||||
parser.add_argument("-e", "--exit-code", help="return non-zero exit status if result is empty", action="store_true")
|
||||
parser.add_argument("-s", "--status", help="filter packages by status. Requires --from-database to be set",
|
||||
type=BuildStatusEnum, choices=enum_values(BuildStatusEnum))
|
||||
@ -708,6 +717,8 @@ def _set_repo_tree_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
parser = root.add_parser("repo-tree", help="dump repository tree",
|
||||
description="dump repository tree based on packages dependencies",
|
||||
formatter_class=_formatter)
|
||||
parser.add_argument("-p", "--partitions", help="also divide packages by independent partitions",
|
||||
type=int, default=1)
|
||||
parser.set_defaults(handler=handlers.Structure, lock=None, report=False, quiet=True, unsafe=True)
|
||||
return parser
|
||||
|
||||
@ -751,6 +762,8 @@ def _set_repo_update_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
action=argparse.BooleanOptionalAction, default=True)
|
||||
parser.add_argument("--dry-run", help="just perform check for updates, same as check command", action="store_true")
|
||||
parser.add_argument("-e", "--exit-code", help="return non-zero exit status if result is empty", action="store_true")
|
||||
parser.add_argument("--increment", help="increment package release (pkgrel) on duplicate",
|
||||
action=argparse.BooleanOptionalAction, default=True)
|
||||
parser.add_argument("--local", help="enable or disable checking of local packages for updates",
|
||||
action=argparse.BooleanOptionalAction, default=True)
|
||||
parser.add_argument("--manual", help="include or exclude manual updates",
|
||||
@ -872,7 +885,6 @@ def _set_service_setup_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
epilog="Create _minimal_ configuration for the service according to provided options.",
|
||||
formatter_class=_formatter)
|
||||
parser.add_argument("--build-as-user", help="force makepkg user to the specific one")
|
||||
parser.add_argument("--build-command", help="build command prefix", default="ahriman")
|
||||
parser.add_argument("--from-configuration", help="path to default devtools pacman configuration",
|
||||
type=Path, default=Path("/usr") / "share" / "devtools" / "pacman.conf.d" / "extra.conf")
|
||||
parser.add_argument("--generate-salt", help="generate salt for user passwords",
|
||||
@ -883,7 +895,7 @@ def _set_service_setup_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
parser.add_argument("--multilib", help="add or do not multilib repository",
|
||||
action=argparse.BooleanOptionalAction, default=True)
|
||||
parser.add_argument("--packager", help="packager name and email", required=True)
|
||||
parser.add_argument("--repository", help="repository name", required=True)
|
||||
parser.add_argument("--server", help="server to be used for devtools. If none set, local files will be used")
|
||||
parser.add_argument("--sign-key", help="sign key id")
|
||||
parser.add_argument("--sign-target", help="sign options", action="append",
|
||||
type=SignSettings.from_option, choices=enum_values(SignSettings))
|
||||
@ -931,7 +943,7 @@ def _set_user_add_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
"`Name Surname <mail@example.com>`")
|
||||
parser.add_argument("-p", "--password", help="user password. Blank password will be treated as empty password, "
|
||||
"which is in particular must be used for OAuth2 authorization type.")
|
||||
parser.add_argument("-r", "--role", help="user access level",
|
||||
parser.add_argument("-R", "--role", help="user access level",
|
||||
type=UserAccess, choices=enum_values(UserAccess), default=UserAccess.Read)
|
||||
parser.set_defaults(handler=handlers.Users, action=Action.Update, architecture=[""], lock=None, report=False,
|
||||
quiet=True)
|
||||
@ -953,7 +965,7 @@ def _set_user_list_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
formatter_class=_formatter)
|
||||
parser.add_argument("username", help="filter users by username", nargs="?")
|
||||
parser.add_argument("-e", "--exit-code", help="return non-zero exit status if result is empty", action="store_true")
|
||||
parser.add_argument("-r", "--role", help="filter users by role", type=UserAccess, choices=enum_values(UserAccess))
|
||||
parser.add_argument("-R", "--role", help="filter users by role", type=UserAccess, choices=enum_values(UserAccess))
|
||||
parser.set_defaults(handler=handlers.Users, action=Action.List, architecture=[""], lock=None, report=False,
|
||||
quiet=True, unsafe=True)
|
||||
return parser
|
||||
@ -994,18 +1006,15 @@ def _set_web_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
return parser
|
||||
|
||||
|
||||
def run() -> None:
|
||||
def run() -> int:
|
||||
"""
|
||||
run application instance
|
||||
|
||||
Returns:
|
||||
int: application status code
|
||||
"""
|
||||
if __name__ == "__main__":
|
||||
args_parser = _parser()
|
||||
args = args_parser.parse_args()
|
||||
args_parser = _parser()
|
||||
args = args_parser.parse_args()
|
||||
|
||||
handler: handlers.Handler = args.handler
|
||||
status = handler.execute(args)
|
||||
|
||||
sys.exit(status)
|
||||
|
||||
|
||||
run()
|
||||
handler: handlers.Handler = args.handler
|
||||
return handler.execute(args)
|
||||
|
@ -37,9 +37,10 @@ class Application(ApplicationPackages, ApplicationRepository):
|
||||
|
||||
>>> from ahriman.core.configuration import Configuration
|
||||
>>> from ahriman.models.package_source import PackageSource
|
||||
>>> from ahriman.models.repository_id import RepositoryId
|
||||
>>>
|
||||
>>> configuration = Configuration()
|
||||
>>> application = Application("x86_64", configuration, report=True)
|
||||
>>> application = Application(RepositoryId("x86_64", "x86_64"), configuration, report=True)
|
||||
>>> # add packages to build queue
|
||||
>>> application.add(["ahriman"], PackageSource.AUR)
|
||||
>>>
|
||||
@ -142,8 +143,13 @@ class Application(ApplicationPackages, ApplicationRepository):
|
||||
|
||||
while missing := missing_dependencies(with_dependencies.values()):
|
||||
for package_name, username in missing.items():
|
||||
package = Package.from_aur(package_name, self.repository.pacman, username)
|
||||
if (source_dir := self.repository.paths.cache_for(package_name)).is_dir():
|
||||
# there is local cache, load package from it
|
||||
package = Package.from_build(source_dir, self.repository.architecture, username)
|
||||
else:
|
||||
package = Package.from_aur(package_name, self.repository.pacman, username)
|
||||
with_dependencies[package.base] = package
|
||||
|
||||
# register package in local database
|
||||
self.database.remote_update(package)
|
||||
self.repository.reporter.set_unknown(package)
|
||||
|
@ -73,6 +73,9 @@ class ApplicationPackages(ApplicationProperties):
|
||||
|
||||
Args:
|
||||
source(str): path to local directory
|
||||
|
||||
Raises:
|
||||
UnknownPackageError: if specified package is unknown or doesn't exist
|
||||
"""
|
||||
local_dir = Path(source)
|
||||
if not local_dir.is_dir():
|
||||
@ -95,7 +98,7 @@ class ApplicationPackages(ApplicationProperties):
|
||||
if (source_dir := Path(source)).is_dir():
|
||||
package = Package.from_build(source_dir, self.architecture, username)
|
||||
cache_dir = self.repository.paths.cache_for(package.base)
|
||||
shutil.copytree(source_dir, cache_dir) # copy package to store in caches
|
||||
shutil.copytree(source_dir, cache_dir, dirs_exist_ok=True) # copy package to store in caches
|
||||
Sources.init(cache_dir) # we need to run init command in directory where we do have permissions
|
||||
elif (source_dir := self.repository.paths.cache_for(source)).is_dir():
|
||||
package = Package.from_build(source_dir, self.architecture, username)
|
||||
@ -110,8 +113,10 @@ class ApplicationPackages(ApplicationProperties):
|
||||
|
||||
Args:
|
||||
source(str): remote URL of the package archive
|
||||
|
||||
Raises:
|
||||
UnknownPackageError: if specified package is unknown or doesn't exist
|
||||
"""
|
||||
dst = self.repository.paths.packages / Path(source).name # URL is path, is not it?
|
||||
# timeout=None to suppress pylint warns. Also suppress bandit warnings
|
||||
try:
|
||||
response = requests.get(source, stream=True, timeout=None) # nosec
|
||||
@ -119,6 +124,7 @@ class ApplicationPackages(ApplicationProperties):
|
||||
except Exception:
|
||||
raise UnknownPackageError(source)
|
||||
|
||||
dst = self.repository.paths.packages / Path(source).name # URL is path, is not it?
|
||||
with dst.open("wb") as local_file:
|
||||
for chunk in response.iter_content(chunk_size=1024):
|
||||
local_file.write(chunk)
|
||||
@ -145,7 +151,7 @@ class ApplicationPackages(ApplicationProperties):
|
||||
username(str | None, optional): optional override of username for build process (Default value = None)
|
||||
"""
|
||||
for name in names:
|
||||
resolved_source = source.resolve(name)
|
||||
resolved_source = source.resolve(name, self.repository.paths)
|
||||
fn = getattr(self, f"_add_{resolved_source.value}")
|
||||
fn(name, username)
|
||||
|
||||
|
@ -22,6 +22,7 @@ from ahriman.core.database import SQLite
|
||||
from ahriman.core.log import LazyLogging
|
||||
from ahriman.core.repository import Repository
|
||||
from ahriman.models.pacman_synchronization import PacmanSynchronization
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class ApplicationProperties(LazyLogging):
|
||||
@ -29,26 +30,36 @@ class ApplicationProperties(LazyLogging):
|
||||
application base properties class
|
||||
|
||||
Attributes:
|
||||
architecture(str): repository architecture
|
||||
configuration(Configuration): configuration instance
|
||||
database(SQLite): database instance
|
||||
repository(Repository): repository instance
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
"""
|
||||
|
||||
def __init__(self, architecture: str, configuration: Configuration, *, report: bool,
|
||||
def __init__(self, repository_id: RepositoryId, configuration: Configuration, *, report: bool,
|
||||
refresh_pacman_database: PacmanSynchronization = PacmanSynchronization.Disabled) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
refresh_pacman_database(PacmanSynchronization, optional): pacman database synchronization level
|
||||
(Default value = PacmanSynchronization.Disabled)
|
||||
"""
|
||||
self.configuration = configuration
|
||||
self.architecture = architecture
|
||||
self.repository_id = repository_id
|
||||
self.database = SQLite.load(configuration)
|
||||
self.repository = Repository.load(architecture, configuration, self.database, report=report,
|
||||
self.repository = Repository.load(repository_id, configuration, self.database, report=report,
|
||||
refresh_pacman_database=refresh_pacman_database)
|
||||
|
||||
@property
|
||||
def architecture(self) -> str:
|
||||
"""
|
||||
repository architecture for backward compatibility
|
||||
|
||||
Returns:
|
||||
str: repository architecture
|
||||
"""
|
||||
return self.repository_id.architecture
|
||||
|
@ -123,7 +123,8 @@ class ApplicationRepository(ApplicationProperties):
|
||||
result.extend(unknown_aur(package)) # local package not found
|
||||
return result
|
||||
|
||||
def update(self, updates: Iterable[Package], packagers: Packagers | None = None) -> Result:
|
||||
def update(self, updates: Iterable[Package], packagers: Packagers | None = None, *,
|
||||
bump_pkgrel: bool = False) -> Result:
|
||||
"""
|
||||
run package updates
|
||||
|
||||
@ -131,6 +132,7 @@ class ApplicationRepository(ApplicationProperties):
|
||||
updates(Iterable[Package]): list of packages to update
|
||||
packagers(Packagers | None, optional): optional override of username for build process
|
||||
(Default value = None)
|
||||
bump_pkgrel(bool, optional): bump pkgrel in case of local version conflict (Default value = False)
|
||||
|
||||
Returns:
|
||||
Result: update result
|
||||
@ -150,7 +152,7 @@ class ApplicationRepository(ApplicationProperties):
|
||||
tree = Tree.resolve(updates)
|
||||
for num, level in enumerate(tree):
|
||||
self.logger.info("processing level #%i %s", num, [package.base for package in level])
|
||||
build_result = self.repository.process_build(level, packagers)
|
||||
build_result = self.repository.process_build(level, packagers, bump_pkgrel=bump_pkgrel)
|
||||
packages = self.repository.packages_built()
|
||||
process_update(packages, build_result)
|
||||
|
||||
|
@ -23,6 +23,7 @@ from ahriman.application.application import Application
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.models.packagers import Packagers
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class Add(Handler):
|
||||
@ -31,17 +32,18 @@ class Add(Handler):
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
application = Application(architecture, configuration, report=report, refresh_pacman_database=args.refresh)
|
||||
application = Application(repository_id, configuration, report=report, refresh_pacman_database=args.refresh)
|
||||
application.on_start()
|
||||
application.add(args.package, args.source, args.username)
|
||||
if not args.now:
|
||||
@ -52,5 +54,5 @@ class Add(Handler):
|
||||
packagers = Packagers(args.username, {package.base: package.packager for package in packages})
|
||||
|
||||
application.print_updates(packages, log_fn=application.logger.info)
|
||||
result = application.update(packages, packagers)
|
||||
result = application.update(packages, packagers, bump_pkgrel=args.increment)
|
||||
Add.check_if_empty(args.exit_code, result.is_empty)
|
||||
|
@ -26,6 +26,7 @@ from tarfile import TarFile
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.database import SQLite
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class Backup(Handler):
|
||||
@ -36,13 +37,14 @@ class Backup(Handler):
|
||||
ALLOW_AUTO_ARCHITECTURE_RUN = False # it should be called only as "no-architecture"
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
|
@ -22,6 +22,7 @@ import argparse
|
||||
from ahriman.application.application import Application
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class Clean(Handler):
|
||||
@ -30,17 +31,18 @@ class Clean(Handler):
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
application = Application(architecture, configuration, report=report)
|
||||
application = Application(repository_id, configuration, report=report)
|
||||
application.on_start()
|
||||
application.clean(cache=args.cache, chroot=args.chroot, manual=args.manual, packages=args.packages,
|
||||
pacman=args.pacman)
|
||||
|
@ -22,6 +22,7 @@ import threading
|
||||
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class Daemon(Handler):
|
||||
@ -30,19 +31,20 @@ class Daemon(Handler):
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
from ahriman.application.handlers import Update
|
||||
Update.run(args, architecture, configuration, report=report)
|
||||
timer = threading.Timer(args.interval, Daemon.run, args=[args, architecture, configuration],
|
||||
Update.run(args, repository_id, configuration, report=report)
|
||||
timer = threading.Timer(args.interval, Daemon.run, args=[args, repository_id, configuration],
|
||||
kwargs={"report": report})
|
||||
timer.start()
|
||||
timer.join()
|
||||
|
@ -22,6 +22,7 @@ import argparse
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.formatters import ConfigurationPathsPrinter, ConfigurationPrinter, StringPrinter
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class Dump(Handler):
|
||||
@ -32,13 +33,14 @@ class Dump(Handler):
|
||||
ALLOW_AUTO_ARCHITECTURE_RUN = False
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
|
@ -25,7 +25,8 @@ from multiprocessing import Pool
|
||||
from ahriman.application.lock import Lock
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.exceptions import ExitCode, MissingArchitectureError, MultipleArchitecturesError
|
||||
from ahriman.core.log import Log
|
||||
from ahriman.core.log.log_loader import LogLoader
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
from ahriman.models.repository_paths import RepositoryPaths
|
||||
|
||||
|
||||
@ -50,54 +51,26 @@ class Handler:
|
||||
ALLOW_MULTI_ARCHITECTURE_RUN = True
|
||||
|
||||
@classmethod
|
||||
def architectures_extract(cls, args: argparse.Namespace) -> list[str]:
|
||||
"""
|
||||
get known architectures
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
|
||||
Returns:
|
||||
list[str]: list of architectures for which tree is created
|
||||
|
||||
Raises:
|
||||
MissingArchitecture: if no architecture set and automatic detection is not allowed or failed
|
||||
"""
|
||||
if not cls.ALLOW_AUTO_ARCHITECTURE_RUN and args.architecture is None:
|
||||
# for some parsers (e.g. config) we need to run with specific architecture
|
||||
# for those cases architecture must be set explicitly
|
||||
raise MissingArchitectureError(args.command)
|
||||
if args.architecture: # architecture is specified explicitly
|
||||
return sorted(set(args.architecture))
|
||||
|
||||
configuration = Configuration()
|
||||
configuration.load(args.configuration)
|
||||
# wtf???
|
||||
root = configuration.getpath("repository", "root") # pylint: disable=assignment-from-no-return
|
||||
architectures = RepositoryPaths.known_architectures(root)
|
||||
|
||||
if not architectures: # well we did not find anything
|
||||
raise MissingArchitectureError(args.command)
|
||||
return sorted(architectures)
|
||||
|
||||
@classmethod
|
||||
def call(cls, args: argparse.Namespace, architecture: str) -> bool:
|
||||
def call(cls, args: argparse.Namespace, repository_id: RepositoryId) -> bool:
|
||||
"""
|
||||
additional function to wrap all calls for multiprocessing library
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
|
||||
Returns:
|
||||
bool: True on success, False otherwise
|
||||
"""
|
||||
try:
|
||||
configuration = Configuration.from_path(args.configuration, architecture)
|
||||
log_handler = Log.handler(args.log_handler)
|
||||
Log.load(configuration, log_handler, quiet=args.quiet, report=args.report)
|
||||
with Lock(args, architecture, configuration):
|
||||
cls.run(args, architecture, configuration, report=args.report)
|
||||
configuration = Configuration.from_path(args.configuration, repository_id)
|
||||
|
||||
log_handler = LogLoader.handler(args.log_handler)
|
||||
LogLoader.load(configuration, log_handler, quiet=args.quiet, report=args.report)
|
||||
|
||||
with Lock(args, repository_id, configuration):
|
||||
cls.run(args, repository_id, configuration, report=args.report)
|
||||
|
||||
return True
|
||||
except ExitCode:
|
||||
return False
|
||||
@ -118,31 +91,72 @@ class Handler:
|
||||
int: 0 on success, 1 otherwise
|
||||
|
||||
Raises:
|
||||
MultipleArchitectures: if more than one architecture supplied and no multi architecture supported
|
||||
MultipleArchitecturesError: if more than one architecture supplied and no multi architecture supported
|
||||
"""
|
||||
architectures = cls.architectures_extract(args)
|
||||
repositories = cls.repositories_extract(args)
|
||||
|
||||
# actually we do not have to spawn another process if it is single-process application, do we?
|
||||
if len(architectures) > 1:
|
||||
if len(repositories) > 1:
|
||||
if not cls.ALLOW_MULTI_ARCHITECTURE_RUN:
|
||||
raise MultipleArchitecturesError(args.command)
|
||||
|
||||
with Pool(len(architectures)) as pool:
|
||||
result = pool.starmap(
|
||||
cls.call, [(args, architecture) for architecture in architectures])
|
||||
with Pool(len(repositories)) as pool:
|
||||
result = pool.starmap(cls.call, [(args, repository_id) for repository_id in repositories])
|
||||
else:
|
||||
result = [cls.call(args, architectures.pop())]
|
||||
result = [cls.call(args, repositories.pop())]
|
||||
|
||||
return 0 if all(result) else 1
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def repositories_extract(cls, args: argparse.Namespace) -> list[RepositoryId]:
|
||||
"""
|
||||
get known architectures
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
|
||||
Returns:
|
||||
list[RepositoryId]: list of repository names and architectures for which tree is created
|
||||
|
||||
Raises:
|
||||
MissingArchitectureError: if no architecture set and automatic detection is not allowed or failed
|
||||
"""
|
||||
if not cls.ALLOW_AUTO_ARCHITECTURE_RUN and args.architecture is None:
|
||||
# for some parsers (e.g. config) we need to run with specific architecture
|
||||
# for those cases architecture must be set explicitly
|
||||
raise MissingArchitectureError(args.command)
|
||||
|
||||
configuration = Configuration()
|
||||
configuration.load(args.configuration)
|
||||
name = configuration.get("repository", "name", fallback="") # will only be used for legacy mode
|
||||
|
||||
if args.architecture: # architecture is specified explicitly
|
||||
repositories = args.repository or [name] # fallback for legacy mode
|
||||
return sorted(
|
||||
set(
|
||||
RepositoryId(architecture, repository)
|
||||
for architecture in args.architecture
|
||||
for repository in repositories
|
||||
)
|
||||
)
|
||||
|
||||
# wtf???
|
||||
root = configuration.getpath("repository", "root") # pylint: disable=assignment-from-no-return
|
||||
architectures = RepositoryPaths.known_architectures(root, name)
|
||||
|
||||
if not architectures: # well we did not find anything
|
||||
raise MissingArchitectureError(args.command)
|
||||
return sorted(architectures)
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
|
||||
@ -164,4 +178,4 @@ class Handler:
|
||||
ExitCode: if result is empty and check is enabled
|
||||
"""
|
||||
if enabled and predicate:
|
||||
raise ExitCode()
|
||||
raise ExitCode
|
||||
|
@ -21,6 +21,7 @@ import argparse
|
||||
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class Help(Handler):
|
||||
@ -31,13 +32,14 @@ class Help(Handler):
|
||||
ALLOW_AUTO_ARCHITECTURE_RUN = False # it should be called only as "no-architecture"
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
|
@ -22,6 +22,7 @@ import argparse
|
||||
from ahriman.application.application import Application
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class KeyImport(Handler):
|
||||
@ -32,15 +33,16 @@ class KeyImport(Handler):
|
||||
ALLOW_AUTO_ARCHITECTURE_RUN = False # it should be called only as "no-architecture"
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
application = Application(architecture, configuration, report=report)
|
||||
application = Application(repository_id, configuration, report=report)
|
||||
application.repository.sign.key_import(args.key_server, args.key)
|
||||
|
@ -30,6 +30,7 @@ from ahriman.core.formatters import PatchPrinter
|
||||
from ahriman.models.action import Action
|
||||
from ahriman.models.package import Package
|
||||
from ahriman.models.pkgbuild_patch import PkgbuildPatch
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class Patch(Handler):
|
||||
@ -38,29 +39,31 @@ class Patch(Handler):
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
application = Application(architecture, configuration, report=report)
|
||||
application = Application(repository_id, configuration, report=report)
|
||||
application.on_start()
|
||||
|
||||
if args.action == Action.Update and args.variable is not None:
|
||||
patch = Patch.patch_create_from_function(args.variable, args.patch)
|
||||
Patch.patch_set_create(application, args.package, patch)
|
||||
elif args.action == Action.Update and args.variable is None:
|
||||
package_base, patch = Patch.patch_create_from_diff(args.package, architecture, args.track)
|
||||
Patch.patch_set_create(application, package_base, patch)
|
||||
elif args.action == Action.List:
|
||||
Patch.patch_set_list(application, args.package, args.variable, args.exit_code)
|
||||
elif args.action == Action.Remove:
|
||||
Patch.patch_set_remove(application, args.package, args.variable)
|
||||
match args.action:
|
||||
case Action.Update if args.variable is not None:
|
||||
patch = Patch.patch_create_from_function(args.variable, args.patch)
|
||||
Patch.patch_set_create(application, args.package, patch)
|
||||
case Action.Update:
|
||||
package_base, patch = Patch.patch_create_from_diff(args.package, repository_id.architecture, args.track)
|
||||
Patch.patch_set_create(application, package_base, patch)
|
||||
case Action.List:
|
||||
Patch.patch_set_list(application, args.package, args.variable, args.exit_code)
|
||||
case Action.Remove:
|
||||
Patch.patch_set_remove(application, args.package, args.variable)
|
||||
|
||||
@staticmethod
|
||||
def patch_create_from_diff(sources_dir: Path, architecture: str, track: list[str]) -> tuple[str, PkgbuildPatch]:
|
||||
|
@ -24,6 +24,7 @@ from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.models.build_status import BuildStatusEnum
|
||||
from ahriman.models.package import Package
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class Rebuild(Handler):
|
||||
@ -32,17 +33,18 @@ class Rebuild(Handler):
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
application = Application(architecture, configuration, report=report)
|
||||
application = Application(repository_id, configuration, report=report)
|
||||
application.on_start()
|
||||
|
||||
packages = Rebuild.extract_packages(application, args.status, from_database=args.from_database)
|
||||
@ -53,7 +55,7 @@ class Rebuild(Handler):
|
||||
application.print_updates(updates, log_fn=print)
|
||||
return
|
||||
|
||||
result = application.update(updates, args.username)
|
||||
result = application.update(updates, args.username, bump_pkgrel=args.increment)
|
||||
Rebuild.check_if_empty(args.exit_code, result.is_empty)
|
||||
|
||||
@staticmethod
|
||||
|
@ -22,6 +22,7 @@ import argparse
|
||||
from ahriman.application.application import Application
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class Remove(Handler):
|
||||
@ -30,16 +31,17 @@ class Remove(Handler):
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
application = Application(architecture, configuration, report=report)
|
||||
application = Application(repository_id, configuration, report=report)
|
||||
application.on_start()
|
||||
application.remove(args.package)
|
||||
|
@ -23,6 +23,7 @@ from ahriman.application.application import Application
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.formatters import StringPrinter
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class RemoveUnknown(Handler):
|
||||
@ -31,17 +32,18 @@ class RemoveUnknown(Handler):
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
application = Application(architecture, configuration, report=report)
|
||||
application = Application(repository_id, configuration, report=report)
|
||||
application.on_start()
|
||||
unknown_packages = application.unknown()
|
||||
|
||||
|
@ -23,6 +23,7 @@ from tarfile import TarFile
|
||||
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class Restore(Handler):
|
||||
@ -33,13 +34,14 @@ class Restore(Handler):
|
||||
ALLOW_AUTO_ARCHITECTURE_RUN = False # it should be called only as "no-architecture"
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
|
@ -29,6 +29,7 @@ from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.exceptions import OptionError
|
||||
from ahriman.core.formatters import AurPrinter
|
||||
from ahriman.models.aur_package import AURPackage
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class Search(Handler):
|
||||
@ -47,17 +48,18 @@ class Search(Handler):
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
application = Application(architecture, configuration, report=report)
|
||||
application = Application(repository_id, configuration, report=report)
|
||||
|
||||
official_packages_list = Official.multisearch(*args.search, pacman=application.repository.pacman)
|
||||
aur_packages_list = AUR.multisearch(*args.search, pacman=application.repository.pacman)
|
||||
@ -81,7 +83,7 @@ class Search(Handler):
|
||||
list[AURPackage]: sorted list for packages
|
||||
|
||||
Raises:
|
||||
InvalidOption: if search fields is not in list of allowed ones
|
||||
OptionError: if search fields is not in list of allowed ones
|
||||
"""
|
||||
if sort_by not in Search.SORT_FIELDS:
|
||||
raise OptionError(sort_by)
|
||||
|
@ -19,12 +19,13 @@
|
||||
#
|
||||
import argparse
|
||||
|
||||
from ahriman import version
|
||||
from ahriman import __version__
|
||||
from ahriman.application.application import Application
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.formatters import UpdatePrinter
|
||||
from ahriman.models.package import Package
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class ServiceUpdates(Handler):
|
||||
@ -35,21 +36,22 @@ class ServiceUpdates(Handler):
|
||||
ALLOW_AUTO_ARCHITECTURE_RUN = False # it should be called only as "no-architecture"
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
application = Application(architecture, configuration, report=report)
|
||||
application = Application(repository_id, configuration, report=report)
|
||||
|
||||
remote = Package.from_aur("ahriman", application.repository.pacman, None)
|
||||
release = remote.version.rsplit("-", 1)[-1] # we don't store pkgrel locally, so we just append it
|
||||
local_version = f"{version.__version__}-{release}"
|
||||
_, release = remote.version.rsplit("-", 1) # we don't store pkgrel locally, so we just append it
|
||||
local_version = f"{__version__}-{release}"
|
||||
|
||||
# technically we would like to compare versions, but it is fine to raise an exception in case if locally
|
||||
# installed package is newer than in AUR
|
||||
|
@ -25,6 +25,7 @@ from pwd import getpwuid
|
||||
from ahriman.application.application import Application
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
from ahriman.models.repository_paths import RepositoryPaths
|
||||
from ahriman.models.user import User
|
||||
|
||||
@ -46,80 +47,80 @@ class Setup(Handler):
|
||||
SUDOERS_DIR_PATH = Path("/etc") / "sudoers.d"
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
Setup.configuration_create_ahriman(args, architecture, args.repository, configuration)
|
||||
Setup.configuration_create_ahriman(args, repository_id, configuration)
|
||||
configuration.reload()
|
||||
|
||||
application = Application(architecture, configuration, report=report)
|
||||
application = Application(repository_id, configuration, report=report)
|
||||
|
||||
Setup.configuration_create_makepkg(args.packager, args.makeflags_jobs, application.repository.paths)
|
||||
Setup.executable_create(application.repository.paths, args.build_command, architecture)
|
||||
Setup.configuration_create_devtools(args.build_command, architecture, args.from_configuration, args.mirror,
|
||||
args.multilib, args.repository, application.repository.paths)
|
||||
Setup.configuration_create_sudo(application.repository.paths, args.build_command, architecture)
|
||||
Setup.executable_create(application.repository.paths, repository_id)
|
||||
repository_server = f"file://{application.repository.paths.repository}" if args.server is None else args.server
|
||||
Setup.configuration_create_devtools(
|
||||
repository_id, args.from_configuration, args.mirror, args.multilib, repository_server)
|
||||
Setup.configuration_create_sudo(application.repository.paths, repository_id)
|
||||
|
||||
application.repository.repo.init()
|
||||
# lazy database sync
|
||||
application.repository.pacman.handle # pylint: disable=pointless-statement
|
||||
|
||||
@staticmethod
|
||||
def build_command(root: Path, prefix: str, architecture: str) -> Path:
|
||||
def build_command(root: Path, repository_id: RepositoryId) -> Path:
|
||||
"""
|
||||
generate build command name
|
||||
|
||||
Args:
|
||||
root(Path): root directory for the build command (must be root of the repository)
|
||||
prefix(str): command prefix in {prefix}-{architecture}-build
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
|
||||
Returns:
|
||||
Path: valid devtools command name
|
||||
"""
|
||||
return root / f"{prefix}-{architecture}-build"
|
||||
return root / f"{repository_id.name}-{repository_id.architecture}-build"
|
||||
|
||||
@staticmethod
|
||||
def configuration_create_ahriman(args: argparse.Namespace, architecture: str, repository: str,
|
||||
def configuration_create_ahriman(args: argparse.Namespace, repository_id: RepositoryId,
|
||||
root: Configuration) -> None:
|
||||
"""
|
||||
create service specific configuration
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository(str): repository name
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
root(Configuration): root configuration instance
|
||||
"""
|
||||
configuration = Configuration()
|
||||
|
||||
section = Configuration.section_name("build", architecture)
|
||||
build_command = Setup.build_command(root.repository_paths.root, args.build_command, architecture)
|
||||
section = Configuration.section_name("build", repository_id.name, repository_id.architecture)
|
||||
build_command = Setup.build_command(root.repository_paths.root, repository_id)
|
||||
configuration.set_option(section, "build_command", str(build_command))
|
||||
configuration.set_option("repository", "name", repository)
|
||||
configuration.set_option("repository", "name", repository_id.name) # backward compatibility for docker
|
||||
if args.build_as_user is not None:
|
||||
configuration.set_option(section, "makechrootpkg_flags", f"-U {args.build_as_user}")
|
||||
|
||||
section = Configuration.section_name("alpm", architecture)
|
||||
section = Configuration.section_name("alpm", repository_id.name, repository_id.architecture)
|
||||
if args.mirror is not None:
|
||||
configuration.set_option(section, "mirror", args.mirror)
|
||||
if not args.multilib:
|
||||
repositories = filter(lambda r: r != "multilib", root.getlist("alpm", "repositories"))
|
||||
configuration.set_option(section, "repositories", " ".join(repositories))
|
||||
|
||||
section = Configuration.section_name("sign", architecture)
|
||||
section = Configuration.section_name("sign", repository_id.name, repository_id.architecture)
|
||||
if args.sign_key is not None:
|
||||
configuration.set_option(section, "target", " ".join([target.name.lower() for target in args.sign_target]))
|
||||
configuration.set_option(section, "key", args.sign_key)
|
||||
|
||||
section = Configuration.section_name("web", architecture)
|
||||
section = Configuration.section_name("web", repository_id.name, repository_id.architecture)
|
||||
if args.web_port is not None:
|
||||
configuration.set_option(section, "port", str(args.web_port))
|
||||
if args.web_unix_socket is not None:
|
||||
@ -133,8 +134,8 @@ class Setup(Handler):
|
||||
configuration.write(ahriman_configuration)
|
||||
|
||||
@staticmethod
|
||||
def configuration_create_devtools(prefix: str, architecture: str, source: Path, mirror: str | None,
|
||||
multilib: bool, repository: str, paths: RepositoryPaths) -> None:
|
||||
def configuration_create_devtools(repository_id: RepositoryId, source: Path, mirror: str | None,
|
||||
multilib: bool, repository_server: str) -> None:
|
||||
"""
|
||||
create configuration for devtools based on ``source`` configuration
|
||||
|
||||
@ -142,13 +143,11 @@ class Setup(Handler):
|
||||
devtools does not allow to specify the pacman configuration, thus we still have to use configuration in /usr
|
||||
|
||||
Args:
|
||||
prefix(str): command prefix in {prefix}-{architecture}-build
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
source(Path): path to source configuration file
|
||||
mirror(str | None): link to package server mirror
|
||||
multilib(bool): add or do not multilib repository to the configuration
|
||||
repository(str): repository name
|
||||
paths(RepositoryPaths): repository paths instance
|
||||
repository_server(str): url of the repository
|
||||
"""
|
||||
# allow_no_value=True is required because pacman uses boolean configuration in which just keys present
|
||||
# (e.g. NoProgressBar) which will lead to exception
|
||||
@ -162,7 +161,7 @@ class Setup(Handler):
|
||||
configuration.read(source)
|
||||
|
||||
# set our architecture now
|
||||
configuration.set_option("options", "Architecture", architecture)
|
||||
configuration.set_option("options", "Architecture", repository_id.architecture)
|
||||
|
||||
# add multilib
|
||||
if multilib:
|
||||
@ -177,10 +176,10 @@ class Setup(Handler):
|
||||
configuration.set_option(section, "Server", mirror)
|
||||
|
||||
# add repository itself
|
||||
configuration.set_option(repository, "SigLevel", "Never") # we don't care
|
||||
configuration.set_option(repository, "Server", f"file://{paths.repository}")
|
||||
configuration.set_option(repository_id.name, "SigLevel", "Never") # we don't care
|
||||
configuration.set_option(repository_id.name, "Server", repository_server)
|
||||
|
||||
target = source.parent / f"{prefix}-{architecture}.conf"
|
||||
target = source.parent / f"{repository_id.name}-{repository_id.architecture}.conf"
|
||||
with target.open("w") as devtools_configuration:
|
||||
configuration.write(devtools_configuration)
|
||||
|
||||
@ -204,31 +203,29 @@ class Setup(Handler):
|
||||
(home_dir / ".makepkg.conf").write_text(content, encoding="utf8")
|
||||
|
||||
@staticmethod
|
||||
def configuration_create_sudo(paths: RepositoryPaths, prefix: str, architecture: str) -> None:
|
||||
def configuration_create_sudo(paths: RepositoryPaths, repository_id: RepositoryId) -> None:
|
||||
"""
|
||||
create configuration to run build command with sudo without password
|
||||
|
||||
Args:
|
||||
paths(RepositoryPaths): repository paths instance
|
||||
prefix(str): command prefix in {prefix}-{architecture}-build
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
"""
|
||||
command = Setup.build_command(paths.root, prefix, architecture)
|
||||
sudoers_file = Setup.build_command(Setup.SUDOERS_DIR_PATH, prefix, architecture)
|
||||
command = Setup.build_command(paths.root, repository_id)
|
||||
sudoers_file = Setup.build_command(Setup.SUDOERS_DIR_PATH, repository_id)
|
||||
sudoers_file.write_text(f"ahriman ALL=(ALL) NOPASSWD:SETENV: {command} *\n", encoding="utf8")
|
||||
sudoers_file.chmod(0o400) # security!
|
||||
|
||||
@staticmethod
|
||||
def executable_create(paths: RepositoryPaths, prefix: str, architecture: str) -> None:
|
||||
def executable_create(paths: RepositoryPaths, repository_id: RepositoryId) -> None:
|
||||
"""
|
||||
create executable for the service
|
||||
|
||||
Args:
|
||||
paths(RepositoryPaths): repository paths instance
|
||||
prefix(str): command prefix in {prefix}-{architecture}-build
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
"""
|
||||
command = Setup.build_command(paths.root, prefix, architecture)
|
||||
command = Setup.build_command(paths.root, repository_id)
|
||||
command.unlink(missing_ok=True)
|
||||
command.symlink_to(Setup.ARCHBUILD_COMMAND_PATH)
|
||||
paths.chown(command) # we would like to keep owner inside ahriman's home
|
||||
|
@ -26,6 +26,7 @@ from pathlib import Path
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.formatters import StringPrinter
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class Shell(Handler):
|
||||
@ -36,13 +37,14 @@ class Shell(Handler):
|
||||
ALLOW_MULTI_ARCHITECTURE_RUN = False
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
@ -50,7 +52,13 @@ class Shell(Handler):
|
||||
# licensed by https://creativecommons.org/licenses/by-sa/3.0
|
||||
path = Path(sys.prefix) / "share" / "ahriman" / "templates" / "shell"
|
||||
StringPrinter(path.read_text(encoding="utf8")).print(verbose=False)
|
||||
local_variables = {"architecture": architecture, "configuration": configuration}
|
||||
|
||||
local_variables = {
|
||||
"architecture": repository_id.architecture,
|
||||
"configuration": configuration,
|
||||
"repository_id": repository_id,
|
||||
}
|
||||
|
||||
if args.code is None:
|
||||
code.interact(local=local_variables)
|
||||
else:
|
||||
|
@ -22,6 +22,7 @@ import argparse
|
||||
from ahriman.application.application import Application
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class Sign(Handler):
|
||||
@ -30,14 +31,15 @@ class Sign(Handler):
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
Application(architecture, configuration, report=report).sign(args.package)
|
||||
Application(repository_id, configuration, report=report).sign(args.package)
|
||||
|
@ -27,6 +27,7 @@ from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.formatters import PackagePrinter, StatusPrinter
|
||||
from ahriman.models.build_status import BuildStatus
|
||||
from ahriman.models.package import Package
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class Status(Handler):
|
||||
@ -37,27 +38,28 @@ class Status(Handler):
|
||||
ALLOW_AUTO_ARCHITECTURE_RUN = False
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
# we are using reporter here
|
||||
client = Application(architecture, configuration, report=True).repository.reporter
|
||||
client = Application(repository_id, configuration, report=True).repository.reporter
|
||||
if args.ahriman:
|
||||
service_status = client.get_internal()
|
||||
service_status = client.status_get()
|
||||
StatusPrinter(service_status.status).print(verbose=args.info)
|
||||
if args.package:
|
||||
packages: list[tuple[Package, BuildStatus]] = sum(
|
||||
(client.get(base) for base in args.package),
|
||||
(client.package_get(base) for base in args.package),
|
||||
start=[])
|
||||
else:
|
||||
packages = client.get(None)
|
||||
packages = client.package_get(None)
|
||||
|
||||
Status.check_if_empty(args.exit_code, not packages)
|
||||
|
||||
|
@ -23,6 +23,7 @@ from ahriman.application.application import Application
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.models.action import Action
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class StatusUpdate(Handler):
|
||||
@ -33,26 +34,28 @@ class StatusUpdate(Handler):
|
||||
ALLOW_AUTO_ARCHITECTURE_RUN = False
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
# we are using reporter here
|
||||
client = Application(architecture, configuration, report=True).repository.reporter
|
||||
client = Application(repository_id, configuration, report=True).repository.reporter
|
||||
|
||||
if args.action == Action.Update and args.package:
|
||||
# update packages statuses
|
||||
for package in args.package:
|
||||
client.update(package, args.status)
|
||||
elif args.action == Action.Update:
|
||||
# update service status
|
||||
client.update_self(args.status)
|
||||
elif args.action == Action.Remove:
|
||||
for package in args.package:
|
||||
client.remove(package)
|
||||
match args.action:
|
||||
case Action.Update if args.package:
|
||||
# update packages statuses
|
||||
for package in args.package:
|
||||
client.package_update(package, args.status)
|
||||
case Action.Update:
|
||||
# update service status
|
||||
client.status_update(args.status)
|
||||
case Action.Remove:
|
||||
for package in args.package:
|
||||
client.package_remove(package)
|
||||
|
@ -22,8 +22,9 @@ import argparse
|
||||
from ahriman.application.application import Application
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.formatters import TreePrinter
|
||||
from ahriman.core.formatters import StringPrinter, TreePrinter
|
||||
from ahriman.core.tree import Tree
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class Structure(Handler):
|
||||
@ -34,19 +35,26 @@ class Structure(Handler):
|
||||
ALLOW_AUTO_ARCHITECTURE_RUN = False
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
application = Application(architecture, configuration, report=report)
|
||||
packages = application.repository.packages()
|
||||
application = Application(repository_id, configuration, report=report)
|
||||
partitions = Tree.partition(application.repository.packages(), count=args.partitions)
|
||||
|
||||
tree = Tree.resolve(packages)
|
||||
for num, level in enumerate(tree):
|
||||
TreePrinter(num, level).print(verbose=True, separator=" ")
|
||||
for partition_id, partition in enumerate(partitions):
|
||||
StringPrinter(f"partition #{partition_id}").print(verbose=False)
|
||||
|
||||
tree = Tree.resolve(partition)
|
||||
for num, level in enumerate(tree):
|
||||
TreePrinter(num, level).print(verbose=True, separator=" ")
|
||||
|
||||
# empty line
|
||||
StringPrinter("").print(verbose=False)
|
||||
|
@ -22,6 +22,7 @@ import argparse
|
||||
from ahriman.application.application import Application
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
from ahriman.models.result import Result
|
||||
|
||||
|
||||
@ -31,19 +32,20 @@ class Triggers(Handler):
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
application = Application(architecture, configuration, report=report)
|
||||
application = Application(repository_id, configuration, report=report)
|
||||
if args.trigger:
|
||||
loader = application.repository.triggers
|
||||
loader.triggers = [loader.load_trigger(trigger, architecture, configuration) for trigger in args.trigger]
|
||||
loader.triggers = [loader.load_trigger(trigger, repository_id, configuration) for trigger in args.trigger]
|
||||
application.on_start()
|
||||
application.on_result(Result())
|
||||
|
@ -22,6 +22,7 @@ import argparse
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.formatters import StringPrinter
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class UnsafeCommands(Handler):
|
||||
@ -32,13 +33,14 @@ class UnsafeCommands(Handler):
|
||||
ALLOW_AUTO_ARCHITECTURE_RUN = False # it should be called only as "no-architecture"
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
|
@ -25,6 +25,7 @@ from ahriman.application.application import Application
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.models.packagers import Packagers
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class Update(Handler):
|
||||
@ -33,17 +34,18 @@ class Update(Handler):
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
application = Application(architecture, configuration, report=report, refresh_pacman_database=args.refresh)
|
||||
application = Application(repository_id, configuration, report=report, refresh_pacman_database=args.refresh)
|
||||
application.on_start()
|
||||
packages = application.updates(args.package, aur=args.aur, local=args.local, manual=args.manual, vcs=args.vcs)
|
||||
Update.check_if_empty(args.exit_code, not packages)
|
||||
@ -54,7 +56,7 @@ class Update(Handler):
|
||||
packagers = Packagers(args.username, {package.base: package.packager for package in packages})
|
||||
|
||||
application.print_updates(packages, log_fn=application.logger.info)
|
||||
result = application.update(packages, packagers)
|
||||
result = application.update(packages, packagers, bump_pkgrel=args.increment)
|
||||
Update.check_if_empty(args.exit_code, result.is_empty)
|
||||
|
||||
@staticmethod
|
||||
|
@ -26,6 +26,7 @@ from ahriman.core.database import SQLite
|
||||
from ahriman.core.exceptions import PasswordError
|
||||
from ahriman.core.formatters import UserPrinter
|
||||
from ahriman.models.action import Action
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
from ahriman.models.user import User
|
||||
|
||||
|
||||
@ -37,30 +38,32 @@ class Users(Handler):
|
||||
ALLOW_AUTO_ARCHITECTURE_RUN = False # it should be called only as "no-architecture"
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
database = SQLite.load(configuration)
|
||||
|
||||
if args.action == Action.Update:
|
||||
user = Users.user_create(args)
|
||||
# if password is left blank we are not going to require salt to be set
|
||||
salt = configuration.get("auth", "salt") if user.password else ""
|
||||
database.user_update(user.hash_password(salt))
|
||||
elif args.action == Action.List:
|
||||
users = database.user_list(args.username, args.role)
|
||||
Users.check_if_empty(args.exit_code, not users)
|
||||
for user in users:
|
||||
UserPrinter(user).print(verbose=True)
|
||||
elif args.action == Action.Remove:
|
||||
database.user_remove(args.username)
|
||||
match args.action:
|
||||
case Action.Update:
|
||||
user = Users.user_create(args)
|
||||
# if password is left blank we are not going to require salt to be set
|
||||
salt = configuration.get("auth", "salt", fallback="") if user.password else ""
|
||||
database.user_update(user.hash_password(salt))
|
||||
case Action.List:
|
||||
users = database.user_list(args.username, args.role)
|
||||
Users.check_if_empty(args.exit_code, not users)
|
||||
for user in users:
|
||||
UserPrinter(user).print(verbose=True)
|
||||
case Action.Remove:
|
||||
database.user_remove(args.username)
|
||||
|
||||
@staticmethod
|
||||
def user_create(args: argparse.Namespace) -> User:
|
||||
@ -72,6 +75,9 @@ class Users(Handler):
|
||||
|
||||
Returns:
|
||||
User: built user descriptor
|
||||
|
||||
Raises:
|
||||
PasswordError: password input is invalid
|
||||
"""
|
||||
def read_password() -> str:
|
||||
first_password = getpass.getpass()
|
||||
|
@ -29,6 +29,7 @@ from ahriman.core.configuration.validator import Validator
|
||||
from ahriman.core.exceptions import ExtensionError
|
||||
from ahriman.core.formatters import ValidationPrinter
|
||||
from ahriman.core.triggers import TriggerLoader
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class Validate(Handler):
|
||||
@ -39,17 +40,18 @@ class Validate(Handler):
|
||||
ALLOW_AUTO_ARCHITECTURE_RUN = False
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
schema = Validate.schema(architecture, configuration)
|
||||
schema = Validate.schema(repository_id, configuration)
|
||||
validator = Validator(configuration=configuration, schema=schema)
|
||||
|
||||
if validator.validate(configuration.dump()):
|
||||
@ -61,12 +63,12 @@ class Validate(Handler):
|
||||
Validate.check_if_empty(args.exit_code, True)
|
||||
|
||||
@staticmethod
|
||||
def schema(architecture: str, configuration: Configuration) -> ConfigurationSchema:
|
||||
def schema(repository_id: RepositoryId, configuration: Configuration) -> ConfigurationSchema:
|
||||
"""
|
||||
get schema with triggers
|
||||
|
||||
Args:
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
|
||||
Returns:
|
||||
@ -85,12 +87,12 @@ class Validate(Handler):
|
||||
continue
|
||||
|
||||
# default settings if any
|
||||
for schema_name, schema in trigger_class.configuration_schema(architecture, None).items():
|
||||
for schema_name, schema in trigger_class.configuration_schema(repository_id, None).items():
|
||||
erased = Validate.schema_erase_required(copy.deepcopy(schema))
|
||||
root[schema_name] = Validate.schema_merge(root.get(schema_name, {}), erased)
|
||||
|
||||
# settings according to enabled triggers
|
||||
for schema_name, schema in trigger_class.configuration_schema(architecture, configuration).items():
|
||||
for schema_name, schema in trigger_class.configuration_schema(repository_id, configuration).items():
|
||||
root[schema_name] = Validate.schema_merge(root.get(schema_name, {}), copy.deepcopy(schema))
|
||||
|
||||
return root
|
||||
|
@ -24,10 +24,11 @@ import sys
|
||||
from collections.abc import Generator
|
||||
from importlib import metadata
|
||||
|
||||
from ahriman import version
|
||||
from ahriman import __version__
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.formatters import VersionPrinter
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class Versions(Handler):
|
||||
@ -42,17 +43,18 @@ class Versions(Handler):
|
||||
PEP423_PACKAGE_NAME = re.compile(r"^[A-Za-z0-9._-]+")
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
VersionPrinter(f"Module version {version.__version__}",
|
||||
VersionPrinter(f"Module version {__version__}",
|
||||
{"Python": sys.version}).print(verbose=False, separator=" ")
|
||||
packages = Versions.package_dependencies("ahriman")
|
||||
VersionPrinter("Installed packages", dict(packages)).print(verbose=False, separator=" ")
|
||||
|
@ -24,6 +24,7 @@ from collections.abc import Generator
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.spawn import Spawn
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class Web(Handler):
|
||||
@ -33,27 +34,27 @@ class Web(Handler):
|
||||
|
||||
ALLOW_AUTO_ARCHITECTURE_RUN = False
|
||||
ALLOW_MULTI_ARCHITECTURE_RUN = False # required to be able to spawn external processes
|
||||
COMMAND_ARGS_WHITELIST = ["force", "log_handler", ""]
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
def run(cls, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
report: bool) -> None:
|
||||
"""
|
||||
callback for command line
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
# we are using local import for optional dependencies
|
||||
from ahriman.web.web import run_server, setup_service
|
||||
|
||||
spawner_args = Web.extract_arguments(args, architecture, configuration)
|
||||
spawner = Spawn(args.parser(), architecture, list(spawner_args))
|
||||
spawner_args = Web.extract_arguments(args, repository_id, configuration)
|
||||
spawner = Spawn(args.parser(), repository_id, list(spawner_args))
|
||||
spawner.start()
|
||||
|
||||
application = setup_service(architecture, configuration, spawner)
|
||||
application = setup_service(repository_id, configuration, spawner)
|
||||
run_server(application)
|
||||
|
||||
# terminate spawn process at the last
|
||||
@ -61,21 +62,22 @@ class Web(Handler):
|
||||
spawner.join()
|
||||
|
||||
@staticmethod
|
||||
def extract_arguments(args: argparse.Namespace, architecture: str,
|
||||
def extract_arguments(args: argparse.Namespace, repository_id: RepositoryId,
|
||||
configuration: Configuration) -> Generator[str, None, None]:
|
||||
"""
|
||||
extract list of arguments used for current command, except for command specific ones
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
|
||||
Returns:
|
||||
Generator[str, None, None]: command line arguments which were used for this specific command
|
||||
"""
|
||||
# read architecture from the same argument list
|
||||
yield from ["--architecture", architecture]
|
||||
yield from ["--architecture", repository_id.architecture]
|
||||
yield from ["--repository", repository_id.name]
|
||||
# read configuration path from current settings
|
||||
if (configuration_path := configuration.path) is not None:
|
||||
yield from ["--configuration", str(configuration_path)]
|
||||
@ -89,3 +91,7 @@ class Web(Handler):
|
||||
yield "--quiet"
|
||||
if args.unsafe:
|
||||
yield "--unsafe"
|
||||
|
||||
# arguments from configuration
|
||||
if (wait_timeout := configuration.getint("web", "wait_timeout", fallback=None)) is not None:
|
||||
yield from ["--wait-timeout", str(wait_timeout)]
|
||||
|
@ -19,16 +19,19 @@
|
||||
#
|
||||
import argparse
|
||||
|
||||
from pathlib import Path
|
||||
from types import TracebackType
|
||||
from typing import Literal, Self
|
||||
|
||||
from ahriman import version
|
||||
from ahriman import __version__
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.exceptions import DuplicateRunError
|
||||
from ahriman.core.log import LazyLogging
|
||||
from ahriman.core.status.client import Client
|
||||
from ahriman.core.util import check_user
|
||||
from ahriman.models.build_status import BuildStatusEnum
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
from ahriman.models.waiter import Waiter
|
||||
|
||||
|
||||
class Lock(LazyLogging):
|
||||
@ -41,33 +44,39 @@ class Lock(LazyLogging):
|
||||
reporter(Client): build status reporter instance
|
||||
paths(RepositoryPaths): repository paths instance
|
||||
unsafe(bool): skip user check
|
||||
wait_timeout(int): wait in seconds until lock will free
|
||||
|
||||
Examples:
|
||||
Instance of this class except for controlling file-based lock is also required for basic applications checks.
|
||||
The common flow is to create instance in ``with`` block and handle exceptions after all::
|
||||
|
||||
>>> from ahriman.core.configuration import Configuration
|
||||
>>> from ahriman.models.repository_id import RepositoryId
|
||||
>>>
|
||||
>>> configuration = Configuration()
|
||||
>>> try:
|
||||
>>> with Lock(args, "x86_64", configuration):
|
||||
>>> with Lock(args, RepositoryId("x86_64", "aur-clone"), configuration):
|
||||
>>> perform_actions()
|
||||
>>> except Exception as exception:
|
||||
>>> handle_exceptions(exception)
|
||||
"""
|
||||
|
||||
def __init__(self, args: argparse.Namespace, architecture: str, configuration: Configuration) -> None:
|
||||
def __init__(self, args: argparse.Namespace, repository_id: RepositoryId, configuration: Configuration) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
args(argparse.Namespace): command line args
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
"""
|
||||
self.path = args.lock.with_stem(f"{args.lock.stem}_{architecture}") if args.lock is not None else None
|
||||
self.force = args.force
|
||||
self.unsafe = args.unsafe
|
||||
lock_suffix = f"{repository_id.name}_{repository_id.architecture}" if repository_id.name is not None else repository_id.architecture
|
||||
self.path: Path | None = \
|
||||
args.lock.with_stem(f"{args.lock.stem}_{lock_suffix}") if args.lock is not None else None
|
||||
|
||||
self.force: bool = args.force
|
||||
self.unsafe: bool = args.unsafe
|
||||
self.wait_timeout: int = args.wait_timeout
|
||||
|
||||
self.paths = configuration.repository_paths
|
||||
self.reporter = Client.load(configuration, report=args.report)
|
||||
@ -76,10 +85,10 @@ class Lock(LazyLogging):
|
||||
"""
|
||||
check web server version
|
||||
"""
|
||||
status = self.reporter.get_internal()
|
||||
if status.version is not None and status.version != version.__version__:
|
||||
status = self.reporter.status_get()
|
||||
if status.version is not None and status.version != __version__:
|
||||
self.logger.warning("status watcher version mismatch, our %s, their %s",
|
||||
version.__version__, status.version)
|
||||
__version__, status.version)
|
||||
|
||||
def check_user(self) -> None:
|
||||
"""
|
||||
@ -101,14 +110,27 @@ class Lock(LazyLogging):
|
||||
create lock file
|
||||
|
||||
Raises:
|
||||
DuplicateRun: if lock exists and no force flag supplied
|
||||
DuplicateRunError: if lock exists and no force flag supplied
|
||||
"""
|
||||
if self.path is None:
|
||||
return
|
||||
try:
|
||||
self.path.touch(exist_ok=self.force)
|
||||
except FileExistsError:
|
||||
raise DuplicateRunError()
|
||||
raise DuplicateRunError from None
|
||||
|
||||
def watch(self) -> None:
|
||||
"""
|
||||
watch until lock disappear
|
||||
"""
|
||||
# there are reasons why we are not using inotify here. First of all, if we would use it, it would bring to
|
||||
# race conditions because multiple processes will be notified in the same time. Secondly, it is good library,
|
||||
# but platform-specific, and we only need to check if file exists
|
||||
if self.path is None:
|
||||
return
|
||||
|
||||
waiter = Waiter(self.wait_timeout)
|
||||
waiter.wait(self.path.is_file)
|
||||
|
||||
def __enter__(self) -> Self:
|
||||
"""
|
||||
@ -117,16 +139,18 @@ class Lock(LazyLogging):
|
||||
1. Check user UID
|
||||
2. Check if there is lock file
|
||||
3. Check web status watcher status
|
||||
4. Create lock file and directory tree
|
||||
5. Report to status page if enabled
|
||||
4. Wait for lock file to be free
|
||||
5. Create lock file and directory tree
|
||||
6. Report to status page if enabled
|
||||
|
||||
Returns:
|
||||
Self: always instance of self
|
||||
"""
|
||||
self.check_user()
|
||||
self.check_version()
|
||||
self.watch()
|
||||
self.create()
|
||||
self.reporter.update_self(BuildStatusEnum.Building)
|
||||
self.reporter.status_update(BuildStatusEnum.Building)
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type: type[Exception] | None, exc_val: Exception | None,
|
||||
@ -144,5 +168,5 @@ class Lock(LazyLogging):
|
||||
"""
|
||||
self.clear()
|
||||
status = BuildStatusEnum.Success if exc_val is None else BuildStatusEnum.Failed
|
||||
self.reporter.update_self(status)
|
||||
self.reporter.status_update(status)
|
||||
return False
|
||||
|
@ -23,11 +23,13 @@ from collections.abc import Callable, Generator
|
||||
from functools import cached_property
|
||||
from pathlib import Path
|
||||
from pyalpm import DB, Handle, Package, SIG_PACKAGE, error as PyalpmError # type: ignore[import]
|
||||
from string import Template
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.log import LazyLogging
|
||||
from ahriman.core.util import trim_package
|
||||
from ahriman.models.pacman_synchronization import PacmanSynchronization
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
from ahriman.models.repository_paths import RepositoryPaths
|
||||
|
||||
|
||||
@ -36,26 +38,36 @@ class Pacman(LazyLogging):
|
||||
alpm wrapper
|
||||
"""
|
||||
|
||||
def __init__(self, architecture: str, configuration: Configuration, *,
|
||||
def __init__(self, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
refresh_database: PacmanSynchronization) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
refresh_database(PacmanSynchronization): synchronize local cache to remote
|
||||
"""
|
||||
self.__create_handle_fn: Callable[[], Handle] = lambda: self.__create_handle(
|
||||
architecture, configuration, refresh_database=refresh_database)
|
||||
repository_id, configuration, refresh_database=refresh_database)
|
||||
|
||||
def __create_handle(self, architecture: str, configuration: Configuration, *,
|
||||
@cached_property
|
||||
def handle(self) -> Handle:
|
||||
"""
|
||||
pyalpm handle
|
||||
|
||||
Returns:
|
||||
Handle: generated pyalpm handle instance
|
||||
"""
|
||||
return self.__create_handle_fn()
|
||||
|
||||
def __create_handle(self, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
refresh_database: PacmanSynchronization) -> Handle:
|
||||
"""
|
||||
create lazy handle function
|
||||
|
||||
Args:
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
refresh_database(PacmanSynchronization): synchronize local cache to remote
|
||||
|
||||
@ -71,7 +83,7 @@ class Pacman(LazyLogging):
|
||||
|
||||
handle = Handle(str(root), str(database_path))
|
||||
for repository in configuration.getlist("alpm", "repositories"):
|
||||
database = self.database_init(handle, repository, mirror, architecture)
|
||||
database = self.database_init(handle, repository, mirror, repository_id.architecture)
|
||||
self.database_copy(handle, database, pacman_root, paths, use_ahriman_cache=use_ahriman_cache)
|
||||
|
||||
if use_ahriman_cache and refresh_database:
|
||||
@ -79,16 +91,6 @@ class Pacman(LazyLogging):
|
||||
|
||||
return handle
|
||||
|
||||
@cached_property
|
||||
def handle(self) -> Handle:
|
||||
"""
|
||||
pyalpm handle
|
||||
|
||||
Returns:
|
||||
Handle: generated pyalpm handle instance
|
||||
"""
|
||||
return self.__create_handle_fn()
|
||||
|
||||
def database_copy(self, handle: Handle, database: DB, pacman_root: Path, paths: RepositoryPaths, *,
|
||||
use_ahriman_cache: bool) -> None:
|
||||
"""
|
||||
@ -116,7 +118,7 @@ class Pacman(LazyLogging):
|
||||
src = repository_database(pacman_root)
|
||||
if not src.is_file():
|
||||
self.logger.warning("repository %s is set to be used, however, no working copy was found", database.name)
|
||||
return # database for some reasons deos not exist
|
||||
return # database for some reason deos not exist
|
||||
self.logger.info("copy pacman database from operating system root to ahriman's home")
|
||||
shutil.copy(src, dst)
|
||||
paths.chown(dst)
|
||||
@ -136,8 +138,14 @@ class Pacman(LazyLogging):
|
||||
"""
|
||||
self.logger.info("loading pacman database %s", repository)
|
||||
database: DB = handle.register_syncdb(repository, SIG_PACKAGE)
|
||||
|
||||
# replace variables in mirror address
|
||||
database.servers = [mirror.replace("$repo", repository).replace("$arch", architecture)]
|
||||
variables = {
|
||||
"arch": architecture,
|
||||
"repo": repository,
|
||||
}
|
||||
database.servers = [Template(mirror).safe_substitute(variables)]
|
||||
|
||||
return database
|
||||
|
||||
def database_sync(self, handle: Handle, *, force: bool) -> None:
|
||||
|
@ -17,14 +17,11 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import requests
|
||||
|
||||
from typing import Any
|
||||
|
||||
from ahriman.core.alpm.pacman import Pacman
|
||||
from ahriman.core.alpm.remote import Remote
|
||||
from ahriman.core.exceptions import PackageInfoError, UnknownPackageError
|
||||
from ahriman.core.util import exception_response_text
|
||||
from ahriman.models.aur_package import AURPackage
|
||||
|
||||
|
||||
@ -36,13 +33,11 @@ class AUR(Remote):
|
||||
DEFAULT_AUR_URL(str): (class attribute) default AUR url
|
||||
DEFAULT_RPC_URL(str): (class attribute) default AUR RPC url
|
||||
DEFAULT_RPC_VERSION(str): (class attribute) default AUR RPC version
|
||||
DEFAULT_TIMEOUT(int): (class attribute) HTTP request timeout in seconds
|
||||
"""
|
||||
|
||||
DEFAULT_AUR_URL = "https://aur.archlinux.org"
|
||||
DEFAULT_RPC_URL = f"{DEFAULT_AUR_URL}/rpc"
|
||||
DEFAULT_RPC_VERSION = "5"
|
||||
DEFAULT_TIMEOUT = 30
|
||||
|
||||
@classmethod
|
||||
def remote_git_url(cls, package_base: str, repository: str) -> str:
|
||||
@ -83,7 +78,7 @@ class AUR(Remote):
|
||||
list[AURPackage]: list of parsed packages
|
||||
|
||||
Raises:
|
||||
InvalidPackageInfo: for error API response
|
||||
PackageInfoError: for error API response
|
||||
"""
|
||||
response_type = response["type"]
|
||||
if response_type == "error":
|
||||
@ -91,7 +86,7 @@ class AUR(Remote):
|
||||
raise PackageInfoError(error_details)
|
||||
return [AURPackage.from_json(package) for package in response["results"]]
|
||||
|
||||
def make_request(self, request_type: str, *args: str, **kwargs: str) -> list[AURPackage]:
|
||||
def aur_request(self, request_type: str, *args: str, **kwargs: str) -> list[AURPackage]:
|
||||
"""
|
||||
perform request to AUR RPC
|
||||
|
||||
@ -103,34 +98,20 @@ class AUR(Remote):
|
||||
Returns:
|
||||
list[AURPackage]: response parsed to package list
|
||||
"""
|
||||
query: dict[str, Any] = {
|
||||
"type": request_type,
|
||||
"v": self.DEFAULT_RPC_VERSION
|
||||
}
|
||||
query: list[tuple[str, str]] = [
|
||||
("type", request_type),
|
||||
("v", self.DEFAULT_RPC_VERSION),
|
||||
]
|
||||
|
||||
arg_query = "arg[]" if len(args) > 1 else "arg"
|
||||
query[arg_query] = list(args)
|
||||
for arg in args:
|
||||
query.append((arg_query, arg))
|
||||
|
||||
for key, value in kwargs.items():
|
||||
query[key] = value
|
||||
query.append((key, value))
|
||||
|
||||
try:
|
||||
response = requests.get(
|
||||
self.DEFAULT_RPC_URL,
|
||||
params=query,
|
||||
headers={"User-Agent": self.DEFAULT_USER_AGENT},
|
||||
timeout=self.DEFAULT_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
return self.parse_response(response.json())
|
||||
except requests.HTTPError as e:
|
||||
self.logger.exception(
|
||||
"could not perform request by using type %s: %s",
|
||||
request_type,
|
||||
exception_response_text(e))
|
||||
raise
|
||||
except Exception:
|
||||
self.logger.exception("could not perform request by using type %s", request_type)
|
||||
raise
|
||||
response = self.make_request("GET", self.DEFAULT_RPC_URL, params=query)
|
||||
return self.parse_response(response.json())
|
||||
|
||||
def package_info(self, package_name: str, *, pacman: Pacman) -> AURPackage:
|
||||
"""
|
||||
@ -142,12 +123,15 @@ class AUR(Remote):
|
||||
|
||||
Returns:
|
||||
AURPackage: package which match the package name
|
||||
|
||||
Raises:
|
||||
UnknownPackageError: package doesn't exist
|
||||
"""
|
||||
packages = self.make_request("info", package_name)
|
||||
packages = self.aur_request("info", package_name)
|
||||
try:
|
||||
return next(package for package in packages if package.name == package_name)
|
||||
except StopIteration:
|
||||
raise UnknownPackageError(package_name)
|
||||
raise UnknownPackageError(package_name) from None
|
||||
|
||||
def package_search(self, *keywords: str, pacman: Pacman) -> list[AURPackage]:
|
||||
"""
|
||||
@ -160,4 +144,4 @@ class AUR(Remote):
|
||||
Returns:
|
||||
list[AURPackage]: list of packages which match the criteria
|
||||
"""
|
||||
return self.make_request("search", *keywords, by="name-desc")
|
||||
return self.aur_request("search", *keywords, by="name-desc")
|
||||
|
@ -17,14 +17,11 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import requests
|
||||
|
||||
from typing import Any
|
||||
|
||||
from ahriman.core.alpm.pacman import Pacman
|
||||
from ahriman.core.alpm.remote import Remote
|
||||
from ahriman.core.exceptions import PackageInfoError, UnknownPackageError
|
||||
from ahriman.core.util import exception_response_text
|
||||
from ahriman.models.aur_package import AURPackage
|
||||
|
||||
|
||||
@ -37,14 +34,12 @@ class Official(Remote):
|
||||
DEFAULT_ARCHLINUX_GIT_URL(str): (class attribute) default url for git packages
|
||||
DEFAULT_SEARCH_REPOSITORIES(list[str]): (class attribute) default list of repositories to search
|
||||
DEFAULT_RPC_URL(str): (class attribute) default archlinux repositories RPC url
|
||||
DEFAULT_TIMEOUT(int): (class attribute) HTTP request timeout in seconds
|
||||
"""
|
||||
|
||||
DEFAULT_ARCHLINUX_GIT_URL = "https://gitlab.archlinux.org"
|
||||
DEFAULT_ARCHLINUX_URL = "https://archlinux.org"
|
||||
DEFAULT_SEARCH_REPOSITORIES = ["Core", "Extra", "Multilib"]
|
||||
DEFAULT_RPC_URL = "https://archlinux.org/packages/search/json"
|
||||
DEFAULT_TIMEOUT = 30
|
||||
|
||||
@classmethod
|
||||
def remote_git_url(cls, package_base: str, repository: str) -> str:
|
||||
@ -85,13 +80,13 @@ class Official(Remote):
|
||||
list[AURPackage]: list of parsed packages
|
||||
|
||||
Raises:
|
||||
InvalidPackageInfo: for error API response
|
||||
PackageInfoError: for error API response
|
||||
"""
|
||||
if not response["valid"]:
|
||||
raise PackageInfoError("API validation error")
|
||||
return [AURPackage.from_repo(package) for package in response["results"]]
|
||||
|
||||
def make_request(self, *args: str, by: str) -> list[AURPackage]:
|
||||
def arch_request(self, *args: str, by: str) -> list[AURPackage]:
|
||||
"""
|
||||
perform request to official repositories RPC
|
||||
|
||||
@ -102,20 +97,15 @@ class Official(Remote):
|
||||
Returns:
|
||||
list[AURPackage]: response parsed to package list
|
||||
"""
|
||||
try:
|
||||
response = requests.get(
|
||||
self.DEFAULT_RPC_URL,
|
||||
params={by: args, "repo": self.DEFAULT_SEARCH_REPOSITORIES},
|
||||
headers={"User-Agent": self.DEFAULT_USER_AGENT},
|
||||
timeout=self.DEFAULT_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
return self.parse_response(response.json())
|
||||
except requests.HTTPError as e:
|
||||
self.logger.exception("could not perform request: %s", exception_response_text(e))
|
||||
raise
|
||||
except Exception:
|
||||
self.logger.exception("could not perform request")
|
||||
raise
|
||||
query: list[tuple[str, str]] = [
|
||||
("repo", repository)
|
||||
for repository in self.DEFAULT_SEARCH_REPOSITORIES
|
||||
]
|
||||
for arg in args:
|
||||
query.append((by, arg))
|
||||
|
||||
response = self.make_request("GET", self.DEFAULT_RPC_URL, params=query)
|
||||
return self.parse_response(response.json())
|
||||
|
||||
def package_info(self, package_name: str, *, pacman: Pacman) -> AURPackage:
|
||||
"""
|
||||
@ -127,12 +117,15 @@ class Official(Remote):
|
||||
|
||||
Returns:
|
||||
AURPackage: package which match the package name
|
||||
|
||||
Raises:
|
||||
UnknownPackageError: package doesn't exist
|
||||
"""
|
||||
packages = self.make_request(package_name, by="name")
|
||||
packages = self.arch_request(package_name, by="name")
|
||||
try:
|
||||
return next(package for package in packages if package.name == package_name)
|
||||
except StopIteration:
|
||||
raise UnknownPackageError(package_name)
|
||||
raise UnknownPackageError(package_name) from None
|
||||
|
||||
def package_search(self, *keywords: str, pacman: Pacman) -> list[AURPackage]:
|
||||
"""
|
||||
@ -145,4 +138,4 @@ class Official(Remote):
|
||||
Returns:
|
||||
list[AURPackage]: list of packages which match the criteria
|
||||
"""
|
||||
return self.make_request(*keywords, by="q")
|
||||
return self.arch_request(*keywords, by="q")
|
||||
|
@ -48,8 +48,11 @@ class OfficialSyncdb(Official):
|
||||
|
||||
Returns:
|
||||
AURPackage: package which match the package name
|
||||
|
||||
Raises:
|
||||
UnknownPackageError: package doesn't exist
|
||||
"""
|
||||
try:
|
||||
return next(AURPackage.from_pacman(package) for package in pacman.package_get(package_name))
|
||||
except StopIteration:
|
||||
raise UnknownPackageError(package_name)
|
||||
raise UnknownPackageError(package_name) from None
|
||||
|
@ -17,19 +17,15 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from ahriman import version
|
||||
from ahriman.core.alpm.pacman import Pacman
|
||||
from ahriman.core.log import LazyLogging
|
||||
from ahriman.core.http import SyncHttpClient
|
||||
from ahriman.models.aur_package import AURPackage
|
||||
|
||||
|
||||
class Remote(LazyLogging):
|
||||
class Remote(SyncHttpClient):
|
||||
"""
|
||||
base class for remote package search
|
||||
|
||||
Attributes:
|
||||
DEFAULT_USER_AGENT(str): (class attribute) default user agent
|
||||
|
||||
Examples:
|
||||
These classes are designed to be used without instancing. In order to achieve it several class methods are
|
||||
provided: ``info``, ``multisearch`` and ``search``. Thus, the basic flow is the following::
|
||||
@ -43,8 +39,6 @@ class Remote(LazyLogging):
|
||||
directly, whereas ``multisearch`` splits search one by one and finds intersection between search results.
|
||||
"""
|
||||
|
||||
DEFAULT_USER_AGENT = f"ahriman/{version.__version__}"
|
||||
|
||||
@classmethod
|
||||
def info(cls, package_name: str, *, pacman: Pacman) -> AURPackage:
|
||||
"""
|
||||
|
@ -71,7 +71,7 @@ class Repo(LazyLogging):
|
||||
"""
|
||||
Repo._check_output(
|
||||
"repo-add", *self.sign_args, "-R", str(self.repo_path), str(path),
|
||||
exception=BuildError(path.name),
|
||||
exception=BuildError.from_process(path.name),
|
||||
cwd=self.paths.repository,
|
||||
logger=self.logger,
|
||||
user=self.uid)
|
||||
@ -98,7 +98,7 @@ class Repo(LazyLogging):
|
||||
# remove package from registry
|
||||
Repo._check_output(
|
||||
"repo-remove", *self.sign_args, str(self.repo_path), package,
|
||||
exception=BuildError(package),
|
||||
exception=BuildError.from_process(package),
|
||||
cwd=self.paths.repository,
|
||||
logger=self.logger,
|
||||
user=self.uid)
|
||||
|
@ -74,14 +74,15 @@ class Auth(LazyLogging):
|
||||
Returns:
|
||||
Auth: authorization module according to current settings
|
||||
"""
|
||||
provider = AuthSettings.from_option(configuration.get("auth", "target", fallback="disabled"))
|
||||
if provider == AuthSettings.Configuration:
|
||||
from ahriman.core.auth.mapping import Mapping
|
||||
return Mapping(configuration, database)
|
||||
if provider == AuthSettings.OAuth:
|
||||
from ahriman.core.auth.oauth import OAuth
|
||||
return OAuth(configuration, database)
|
||||
return Auth(configuration)
|
||||
match AuthSettings.from_option(configuration.get("auth", "target", fallback="disabled")):
|
||||
case AuthSettings.Configuration:
|
||||
from ahriman.core.auth.mapping import Mapping
|
||||
return Mapping(configuration, database)
|
||||
case AuthSettings.OAuth:
|
||||
from ahriman.core.auth.oauth import OAuth
|
||||
return OAuth(configuration, database)
|
||||
case _:
|
||||
return Auth(configuration)
|
||||
|
||||
async def check_credentials(self, username: str | None, password: str | None) -> bool:
|
||||
"""
|
||||
|
@ -46,7 +46,7 @@ class Mapping(Auth):
|
||||
"""
|
||||
Auth.__init__(self, configuration, provider)
|
||||
self.database = database
|
||||
self.salt = configuration.get("auth", "salt")
|
||||
self.salt = configuration.get("auth", "salt", fallback="")
|
||||
|
||||
async def check_credentials(self, username: str | None, password: str | None) -> bool:
|
||||
"""
|
||||
|
@ -28,8 +28,8 @@ from ahriman.models.auth_settings import AuthSettings
|
||||
|
||||
class OAuth(Mapping):
|
||||
"""
|
||||
OAuth's user authorization.
|
||||
It is required to create application first and put application credentials.
|
||||
User authorization implementation via OAuth. It is required to create application first and put application
|
||||
credentials.
|
||||
|
||||
Attributes:
|
||||
client_id(str): application client id
|
||||
@ -81,7 +81,7 @@ class OAuth(Mapping):
|
||||
type[aioauth_client.OAuth2Client]: loaded provider type
|
||||
|
||||
Raises:
|
||||
InvalidOption: in case if invalid OAuth provider name supplied
|
||||
OptionError: in case if invalid OAuth provider name supplied
|
||||
"""
|
||||
provider: type[aioauth_client.OAuth2Client] = getattr(aioauth_client, name)
|
||||
try:
|
||||
|
@ -36,9 +36,11 @@ class Sources(LazyLogging):
|
||||
Attributes:
|
||||
DEFAULT_BRANCH(str): (class attribute) default branch to process git repositories.
|
||||
Must be used only for local stored repositories, use RemoteSource descriptor instead for real packages
|
||||
DEFAULT_COMMIT_AUTHOR(tuple[str, str]): (class attribute) default commit author to be used if none set
|
||||
"""
|
||||
|
||||
DEFAULT_BRANCH = "master" # default fallback branch
|
||||
DEFAULT_COMMIT_AUTHOR = ("ahriman", "ahriman@localhost")
|
||||
|
||||
_check_output = check_output
|
||||
|
||||
@ -61,13 +63,13 @@ class Sources(LazyLogging):
|
||||
return [PkgbuildPatch("arch", list(architectures))]
|
||||
|
||||
@staticmethod
|
||||
def fetch(sources_dir: Path, remote: RemoteSource | None) -> None:
|
||||
def fetch(sources_dir: Path, remote: RemoteSource) -> None:
|
||||
"""
|
||||
either clone repository or update it to origin/``remote.branch``
|
||||
|
||||
Args:
|
||||
sources_dir(Path): local path to fetch
|
||||
remote(RemoteSource | None): remote target (from where to fetch)
|
||||
remote(RemoteSource): remote target (from where to fetch)
|
||||
"""
|
||||
instance = Sources()
|
||||
# local directory exists and there is .git directory
|
||||
@ -77,13 +79,14 @@ class Sources(LazyLogging):
|
||||
instance.logger.info("skip update at %s because there are no branches configured", sources_dir)
|
||||
return
|
||||
|
||||
branch = remote.branch if remote is not None else instance.DEFAULT_BRANCH
|
||||
branch = remote.branch or instance.DEFAULT_BRANCH
|
||||
if is_initialized_git:
|
||||
instance.logger.info("update HEAD to remote at %s using branch %s", sources_dir, branch)
|
||||
Sources._check_output("git", "fetch", "origin", branch, cwd=sources_dir, logger=instance.logger)
|
||||
elif remote is not None:
|
||||
Sources._check_output("git", "fetch", "--quiet", "origin", branch,
|
||||
cwd=sources_dir, logger=instance.logger)
|
||||
elif remote.git_url is not None:
|
||||
instance.logger.info("clone remote %s to %s using branch %s", remote.git_url, sources_dir, branch)
|
||||
Sources._check_output("git", "clone", "--branch", branch, "--single-branch",
|
||||
Sources._check_output("git", "clone", "--quiet", "--branch", branch, "--single-branch",
|
||||
remote.git_url, str(sources_dir), cwd=sources_dir.parent, logger=instance.logger)
|
||||
else:
|
||||
# it will cause an exception later
|
||||
@ -91,11 +94,12 @@ class Sources(LazyLogging):
|
||||
|
||||
# and now force reset to our branch
|
||||
Sources._check_output("git", "checkout", "--force", branch, cwd=sources_dir, logger=instance.logger)
|
||||
Sources._check_output("git", "reset", "--hard", f"origin/{branch}", cwd=sources_dir, logger=instance.logger)
|
||||
Sources._check_output("git", "reset", "--quiet", "--hard", f"origin/{branch}",
|
||||
cwd=sources_dir, logger=instance.logger)
|
||||
|
||||
# move content if required
|
||||
# we are using full path to source directory in order to make append possible
|
||||
pkgbuild_dir = remote.pkgbuild_dir if remote is not None else sources_dir.resolve()
|
||||
pkgbuild_dir = remote.pkgbuild_dir or sources_dir.resolve()
|
||||
instance.move((sources_dir / pkgbuild_dir).resolve(), sources_dir)
|
||||
|
||||
@staticmethod
|
||||
@ -122,14 +126,16 @@ class Sources(LazyLogging):
|
||||
sources_dir(Path): local path to sources
|
||||
"""
|
||||
instance = Sources()
|
||||
Sources._check_output("git", "init", "--initial-branch", instance.DEFAULT_BRANCH,
|
||||
cwd=sources_dir, logger=instance.logger)
|
||||
if not (sources_dir / ".git").is_dir():
|
||||
# skip initializing in case if it was already
|
||||
Sources._check_output("git", "init", "--quiet", "--initial-branch", instance.DEFAULT_BRANCH,
|
||||
cwd=sources_dir, logger=instance.logger)
|
||||
|
||||
# extract local files...
|
||||
files = ["PKGBUILD", ".SRCINFO"] + [str(path) for path in Package.local_files(sources_dir)]
|
||||
instance.add(sources_dir, *files)
|
||||
# ...and commit them
|
||||
instance.commit(sources_dir, author="ahriman <ahriman@localhost>")
|
||||
instance.commit(sources_dir)
|
||||
|
||||
@staticmethod
|
||||
def load(sources_dir: Path, package: Package, patches: list[PkgbuildPatch], paths: RepositoryPaths) -> None:
|
||||
@ -148,7 +154,7 @@ class Sources(LazyLogging):
|
||||
shutil.copytree(cache_dir, sources_dir, dirs_exist_ok=True)
|
||||
instance.fetch(sources_dir, package.remote)
|
||||
|
||||
patches.extend(instance.extend_architectures(sources_dir, paths.architecture))
|
||||
patches.extend(instance.extend_architectures(sources_dir, paths.repository_id.architecture))
|
||||
for patch in patches:
|
||||
instance.patch_apply(sources_dir, patch)
|
||||
|
||||
@ -170,7 +176,8 @@ class Sources(LazyLogging):
|
||||
return f"{diff}\n" # otherwise, patch will be broken
|
||||
|
||||
@staticmethod
|
||||
def push(sources_dir: Path, remote: RemoteSource, *pattern: str, commit_author: str | None = None) -> None:
|
||||
def push(sources_dir: Path, remote: RemoteSource, *pattern: str,
|
||||
commit_author: tuple[str, str] | None = None) -> None:
|
||||
"""
|
||||
commit selected changes and push files to the remote repository
|
||||
|
||||
@ -178,13 +185,15 @@ class Sources(LazyLogging):
|
||||
sources_dir(Path): local path to git repository
|
||||
remote(RemoteSource): remote target, branch and url
|
||||
*pattern(str): glob patterns
|
||||
commit_author(str | None, optional): commit author in form of git config (i.e. ``user <user@host>``)
|
||||
(Default value = None)
|
||||
commit_author(tuple[str, str] | None, optional): commit author if any (Default value = None)
|
||||
"""
|
||||
instance = Sources()
|
||||
instance.add(sources_dir, *pattern)
|
||||
instance.commit(sources_dir, author=commit_author)
|
||||
Sources._check_output("git", "push", remote.git_url, remote.branch, cwd=sources_dir, logger=instance.logger)
|
||||
if not instance.commit(sources_dir, commit_author=commit_author):
|
||||
return # no changes to push, just skip action
|
||||
|
||||
git_url, branch = remote.git_source()
|
||||
Sources._check_output("git", "push", "--quiet", git_url, branch, cwd=sources_dir, logger=instance.logger)
|
||||
|
||||
def add(self, sources_dir: Path, *pattern: str, intent_to_add: bool = False) -> None:
|
||||
"""
|
||||
@ -208,7 +217,8 @@ class Sources(LazyLogging):
|
||||
Sources._check_output("git", "add", *args, *[str(fn.relative_to(sources_dir)) for fn in found_files],
|
||||
cwd=sources_dir, logger=self.logger)
|
||||
|
||||
def commit(self, sources_dir: Path, message: str | None = None, author: str | None = None) -> None:
|
||||
def commit(self, sources_dir: Path, message: str | None = None,
|
||||
commit_author: tuple[str, str] | None = None) -> bool:
|
||||
"""
|
||||
commit changes
|
||||
|
||||
@ -216,14 +226,29 @@ class Sources(LazyLogging):
|
||||
sources_dir(Path): local path to git repository
|
||||
message(str | None, optional): optional commit message if any. If none set, message will be generated
|
||||
according to the current timestamp (Default value = None)
|
||||
author(str | None, optional): optional commit author if any (Default value = None)
|
||||
commit_author(tuple[str, str] | None, optional): optional commit author if any (Default value = None)
|
||||
|
||||
Returns:
|
||||
bool: True in case if changes have been committed and False otherwise
|
||||
"""
|
||||
if not self.has_changes(sources_dir):
|
||||
return False # nothing to commit
|
||||
|
||||
if message is None:
|
||||
message = f"Autogenerated commit at {utcnow()}"
|
||||
args = ["--allow-empty", "--message", message]
|
||||
if author is not None:
|
||||
args.extend(["--author", author])
|
||||
Sources._check_output("git", "commit", *args, cwd=sources_dir, logger=self.logger)
|
||||
args = ["--message", message]
|
||||
environment: dict[str, str] = {}
|
||||
|
||||
if commit_author is None:
|
||||
commit_author = self.DEFAULT_COMMIT_AUTHOR
|
||||
user, email = commit_author
|
||||
environment["GIT_AUTHOR_NAME"] = environment["GIT_COMMITTER_NAME"] = user
|
||||
environment["GIT_AUTHOR_EMAIL"] = environment["GIT_COMMITTER_EMAIL"] = email
|
||||
|
||||
Sources._check_output("git", "commit", "--quiet", *args,
|
||||
cwd=sources_dir, logger=self.logger, environment=environment)
|
||||
|
||||
return True
|
||||
|
||||
def diff(self, sources_dir: Path) -> str:
|
||||
"""
|
||||
@ -237,6 +262,20 @@ class Sources(LazyLogging):
|
||||
"""
|
||||
return Sources._check_output("git", "diff", cwd=sources_dir, logger=self.logger)
|
||||
|
||||
def has_changes(self, sources_dir: Path) -> bool:
|
||||
"""
|
||||
check if there are changes in current git tree
|
||||
|
||||
Args:
|
||||
sources_dir(Path): local path to git repository
|
||||
|
||||
Returns:
|
||||
bool: True if there are uncommitted changes and False otherwise
|
||||
"""
|
||||
# there is --exit-code argument to diff, however, there might be other process errors
|
||||
changes = Sources._check_output("git", "diff", "--cached", "--name-only", cwd=sources_dir, logger=self.logger)
|
||||
return bool(changes)
|
||||
|
||||
def move(self, pkgbuild_dir: Path, sources_dir: Path) -> None:
|
||||
"""
|
||||
move content from pkgbuild_dir to sources_dir
|
||||
|
@ -26,6 +26,7 @@ from ahriman.core.exceptions import BuildError
|
||||
from ahriman.core.log import LazyLogging
|
||||
from ahriman.core.util import check_output
|
||||
from ahriman.models.package import Package
|
||||
from ahriman.models.pkgbuild_patch import PkgbuildPatch
|
||||
from ahriman.models.repository_paths import RepositoryPaths
|
||||
|
||||
|
||||
@ -34,6 +35,11 @@ class Task(LazyLogging):
|
||||
base package build task
|
||||
|
||||
Attributes:
|
||||
archbuild_flags(list[str]): command flags for archbuild command
|
||||
architecture(str): repository architecture
|
||||
build_command(str): build command
|
||||
makechroootpkg_flags(list[str]): command flags for makechrootpkg command
|
||||
makepkg_flags(list[str]): command flags for makepkg command
|
||||
package(Package): package definitions
|
||||
paths(RepositoryPaths): repository paths instance
|
||||
uid(int): uid of the repository owner user
|
||||
@ -41,18 +47,21 @@ class Task(LazyLogging):
|
||||
|
||||
_check_output = check_output
|
||||
|
||||
def __init__(self, package: Package, configuration: Configuration, paths: RepositoryPaths) -> None:
|
||||
def __init__(self, package: Package, configuration: Configuration, architecture: str,
|
||||
paths: RepositoryPaths) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
package(Package): package definitions
|
||||
configuration(Configuration): configuration instance
|
||||
architecture(str): repository architecture
|
||||
paths(RepositoryPaths): repository paths instance
|
||||
"""
|
||||
self.package = package
|
||||
self.paths = paths
|
||||
self.uid, _ = paths.root_owner
|
||||
self.architecture = architecture
|
||||
|
||||
self.archbuild_flags = configuration.getlist("build", "archbuild_flags", fallback=[])
|
||||
self.build_command = configuration.get("build", "build_command")
|
||||
@ -83,7 +92,7 @@ class Task(LazyLogging):
|
||||
|
||||
Task._check_output(
|
||||
*command,
|
||||
exception=BuildError(self.package.base),
|
||||
exception=BuildError.from_process(self.package.base),
|
||||
cwd=sources_dir,
|
||||
logger=self.logger,
|
||||
user=self.uid,
|
||||
@ -92,18 +101,29 @@ class Task(LazyLogging):
|
||||
# well it is not actually correct, but we can deal with it
|
||||
packages = Task._check_output(
|
||||
"makepkg", "--packagelist",
|
||||
exception=BuildError(self.package.base),
|
||||
exception=BuildError.from_process(self.package.base),
|
||||
cwd=sources_dir,
|
||||
logger=self.logger
|
||||
).splitlines()
|
||||
return [Path(package) for package in packages]
|
||||
|
||||
def init(self, sources_dir: Path, database: SQLite) -> None:
|
||||
def init(self, sources_dir: Path, database: SQLite, local_version: str | None) -> None:
|
||||
"""
|
||||
fetch package from git
|
||||
|
||||
Args:
|
||||
sources_dir(Path): local path to fetch
|
||||
database(SQLite): database instance
|
||||
local_version(str | None): local version of the package. If set and equal to current version, it will
|
||||
automatically bump pkgrel
|
||||
"""
|
||||
Sources.load(sources_dir, self.package, database.patches_get(self.package.base), self.paths)
|
||||
if local_version is None:
|
||||
return # there is no local package or pkgrel increment is disabled
|
||||
|
||||
# load fresh package
|
||||
loaded_package = Package.from_build(sources_dir, self.architecture, None)
|
||||
if (pkgrel := loaded_package.next_pkgrel(local_version)) is not None:
|
||||
self.logger.info("package %s is the same as in repo, bumping pkgrel to %s", self.package.base, pkgrel)
|
||||
patch = PkgbuildPatch("pkgrel", pkgrel)
|
||||
patch.write(sources_dir / "PKGBUILD")
|
||||
|
@ -25,7 +25,9 @@ from collections.abc import Callable
|
||||
from pathlib import Path
|
||||
from typing import Any, Self
|
||||
|
||||
from ahriman.core.configuration.shell_interpolator import ShellInterpolator
|
||||
from ahriman.core.exceptions import InitializeError
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
from ahriman.models.repository_paths import RepositoryPaths
|
||||
|
||||
|
||||
@ -37,9 +39,9 @@ class Configuration(configparser.RawConfigParser):
|
||||
ARCHITECTURE_SPECIFIC_SECTIONS(list[str]): (class attribute) known sections which can be architecture specific.
|
||||
Required by dump and merging functions
|
||||
SYSTEM_CONFIGURATION_PATH(Path): (class attribute) default system configuration path distributed by package
|
||||
architecture(str | None): repository architecture
|
||||
includes(list[Path]): list of includes which were read
|
||||
path(Path | None): path to root configuration file
|
||||
repository_id(RepositoryId | None): repository unique identifier
|
||||
|
||||
Examples:
|
||||
Configuration class provides additional method in order to handle application configuration. Since this class is
|
||||
@ -48,7 +50,7 @@ class Configuration(configparser.RawConfigParser):
|
||||
|
||||
>>> from pathlib import Path
|
||||
>>>
|
||||
>>> configuration = Configuration.from_path(Path("/etc/ahriman.ini"), "x86_64", quiet=False)
|
||||
>>> configuration = Configuration.from_path(Path("/etc/ahriman.ini"), RepositoryId("x86_64", "aur-clone"))
|
||||
>>> repository_name = configuration.get("repository", "name")
|
||||
>>> makepkg_flags = configuration.getlist("build", "makepkg_flags")
|
||||
|
||||
@ -58,7 +60,7 @@ class Configuration(configparser.RawConfigParser):
|
||||
In order to get current settings, the ``check_loaded`` method can be used. This method will raise an
|
||||
``InitializeError`` in case if configuration was not yet loaded::
|
||||
|
||||
>>> path, architecture = configuration.check_loaded()
|
||||
>>> path, repository_id = configuration.check_loaded()
|
||||
"""
|
||||
|
||||
ARCHITECTURE_SPECIFIC_SECTIONS = ["alpm", "build", "sign", "web"]
|
||||
@ -73,11 +75,17 @@ class Configuration(configparser.RawConfigParser):
|
||||
allow_no_value(bool, optional): copies ``configparser.RawConfigParser`` behaviour. In case if it is set
|
||||
to ``True``, the keys without values will be allowed (Default value = False)
|
||||
"""
|
||||
configparser.RawConfigParser.__init__(self, allow_no_value=allow_no_value, converters={
|
||||
"list": shlex.split,
|
||||
"path": self._convert_path,
|
||||
})
|
||||
self.architecture: str | None = None
|
||||
configparser.RawConfigParser.__init__(
|
||||
self,
|
||||
allow_no_value=allow_no_value,
|
||||
interpolation=ShellInterpolator(),
|
||||
converters={
|
||||
"list": shlex.split,
|
||||
"path": self._convert_path,
|
||||
}
|
||||
)
|
||||
|
||||
self.repository_id: RepositoryId | None = None
|
||||
self.path: Path | None = None
|
||||
self.includes: list[Path] = []
|
||||
|
||||
@ -104,12 +112,13 @@ class Configuration(configparser.RawConfigParser):
|
||||
@property
|
||||
def repository_name(self) -> str:
|
||||
"""
|
||||
repository name as defined by configuration
|
||||
repository name for backward compatibility
|
||||
|
||||
Returns:
|
||||
str: repository name from configuration
|
||||
str: repository name
|
||||
"""
|
||||
return self.get("repository", "name")
|
||||
_, repository_id = self.check_loaded()
|
||||
return repository_id.name
|
||||
|
||||
@property
|
||||
def repository_paths(self) -> RepositoryPaths:
|
||||
@ -119,39 +128,60 @@ class Configuration(configparser.RawConfigParser):
|
||||
Returns:
|
||||
RepositoryPaths: repository paths instance
|
||||
"""
|
||||
_, architecture = self.check_loaded()
|
||||
return RepositoryPaths(self.getpath("repository", "root"), architecture)
|
||||
_, repository_id = self.check_loaded()
|
||||
return RepositoryPaths(self.getpath("repository", "root"), repository_id)
|
||||
|
||||
@classmethod
|
||||
def from_path(cls, path: Path, architecture: str) -> Self:
|
||||
def from_path(cls, path: Path, repository_id: RepositoryId) -> Self:
|
||||
"""
|
||||
constructor with full object initialization
|
||||
|
||||
Args:
|
||||
path(Path): path to root configuration file
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
|
||||
Returns:
|
||||
Self: configuration instance
|
||||
"""
|
||||
configuration = cls()
|
||||
configuration.load(path)
|
||||
configuration.merge_sections(architecture)
|
||||
configuration.merge_sections(repository_id)
|
||||
return configuration
|
||||
|
||||
@staticmethod
|
||||
def section_name(section: str, suffix: str) -> str:
|
||||
def override_sections(section: str, repository_id: RepositoryId) -> list[str]:
|
||||
"""
|
||||
extract override sections
|
||||
|
||||
Args:
|
||||
section(str): section name
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
|
||||
Returns:
|
||||
list[str]: architecture and repository specific sections in correct order
|
||||
"""
|
||||
# the valid order is global < per architecture < per repository < per repository and architecture
|
||||
return [
|
||||
Configuration.section_name(section, repository_id.architecture), # architecture specific override
|
||||
Configuration.section_name(section, repository_id.name),
|
||||
Configuration.section_name(section, repository_id.name, repository_id.architecture),
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def section_name(section: str, *suffixes: str) -> str:
|
||||
"""
|
||||
generate section name for sections which depends on context
|
||||
|
||||
Args:
|
||||
section(str): section name
|
||||
suffix(str): session suffix, e.g. repository architecture
|
||||
*suffixes(str): session suffix, e.g. repository architecture
|
||||
|
||||
Returns:
|
||||
str: correct section name for repository specific section
|
||||
"""
|
||||
return f"{section}:{suffix}"
|
||||
for suffix in suffixes:
|
||||
section = f"{section}:{suffix}"
|
||||
return section
|
||||
|
||||
def _convert_path(self, value: str) -> Path:
|
||||
"""
|
||||
@ -168,19 +198,19 @@ class Configuration(configparser.RawConfigParser):
|
||||
return path
|
||||
return self.path.parent / path
|
||||
|
||||
def check_loaded(self) -> tuple[Path, str]:
|
||||
def check_loaded(self) -> tuple[Path, RepositoryId]:
|
||||
"""
|
||||
check if service was actually loaded
|
||||
|
||||
Returns:
|
||||
tuple[Path, str]: configuration root path and architecture if loaded
|
||||
tuple[Path, RepositoryId]: configuration root path and architecture if loaded
|
||||
|
||||
Raises:
|
||||
InitializeError: in case if architecture and/or path are not set
|
||||
"""
|
||||
if self.path is None or self.architecture is None:
|
||||
raise InitializeError("Configuration path and/or architecture are not set")
|
||||
return self.path, self.architecture
|
||||
if self.path is None or self.repository_id is None:
|
||||
raise InitializeError("Configuration path and/or repository id are not set")
|
||||
return self.path, self.repository_id
|
||||
|
||||
def dump(self) -> dict[str, dict[str, str]]:
|
||||
"""
|
||||
@ -200,14 +230,14 @@ class Configuration(configparser.RawConfigParser):
|
||||
|
||||
def getpath(self, *args: Any, **kwargs: Any) -> Path: ... # type: ignore[empty-body]
|
||||
|
||||
def gettype(self, section: str, architecture: str, *, fallback: str | None = None) -> tuple[str, str]:
|
||||
def gettype(self, section: str, repository_id: RepositoryId, *, fallback: str | None = None) -> tuple[str, str]:
|
||||
"""
|
||||
get type variable with fallback to old logic. Despite the fact that it has same semantics as other get* methods,
|
||||
but it has different argument list
|
||||
|
||||
Args:
|
||||
section(str): section name
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
fallback(str | None, optional): optional fallback type if any. If set, second element of the tuple will
|
||||
be always set to this value (Default value = None)
|
||||
|
||||
@ -220,9 +250,9 @@ class Configuration(configparser.RawConfigParser):
|
||||
if (group_type := self.get(section, "type", fallback=fallback)) is not None:
|
||||
return section, group_type # new-style logic
|
||||
# okay lets check for the section with architecture name
|
||||
full_section = self.section_name(section, architecture)
|
||||
if self.has_section(full_section):
|
||||
return full_section, section
|
||||
for specific in self.override_sections(section, repository_id):
|
||||
if self.has_section(specific):
|
||||
return specific, section
|
||||
# okay lets just use section as type
|
||||
if self.has_section(section):
|
||||
return section, section
|
||||
@ -255,23 +285,24 @@ class Configuration(configparser.RawConfigParser):
|
||||
except (FileNotFoundError, configparser.NoOptionError, configparser.NoSectionError):
|
||||
pass
|
||||
|
||||
def merge_sections(self, architecture: str) -> None:
|
||||
def merge_sections(self, repository_id: RepositoryId) -> None:
|
||||
"""
|
||||
merge architecture specific sections into main configuration
|
||||
merge architecture and repository specific sections into main configuration
|
||||
|
||||
Args:
|
||||
architecture(str): repository architecture
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
"""
|
||||
self.architecture = architecture
|
||||
self.repository_id = repository_id
|
||||
|
||||
for section in self.ARCHITECTURE_SPECIFIC_SECTIONS:
|
||||
# get overrides
|
||||
specific = self.section_name(section, architecture)
|
||||
if self.has_section(specific):
|
||||
# if there is no such section it means that there is no overrides for this arch,
|
||||
# but we anyway will have to delete sections for others architectures
|
||||
for key, value in self[specific].items():
|
||||
self.set_option(section, key, value)
|
||||
# remove any arch specific section
|
||||
for specific in self.override_sections(section, repository_id):
|
||||
if self.has_section(specific):
|
||||
# if there is no such section it means that there is no overrides for this arch,
|
||||
# but we anyway will have to delete sections for others architectures
|
||||
for key, value in self[specific].items():
|
||||
self.set_option(section, key, value)
|
||||
|
||||
# remove any arch/repo specific section
|
||||
for foreign in self.sections():
|
||||
# we would like to use lambda filter here, but pylint is too dumb
|
||||
if not foreign.startswith(f"{section}:"):
|
||||
@ -282,11 +313,11 @@ class Configuration(configparser.RawConfigParser):
|
||||
"""
|
||||
reload configuration if possible or raise exception otherwise
|
||||
"""
|
||||
path, architecture = self.check_loaded()
|
||||
path, repository_id = self.check_loaded()
|
||||
for section in self.sections(): # clear current content
|
||||
self.remove_section(section)
|
||||
self.load(path)
|
||||
self.merge_sections(architecture)
|
||||
self.merge_sections(repository_id)
|
||||
|
||||
def set_option(self, section: str, option: str, value: str) -> None:
|
||||
"""
|
||||
|
@ -93,9 +93,12 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
||||
"type": "string",
|
||||
"oneof": [
|
||||
{"allowed": ["disabled"]},
|
||||
{"allowed": ["configuration", "mapping"], "dependencies": ["salt"]},
|
||||
{"allowed": ["configuration", "mapping"]},
|
||||
{"allowed": ["oauth"], "dependencies": [
|
||||
"client_id", "client_secret", "oauth_provider", "oauth_scopes", "salt"
|
||||
"client_id",
|
||||
"client_secret",
|
||||
"oauth_provider",
|
||||
"oauth_scopes",
|
||||
]},
|
||||
],
|
||||
},
|
||||
@ -180,7 +183,6 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
||||
"schema": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"required": True,
|
||||
},
|
||||
"root": {
|
||||
"type": "string",
|
||||
@ -225,6 +227,10 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
||||
"coerce": "list",
|
||||
"schema": {"type": "string"},
|
||||
},
|
||||
"enable_archive_upload": {
|
||||
"type": "boolean",
|
||||
"coerce": "boolean",
|
||||
},
|
||||
"host": {
|
||||
"type": "string",
|
||||
"is_ip_address": ["localhost"],
|
||||
@ -233,6 +239,11 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
||||
"type": "string",
|
||||
"is_url": ["http", "https"],
|
||||
},
|
||||
"max_body_size": {
|
||||
"type": "integer",
|
||||
"coerce": "integer",
|
||||
"min": 0,
|
||||
},
|
||||
"password": {
|
||||
"type": "string",
|
||||
},
|
||||
@ -254,6 +265,11 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
||||
"required": True,
|
||||
"path_exists": True,
|
||||
},
|
||||
"timeout": {
|
||||
"type": "integer",
|
||||
"coerce": "integer",
|
||||
"min": 0,
|
||||
},
|
||||
"unix_socket": {
|
||||
"type": "path",
|
||||
"coerce": "absolute_path",
|
||||
@ -265,6 +281,10 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
||||
"username": {
|
||||
"type": "string",
|
||||
},
|
||||
"wait_timeout": {
|
||||
"type": "integer",
|
||||
"coerce": "integer",
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
51
src/ahriman/core/configuration/shell_interpolator.py
Normal file
51
src/ahriman/core/configuration/shell_interpolator.py
Normal file
@ -0,0 +1,51 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import configparser
|
||||
import os
|
||||
|
||||
from collections.abc import Mapping, MutableMapping
|
||||
from string import Template
|
||||
|
||||
|
||||
class ShellInterpolator(configparser.Interpolation):
|
||||
"""
|
||||
custom string interpolator, because we cannot use defaults argument due to config validation
|
||||
"""
|
||||
|
||||
def before_get(self, parser: MutableMapping[str, Mapping[str, str]], section: str, option: str, value: str,
|
||||
defaults: Mapping[str, str]) -> str:
|
||||
"""
|
||||
interpolate option value
|
||||
|
||||
Args:
|
||||
parser(MutableMapping[str, Mapping[str, str]]): option parser
|
||||
section(str): section name
|
||||
option(str): option name
|
||||
value(str): source (not-converted) value
|
||||
defaults(Mapping[str, str]): default values
|
||||
|
||||
Returns:
|
||||
str: substituted value
|
||||
"""
|
||||
# At the moment it seems that it is the most legit way to handle environment variables
|
||||
# Template behaviour is literally the same as shell
|
||||
# In addition, we are using shell-like variables in some cases (see ``alpm.mirror`` option), thus we would like
|
||||
# to keep them alive
|
||||
return Template(value).safe_substitute(os.environ)
|
@ -136,31 +136,13 @@ class Validator(RootValidator):
|
||||
The rule's arguments are validated against this schema:
|
||||
{"type": "list", "schema": {"type": "string"}}
|
||||
"""
|
||||
url = urlparse(value) # it probably will never rise exceptions on parse
|
||||
if not url.scheme:
|
||||
self._error(field, f"Url scheme is not set for {value}")
|
||||
if not url.netloc and url.scheme not in ("file",):
|
||||
self._error(field, f"Location must be set for url {value} of scheme {url.scheme}")
|
||||
if constraint and url.scheme not in constraint:
|
||||
self._error(field, f"Url {value} scheme must be one of {constraint}")
|
||||
|
||||
def _validate_path_is_absolute(self, constraint: bool, field: str, value: Path) -> None:
|
||||
"""
|
||||
check if path is absolute or not
|
||||
|
||||
Args:
|
||||
constraint(bool): True in case if path must be absolute and False if it must be relative
|
||||
field(str): field name to be checked
|
||||
value(Path): value to be checked
|
||||
|
||||
Examples:
|
||||
The rule's arguments are validated against this schema:
|
||||
{"type": "boolean"}
|
||||
"""
|
||||
if constraint and not value.is_absolute():
|
||||
self._error(field, f"Path {value} must be absolute")
|
||||
if not constraint and value.is_absolute():
|
||||
self._error(field, f"Path {value} must be relative")
|
||||
match urlparse(value): # it probably will never rise exceptions on parse
|
||||
case url if not url.scheme:
|
||||
self._error(field, f"Url scheme is not set for {value}")
|
||||
case url if not url.netloc and url.scheme not in ("file",):
|
||||
self._error(field, f"Location must be set for url {value} of scheme {url.scheme}")
|
||||
case url if constraint and url.scheme not in constraint:
|
||||
self._error(field, f"Url {value} scheme must be one of {constraint}")
|
||||
|
||||
def _validate_path_exists(self, constraint: bool, field: str, value: Path) -> None:
|
||||
"""
|
||||
@ -175,7 +157,8 @@ class Validator(RootValidator):
|
||||
The rule's arguments are validated against this schema:
|
||||
{"type": "boolean"}
|
||||
"""
|
||||
if constraint and not value.exists():
|
||||
self._error(field, f"Path {value} must exist")
|
||||
if not constraint and value.exists():
|
||||
self._error(field, f"Path {value} must not exist")
|
||||
match value.exists():
|
||||
case True if not constraint:
|
||||
self._error(field, f"Path {value} must not exist")
|
||||
case False if constraint:
|
||||
self._error(field, f"Path {value} must exist")
|
||||
|
@ -70,7 +70,19 @@ def migrate_package_remotes(connection: Connection, paths: RepositoryPaths) -> N
|
||||
connection(Connection): database connection
|
||||
paths(RepositoryPaths): repository paths instance
|
||||
"""
|
||||
from ahriman.core.database.operations import PackageOperations
|
||||
from ahriman.core.alpm.remote import AUR
|
||||
from ahriman.models.package import Package
|
||||
|
||||
def get_packages() -> dict[str, Package]:
|
||||
return {
|
||||
row["package_base"]: Package(
|
||||
base=row["package_base"],
|
||||
version=row["version"],
|
||||
remote=RemoteSource.from_json(row),
|
||||
packages={},
|
||||
packager=row["packager"] or None,
|
||||
) for row in connection.execute("""select * from package_bases""")
|
||||
}
|
||||
|
||||
def insert_remote(base: str, remote: RemoteSource) -> None:
|
||||
connection.execute(
|
||||
@ -87,12 +99,15 @@ def migrate_package_remotes(connection: Connection, paths: RepositoryPaths) -> N
|
||||
}
|
||||
)
|
||||
|
||||
packages = PackageOperations._packages_get_select_package_bases(connection)
|
||||
for package_base, package in packages.items():
|
||||
for package_base, package in get_packages().items():
|
||||
local_cache = paths.cache_for(package_base)
|
||||
if local_cache.exists() and not package.is_vcs:
|
||||
continue # skip packages which are not VCS and with local cache
|
||||
remote_source = RemoteSource.from_source(PackageSource.AUR, package_base, "aur")
|
||||
if remote_source is None:
|
||||
continue # should never happen
|
||||
remote_source = RemoteSource(
|
||||
source=PackageSource.AUR,
|
||||
git_url=AUR.remote_git_url(package_base, "aur"),
|
||||
web_url=AUR.remote_web_url(package_base),
|
||||
path=".",
|
||||
branch="master",
|
||||
)
|
||||
insert_remote(package_base, remote_source)
|
||||
|
@ -61,12 +61,12 @@ def migrate_package_depends(connection: Connection, configuration: Configuration
|
||||
if not configuration.repository_paths.repository.is_dir():
|
||||
return
|
||||
|
||||
_, architecture = configuration.check_loaded()
|
||||
pacman = Pacman(architecture, configuration, refresh_database=PacmanSynchronization.Disabled)
|
||||
_, repository_id = configuration.check_loaded()
|
||||
pacman = Pacman(repository_id, configuration, refresh_database=PacmanSynchronization.Disabled)
|
||||
|
||||
package_list = []
|
||||
for full_path in filter(package_like, configuration.repository_paths.repository.iterdir()):
|
||||
base = Package.from_archive(full_path, pacman, remote=None)
|
||||
base = Package.from_archive(full_path, pacman)
|
||||
for package, description in base.packages.items():
|
||||
package_list.append({
|
||||
"make_depends": description.make_depends,
|
||||
|
@ -45,9 +45,9 @@ steps = [
|
||||
)
|
||||
""",
|
||||
"""
|
||||
insert into packages select * from packages_ where architecture is not null;
|
||||
insert into packages select * from packages_ where architecture is not null
|
||||
""",
|
||||
"""
|
||||
drop table packages_;
|
||||
drop table packages_
|
||||
""",
|
||||
]
|
||||
|
@ -58,12 +58,12 @@ def migrate_package_check_depends(connection: Connection, configuration: Configu
|
||||
if not configuration.repository_paths.repository.is_dir():
|
||||
return
|
||||
|
||||
_, architecture = configuration.check_loaded()
|
||||
pacman = Pacman(architecture, configuration, refresh_database=PacmanSynchronization.Disabled)
|
||||
_, repository_id = configuration.check_loaded()
|
||||
pacman = Pacman(repository_id, configuration, refresh_database=PacmanSynchronization.Disabled)
|
||||
|
||||
package_list = []
|
||||
for full_path in filter(package_like, configuration.repository_paths.repository.iterdir()):
|
||||
base = Package.from_archive(full_path, pacman, remote=None)
|
||||
base = Package.from_archive(full_path, pacman)
|
||||
for package, description in base.packages.items():
|
||||
package_list.append({
|
||||
"check_depends": description.check_depends,
|
||||
|
@ -64,12 +64,12 @@ def migrate_package_base_packager(connection: Connection, configuration: Configu
|
||||
if not configuration.repository_paths.repository.is_dir():
|
||||
return
|
||||
|
||||
_, architecture = configuration.check_loaded()
|
||||
pacman = Pacman(architecture, configuration, refresh_database=PacmanSynchronization.Disabled)
|
||||
_, repository_id = configuration.check_loaded()
|
||||
pacman = Pacman(repository_id, configuration, refresh_database=PacmanSynchronization.Disabled)
|
||||
|
||||
package_list = []
|
||||
for full_path in filter(package_like, configuration.repository_paths.repository.iterdir()):
|
||||
package = Package.from_archive(full_path, pacman, remote=None)
|
||||
package = Package.from_archive(full_path, pacman)
|
||||
package_list.append({
|
||||
"package_base": package.base,
|
||||
"packager": package.packager,
|
||||
|
27
src/ahriman/core/database/migrations/m009_local_source.py
Normal file
27
src/ahriman/core/database/migrations/m009_local_source.py
Normal file
@ -0,0 +1,27 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
__all__ = ["steps"]
|
||||
|
||||
|
||||
steps = [
|
||||
"""
|
||||
update package_bases set source = 'local' where source is null
|
||||
""",
|
||||
]
|
@ -0,0 +1,36 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
__all__ = ["steps"]
|
||||
|
||||
|
||||
steps = [
|
||||
"""
|
||||
drop index logs_package_base_process_id
|
||||
""",
|
||||
"""
|
||||
alter table logs drop column process_id
|
||||
""",
|
||||
"""
|
||||
alter table logs add column version text not null default ''
|
||||
""",
|
||||
"""
|
||||
create index logs_package_base_version on logs (package_base, version)
|
||||
""",
|
||||
]
|
245
src/ahriman/core/database/migrations/m011_repository_name.py
Normal file
245
src/ahriman/core/database/migrations/m011_repository_name.py
Normal file
@ -0,0 +1,245 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from sqlite3 import Connection
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
|
||||
|
||||
__all__ = ["migrate_data", "steps"]
|
||||
|
||||
|
||||
steps = [
|
||||
# set correct types for schema
|
||||
"""
|
||||
alter table users rename to users_
|
||||
""",
|
||||
"""
|
||||
create table users (
|
||||
username text not null unique,
|
||||
access text not null,
|
||||
password text,
|
||||
packager_id text,
|
||||
key_id text
|
||||
)
|
||||
""",
|
||||
"""
|
||||
insert into users select * from users_
|
||||
""",
|
||||
"""
|
||||
drop table users_
|
||||
""",
|
||||
# update base tables
|
||||
# build_queue
|
||||
"""
|
||||
alter table build_queue add column repository text not null default ''
|
||||
""",
|
||||
"""
|
||||
alter table build_queue rename to build_queue_
|
||||
""",
|
||||
"""
|
||||
create table build_queue (
|
||||
package_base text not null,
|
||||
properties json not null,
|
||||
repository text not null,
|
||||
primary key (package_base, repository)
|
||||
)
|
||||
""",
|
||||
"""
|
||||
insert into build_queue select * from build_queue_
|
||||
""",
|
||||
"""
|
||||
drop table build_queue_
|
||||
""",
|
||||
# package_bases
|
||||
"""
|
||||
alter table package_bases add column repository text not null default ''
|
||||
""",
|
||||
"""
|
||||
alter table package_bases rename to package_bases_
|
||||
""",
|
||||
"""
|
||||
create table package_bases (
|
||||
package_base text not null,
|
||||
version text not null,
|
||||
branch text,
|
||||
git_url text,
|
||||
path text,
|
||||
web_url text,
|
||||
source text,
|
||||
packager text,
|
||||
repository text not null,
|
||||
primary key (package_base, repository)
|
||||
)
|
||||
""",
|
||||
"""
|
||||
insert into package_bases select * from package_bases_
|
||||
""",
|
||||
"""
|
||||
drop table package_bases_
|
||||
""",
|
||||
# package_statuses
|
||||
"""
|
||||
alter table package_statuses add column repository text not null default ''
|
||||
""",
|
||||
"""
|
||||
alter table package_statuses rename to package_statuses_
|
||||
""",
|
||||
"""
|
||||
create table package_statuses (
|
||||
package_base text not null,
|
||||
status text not null,
|
||||
last_updated integer,
|
||||
repository text not null,
|
||||
primary key (package_base, repository)
|
||||
)
|
||||
""",
|
||||
"""
|
||||
insert into package_statuses select * from package_statuses_
|
||||
""",
|
||||
"""
|
||||
drop table package_statuses_
|
||||
""",
|
||||
# packages
|
||||
"""
|
||||
alter table packages add column repository text not null default ''
|
||||
""",
|
||||
"""
|
||||
alter table packages rename to packages_
|
||||
""",
|
||||
"""
|
||||
create table packages (
|
||||
package text not null,
|
||||
package_base text not null,
|
||||
architecture text not null,
|
||||
archive_size integer,
|
||||
build_date integer,
|
||||
depends json,
|
||||
description text,
|
||||
filename text,
|
||||
"groups" json,
|
||||
installed_size integer,
|
||||
licenses json,
|
||||
provides json,
|
||||
url text,
|
||||
make_depends json,
|
||||
opt_depends json,
|
||||
check_depends json,
|
||||
repository text not null,
|
||||
primary key (package, architecture, repository)
|
||||
)
|
||||
""",
|
||||
"""
|
||||
insert into packages select * from packages_
|
||||
""",
|
||||
"""
|
||||
drop table packages_
|
||||
""",
|
||||
# patches
|
||||
"""
|
||||
alter table patches add column repository text not null default ''
|
||||
""",
|
||||
"""
|
||||
drop index patches_package_base_variable
|
||||
""",
|
||||
"""
|
||||
alter table patches rename to patches_
|
||||
""",
|
||||
"""
|
||||
create table patches (
|
||||
package_base text not null,
|
||||
variable text,
|
||||
patch blob not null,
|
||||
repository text not null
|
||||
)
|
||||
""",
|
||||
"""
|
||||
create unique index patches_package_base_variable_repository
|
||||
on patches (package_base, coalesce(variable, ''), repository)
|
||||
""",
|
||||
"""
|
||||
insert into patches select * from patches_
|
||||
""",
|
||||
"""
|
||||
drop table patches_
|
||||
""",
|
||||
# logs
|
||||
"""
|
||||
alter table logs add column repository text not null default ''
|
||||
""",
|
||||
"""
|
||||
drop index logs_package_base_version
|
||||
""",
|
||||
"""
|
||||
alter table logs rename to logs_
|
||||
""",
|
||||
"""
|
||||
create table logs (
|
||||
package_base text not null,
|
||||
created real not null,
|
||||
record text,
|
||||
version text not null,
|
||||
repository text not null
|
||||
)
|
||||
""",
|
||||
"""
|
||||
insert into logs select * from logs_
|
||||
""",
|
||||
"""
|
||||
create index logs_package_base_version on logs (package_base, version)
|
||||
""",
|
||||
"""
|
||||
drop table logs_
|
||||
""",
|
||||
]
|
||||
|
||||
|
||||
def migrate_data(connection: Connection, configuration: Configuration) -> None:
|
||||
"""
|
||||
perform data migration
|
||||
|
||||
Args:
|
||||
connection(Connection): database connection
|
||||
configuration(Configuration): configuration instance
|
||||
"""
|
||||
migrate_package_repository(connection, configuration)
|
||||
|
||||
|
||||
def migrate_package_repository(connection: Connection, configuration: Configuration) -> None:
|
||||
"""
|
||||
update repository name from current settings
|
||||
|
||||
Args:
|
||||
connection(Connection): database connection
|
||||
configuration(Configuration): configuration instance
|
||||
"""
|
||||
_, repository_id = configuration.check_loaded()
|
||||
|
||||
connection.execute("""update build_queue set repository = :repository""",
|
||||
{"repository": repository_id.name, })
|
||||
connection.execute("""update package_bases set repository = :repository""",
|
||||
{"repository": repository_id.name, })
|
||||
connection.execute("""update package_statuses set repository = :repository""",
|
||||
{"repository": repository_id.name, })
|
||||
connection.execute("""update packages set repository = :repository""",
|
||||
{"repository": repository_id.name, })
|
||||
connection.execute("""update patches set repository = :repository""",
|
||||
{"repository": repository_id.name, })
|
||||
connection.execute("""update logs set repository = :repository""",
|
||||
{"repository": repository_id.name, })
|
@ -39,9 +39,9 @@ class BuildOperations(Operations):
|
||||
connection.execute(
|
||||
"""
|
||||
delete from build_queue
|
||||
where :package_base is null or package_base = :package_base
|
||||
where (:package_base is null or package_base = :package_base) and repository = :repository
|
||||
""",
|
||||
{"package_base": package_base})
|
||||
{"package_base": package_base, "repository": self.repository_id.name})
|
||||
|
||||
return self.with_connection(run, commit=True)
|
||||
|
||||
@ -55,7 +55,10 @@ class BuildOperations(Operations):
|
||||
def run(connection: Connection) -> list[Package]:
|
||||
return [
|
||||
Package.from_json(row["properties"])
|
||||
for row in connection.execute("""select * from build_queue""")
|
||||
for row in connection.execute(
|
||||
"""select properties from build_queue where repository = :repository""",
|
||||
{"repository": self.repository_id.name}
|
||||
)
|
||||
]
|
||||
|
||||
return self.with_connection(run)
|
||||
@ -71,12 +74,12 @@ class BuildOperations(Operations):
|
||||
connection.execute(
|
||||
"""
|
||||
insert into build_queue
|
||||
(package_base, properties)
|
||||
(package_base, properties, repository)
|
||||
values
|
||||
(:package_base, :properties)
|
||||
on conflict (package_base) do update set
|
||||
(:package_base, :properties, :repository)
|
||||
on conflict (package_base, repository) do update set
|
||||
properties = :properties
|
||||
""",
|
||||
{"package_base": package.base, "properties": package.view()})
|
||||
{"package_base": package.base, "properties": package.view(), "repository": self.repository_id.name})
|
||||
|
||||
return self.with_connection(run, commit=True)
|
||||
|
@ -44,10 +44,11 @@ class LogsOperations(Operations):
|
||||
f"""[{pretty_datetime(row["created"])}] {row["record"]}"""
|
||||
for row in connection.execute(
|
||||
"""
|
||||
select created, record from logs where package_base = :package_base
|
||||
select created, record from logs
|
||||
where package_base = :package_base and repository = :repository
|
||||
order by created
|
||||
""",
|
||||
{"package_base": package_base})
|
||||
{"package_base": package_base, "repository": self.repository_id.name})
|
||||
]
|
||||
|
||||
records = self.with_connection(run)
|
||||
@ -66,36 +67,38 @@ class LogsOperations(Operations):
|
||||
connection.execute(
|
||||
"""
|
||||
insert into logs
|
||||
(package_base, process_id, created, record)
|
||||
(package_base, version, created, record, repository)
|
||||
values
|
||||
(:package_base, :process_id, :created, :record)
|
||||
(:package_base, :version, :created, :record, :repository)
|
||||
""",
|
||||
{
|
||||
"package_base": log_record_id.package_base,
|
||||
"process_id": log_record_id.process_id,
|
||||
"version": log_record_id.version,
|
||||
"created": created,
|
||||
"record": record,
|
||||
"repository": self.repository_id.name,
|
||||
}
|
||||
)
|
||||
|
||||
return self.with_connection(run, commit=True)
|
||||
|
||||
def logs_remove(self, package_base: str, current_process_id: int | None) -> None:
|
||||
def logs_remove(self, package_base: str, version: str | None) -> None:
|
||||
"""
|
||||
remove log records for the specified package
|
||||
|
||||
Args:
|
||||
package_base(str): package base to remove logs
|
||||
current_process_id(int | None): current process id. If set it will remove only logs belonging to another
|
||||
process
|
||||
version(str): package version. If set it will remove only logs belonging to another
|
||||
version
|
||||
"""
|
||||
def run(connection: Connection) -> None:
|
||||
connection.execute(
|
||||
"""
|
||||
delete from logs
|
||||
where package_base = :package_base and (:process_id is null or process_id <> :process_id)
|
||||
where package_base = :package_base and repository = :repository
|
||||
and (:version is null or version <> :version)
|
||||
""",
|
||||
{"package_base": package_base, "process_id": current_process_id}
|
||||
{"package_base": package_base, "version": version, "repository": self.repository_id.name}
|
||||
)
|
||||
|
||||
return self.with_connection(run, commit=True)
|
||||
|
@ -24,6 +24,7 @@ from pathlib import Path
|
||||
from typing import Any, TypeVar
|
||||
|
||||
from ahriman.core.log import LazyLogging
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
@ -35,16 +36,19 @@ class Operations(LazyLogging):
|
||||
|
||||
Attributes:
|
||||
path(Path): path to the database file
|
||||
repository_id(RepositoryId): repository unique identifier to perform implicit filtering
|
||||
"""
|
||||
|
||||
def __init__(self, path: Path) -> None:
|
||||
def __init__(self, path: Path, repository_id: RepositoryId) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
path(Path): path to the database file
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
"""
|
||||
self.path = path
|
||||
self.repository_id = repository_id
|
||||
|
||||
@staticmethod
|
||||
def factory(cursor: sqlite3.Cursor, row: tuple[Any, ...]) -> dict[str, Any]:
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user