mirror of
https://github.com/arcan1s/ahriman.git
synced 2025-07-16 15:29:56 +00:00
Compare commits
9 Commits
9ea3a911f7
...
2.11.0
Author | SHA1 | Date | |
---|---|---|---|
969352d842 | |||
a1db4dc8b8 | |||
f6081507c0 | |||
477c473187 | |||
33e68a59e2 | |||
6dfe1b92f2 | |||
5dc6df11c5 | |||
d3f6ca24c8 | |||
c26a13c562 |
804
.pylintrc
804
.pylintrc
@ -1,28 +1,78 @@
|
||||
[MASTER]
|
||||
[MAIN]
|
||||
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
# Clear in-memory caches upon conclusion of linting. Useful if running pylint
|
||||
# in a server-like mode.
|
||||
clear-cache-post-run=no
|
||||
|
||||
# Load and enable all available extensions. Use --list-extensions to see a list
|
||||
# all available extensions.
|
||||
#enable-all-extensions=
|
||||
|
||||
# In error mode, messages with a category besides ERROR or FATAL are
|
||||
# suppressed, and no reports are done by default. Error mode is compatible with
|
||||
# disabling specific errors.
|
||||
#errors-only=
|
||||
|
||||
# Always return a 0 (non-error) status code, even if lint errors are found.
|
||||
# This is primarily useful in continuous integration scripts.
|
||||
#exit-zero=
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code.
|
||||
extension-pkg-allow-list=
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
|
||||
# for backward compatibility.)
|
||||
extension-pkg-whitelist=
|
||||
|
||||
# Specify a score threshold to be exceeded before program exits with error.
|
||||
fail-under=10.0
|
||||
# Return non-zero exit code if any of these messages/categories are detected,
|
||||
# even if score is above --fail-under value. Syntax same as enable. Messages
|
||||
# specified are enabled, while categories only check already-enabled messages.
|
||||
fail-on=
|
||||
|
||||
# Add files or directories to the blacklist. They should be base names, not
|
||||
# paths.
|
||||
# Specify a score threshold under which the program will exit with error.
|
||||
fail-under=10
|
||||
|
||||
# Interpret the stdin as a python script, whose filename needs to be passed as
|
||||
# the module_or_package argument.
|
||||
#from-stdin=
|
||||
|
||||
# Files or directories to be skipped. They should be base names, not paths.
|
||||
ignore=CVS
|
||||
|
||||
# Add files or directories matching the regex patterns to the blacklist. The
|
||||
# regex matches against base names, not paths.
|
||||
ignore-patterns=
|
||||
# Add files or directories matching the regular expressions patterns to the
|
||||
# ignore-list. The regex matches against paths and can be in Posix or Windows
|
||||
# format. Because '\\' represents the directory delimiter on Windows systems,
|
||||
# it can't be used as an escape character.
|
||||
ignore-paths=
|
||||
|
||||
# Files or directories matching the regular expression patterns are skipped.
|
||||
# The regex matches against base names, not paths. The default value ignores
|
||||
# Emacs file locks
|
||||
ignore-patterns=^\.#
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis). It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
#init-hook=
|
||||
|
||||
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
|
||||
# number of processors available to use.
|
||||
jobs=0
|
||||
# number of processors available to use, and will cap the count on Windows to
|
||||
# avoid hangs.
|
||||
jobs=1
|
||||
|
||||
# Control the amount of potential inferred values when inferring a single
|
||||
# object. This can help the performance when dealing with large functions or
|
||||
@ -36,6 +86,19 @@ load-plugins=
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
# Minimum Python version to use for version dependent checks. Will default to
|
||||
# the version used to run pylint.
|
||||
py-version=3.11
|
||||
|
||||
# Discover python modules and packages in the file system subtree.
|
||||
recursive=no
|
||||
|
||||
# Add paths to the list of the source roots. Supports globbing patterns. The
|
||||
# source root is an absolute path or a path relative to the current working
|
||||
# directory used to determine a package namespace for modules located under the
|
||||
# source root.
|
||||
source-roots=
|
||||
|
||||
# When enabled, pylint would attempt to guess common misconfiguration and emit
|
||||
# user-friendly hints instead of false-positive error messages.
|
||||
suggestion-mode=yes
|
||||
@ -44,120 +107,8 @@ suggestion-mode=yes
|
||||
# active Python interpreter and may run arbitrary code.
|
||||
unsafe-load-any-extension=no
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
|
||||
confidence=
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once). You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use "--disable=all --enable=classes
|
||||
# --disable=W".
|
||||
disable=raw-checker-failed,
|
||||
bad-inline-option,
|
||||
locally-disabled,
|
||||
file-ignored,
|
||||
suppressed-message,
|
||||
useless-suppression,
|
||||
deprecated-pragma,
|
||||
use-symbolic-message-instead,
|
||||
missing-module-docstring,
|
||||
line-too-long,
|
||||
no-name-in-module,
|
||||
import-outside-toplevel,
|
||||
invalid-name,
|
||||
raise-missing-from,
|
||||
wrong-import-order,
|
||||
too-few-public-methods,
|
||||
too-many-instance-attributes,
|
||||
broad-except,
|
||||
too-many-ancestors,
|
||||
fixme,
|
||||
too-many-arguments,
|
||||
duplicate-code,
|
||||
cyclic-import,
|
||||
confusing-with-statement,
|
||||
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once). See also the "--disable" option for examples.
|
||||
enable=c-extension-no-member
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Python expression which should return a score less than or equal to 10. You
|
||||
# have access to the variables 'error', 'warning', 'refactor', and 'convention'
|
||||
# which contain the number of messages in each category, as well as 'statement'
|
||||
# which is the total number of statements analyzed. This score is used by the
|
||||
# global evaluation report (RP0004).
|
||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details.
|
||||
#msg-template=
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, json
|
||||
# and msvs (visual studio). You can also give a reporter class, e.g.
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
output-format=text
|
||||
|
||||
# Tells whether to display a full report or only the messages.
|
||||
reports=no
|
||||
|
||||
# Activate the evaluation score.
|
||||
score=yes
|
||||
|
||||
|
||||
[REFACTORING]
|
||||
|
||||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
||||
# Complete name of functions that never returns. When checking for
|
||||
# inconsistent-return-statements if a never returning function is called then
|
||||
# it will be considered as an explicit return statement and no message will be
|
||||
# printed.
|
||||
never-returning-functions=sys.exit
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
expected-line-ending-format=
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
indent-after-paren=4
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=100
|
||||
|
||||
# Maximum number of lines in a module.
|
||||
max-module-lines=400
|
||||
|
||||
# Allow the body of a class to be on the same line as the declaration if body
|
||||
# contains single statement.
|
||||
single-line-class-stmt=no
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
# In verbose mode, extra non-checker-related info will be displayed.
|
||||
#verbose=
|
||||
|
||||
|
||||
[BASIC]
|
||||
@ -166,13 +117,15 @@ single-line-if-stmt=no
|
||||
argument-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct argument names. Overrides argument-
|
||||
# naming-style.
|
||||
# naming-style. If left empty, argument names will be checked with the set
|
||||
# naming style.
|
||||
#argument-rgx=
|
||||
|
||||
# Naming style matching correct attribute names.
|
||||
attr-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct attribute names. Overrides attr-naming-
|
||||
# style. If left empty, attribute names will be checked with the set naming
|
||||
# style.
|
||||
#attr-rgx=
|
||||
|
||||
@ -192,20 +145,30 @@ bad-names-rgxs=
|
||||
class-attribute-naming-style=any
|
||||
|
||||
# Regular expression matching correct class attribute names. Overrides class-
|
||||
# attribute-naming-style.
|
||||
# attribute-naming-style. If left empty, class attribute names will be checked
|
||||
# with the set naming style.
|
||||
#class-attribute-rgx=
|
||||
|
||||
# Naming style matching correct class constant names.
|
||||
class-const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct class constant names. Overrides class-
|
||||
# const-naming-style. If left empty, class constant names will be checked with
|
||||
# the set naming style.
|
||||
#class-const-rgx=
|
||||
|
||||
# Naming style matching correct class names.
|
||||
class-naming-style=PascalCase
|
||||
|
||||
# Regular expression matching correct class names. Overrides class-naming-
|
||||
# style.
|
||||
# style. If left empty, class names will be checked with the set naming style.
|
||||
#class-rgx=
|
||||
|
||||
# Naming style matching correct constant names.
|
||||
const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct constant names. Overrides const-naming-
|
||||
# style. If left empty, constant names will be checked with the set naming
|
||||
# style.
|
||||
#const-rgx=
|
||||
|
||||
@ -217,7 +180,8 @@ docstring-min-length=-1
|
||||
function-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct function names. Overrides function-
|
||||
# naming-style.
|
||||
# naming-style. If left empty, function names will be checked with the set
|
||||
# naming style.
|
||||
#function-rgx=
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma.
|
||||
@ -239,21 +203,22 @@ include-naming-hint=no
|
||||
inlinevar-naming-style=any
|
||||
|
||||
# Regular expression matching correct inline iteration names. Overrides
|
||||
# inlinevar-naming-style.
|
||||
# inlinevar-naming-style. If left empty, inline iteration names will be checked
|
||||
# with the set naming style.
|
||||
#inlinevar-rgx=
|
||||
|
||||
# Naming style matching correct method names.
|
||||
method-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct method names. Overrides method-naming-
|
||||
# style.
|
||||
# style. If left empty, method names will be checked with the set naming style.
|
||||
#method-rgx=
|
||||
|
||||
# Naming style matching correct module names.
|
||||
module-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct module names. Overrides module-naming-
|
||||
# style.
|
||||
# style. If left empty, module names will be checked with the set naming style.
|
||||
#module-rgx=
|
||||
|
||||
# Colon-delimited sets of names that determine each other's naming style when
|
||||
@ -269,209 +234,56 @@ no-docstring-rgx=^_
|
||||
# These decorators are taken in consideration only for invalid-name.
|
||||
property-classes=abc.abstractproperty
|
||||
|
||||
# Regular expression matching correct type alias names. If left empty, type
|
||||
# alias names will be checked with the set naming style.
|
||||
#typealias-rgx=
|
||||
|
||||
# Regular expression matching correct type variable names. If left empty, type
|
||||
# variable names will be checked with the set naming style.
|
||||
#typevar-rgx=
|
||||
|
||||
# Naming style matching correct variable names.
|
||||
variable-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct variable names. Overrides variable-
|
||||
# naming-style.
|
||||
# naming-style. If left empty, variable names will be checked with the set
|
||||
# naming style.
|
||||
#variable-rgx=
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
[CLASSES]
|
||||
|
||||
# List of decorators that produce context managers, such as
|
||||
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||
# produce valid context managers.
|
||||
contextmanager-decorators=contextlib.contextmanager
|
||||
# Warn about protected attribute access inside special methods
|
||||
check-protected-access-in-special-methods=no
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,
|
||||
__new__,
|
||||
setUp,
|
||||
asyncSetUp,
|
||||
__post_init__
|
||||
|
||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
||||
ignore-mixin-members=yes
|
||||
# List of member names, which should be excluded from the protected access
|
||||
# warning.
|
||||
exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit
|
||||
|
||||
# Tells whether to warn about missing members when the owner of the attribute
|
||||
# is inferred to be None.
|
||||
ignore-none=yes
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# This flag controls whether pylint should warn about no-member and similar
|
||||
# checks whenever an opaque object is returned when inferring. The inference
|
||||
# can return multiple potential results while evaluating a Python object, but
|
||||
# some branches might not be evaluated, which results in partial inference. In
|
||||
# that case, it might be useful to still emit no-member and other checks for
|
||||
# the rest of the inferred objects.
|
||||
ignore-on-opaque-inference=yes
|
||||
|
||||
# List of class names for which member attributes should not be checked (useful
|
||||
# for classes with dynamically set attributes). This supports the use of
|
||||
# qualified names.
|
||||
ignored-classes=optparse.Values,thread._local,_thread._local
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis). It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=
|
||||
|
||||
# Show a hint with possible names when a member name was not found. The aspect
|
||||
# of finding the hint is based on edit distance.
|
||||
missing-member-hint=yes
|
||||
|
||||
# The minimum edit distance a name should have in order to be considered a
|
||||
# similar match for a missing member name.
|
||||
missing-member-hint-distance=1
|
||||
|
||||
# The total number of similar names that should be taken in consideration when
|
||||
# showing a hint for a missing member.
|
||||
missing-member-max-choices=1
|
||||
|
||||
# List of decorators that change the signature of a decorated function.
|
||||
signature-mutators=
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Ignore comments when computing similarities.
|
||||
ignore-comments=yes
|
||||
|
||||
# Ignore docstrings when computing similarities.
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Ignore imports when computing similarities.
|
||||
ignore-imports=no
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=4
|
||||
|
||||
|
||||
[LOGGING]
|
||||
|
||||
# The type of string formatting that logging methods do. `old` means using %
|
||||
# formatting, `new` is for `{}` formatting.
|
||||
logging-format-style=old
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format.
|
||||
logging-modules=logging
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,
|
||||
XXX,
|
||||
TODO
|
||||
|
||||
# Regular expression of note tags to take in consideration.
|
||||
#notes-rgx=
|
||||
|
||||
|
||||
[SPELLING]
|
||||
|
||||
# Limits count of emitted suggestions for spelling mistakes.
|
||||
max-spelling-suggestions=4
|
||||
|
||||
# Spelling dictionary name. Available dictionaries: none. To make it work,
|
||||
# install the python-enchant package.
|
||||
spelling-dict=
|
||||
|
||||
# List of comma separated words that should not be checked.
|
||||
spelling-ignore-words=
|
||||
|
||||
# A path to a file that contains the private dictionary; one word per line.
|
||||
spelling-private-dict-file=
|
||||
|
||||
# Tells whether to store unknown words to the private dictionary (see the
|
||||
# --spelling-private-dict-file option) instead of raising a message.
|
||||
spelling-store-unknown-words=no
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid defining new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
# Tells whether unused global variables should be treated as a violation.
|
||||
allow-global-unused-variables=yes
|
||||
|
||||
# List of strings which can identify a callback function by name. A callback
|
||||
# name must start or end with one of those strings.
|
||||
callbacks=cb_,
|
||||
_cb
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expected to
|
||||
# not be used).
|
||||
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
|
||||
|
||||
# Argument names that match this expression will be ignored. Default to name
|
||||
# with leading underscore.
|
||||
ignored-argument-names=_.*|^ignored_|^unused_
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
|
||||
|
||||
|
||||
[STRING]
|
||||
|
||||
# This flag controls whether inconsistent-quotes generates a warning when the
|
||||
# character used as a quote delimiter is used inconsistently within a module.
|
||||
check-quote-consistency=no
|
||||
|
||||
# This flag controls whether the implicit-str-concat should generate a warning
|
||||
# on implicit string concatenation in sequences defined over several lines.
|
||||
check-str-concat-over-line-jumps=no
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# List of modules that can be imported at any level, not just the top level
|
||||
# one.
|
||||
allow-any-import-level=
|
||||
|
||||
# Allow wildcard imports from modules that define __all__.
|
||||
allow-wildcard-with-all=no
|
||||
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma.
|
||||
deprecated-modules=optparse,tkinter.tix
|
||||
|
||||
# Create a graph of external dependencies in the given file (report RP0402 must
|
||||
# not be disabled).
|
||||
ext-import-graph=
|
||||
|
||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
||||
# given file (report RP0402 must not be disabled).
|
||||
import-graph=
|
||||
|
||||
# Create a graph of internal dependencies in the given file (report RP0402 must
|
||||
# not be disabled).
|
||||
int-import-graph=
|
||||
|
||||
# Force import order to recognize a module as part of the standard
|
||||
# compatibility libraries.
|
||||
known-standard-library=
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
|
||||
# Couples of modules and preferred modules, separated by a comma.
|
||||
preferred-modules=
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=mcs
|
||||
|
||||
|
||||
[DESIGN]
|
||||
|
||||
# List of regular expressions of class ancestor names to ignore when counting
|
||||
# public methods (see R0903)
|
||||
exclude-too-few-public-methods=
|
||||
|
||||
# List of qualified class names to ignore when counting class parents (see
|
||||
# R0901)
|
||||
ignored-parents=
|
||||
|
||||
# Maximum number of arguments for function / method.
|
||||
max-args=5
|
||||
|
||||
@ -503,35 +315,331 @@ max-statements=50
|
||||
min-public-methods=2
|
||||
|
||||
|
||||
[CLASSES]
|
||||
|
||||
# Warn about protected attribute access inside special methods
|
||||
check-protected-access-in-special-methods=no
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,
|
||||
__new__,
|
||||
setUp,
|
||||
__post_init__
|
||||
|
||||
# List of member names, which should be excluded from the protected access
|
||||
# warning.
|
||||
exclude-protected=_asdict,
|
||||
_fields,
|
||||
_replace,
|
||||
_source,
|
||||
_make
|
||||
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=cls
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when being caught. Defaults to
|
||||
# "BaseException, Exception".
|
||||
overgeneral-exceptions=builtins.BaseException,
|
||||
builtins.Exception
|
||||
# Exceptions that will emit a warning when caught.
|
||||
overgeneral-exceptions=builtins.BaseException,builtins.Exception
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
expected-line-ending-format=
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
indent-after-paren=4
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=100
|
||||
|
||||
# Maximum number of lines in a module.
|
||||
max-module-lines=1000
|
||||
|
||||
# Allow the body of a class to be on the same line as the declaration if body
|
||||
# contains single statement.
|
||||
single-line-class-stmt=no
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# List of modules that can be imported at any level, not just the top level
|
||||
# one.
|
||||
allow-any-import-level=
|
||||
|
||||
# Allow explicit reexports by alias from a package __init__.
|
||||
allow-reexport-from-package=no
|
||||
|
||||
# Allow wildcard imports from modules that define __all__.
|
||||
allow-wildcard-with-all=no
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma.
|
||||
deprecated-modules=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of external dependencies
|
||||
# to the given file (report RP0402 must not be disabled).
|
||||
ext-import-graph=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of all (i.e. internal and
|
||||
# external) dependencies to the given file (report RP0402 must not be
|
||||
# disabled).
|
||||
import-graph=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of internal dependencies
|
||||
# to the given file (report RP0402 must not be disabled).
|
||||
int-import-graph=
|
||||
|
||||
# Force import order to recognize a module as part of the standard
|
||||
# compatibility libraries.
|
||||
known-standard-library=
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
|
||||
# Couples of modules and preferred modules, separated by a comma.
|
||||
preferred-modules=
|
||||
|
||||
|
||||
[LOGGING]
|
||||
|
||||
# The type of string formatting that logging methods do. `old` means using %
|
||||
# formatting, `new` is for `{}` formatting.
|
||||
logging-format-style=old
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format.
|
||||
logging-modules=logging
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
|
||||
# UNDEFINED.
|
||||
confidence=HIGH,
|
||||
CONTROL_FLOW,
|
||||
INFERENCE,
|
||||
INFERENCE_FAILURE,
|
||||
UNDEFINED
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once). You can also use "--disable=all" to
|
||||
# disable everything first and then re-enable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use "--disable=all --enable=classes
|
||||
# --disable=W".
|
||||
disable=raw-checker-failed,
|
||||
bad-inline-option,
|
||||
locally-disabled,
|
||||
file-ignored,
|
||||
suppressed-message,
|
||||
useless-suppression,
|
||||
deprecated-pragma,
|
||||
use-symbolic-message-instead,
|
||||
missing-module-docstring,
|
||||
line-too-long,
|
||||
no-name-in-module,
|
||||
import-outside-toplevel,
|
||||
invalid-name,
|
||||
raise-missing-from,
|
||||
wrong-import-order,
|
||||
too-few-public-methods,
|
||||
too-many-instance-attributes,
|
||||
broad-except,
|
||||
fixme,
|
||||
too-many-arguments,
|
||||
duplicate-code,
|
||||
cyclic-import,
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once). See also the "--disable" option for examples.
|
||||
enable=c-extension-no-member
|
||||
|
||||
|
||||
[METHOD_ARGS]
|
||||
|
||||
# List of qualified names (i.e., library.method) which require a timeout
|
||||
# parameter e.g. 'requests.api.get,requests.api.post'
|
||||
timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,
|
||||
XXX,
|
||||
TODO
|
||||
|
||||
# Regular expression of note tags to take in consideration.
|
||||
notes-rgx=
|
||||
|
||||
|
||||
[REFACTORING]
|
||||
|
||||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
||||
# Complete name of functions that never returns. When checking for
|
||||
# inconsistent-return-statements if a never returning function is called then
|
||||
# it will be considered as an explicit return statement and no message will be
|
||||
# printed.
|
||||
never-returning-functions=sys.exit,argparse.parse_error
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Python expression which should return a score less than or equal to 10. You
|
||||
# have access to the variables 'fatal', 'error', 'warning', 'refactor',
|
||||
# 'convention', and 'info' which contain the number of messages in each
|
||||
# category, as well as 'statement' which is the total number of statements
|
||||
# analyzed. This score is used by the global evaluation report (RP0004).
|
||||
evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details.
|
||||
msg-template=
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, json
|
||||
# and msvs (visual studio). You can also give a reporter class, e.g.
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
#output-format=
|
||||
|
||||
# Tells whether to display a full report or only the messages.
|
||||
reports=no
|
||||
|
||||
# Activate the evaluation score.
|
||||
score=yes
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Comments are removed from the similarity computation
|
||||
ignore-comments=yes
|
||||
|
||||
# Docstrings are removed from the similarity computation
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Imports are removed from the similarity computation
|
||||
ignore-imports=yes
|
||||
|
||||
# Signatures are removed from the similarity computation
|
||||
ignore-signatures=yes
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=4
|
||||
|
||||
|
||||
[SPELLING]
|
||||
|
||||
# Limits count of emitted suggestions for spelling mistakes.
|
||||
max-spelling-suggestions=4
|
||||
|
||||
# Spelling dictionary name. No available dictionaries : You need to install
|
||||
# both the python package and the system dependency for enchant to work..
|
||||
spelling-dict=
|
||||
|
||||
# List of comma separated words that should be considered directives if they
|
||||
# appear at the beginning of a comment and should not be checked.
|
||||
spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
|
||||
|
||||
# List of comma separated words that should not be checked.
|
||||
spelling-ignore-words=
|
||||
|
||||
# A path to a file that contains the private dictionary; one word per line.
|
||||
spelling-private-dict-file=
|
||||
|
||||
# Tells whether to store unknown words to the private dictionary (see the
|
||||
# --spelling-private-dict-file option) instead of raising a message.
|
||||
spelling-store-unknown-words=no
|
||||
|
||||
|
||||
[STRING]
|
||||
|
||||
# This flag controls whether inconsistent-quotes generates a warning when the
|
||||
# character used as a quote delimiter is used inconsistently within a module.
|
||||
check-quote-consistency=no
|
||||
|
||||
# This flag controls whether the implicit-str-concat should generate a warning
|
||||
# on implicit string concatenation in sequences defined over several lines.
|
||||
check-str-concat-over-line-jumps=no
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
||||
# List of decorators that produce context managers, such as
|
||||
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||
# produce valid context managers.
|
||||
contextmanager-decorators=contextlib.contextmanager
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=
|
||||
|
||||
# Tells whether to warn about missing members when the owner of the attribute
|
||||
# is inferred to be None.
|
||||
ignore-none=yes
|
||||
|
||||
# This flag controls whether pylint should warn about no-member and similar
|
||||
# checks whenever an opaque object is returned when inferring. The inference
|
||||
# can return multiple potential results while evaluating a Python object, but
|
||||
# some branches might not be evaluated, which results in partial inference. In
|
||||
# that case, it might be useful to still emit no-member and other checks for
|
||||
# the rest of the inferred objects.
|
||||
ignore-on-opaque-inference=yes
|
||||
|
||||
# List of symbolic message names to ignore for Mixin members.
|
||||
ignored-checks-for-mixins=no-member,
|
||||
not-async-context-manager,
|
||||
not-context-manager,
|
||||
attribute-defined-outside-init
|
||||
|
||||
# List of class names for which member attributes should not be checked (useful
|
||||
# for classes with dynamically set attributes). This supports the use of
|
||||
# qualified names.
|
||||
ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace
|
||||
|
||||
# Show a hint with possible names when a member name was not found. The aspect
|
||||
# of finding the hint is based on edit distance.
|
||||
missing-member-hint=yes
|
||||
|
||||
# The minimum edit distance a name should have in order to be considered a
|
||||
# similar match for a missing member name.
|
||||
missing-member-hint-distance=1
|
||||
|
||||
# The total number of similar names that should be taken in consideration when
|
||||
# showing a hint for a missing member.
|
||||
missing-member-max-choices=1
|
||||
|
||||
# Regex pattern to define which classes are considered mixins.
|
||||
mixin-class-rgx=.*[Mm]ixin
|
||||
|
||||
# List of decorators that change the signature of a decorated function.
|
||||
signature-mutators=
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid defining new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
# Tells whether unused global variables should be treated as a violation.
|
||||
allow-global-unused-variables=yes
|
||||
|
||||
# List of names allowed to shadow builtins
|
||||
allowed-redefined-builtins=
|
||||
|
||||
# List of strings which can identify a callback function by name. A callback
|
||||
# name must start or end with one of those strings.
|
||||
callbacks=cb_,
|
||||
_cb
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expected to
|
||||
# not be used).
|
||||
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
|
||||
|
||||
# Argument names that match this expression will be ignored.
|
||||
ignored-argument-names=_.*|^ignored_|^unused_
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
|
||||
|
@ -46,7 +46,7 @@ Again, the most checks can be performed by `make check` command, though some add
|
||||
int: result
|
||||
|
||||
Raises:
|
||||
RuntimeException: a local function error occurs
|
||||
RuntimeError: a local function error occurs
|
||||
|
||||
Examples:
|
||||
Very informative example how to use this function, e.g.::
|
||||
@ -85,7 +85,7 @@ Again, the most checks can be performed by `make check` command, though some add
|
||||
|
||||
Args:
|
||||
*args(Any): positional arguments
|
||||
**kwargs(Any): keyword arguments
|
||||
**kwargs(Any): keyword arguments
|
||||
"""
|
||||
self.instance_attribute = ""
|
||||
```
|
||||
@ -130,6 +130,12 @@ Again, the most checks can be performed by `make check` command, though some add
|
||||
* Configuration interactions must go through `ahriman.core.configuration.Configuration` class instance.
|
||||
* In case if class load requires some actions, it is recommended to create class method which can be used for class instantiating.
|
||||
* The code must follow the exception safety, unless it is explicitly asked by end user. It means that most exceptions must be handled and printed to log, no other actions must be done (e.g. raising another exception).
|
||||
* Exceptions without parameters should be raised without parentheses, e.g.:
|
||||
|
||||
```python
|
||||
raise RuntimeError
|
||||
```
|
||||
|
||||
* For the external command `ahriman.core.util.check_output` function must be used.
|
||||
* Every temporary file/directory must be removed at the end of processing, no matter what. The `tempfile` module provides good ways to do it.
|
||||
* Import order must be the following:
|
||||
|
@ -11,6 +11,7 @@ ENV AHRIMAN_PACKAGER="ahriman bot <ahriman@example.com>"
|
||||
ENV AHRIMAN_PACMAN_MIRROR=""
|
||||
ENV AHRIMAN_PORT=""
|
||||
ENV AHRIMAN_REPOSITORY="aur-clone"
|
||||
ENV AHRIMAN_REPOSITORY_SERVER=""
|
||||
ENV AHRIMAN_REPOSITORY_ROOT="/var/lib/ahriman/ahriman"
|
||||
ENV AHRIMAN_UNIX_SOCKET=""
|
||||
ENV AHRIMAN_USER="ahriman"
|
||||
|
@ -43,6 +43,9 @@ fi
|
||||
if [ -n "$AHRIMAN_PORT" ]; then
|
||||
AHRIMAN_SETUP_ARGS+=("--web-port" "$AHRIMAN_PORT")
|
||||
fi
|
||||
if [ -n "$AHRIMAN_REPOSITORY_SERVER" ]; then
|
||||
AHRIMAN_SETUP_ARGS+=("--server" "$AHRIMAN_REPOSITORY_SERVER")
|
||||
fi
|
||||
if [ -n "$AHRIMAN_UNIX_SOCKET" ]; then
|
||||
AHRIMAN_SETUP_ARGS+=("--web-unix-socket" "$AHRIMAN_UNIX_SOCKET")
|
||||
fi
|
||||
|
File diff suppressed because it is too large
Load Diff
Before Width: | Height: | Size: 829 KiB After Width: | Height: | Size: 839 KiB |
@ -20,6 +20,14 @@ ahriman.core.configuration.schema module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.configuration.shell\_interpolator module
|
||||
-----------------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.core.configuration.shell_interpolator
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.configuration.validator module
|
||||
-------------------------------------------
|
||||
|
||||
|
@ -84,6 +84,14 @@ ahriman.core.database.migrations.m009\_local\_source module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.database.migrations.m010\_version\_based\_logs\_removal module
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.core.database.migrations.m010_version_based_logs_removal
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
|
21
docs/ahriman.core.http.rst
Normal file
21
docs/ahriman.core.http.rst
Normal file
@ -0,0 +1,21 @@
|
||||
ahriman.core.http package
|
||||
=========================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
ahriman.core.http.sync\_http\_client module
|
||||
-------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.core.http.sync_http_client
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: ahriman.core.http
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
@ -36,6 +36,14 @@ ahriman.core.report.jinja\_template module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.report.remote\_call module
|
||||
---------------------------------------
|
||||
|
||||
.. automodule:: ahriman.core.report.remote_call
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.report.report module
|
||||
---------------------------------
|
||||
|
||||
|
@ -14,6 +14,7 @@ Subpackages
|
||||
ahriman.core.database
|
||||
ahriman.core.formatters
|
||||
ahriman.core.gitremote
|
||||
ahriman.core.http
|
||||
ahriman.core.log
|
||||
ahriman.core.report
|
||||
ahriman.core.repository
|
||||
|
@ -20,6 +20,14 @@ ahriman.core.upload.http\_upload module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.upload.remote\_service module
|
||||
------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.core.upload.remote_service
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.core.upload.rsync module
|
||||
--------------------------------
|
||||
|
||||
|
@ -220,6 +220,14 @@ ahriman.models.user\_access module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.models.waiter module
|
||||
----------------------------
|
||||
|
||||
.. automodule:: ahriman.models.waiter
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
|
@ -36,6 +36,14 @@ ahriman.web.schemas.error\_schema module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.schemas.file\_schema module
|
||||
---------------------------------------
|
||||
|
||||
.. automodule:: ahriman.web.schemas.file_schema
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.schemas.internal\_status\_schema module
|
||||
---------------------------------------------------
|
||||
|
||||
@ -132,6 +140,22 @@ ahriman.web.schemas.pgp\_key\_schema module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.schemas.process\_id\_schema module
|
||||
----------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.web.schemas.process_id_schema
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.schemas.process\_schema module
|
||||
------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.web.schemas.process_schema
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.schemas.remote\_schema module
|
||||
-----------------------------------------
|
||||
|
||||
@ -156,6 +180,14 @@ ahriman.web.schemas.status\_schema module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.schemas.update\_flags\_schema module
|
||||
------------------------------------------------
|
||||
|
||||
.. automodule:: ahriman.web.schemas.update_flags_schema
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
|
@ -20,6 +20,14 @@ ahriman.web.views.service.pgp module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.views.service.process module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: ahriman.web.views.service.process
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.views.service.rebuild module
|
||||
----------------------------------------
|
||||
|
||||
@ -60,6 +68,14 @@ ahriman.web.views.service.update module
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
ahriman.web.views.service.upload module
|
||||
---------------------------------------
|
||||
|
||||
.. automodule:: ahriman.web.views.service.upload
|
||||
:members:
|
||||
:no-undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
|
@ -37,6 +37,7 @@ This package contains everything required for the most of application actions an
|
||||
* ``ahriman.core.database`` is everything including data and schema migrations for database.
|
||||
* ``ahriman.core.formatters`` package provides ``Printer`` sub-classes for printing data (e.g. package properties) to stdout which are used by some handlers.
|
||||
* ``ahriman.core.gitremote`` is a package with remote PKGBUILD triggers. Should not be called directly.
|
||||
* ``ahriman.core.http`` package provides HTTP clients which can be later used by other classes.
|
||||
* ``ahriman.core.log`` is a log utils package. It includes logger loader class, custom HTTP based logger and access logger for HTTP services with additional filters.
|
||||
* ``ahriman.core.report`` is a package with reporting triggers. Should not be called directly.
|
||||
* ``ahriman.core.repository`` contains several traits and base repository (``ahriman.core.repository.Repository`` class) implementation.
|
||||
|
@ -42,6 +42,7 @@ release = __version__
|
||||
extensions = [
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.napoleon",
|
||||
"sphinx_rtd_theme",
|
||||
"sphinxarg.ext",
|
||||
]
|
||||
|
||||
@ -66,7 +67,7 @@ exclude_patterns = []
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
#
|
||||
html_theme = "default" if on_rtd else "alabaster"
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
|
@ -106,15 +106,19 @@ Web server settings. If any of ``host``/``port`` is not set, web integration wil
|
||||
* ``debug`` - enable debug toolbar, boolean, optional, default ``no``.
|
||||
* ``debug_check_host`` - check hosts to access debug toolbar, boolean, optional, default ``no``.
|
||||
* ``debug_allowed_hosts`` - allowed hosts to get access to debug toolbar, space separated list of string, optional.
|
||||
* ``enable_archive_upload`` - allow to upload packages via HTTP (i.e. call of ``/api/v1/service/upload`` uri), boolean, optional, default ``no``.
|
||||
* ``host`` - host to bind, string, optional.
|
||||
* ``index_url`` - full url of the repository index page, string, optional.
|
||||
* ``max_body_size`` - max body size in bytes to be validated for archive upload, integer, optional. If not set, validation will be disabled.
|
||||
* ``password`` - password to authorize in web service in order to update service status, string, required in case if authorization enabled.
|
||||
* ``port`` - port to bind, int, optional.
|
||||
* ``static_path`` - path to directory with static files, string, required.
|
||||
* ``templates`` - path to templates directory, string, required.
|
||||
* ``timeout`` - HTTP request timeout in seconds, int, optional, default is ``30``.
|
||||
* ``unix_socket`` - path to the listening unix socket, string, optional. If set, server will create the socket on the specified address which can (and will) be used by application. Note, that unlike usual host/port configuration, unix socket allows to perform requests without authorization.
|
||||
* ``unix_socket_unsafe`` - set unsafe (o+w) permissions to unix socket, boolean, optional, default ``yes``. This option is enabled by default, because it is supposed that unix socket is created in safe environment (only web service is supposed to be used in unsafe), but it can be disabled by configuration.
|
||||
* ``username`` - username to authorize in web service in order to update service status, string, required in case if authorization enabled.
|
||||
* ``wait_timeout`` - wait timeout in seconds, maximum amount of time to be waited before lock will be free, int, optional.
|
||||
|
||||
``keyring`` group
|
||||
--------------------
|
||||
@ -246,6 +250,17 @@ Section name must be either ``html`` (plus optional architecture name, e.g. ``ht
|
||||
* ``path`` - path to html report file, string, required.
|
||||
* ``template_path`` - path to Jinja2 template, string, required.
|
||||
|
||||
``remote-call`` type
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Section name must be either ``remote-call`` (plus optional architecture name, e.g. ``remote-call:x86_64``) or random name with ``type`` set.
|
||||
|
||||
* ``type`` - type of the report, string, optional, must be set to ``remote-call`` if exists.
|
||||
* ``aur`` - check for AUR packages updates, boolean, optional, default ``no``.
|
||||
* ``local`` - check for local packages updates, boolean, optional, default ``no``.
|
||||
* ``manual`` - update manually built packages, boolean, optional, default ``no``.
|
||||
* ``wait_timeout`` - maximum amount of time in seconds to be waited before remote process will be terminated, int, optional, default ``-1``.
|
||||
|
||||
``telegram`` type
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
@ -291,6 +306,14 @@ This feature requires Github key creation (see below). Section name must be eith
|
||||
* ``timeout`` - HTTP request timeout in seconds, int, optional, default is ``30``.
|
||||
* ``username`` - Github authorization user, string, required. Basically the same as ``owner``.
|
||||
|
||||
``remote-service`` type
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Section name must be either ``remote-service`` (plus optional architecture name, e.g. ``remote-service:x86_64``) or random name with ``type`` set.
|
||||
|
||||
* ``type`` - type of the report, string, optional, must be set to ``remote-service`` if exists.
|
||||
* ``timeout`` - HTTP request timeout in seconds, int, optional, default is ``30``.
|
||||
|
||||
``rsync`` type
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
|
201
docs/faq.rst
201
docs/faq.rst
@ -396,6 +396,7 @@ The following environment variables are supported:
|
||||
* ``AHRIMAN_PACMAN_MIRROR`` - override pacman mirror server if set.
|
||||
* ``AHRIMAN_PORT`` - HTTP server port if any, default is empty.
|
||||
* ``AHRIMAN_REPOSITORY`` - repository name, default is ``aur-clone``.
|
||||
* ``AHRIMAN_REPOSITORY_SERVER`` - optional override for the repository url. Useful if you would like to download packages from remote instead of local filesystem.
|
||||
* ``AHRIMAN_REPOSITORY_ROOT`` - repository root. Because of filesystem rights it is required to override default repository root. By default, it uses ``ahriman`` directory inside ahriman's home, which can be passed as mount volume.
|
||||
* ``AHRIMAN_UNIX_SOCKET`` - full path to unix socket which is used by web server, default is empty. Note that more likely you would like to put it inside ``AHRIMAN_REPOSITORY_ROOT`` directory (e.g. ``/var/lib/ahriman/ahriman/ahriman-web.sock``) or to ``/tmp``.
|
||||
* ``AHRIMAN_USER`` - ahriman user, usually must not be overwritten, default is ``ahriman``.
|
||||
@ -722,8 +723,7 @@ How to post build report to telegram
|
||||
#.
|
||||
Optionally (if you want to post message in chat):
|
||||
|
||||
|
||||
#. Create telegram channel.
|
||||
#. Create telegram channel.
|
||||
#. Invite your bot into the channel.
|
||||
#. Make your channel public
|
||||
|
||||
@ -753,6 +753,203 @@ If you did everything fine you should receive the message with the next update.
|
||||
|
||||
(replace ``${CHAT_ID}`` and ``${API_KEY}`` with the values from configuration).
|
||||
|
||||
Distributed builds
|
||||
------------------
|
||||
|
||||
The service allows to run build on multiple machines and collect packages on main node. There are multiple ways to achieve it, this section describes officially supported methods.
|
||||
|
||||
Remote synchronization and remote server call
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
This setup requires at least two instances of the service:
|
||||
|
||||
#. Web service (with opt-in authorization enabled), later will be referenced as ``master`` node.
|
||||
#. Application instances responsible for build, later will be referenced as ``worker`` nodes.
|
||||
|
||||
In this example the following settings are assumed:
|
||||
|
||||
* Repository architecture is ``x86_64``.
|
||||
* Master node address is ``master.example.com``.
|
||||
|
||||
Master node configuration
|
||||
"""""""""""""""""""""""""
|
||||
|
||||
The only requirements for the master node is that API must be available for worker nodes to call (e.g. port must be exposed to internet, or local network in case of VPN, etc) and file upload must be enabled:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[web]
|
||||
enable_archive_upload = yes
|
||||
|
||||
In addition, the following settings are recommended for the master node:
|
||||
|
||||
*
|
||||
As it has been mentioned above, it is recommended to enable authentication (see `How to enable basic authorization`_) and create system user which will be used later. Later this user (if any) will be referenced as ``worker-user``.
|
||||
|
||||
*
|
||||
In order to be able to spawn multiple processes at the same time, wait timeout must be configured:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[web]
|
||||
wait_timeout = 0
|
||||
|
||||
Worker nodes configuration
|
||||
""""""""""""""""""""""""""
|
||||
|
||||
#.
|
||||
First of all, in this setup you need to split your repository into chunks manually, e.g. if you have repository on master node with packages ``A``, ``B`` and ``C``, you need to split them between all available workers, as example:
|
||||
|
||||
* Worker #1: ``A``.
|
||||
* Worker #2: ``B`` and ``C``.
|
||||
|
||||
#.
|
||||
Each worker must be configured to upload files to master node:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[upload]
|
||||
target = remote-service
|
||||
|
||||
[remote-service]
|
||||
|
||||
#.
|
||||
Worker must be configured to access web on master node:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[web]
|
||||
address = master.example.com
|
||||
username = worker-user
|
||||
password = very-secure-password
|
||||
|
||||
As it has been mentioned above, ``web.address`` must be available for workers. In case if unix socket is used, it can be passed as ``web.unix_socket`` variable as usual. Optional ``web.username``/``web.password`` can be supplied in case if authentication was enabled on master node.
|
||||
|
||||
#.
|
||||
Each worker must call master node on success:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[report]
|
||||
target = remote-call
|
||||
|
||||
[remote-call]
|
||||
manual = yes
|
||||
|
||||
After success synchronization (see above), the built packages will be put into directory, from which they will be read during manual update, thus ``remote-call.manual`` flag is required.
|
||||
|
||||
#.
|
||||
Change order of trigger runs. This step is required, because by default the report trigger is called before the upload trigger and we would like to achieve the opposite:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[build]
|
||||
triggers = ahriman.core.gitremote.RemotePullTrigger ahriman.core.upload.UploadTrigger ahriman.core.report.ReportTrigger ahriman.core.gitremote.RemotePushTrigger
|
||||
|
||||
In addition, the following settings are recommended for workers:
|
||||
|
||||
*
|
||||
You might want to wait until report trigger will be completed; in this case the following option must be set:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[remote-call]
|
||||
wait_timeout = 0
|
||||
|
||||
Dependency management
|
||||
"""""""""""""""""""""
|
||||
|
||||
By default worker nodes don't know anything about master nodes packages, thus it will try to build each dependency by its own. However, using ``AHRIMAN_REPOSITORY_SERVER`` docker variable (or ``--server`` flag for setup command), it is possible to specify address of the master node for devtools configuration.
|
||||
|
||||
Repository and packages signing
|
||||
"""""""""""""""""""""""""""""""
|
||||
|
||||
You can sign packages on worker nodes and then signatures will be synced to master node. In order to do so, you need to configure worker node as following, e.g.:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[sign]
|
||||
target = package
|
||||
key = 8BE91E5A773FB48AC05CC1EDBED105AED6246B39
|
||||
|
||||
Note, however, that in this case, signatures will not be validated on master node and just will be copied to repository tree.
|
||||
|
||||
If you would like to sign only database files (aka repository sign), it has to be configured on master node only as usual, e.g.:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[sign]
|
||||
target = repository
|
||||
key = 8BE91E5A773FB48AC05CC1EDBED105AED6246B39
|
||||
|
||||
Double node minimal docker example
|
||||
""""""""""""""""""""""""""""""""""
|
||||
|
||||
Master node config (``master.ini``) as:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[auth]
|
||||
target = mapping
|
||||
|
||||
[web]
|
||||
enable_archive_upload = yes
|
||||
wait_timeout = 0
|
||||
|
||||
|
||||
Command to run master node:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
docker run --privileged -p 8080:8080 -e AHRIMAN_PORT=8080 -v master.ini:/etc/ahriman.ini.d/overrides.ini arcan1s/ahriman:latest web
|
||||
|
||||
The user ``worker-user`` has been created additionally. Worker node config (``worker.ini``) as:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[web]
|
||||
address = http://172.17.0.1:8080
|
||||
username = worker-user
|
||||
password = very-secure-password
|
||||
|
||||
[upload]
|
||||
target = remote-service
|
||||
|
||||
[remote-service]
|
||||
|
||||
[report]
|
||||
target = remote-call
|
||||
|
||||
[remote-call]
|
||||
manual = yes
|
||||
wait_timeout = 0
|
||||
|
||||
[build]
|
||||
triggers = ahriman.core.gitremote.RemotePullTrigger ahriman.core.upload.UploadTrigger ahriman.core.report.ReportTrigger ahriman.core.gitremote.RemotePushTrigger
|
||||
|
||||
The address above (``http://172.17.0.1:8080``) is something available for worker container.
|
||||
|
||||
Command to run worker node:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
docker run --privileged -v worker.ini:/etc/ahriman.ini.d/overrides.ini -it arcan1s/ahriman:latest package-add arhiman --now
|
||||
|
||||
The command above will successfully build ``ahriman`` package, upload it on master node and, finally, will update master node repository.
|
||||
|
||||
Addition of new package and repository update
|
||||
"""""""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
Just run on worker command as usual, the built packages will be automatically uploaded to master node. Note that automatic update process must be disabled on master node.
|
||||
|
||||
Package removal
|
||||
"""""""""""""""
|
||||
|
||||
This action must be done in two steps:
|
||||
|
||||
#. Remove package on worker.
|
||||
#. Remove package on master node.
|
||||
|
||||
Maintenance packages
|
||||
--------------------
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
# Maintainer: Evgeniy Alekseev
|
||||
|
||||
pkgname='ahriman'
|
||||
pkgver=2.10.2
|
||||
pkgver=2.11.0
|
||||
pkgrel=1
|
||||
pkgdesc="ArcH linux ReposItory MANager"
|
||||
arch=('any')
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
_shtab_ahriman_subparsers=('aur-search' 'search' 'help' 'help-commands-unsafe' 'help-updates' 'help-version' 'version' 'package-add' 'add' 'package-update' 'package-remove' 'remove' 'package-status' 'status' 'package-status-remove' 'package-status-update' 'status-update' 'patch-add' 'patch-list' 'patch-remove' 'patch-set-add' 'repo-backup' 'repo-check' 'check' 'repo-create-keyring' 'repo-create-mirrorlist' 'repo-daemon' 'daemon' 'repo-rebuild' 'rebuild' 'repo-remove-unknown' 'remove-unknown' 'repo-report' 'report' 'repo-restore' 'repo-sign' 'sign' 'repo-status-update' 'repo-sync' 'sync' 'repo-tree' 'repo-triggers' 'repo-update' 'update' 'service-clean' 'clean' 'repo-clean' 'service-config' 'config' 'repo-config' 'service-config-validate' 'config-validate' 'repo-config-validate' 'service-key-import' 'key-import' 'service-setup' 'init' 'repo-init' 'repo-setup' 'setup' 'service-shell' 'shell' 'user-add' 'user-list' 'user-remove' 'web')
|
||||
|
||||
_shtab_ahriman_option_strings=('-h' '--help' '-a' '--architecture' '-c' '--configuration' '--force' '-l' '--lock' '--log-handler' '--report' '--no-report' '-q' '--quiet' '--unsafe' '-V' '--version')
|
||||
_shtab_ahriman_option_strings=('-h' '--help' '-a' '--architecture' '-c' '--configuration' '--force' '-l' '--lock' '--log-handler' '--report' '--no-report' '-q' '--quiet' '--unsafe' '--wait-timeout' '-V' '--version')
|
||||
_shtab_ahriman_aur_search_option_strings=('-h' '--help' '-e' '--exit-code' '--info' '--no-info' '--sort-by')
|
||||
_shtab_ahriman_search_option_strings=('-h' '--help' '-e' '--exit-code' '--info' '--no-info' '--sort-by')
|
||||
_shtab_ahriman_help_option_strings=('-h' '--help')
|
||||
@ -43,7 +43,7 @@ _shtab_ahriman_sign_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_repo_status_update_option_strings=('-h' '--help' '-s' '--status')
|
||||
_shtab_ahriman_repo_sync_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_sync_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_repo_tree_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_repo_tree_option_strings=('-h' '--help' '-p' '--partitions')
|
||||
_shtab_ahriman_repo_triggers_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_repo_update_option_strings=('-h' '--help' '--aur' '--no-aur' '--dependencies' '--no-dependencies' '--dry-run' '-e' '--exit-code' '--increment' '--no-increment' '--local' '--no-local' '--manual' '--no-manual' '-u' '--username' '--vcs' '--no-vcs' '-y' '--refresh')
|
||||
_shtab_ahriman_update_option_strings=('-h' '--help' '--aur' '--no-aur' '--dependencies' '--no-dependencies' '--dry-run' '-e' '--exit-code' '--increment' '--no-increment' '--local' '--no-local' '--manual' '--no-manual' '-u' '--username' '--vcs' '--no-vcs' '-y' '--refresh')
|
||||
@ -58,11 +58,11 @@ _shtab_ahriman_config_validate_option_strings=('-h' '--help' '-e' '--exit-code')
|
||||
_shtab_ahriman_repo_config_validate_option_strings=('-h' '--help' '-e' '--exit-code')
|
||||
_shtab_ahriman_service_key_import_option_strings=('-h' '--help' '--key-server')
|
||||
_shtab_ahriman_key_import_option_strings=('-h' '--help' '--key-server')
|
||||
_shtab_ahriman_service_setup_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_init_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_repo_init_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_repo_setup_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_setup_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_service_setup_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--server' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_init_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--server' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_repo_init_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--server' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_repo_setup_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--server' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_setup_option_strings=('-h' '--help' '--build-as-user' '--build-command' '--from-configuration' '--generate-salt' '--no-generate-salt' '--makeflags-jobs' '--no-makeflags-jobs' '--mirror' '--multilib' '--no-multilib' '--packager' '--repository' '--server' '--sign-key' '--sign-target' '--web-port' '--web-unix-socket')
|
||||
_shtab_ahriman_service_shell_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_shell_option_strings=('-h' '--help')
|
||||
_shtab_ahriman_user_add_option_strings=('-h' '--help' '--key' '--packager' '-p' '--password' '-r' '--role')
|
||||
|
@ -1,9 +1,9 @@
|
||||
.TH AHRIMAN "1" "2023\-08\-07" "ahriman" "Generated Python Manual"
|
||||
.TH AHRIMAN "1" "2023\-08\-26" "ahriman" "Generated Python Manual"
|
||||
.SH NAME
|
||||
ahriman
|
||||
.SH SYNOPSIS
|
||||
.B ahriman
|
||||
[-h] [-a ARCHITECTURE] [-c CONFIGURATION] [--force] [-l LOCK] [--log-handler {console,syslog,journald}] [--report | --no-report] [-q] [--unsafe] [-V] {aur-search,search,help,help-commands-unsafe,help-updates,help-version,version,package-add,add,package-update,package-remove,remove,package-status,status,package-status-remove,package-status-update,status-update,patch-add,patch-list,patch-remove,patch-set-add,repo-backup,repo-check,check,repo-create-keyring,repo-create-mirrorlist,repo-daemon,daemon,repo-rebuild,rebuild,repo-remove-unknown,remove-unknown,repo-report,report,repo-restore,repo-sign,sign,repo-status-update,repo-sync,sync,repo-tree,repo-triggers,repo-update,update,service-clean,clean,repo-clean,service-config,config,repo-config,service-config-validate,config-validate,repo-config-validate,service-key-import,key-import,service-setup,init,repo-init,repo-setup,setup,service-shell,shell,user-add,user-list,user-remove,web} ...
|
||||
[-h] [-a ARCHITECTURE] [-c CONFIGURATION] [--force] [-l LOCK] [--log-handler {console,syslog,journald}] [--report | --no-report] [-q] [--unsafe] [--wait-timeout WAIT_TIMEOUT] [-V] {aur-search,search,help,help-commands-unsafe,help-updates,help-version,version,package-add,add,package-update,package-remove,remove,package-status,status,package-status-remove,package-status-update,status-update,patch-add,patch-list,patch-remove,patch-set-add,repo-backup,repo-check,check,repo-create-keyring,repo-create-mirrorlist,repo-daemon,daemon,repo-rebuild,rebuild,repo-remove-unknown,remove-unknown,repo-report,report,repo-restore,repo-sign,sign,repo-status-update,repo-sync,sync,repo-tree,repo-triggers,repo-update,update,service-clean,clean,repo-clean,service-config,config,repo-config,service-config-validate,config-validate,repo-config-validate,service-key-import,key-import,service-setup,init,repo-init,repo-setup,setup,service-shell,shell,user-add,user-list,user-remove,web} ...
|
||||
.SH DESCRIPTION
|
||||
ArcH linux ReposItory MANager
|
||||
|
||||
@ -40,6 +40,11 @@ force disable any logging
|
||||
\fB\-\-unsafe\fR
|
||||
allow to run ahriman as non\-ahriman user. Some actions might be unavailable
|
||||
|
||||
.TP
|
||||
\fB\-\-wait\-timeout\fR \fI\,WAIT_TIMEOUT\/\fR
|
||||
wait for lock to be free. Negative value will lead to immediate application run even if there is lock file. In case of
|
||||
zero value, the application will wait infinitely
|
||||
|
||||
.TP
|
||||
\fB\-V\fR, \fB\-\-version\fR
|
||||
show program's version number and exit
|
||||
@ -553,10 +558,15 @@ usage: ahriman repo\-sync [\-h]
|
||||
sync repository files to remote server according to current settings
|
||||
|
||||
.SH COMMAND \fI\,'ahriman repo\-tree'\/\fR
|
||||
usage: ahriman repo\-tree [\-h]
|
||||
usage: ahriman repo\-tree [\-h] [\-p PARTITIONS]
|
||||
|
||||
dump repository tree based on packages dependencies
|
||||
|
||||
.SH OPTIONS \fI\,'ahriman repo\-tree'\/\fR
|
||||
.TP
|
||||
\fB\-p\fR \fI\,PARTITIONS\/\fR, \fB\-\-partitions\fR \fI\,PARTITIONS\/\fR
|
||||
also divide packages by independent partitions
|
||||
|
||||
.SH COMMAND \fI\,'ahriman repo\-triggers'\/\fR
|
||||
usage: ahriman repo\-triggers [\-h] [trigger ...]
|
||||
|
||||
@ -684,7 +694,7 @@ key server for key import
|
||||
usage: ahriman service\-setup [\-h] [\-\-build\-as\-user BUILD_AS_USER] [\-\-build\-command BUILD_COMMAND]
|
||||
[\-\-from\-configuration FROM_CONFIGURATION] [\-\-generate\-salt | \-\-no\-generate\-salt]
|
||||
[\-\-makeflags\-jobs | \-\-no\-makeflags\-jobs] [\-\-mirror MIRROR] [\-\-multilib | \-\-no\-multilib]
|
||||
\-\-packager PACKAGER \-\-repository REPOSITORY [\-\-sign\-key SIGN_KEY]
|
||||
\-\-packager PACKAGER \-\-repository REPOSITORY [\-\-server SERVER] [\-\-sign\-key SIGN_KEY]
|
||||
[\-\-sign\-target {disabled,packages,repository}] [\-\-web\-port WEB_PORT]
|
||||
[\-\-web\-unix\-socket WEB_UNIX_SOCKET]
|
||||
|
||||
@ -727,6 +737,10 @@ packager name and email
|
||||
\fB\-\-repository\fR \fI\,REPOSITORY\/\fR
|
||||
repository name
|
||||
|
||||
.TP
|
||||
\fB\-\-server\fR \fI\,SERVER\/\fR
|
||||
server to be used for devtools. If none set, local files will be used
|
||||
|
||||
.TP
|
||||
\fB\-\-sign\-key\fR \fI\,SIGN_KEY\/\fR
|
||||
sign key id
|
||||
|
@ -85,6 +85,7 @@ _shtab_ahriman_options=(
|
||||
{--report,--no-report}"[force enable or disable reporting to web service (default\: True)]:report:"
|
||||
{-q,--quiet}"[force disable any logging (default\: False)]"
|
||||
"--unsafe[allow to run ahriman as non-ahriman user. Some actions might be unavailable (default\: False)]"
|
||||
"--wait-timeout[wait for lock to be free. Negative value will lead to immediate application run even if there is lock file. In case of zero value, the application will wait infinitely (default\: -1)]:wait_timeout:"
|
||||
"(- : *)"{-V,--version}"[show program\'s version number and exit]"
|
||||
)
|
||||
|
||||
@ -176,6 +177,7 @@ _shtab_ahriman_init_options=(
|
||||
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
||||
"--packager[packager name and email (default\: None)]:packager:"
|
||||
"--repository[repository name (default\: None)]:repository:"
|
||||
"--server[server to be used for devtools. If none set, local files will be used (default\: None)]:server:"
|
||||
"--sign-key[sign key id (default\: None)]:sign_key:"
|
||||
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
||||
"--web-port[port of the web service (default\: None)]:web_port:"
|
||||
@ -346,6 +348,7 @@ _shtab_ahriman_repo_init_options=(
|
||||
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
||||
"--packager[packager name and email (default\: None)]:packager:"
|
||||
"--repository[repository name (default\: None)]:repository:"
|
||||
"--server[server to be used for devtools. If none set, local files will be used (default\: None)]:server:"
|
||||
"--sign-key[sign key id (default\: None)]:sign_key:"
|
||||
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
||||
"--web-port[port of the web service (default\: None)]:web_port:"
|
||||
@ -389,6 +392,7 @@ _shtab_ahriman_repo_setup_options=(
|
||||
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
||||
"--packager[packager name and email (default\: None)]:packager:"
|
||||
"--repository[repository name (default\: None)]:repository:"
|
||||
"--server[server to be used for devtools. If none set, local files will be used (default\: None)]:server:"
|
||||
"--sign-key[sign key id (default\: None)]:sign_key:"
|
||||
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
||||
"--web-port[port of the web service (default\: None)]:web_port:"
|
||||
@ -411,6 +415,7 @@ _shtab_ahriman_repo_sync_options=(
|
||||
|
||||
_shtab_ahriman_repo_tree_options=(
|
||||
"(- : *)"{-h,--help}"[show this help message and exit]"
|
||||
{-p,--partitions}"[also divide packages by independent partitions (default\: 1)]:partitions:"
|
||||
)
|
||||
|
||||
_shtab_ahriman_repo_triggers_options=(
|
||||
@ -481,6 +486,7 @@ _shtab_ahriman_service_setup_options=(
|
||||
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
||||
"--packager[packager name and email (default\: None)]:packager:"
|
||||
"--repository[repository name (default\: None)]:repository:"
|
||||
"--server[server to be used for devtools. If none set, local files will be used (default\: None)]:server:"
|
||||
"--sign-key[sign key id (default\: None)]:sign_key:"
|
||||
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
||||
"--web-port[port of the web service (default\: None)]:web_port:"
|
||||
@ -503,6 +509,7 @@ _shtab_ahriman_setup_options=(
|
||||
{--multilib,--no-multilib}"[add or do not multilib repository (default\: True)]:multilib:"
|
||||
"--packager[packager name and email (default\: None)]:packager:"
|
||||
"--repository[repository name (default\: None)]:repository:"
|
||||
"--server[server to be used for devtools. If none set, local files will be used (default\: None)]:server:"
|
||||
"--sign-key[sign key id (default\: None)]:sign_key:"
|
||||
"*--sign-target[sign options (default\: None)]:sign_target:(disabled packages repository)"
|
||||
"--web-port[port of the web service (default\: None)]:web_port:"
|
||||
|
@ -17,4 +17,4 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
__version__ = "2.10.2"
|
||||
__version__ = "2.11.0"
|
||||
|
@ -84,6 +84,10 @@ def _parser() -> argparse.ArgumentParser:
|
||||
parser.add_argument("-q", "--quiet", help="force disable any logging", action="store_true")
|
||||
parser.add_argument("--unsafe", help="allow to run ahriman as non-ahriman user. Some actions might be unavailable",
|
||||
action="store_true")
|
||||
parser.add_argument("--wait-timeout", help="wait for lock to be free. Negative value will lead to "
|
||||
"immediate application run even if there is lock file. "
|
||||
"In case of zero value, the application will wait infinitely",
|
||||
type=int, default=-1)
|
||||
parser.add_argument("-V", "--version", action="version", version=__version__)
|
||||
|
||||
subparsers = parser.add_subparsers(title="command", help="command to run", dest="command", required=True)
|
||||
@ -711,6 +715,8 @@ def _set_repo_tree_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
parser = root.add_parser("repo-tree", help="dump repository tree",
|
||||
description="dump repository tree based on packages dependencies",
|
||||
formatter_class=_formatter)
|
||||
parser.add_argument("-p", "--partitions", help="also divide packages by independent partitions",
|
||||
type=int, default=1)
|
||||
parser.set_defaults(handler=handlers.Structure, lock=None, report=False, quiet=True, unsafe=True)
|
||||
return parser
|
||||
|
||||
@ -889,6 +895,7 @@ def _set_service_setup_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
action=argparse.BooleanOptionalAction, default=True)
|
||||
parser.add_argument("--packager", help="packager name and email", required=True)
|
||||
parser.add_argument("--repository", help="repository name", required=True)
|
||||
parser.add_argument("--server", help="server to be used for devtools. If none set, local files will be used")
|
||||
parser.add_argument("--sign-key", help="sign key id")
|
||||
parser.add_argument("--sign-target", help="sign options", action="append",
|
||||
type=SignSettings.from_option, choices=enum_values(SignSettings))
|
||||
|
@ -73,6 +73,9 @@ class ApplicationPackages(ApplicationProperties):
|
||||
|
||||
Args:
|
||||
source(str): path to local directory
|
||||
|
||||
Raises:
|
||||
UnknownPackageError: if specified package is unknown or doesn't exist
|
||||
"""
|
||||
local_dir = Path(source)
|
||||
if not local_dir.is_dir():
|
||||
@ -110,8 +113,10 @@ class ApplicationPackages(ApplicationProperties):
|
||||
|
||||
Args:
|
||||
source(str): remote URL of the package archive
|
||||
|
||||
Raises:
|
||||
UnknownPackageError: if specified package is unknown or doesn't exist
|
||||
"""
|
||||
dst = self.repository.paths.packages / Path(source).name # URL is path, is not it?
|
||||
# timeout=None to suppress pylint warns. Also suppress bandit warnings
|
||||
try:
|
||||
response = requests.get(source, stream=True, timeout=None) # nosec
|
||||
@ -119,6 +124,7 @@ class ApplicationPackages(ApplicationProperties):
|
||||
except Exception:
|
||||
raise UnknownPackageError(source)
|
||||
|
||||
dst = self.repository.paths.packages / Path(source).name # URL is path, is not it?
|
||||
with dst.open("wb") as local_file:
|
||||
for chunk in response.iter_content(chunk_size=1024):
|
||||
local_file.write(chunk)
|
||||
|
@ -61,7 +61,7 @@ class Handler:
|
||||
list[str]: list of architectures for which tree is created
|
||||
|
||||
Raises:
|
||||
MissingArchitecture: if no architecture set and automatic detection is not allowed or failed
|
||||
MissingArchitectureError: if no architecture set and automatic detection is not allowed or failed
|
||||
"""
|
||||
if not cls.ALLOW_AUTO_ARCHITECTURE_RUN and args.architecture is None:
|
||||
# for some parsers (e.g. config) we need to run with specific architecture
|
||||
@ -94,10 +94,13 @@ class Handler:
|
||||
"""
|
||||
try:
|
||||
configuration = Configuration.from_path(args.configuration, architecture)
|
||||
|
||||
log_handler = Log.handler(args.log_handler)
|
||||
Log.load(configuration, log_handler, quiet=args.quiet, report=args.report)
|
||||
|
||||
with Lock(args, architecture, configuration):
|
||||
cls.run(args, architecture, configuration, report=args.report)
|
||||
|
||||
return True
|
||||
except ExitCode:
|
||||
return False
|
||||
@ -118,7 +121,7 @@ class Handler:
|
||||
int: 0 on success, 1 otherwise
|
||||
|
||||
Raises:
|
||||
MultipleArchitectures: if more than one architecture supplied and no multi architecture supported
|
||||
MultipleArchitecturesError: if more than one architecture supplied and no multi architecture supported
|
||||
"""
|
||||
architectures = cls.architectures_extract(args)
|
||||
|
||||
@ -128,8 +131,7 @@ class Handler:
|
||||
raise MultipleArchitecturesError(args.command)
|
||||
|
||||
with Pool(len(architectures)) as pool:
|
||||
result = pool.starmap(
|
||||
cls.call, [(args, architecture) for architecture in architectures])
|
||||
result = pool.starmap(cls.call, [(args, architecture) for architecture in architectures])
|
||||
else:
|
||||
result = [cls.call(args, architectures.pop())]
|
||||
|
||||
@ -164,4 +166,4 @@ class Handler:
|
||||
ExitCode: if result is empty and check is enabled
|
||||
"""
|
||||
if enabled and predicate:
|
||||
raise ExitCode()
|
||||
raise ExitCode
|
||||
|
@ -51,16 +51,17 @@ class Patch(Handler):
|
||||
application = Application(architecture, configuration, report=report)
|
||||
application.on_start()
|
||||
|
||||
if args.action == Action.Update and args.variable is not None:
|
||||
patch = Patch.patch_create_from_function(args.variable, args.patch)
|
||||
Patch.patch_set_create(application, args.package, patch)
|
||||
elif args.action == Action.Update and args.variable is None:
|
||||
package_base, patch = Patch.patch_create_from_diff(args.package, architecture, args.track)
|
||||
Patch.patch_set_create(application, package_base, patch)
|
||||
elif args.action == Action.List:
|
||||
Patch.patch_set_list(application, args.package, args.variable, args.exit_code)
|
||||
elif args.action == Action.Remove:
|
||||
Patch.patch_set_remove(application, args.package, args.variable)
|
||||
match args.action:
|
||||
case Action.Update if args.variable is not None:
|
||||
patch = Patch.patch_create_from_function(args.variable, args.patch)
|
||||
Patch.patch_set_create(application, args.package, patch)
|
||||
case Action.Update:
|
||||
package_base, patch = Patch.patch_create_from_diff(args.package, architecture, args.track)
|
||||
Patch.patch_set_create(application, package_base, patch)
|
||||
case Action.List:
|
||||
Patch.patch_set_list(application, args.package, args.variable, args.exit_code)
|
||||
case Action.Remove:
|
||||
Patch.patch_set_remove(application, args.package, args.variable)
|
||||
|
||||
@staticmethod
|
||||
def patch_create_from_diff(sources_dir: Path, architecture: str, track: list[str]) -> tuple[str, PkgbuildPatch]:
|
||||
|
@ -81,7 +81,7 @@ class Search(Handler):
|
||||
list[AURPackage]: sorted list for packages
|
||||
|
||||
Raises:
|
||||
InvalidOption: if search fields is not in list of allowed ones
|
||||
OptionError: if search fields is not in list of allowed ones
|
||||
"""
|
||||
if sort_by not in Search.SORT_FIELDS:
|
||||
raise OptionError(sort_by)
|
||||
|
@ -48,7 +48,7 @@ class ServiceUpdates(Handler):
|
||||
application = Application(architecture, configuration, report=report)
|
||||
|
||||
remote = Package.from_aur("ahriman", application.repository.pacman, None)
|
||||
release = remote.version.rsplit("-", 1)[-1] # we don't store pkgrel locally, so we just append it
|
||||
_, release = remote.version.rsplit("-", 1) # we don't store pkgrel locally, so we just append it
|
||||
local_version = f"{__version__}-{release}"
|
||||
|
||||
# technically we would like to compare versions, but it is fine to raise an exception in case if locally
|
||||
|
@ -63,8 +63,9 @@ class Setup(Handler):
|
||||
|
||||
Setup.configuration_create_makepkg(args.packager, args.makeflags_jobs, application.repository.paths)
|
||||
Setup.executable_create(application.repository.paths, args.build_command, architecture)
|
||||
repository_server = f"file://{application.repository.paths.repository}" if args.server is None else args.server
|
||||
Setup.configuration_create_devtools(args.build_command, architecture, args.from_configuration, args.mirror,
|
||||
args.multilib, args.repository, application.repository.paths)
|
||||
args.multilib, args.repository, repository_server)
|
||||
Setup.configuration_create_sudo(application.repository.paths, args.build_command, architecture)
|
||||
|
||||
application.repository.repo.init()
|
||||
@ -134,7 +135,7 @@ class Setup(Handler):
|
||||
|
||||
@staticmethod
|
||||
def configuration_create_devtools(prefix: str, architecture: str, source: Path, mirror: str | None,
|
||||
multilib: bool, repository: str, paths: RepositoryPaths) -> None:
|
||||
multilib: bool, repository: str, repository_server: str) -> None:
|
||||
"""
|
||||
create configuration for devtools based on ``source`` configuration
|
||||
|
||||
@ -148,7 +149,7 @@ class Setup(Handler):
|
||||
mirror(str | None): link to package server mirror
|
||||
multilib(bool): add or do not multilib repository to the configuration
|
||||
repository(str): repository name
|
||||
paths(RepositoryPaths): repository paths instance
|
||||
repository_server(str): url of the repository
|
||||
"""
|
||||
# allow_no_value=True is required because pacman uses boolean configuration in which just keys present
|
||||
# (e.g. NoProgressBar) which will lead to exception
|
||||
@ -178,7 +179,7 @@ class Setup(Handler):
|
||||
|
||||
# add repository itself
|
||||
configuration.set_option(repository, "SigLevel", "Never") # we don't care
|
||||
configuration.set_option(repository, "Server", f"file://{paths.repository}")
|
||||
configuration.set_option(repository, "Server", repository_server)
|
||||
|
||||
target = source.parent / f"{prefix}-{architecture}.conf"
|
||||
with target.open("w") as devtools_configuration:
|
||||
|
@ -50,14 +50,14 @@ class Status(Handler):
|
||||
# we are using reporter here
|
||||
client = Application(architecture, configuration, report=True).repository.reporter
|
||||
if args.ahriman:
|
||||
service_status = client.get_internal()
|
||||
service_status = client.status_get()
|
||||
StatusPrinter(service_status.status).print(verbose=args.info)
|
||||
if args.package:
|
||||
packages: list[tuple[Package, BuildStatus]] = sum(
|
||||
(client.get(base) for base in args.package),
|
||||
(client.package_get(base) for base in args.package),
|
||||
start=[])
|
||||
else:
|
||||
packages = client.get(None)
|
||||
packages = client.package_get(None)
|
||||
|
||||
Status.check_if_empty(args.exit_code, not packages)
|
||||
|
||||
|
@ -46,13 +46,14 @@ class StatusUpdate(Handler):
|
||||
# we are using reporter here
|
||||
client = Application(architecture, configuration, report=True).repository.reporter
|
||||
|
||||
if args.action == Action.Update and args.package:
|
||||
# update packages statuses
|
||||
for package in args.package:
|
||||
client.update(package, args.status)
|
||||
elif args.action == Action.Update:
|
||||
# update service status
|
||||
client.update_self(args.status)
|
||||
elif args.action == Action.Remove:
|
||||
for package in args.package:
|
||||
client.remove(package)
|
||||
match args.action:
|
||||
case Action.Update if args.package:
|
||||
# update packages statuses
|
||||
for package in args.package:
|
||||
client.package_update(package, args.status)
|
||||
case Action.Update:
|
||||
# update service status
|
||||
client.status_update(args.status)
|
||||
case Action.Remove:
|
||||
for package in args.package:
|
||||
client.package_remove(package)
|
||||
|
@ -22,7 +22,7 @@ import argparse
|
||||
from ahriman.application.application import Application
|
||||
from ahriman.application.handlers import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.formatters import TreePrinter
|
||||
from ahriman.core.formatters import StringPrinter, TreePrinter
|
||||
from ahriman.core.tree import Tree
|
||||
|
||||
|
||||
@ -45,8 +45,14 @@ class Structure(Handler):
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
application = Application(architecture, configuration, report=report)
|
||||
packages = application.repository.packages()
|
||||
partitions = Tree.partition(application.repository.packages(), count=args.partitions)
|
||||
|
||||
tree = Tree.resolve(packages)
|
||||
for num, level in enumerate(tree):
|
||||
TreePrinter(num, level).print(verbose=True, separator=" ")
|
||||
for partition_id, partition in enumerate(partitions):
|
||||
StringPrinter(f"partition #{partition_id}").print(verbose=False)
|
||||
|
||||
tree = Tree.resolve(partition)
|
||||
for num, level in enumerate(tree):
|
||||
TreePrinter(num, level).print(verbose=True, separator=" ")
|
||||
|
||||
# empty line
|
||||
StringPrinter("").print(verbose=False)
|
||||
|
@ -49,18 +49,19 @@ class Users(Handler):
|
||||
"""
|
||||
database = SQLite.load(configuration)
|
||||
|
||||
if args.action == Action.Update:
|
||||
user = Users.user_create(args)
|
||||
# if password is left blank we are not going to require salt to be set
|
||||
salt = configuration.get("auth", "salt", fallback="") if user.password else ""
|
||||
database.user_update(user.hash_password(salt))
|
||||
elif args.action == Action.List:
|
||||
users = database.user_list(args.username, args.role)
|
||||
Users.check_if_empty(args.exit_code, not users)
|
||||
for user in users:
|
||||
UserPrinter(user).print(verbose=True)
|
||||
elif args.action == Action.Remove:
|
||||
database.user_remove(args.username)
|
||||
match args.action:
|
||||
case Action.Update:
|
||||
user = Users.user_create(args)
|
||||
# if password is left blank we are not going to require salt to be set
|
||||
salt = configuration.get("auth", "salt", fallback="") if user.password else ""
|
||||
database.user_update(user.hash_password(salt))
|
||||
case Action.List:
|
||||
users = database.user_list(args.username, args.role)
|
||||
Users.check_if_empty(args.exit_code, not users)
|
||||
for user in users:
|
||||
UserPrinter(user).print(verbose=True)
|
||||
case Action.Remove:
|
||||
database.user_remove(args.username)
|
||||
|
||||
@staticmethod
|
||||
def user_create(args: argparse.Namespace) -> User:
|
||||
@ -72,6 +73,9 @@ class Users(Handler):
|
||||
|
||||
Returns:
|
||||
User: built user descriptor
|
||||
|
||||
Raises:
|
||||
PasswordError: password input is invalid
|
||||
"""
|
||||
def read_password() -> str:
|
||||
first_password = getpass.getpass()
|
||||
|
@ -33,7 +33,6 @@ class Web(Handler):
|
||||
|
||||
ALLOW_AUTO_ARCHITECTURE_RUN = False
|
||||
ALLOW_MULTI_ARCHITECTURE_RUN = False # required to be able to spawn external processes
|
||||
COMMAND_ARGS_WHITELIST = ["force", "log_handler", ""]
|
||||
|
||||
@classmethod
|
||||
def run(cls, args: argparse.Namespace, architecture: str, configuration: Configuration, *, report: bool) -> None:
|
||||
@ -89,3 +88,7 @@ class Web(Handler):
|
||||
yield "--quiet"
|
||||
if args.unsafe:
|
||||
yield "--unsafe"
|
||||
|
||||
# arguments from configuration
|
||||
if (wait_timeout := configuration.getint("web", "wait_timeout", fallback=None)) is not None:
|
||||
yield from ["--wait-timeout", str(wait_timeout)]
|
||||
|
@ -19,6 +19,7 @@
|
||||
#
|
||||
import argparse
|
||||
|
||||
from pathlib import Path
|
||||
from types import TracebackType
|
||||
from typing import Literal, Self
|
||||
|
||||
@ -29,6 +30,7 @@ from ahriman.core.log import LazyLogging
|
||||
from ahriman.core.status.client import Client
|
||||
from ahriman.core.util import check_user
|
||||
from ahriman.models.build_status import BuildStatusEnum
|
||||
from ahriman.models.waiter import Waiter
|
||||
|
||||
|
||||
class Lock(LazyLogging):
|
||||
@ -41,6 +43,7 @@ class Lock(LazyLogging):
|
||||
reporter(Client): build status reporter instance
|
||||
paths(RepositoryPaths): repository paths instance
|
||||
unsafe(bool): skip user check
|
||||
wait_timeout(int): wait in seconds until lock will free
|
||||
|
||||
Examples:
|
||||
Instance of this class except for controlling file-based lock is also required for basic applications checks.
|
||||
@ -65,9 +68,11 @@ class Lock(LazyLogging):
|
||||
architecture(str): repository architecture
|
||||
configuration(Configuration): configuration instance
|
||||
"""
|
||||
self.path = args.lock.with_stem(f"{args.lock.stem}_{architecture}") if args.lock is not None else None
|
||||
self.force = args.force
|
||||
self.unsafe = args.unsafe
|
||||
self.path: Path | None = \
|
||||
args.lock.with_stem(f"{args.lock.stem}_{architecture}") if args.lock is not None else None
|
||||
self.force: bool = args.force
|
||||
self.unsafe: bool = args.unsafe
|
||||
self.wait_timeout: int = args.wait_timeout
|
||||
|
||||
self.paths = configuration.repository_paths
|
||||
self.reporter = Client.load(configuration, report=args.report)
|
||||
@ -76,7 +81,7 @@ class Lock(LazyLogging):
|
||||
"""
|
||||
check web server version
|
||||
"""
|
||||
status = self.reporter.get_internal()
|
||||
status = self.reporter.status_get()
|
||||
if status.version is not None and status.version != __version__:
|
||||
self.logger.warning("status watcher version mismatch, our %s, their %s",
|
||||
__version__, status.version)
|
||||
@ -101,14 +106,27 @@ class Lock(LazyLogging):
|
||||
create lock file
|
||||
|
||||
Raises:
|
||||
DuplicateRun: if lock exists and no force flag supplied
|
||||
DuplicateRunError: if lock exists and no force flag supplied
|
||||
"""
|
||||
if self.path is None:
|
||||
return
|
||||
try:
|
||||
self.path.touch(exist_ok=self.force)
|
||||
except FileExistsError:
|
||||
raise DuplicateRunError()
|
||||
raise DuplicateRunError from None
|
||||
|
||||
def watch(self) -> None:
|
||||
"""
|
||||
watch until lock disappear
|
||||
"""
|
||||
# there are reasons why we are not using inotify here. First of all, if we would use it, it would bring to
|
||||
# race conditions because multiple processes will be notified in the same time. Secondly, it is good library,
|
||||
# but platform-specific, and we only need to check if file exists
|
||||
if self.path is None:
|
||||
return
|
||||
|
||||
waiter = Waiter(self.wait_timeout)
|
||||
waiter.wait(self.path.is_file)
|
||||
|
||||
def __enter__(self) -> Self:
|
||||
"""
|
||||
@ -117,16 +135,18 @@ class Lock(LazyLogging):
|
||||
1. Check user UID
|
||||
2. Check if there is lock file
|
||||
3. Check web status watcher status
|
||||
4. Create lock file and directory tree
|
||||
5. Report to status page if enabled
|
||||
4. Wait for lock file to be free
|
||||
5. Create lock file and directory tree
|
||||
6. Report to status page if enabled
|
||||
|
||||
Returns:
|
||||
Self: always instance of self
|
||||
"""
|
||||
self.check_user()
|
||||
self.check_version()
|
||||
self.watch()
|
||||
self.create()
|
||||
self.reporter.update_self(BuildStatusEnum.Building)
|
||||
self.reporter.status_update(BuildStatusEnum.Building)
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type: type[Exception] | None, exc_val: Exception | None,
|
||||
@ -144,5 +164,5 @@ class Lock(LazyLogging):
|
||||
"""
|
||||
self.clear()
|
||||
status = BuildStatusEnum.Success if exc_val is None else BuildStatusEnum.Failed
|
||||
self.reporter.update_self(status)
|
||||
self.reporter.status_update(status)
|
||||
return False
|
||||
|
@ -49,6 +49,16 @@ class Pacman(LazyLogging):
|
||||
self.__create_handle_fn: Callable[[], Handle] = lambda: self.__create_handle(
|
||||
architecture, configuration, refresh_database=refresh_database)
|
||||
|
||||
@cached_property
|
||||
def handle(self) -> Handle:
|
||||
"""
|
||||
pyalpm handle
|
||||
|
||||
Returns:
|
||||
Handle: generated pyalpm handle instance
|
||||
"""
|
||||
return self.__create_handle_fn()
|
||||
|
||||
def __create_handle(self, architecture: str, configuration: Configuration, *,
|
||||
refresh_database: PacmanSynchronization) -> Handle:
|
||||
"""
|
||||
@ -79,16 +89,6 @@ class Pacman(LazyLogging):
|
||||
|
||||
return handle
|
||||
|
||||
@cached_property
|
||||
def handle(self) -> Handle:
|
||||
"""
|
||||
pyalpm handle
|
||||
|
||||
Returns:
|
||||
Handle: generated pyalpm handle instance
|
||||
"""
|
||||
return self.__create_handle_fn()
|
||||
|
||||
def database_copy(self, handle: Handle, database: DB, pacman_root: Path, paths: RepositoryPaths, *,
|
||||
use_ahriman_cache: bool) -> None:
|
||||
"""
|
||||
@ -116,7 +116,7 @@ class Pacman(LazyLogging):
|
||||
src = repository_database(pacman_root)
|
||||
if not src.is_file():
|
||||
self.logger.warning("repository %s is set to be used, however, no working copy was found", database.name)
|
||||
return # database for some reasons deos not exist
|
||||
return # database for some reason deos not exist
|
||||
self.logger.info("copy pacman database from operating system root to ahriman's home")
|
||||
shutil.copy(src, dst)
|
||||
paths.chown(dst)
|
||||
|
@ -17,14 +17,11 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import requests
|
||||
|
||||
from typing import Any
|
||||
|
||||
from ahriman.core.alpm.pacman import Pacman
|
||||
from ahriman.core.alpm.remote import Remote
|
||||
from ahriman.core.exceptions import PackageInfoError, UnknownPackageError
|
||||
from ahriman.core.util import exception_response_text
|
||||
from ahriman.models.aur_package import AURPackage
|
||||
|
||||
|
||||
@ -36,13 +33,11 @@ class AUR(Remote):
|
||||
DEFAULT_AUR_URL(str): (class attribute) default AUR url
|
||||
DEFAULT_RPC_URL(str): (class attribute) default AUR RPC url
|
||||
DEFAULT_RPC_VERSION(str): (class attribute) default AUR RPC version
|
||||
DEFAULT_TIMEOUT(int): (class attribute) HTTP request timeout in seconds
|
||||
"""
|
||||
|
||||
DEFAULT_AUR_URL = "https://aur.archlinux.org"
|
||||
DEFAULT_RPC_URL = f"{DEFAULT_AUR_URL}/rpc"
|
||||
DEFAULT_RPC_VERSION = "5"
|
||||
DEFAULT_TIMEOUT = 30
|
||||
|
||||
@classmethod
|
||||
def remote_git_url(cls, package_base: str, repository: str) -> str:
|
||||
@ -83,7 +78,7 @@ class AUR(Remote):
|
||||
list[AURPackage]: list of parsed packages
|
||||
|
||||
Raises:
|
||||
InvalidPackageInfo: for error API response
|
||||
PackageInfoError: for error API response
|
||||
"""
|
||||
response_type = response["type"]
|
||||
if response_type == "error":
|
||||
@ -91,7 +86,7 @@ class AUR(Remote):
|
||||
raise PackageInfoError(error_details)
|
||||
return [AURPackage.from_json(package) for package in response["results"]]
|
||||
|
||||
def make_request(self, request_type: str, *args: str, **kwargs: str) -> list[AURPackage]:
|
||||
def aur_request(self, request_type: str, *args: str, **kwargs: str) -> list[AURPackage]:
|
||||
"""
|
||||
perform request to AUR RPC
|
||||
|
||||
@ -103,34 +98,20 @@ class AUR(Remote):
|
||||
Returns:
|
||||
list[AURPackage]: response parsed to package list
|
||||
"""
|
||||
query: dict[str, Any] = {
|
||||
"type": request_type,
|
||||
"v": self.DEFAULT_RPC_VERSION
|
||||
}
|
||||
query: list[tuple[str, str]] = [
|
||||
("type", request_type),
|
||||
("v", self.DEFAULT_RPC_VERSION),
|
||||
]
|
||||
|
||||
arg_query = "arg[]" if len(args) > 1 else "arg"
|
||||
query[arg_query] = list(args)
|
||||
for arg in args:
|
||||
query.append((arg_query, arg))
|
||||
|
||||
for key, value in kwargs.items():
|
||||
query[key] = value
|
||||
query.append((key, value))
|
||||
|
||||
try:
|
||||
response = requests.get(
|
||||
self.DEFAULT_RPC_URL,
|
||||
params=query,
|
||||
headers={"User-Agent": self.DEFAULT_USER_AGENT},
|
||||
timeout=self.DEFAULT_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
return self.parse_response(response.json())
|
||||
except requests.HTTPError as e:
|
||||
self.logger.exception(
|
||||
"could not perform request by using type %s: %s",
|
||||
request_type,
|
||||
exception_response_text(e))
|
||||
raise
|
||||
except Exception:
|
||||
self.logger.exception("could not perform request by using type %s", request_type)
|
||||
raise
|
||||
response = self.make_request("GET", self.DEFAULT_RPC_URL, params=query)
|
||||
return self.parse_response(response.json())
|
||||
|
||||
def package_info(self, package_name: str, *, pacman: Pacman) -> AURPackage:
|
||||
"""
|
||||
@ -142,12 +123,15 @@ class AUR(Remote):
|
||||
|
||||
Returns:
|
||||
AURPackage: package which match the package name
|
||||
|
||||
Raises:
|
||||
UnknownPackageError: package doesn't exist
|
||||
"""
|
||||
packages = self.make_request("info", package_name)
|
||||
packages = self.aur_request("info", package_name)
|
||||
try:
|
||||
return next(package for package in packages if package.name == package_name)
|
||||
except StopIteration:
|
||||
raise UnknownPackageError(package_name)
|
||||
raise UnknownPackageError(package_name) from None
|
||||
|
||||
def package_search(self, *keywords: str, pacman: Pacman) -> list[AURPackage]:
|
||||
"""
|
||||
@ -160,4 +144,4 @@ class AUR(Remote):
|
||||
Returns:
|
||||
list[AURPackage]: list of packages which match the criteria
|
||||
"""
|
||||
return self.make_request("search", *keywords, by="name-desc")
|
||||
return self.aur_request("search", *keywords, by="name-desc")
|
||||
|
@ -17,14 +17,11 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import requests
|
||||
|
||||
from typing import Any
|
||||
|
||||
from ahriman.core.alpm.pacman import Pacman
|
||||
from ahriman.core.alpm.remote import Remote
|
||||
from ahriman.core.exceptions import PackageInfoError, UnknownPackageError
|
||||
from ahriman.core.util import exception_response_text
|
||||
from ahriman.models.aur_package import AURPackage
|
||||
|
||||
|
||||
@ -37,14 +34,12 @@ class Official(Remote):
|
||||
DEFAULT_ARCHLINUX_GIT_URL(str): (class attribute) default url for git packages
|
||||
DEFAULT_SEARCH_REPOSITORIES(list[str]): (class attribute) default list of repositories to search
|
||||
DEFAULT_RPC_URL(str): (class attribute) default archlinux repositories RPC url
|
||||
DEFAULT_TIMEOUT(int): (class attribute) HTTP request timeout in seconds
|
||||
"""
|
||||
|
||||
DEFAULT_ARCHLINUX_GIT_URL = "https://gitlab.archlinux.org"
|
||||
DEFAULT_ARCHLINUX_URL = "https://archlinux.org"
|
||||
DEFAULT_SEARCH_REPOSITORIES = ["Core", "Extra", "Multilib"]
|
||||
DEFAULT_RPC_URL = "https://archlinux.org/packages/search/json"
|
||||
DEFAULT_TIMEOUT = 30
|
||||
|
||||
@classmethod
|
||||
def remote_git_url(cls, package_base: str, repository: str) -> str:
|
||||
@ -85,13 +80,13 @@ class Official(Remote):
|
||||
list[AURPackage]: list of parsed packages
|
||||
|
||||
Raises:
|
||||
InvalidPackageInfo: for error API response
|
||||
PackageInfoError: for error API response
|
||||
"""
|
||||
if not response["valid"]:
|
||||
raise PackageInfoError("API validation error")
|
||||
return [AURPackage.from_repo(package) for package in response["results"]]
|
||||
|
||||
def make_request(self, *args: str, by: str) -> list[AURPackage]:
|
||||
def arch_request(self, *args: str, by: str) -> list[AURPackage]:
|
||||
"""
|
||||
perform request to official repositories RPC
|
||||
|
||||
@ -102,20 +97,15 @@ class Official(Remote):
|
||||
Returns:
|
||||
list[AURPackage]: response parsed to package list
|
||||
"""
|
||||
try:
|
||||
response = requests.get(
|
||||
self.DEFAULT_RPC_URL,
|
||||
params={by: args, "repo": self.DEFAULT_SEARCH_REPOSITORIES},
|
||||
headers={"User-Agent": self.DEFAULT_USER_AGENT},
|
||||
timeout=self.DEFAULT_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
return self.parse_response(response.json())
|
||||
except requests.HTTPError as e:
|
||||
self.logger.exception("could not perform request: %s", exception_response_text(e))
|
||||
raise
|
||||
except Exception:
|
||||
self.logger.exception("could not perform request")
|
||||
raise
|
||||
query: list[tuple[str, str]] = [
|
||||
("repo", repository)
|
||||
for repository in self.DEFAULT_SEARCH_REPOSITORIES
|
||||
]
|
||||
for arg in args:
|
||||
query.append((by, arg))
|
||||
|
||||
response = self.make_request("GET", self.DEFAULT_RPC_URL, params=query)
|
||||
return self.parse_response(response.json())
|
||||
|
||||
def package_info(self, package_name: str, *, pacman: Pacman) -> AURPackage:
|
||||
"""
|
||||
@ -127,12 +117,15 @@ class Official(Remote):
|
||||
|
||||
Returns:
|
||||
AURPackage: package which match the package name
|
||||
|
||||
Raises:
|
||||
UnknownPackageError: package doesn't exist
|
||||
"""
|
||||
packages = self.make_request(package_name, by="name")
|
||||
packages = self.arch_request(package_name, by="name")
|
||||
try:
|
||||
return next(package for package in packages if package.name == package_name)
|
||||
except StopIteration:
|
||||
raise UnknownPackageError(package_name)
|
||||
raise UnknownPackageError(package_name) from None
|
||||
|
||||
def package_search(self, *keywords: str, pacman: Pacman) -> list[AURPackage]:
|
||||
"""
|
||||
@ -145,4 +138,4 @@ class Official(Remote):
|
||||
Returns:
|
||||
list[AURPackage]: list of packages which match the criteria
|
||||
"""
|
||||
return self.make_request(*keywords, by="q")
|
||||
return self.arch_request(*keywords, by="q")
|
||||
|
@ -48,8 +48,11 @@ class OfficialSyncdb(Official):
|
||||
|
||||
Returns:
|
||||
AURPackage: package which match the package name
|
||||
|
||||
Raises:
|
||||
UnknownPackageError: package doesn't exist
|
||||
"""
|
||||
try:
|
||||
return next(AURPackage.from_pacman(package) for package in pacman.package_get(package_name))
|
||||
except StopIteration:
|
||||
raise UnknownPackageError(package_name)
|
||||
raise UnknownPackageError(package_name) from None
|
||||
|
@ -17,19 +17,15 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from ahriman import __version__
|
||||
from ahriman.core.alpm.pacman import Pacman
|
||||
from ahriman.core.log import LazyLogging
|
||||
from ahriman.core.http import SyncHttpClient
|
||||
from ahriman.models.aur_package import AURPackage
|
||||
|
||||
|
||||
class Remote(LazyLogging):
|
||||
class Remote(SyncHttpClient):
|
||||
"""
|
||||
base class for remote package search
|
||||
|
||||
Attributes:
|
||||
DEFAULT_USER_AGENT(str): (class attribute) default user agent
|
||||
|
||||
Examples:
|
||||
These classes are designed to be used without instancing. In order to achieve it several class methods are
|
||||
provided: ``info``, ``multisearch`` and ``search``. Thus, the basic flow is the following::
|
||||
@ -43,8 +39,6 @@ class Remote(LazyLogging):
|
||||
directly, whereas ``multisearch`` splits search one by one and finds intersection between search results.
|
||||
"""
|
||||
|
||||
DEFAULT_USER_AGENT = f"ahriman/{__version__}"
|
||||
|
||||
@classmethod
|
||||
def info(cls, package_name: str, *, pacman: Pacman) -> AURPackage:
|
||||
"""
|
||||
|
@ -71,7 +71,7 @@ class Repo(LazyLogging):
|
||||
"""
|
||||
Repo._check_output(
|
||||
"repo-add", *self.sign_args, "-R", str(self.repo_path), str(path),
|
||||
exception=BuildError(path.name),
|
||||
exception=BuildError.from_process(path.name),
|
||||
cwd=self.paths.repository,
|
||||
logger=self.logger,
|
||||
user=self.uid)
|
||||
@ -98,7 +98,7 @@ class Repo(LazyLogging):
|
||||
# remove package from registry
|
||||
Repo._check_output(
|
||||
"repo-remove", *self.sign_args, str(self.repo_path), package,
|
||||
exception=BuildError(package),
|
||||
exception=BuildError.from_process(package),
|
||||
cwd=self.paths.repository,
|
||||
logger=self.logger,
|
||||
user=self.uid)
|
||||
|
@ -74,14 +74,15 @@ class Auth(LazyLogging):
|
||||
Returns:
|
||||
Auth: authorization module according to current settings
|
||||
"""
|
||||
provider = AuthSettings.from_option(configuration.get("auth", "target", fallback="disabled"))
|
||||
if provider == AuthSettings.Configuration:
|
||||
from ahriman.core.auth.mapping import Mapping
|
||||
return Mapping(configuration, database)
|
||||
if provider == AuthSettings.OAuth:
|
||||
from ahriman.core.auth.oauth import OAuth
|
||||
return OAuth(configuration, database)
|
||||
return Auth(configuration)
|
||||
match AuthSettings.from_option(configuration.get("auth", "target", fallback="disabled")):
|
||||
case AuthSettings.Configuration:
|
||||
from ahriman.core.auth.mapping import Mapping
|
||||
return Mapping(configuration, database)
|
||||
case AuthSettings.OAuth:
|
||||
from ahriman.core.auth.oauth import OAuth
|
||||
return OAuth(configuration, database)
|
||||
case _:
|
||||
return Auth(configuration)
|
||||
|
||||
async def check_credentials(self, username: str | None, password: str | None) -> bool:
|
||||
"""
|
||||
|
@ -28,8 +28,8 @@ from ahriman.models.auth_settings import AuthSettings
|
||||
|
||||
class OAuth(Mapping):
|
||||
"""
|
||||
OAuth's user authorization.
|
||||
It is required to create application first and put application credentials.
|
||||
User authorization implementation via OAuth. It is required to create application first and put application
|
||||
credentials.
|
||||
|
||||
Attributes:
|
||||
client_id(str): application client id
|
||||
@ -81,7 +81,7 @@ class OAuth(Mapping):
|
||||
type[aioauth_client.OAuth2Client]: loaded provider type
|
||||
|
||||
Raises:
|
||||
InvalidOption: in case if invalid OAuth provider name supplied
|
||||
OptionError: in case if invalid OAuth provider name supplied
|
||||
"""
|
||||
provider: type[aioauth_client.OAuth2Client] = getattr(aioauth_client, name)
|
||||
try:
|
||||
|
@ -82,10 +82,11 @@ class Sources(LazyLogging):
|
||||
branch = remote.branch or instance.DEFAULT_BRANCH
|
||||
if is_initialized_git:
|
||||
instance.logger.info("update HEAD to remote at %s using branch %s", sources_dir, branch)
|
||||
Sources._check_output("git", "fetch", "origin", branch, cwd=sources_dir, logger=instance.logger)
|
||||
Sources._check_output("git", "fetch", "--quiet", "origin", branch,
|
||||
cwd=sources_dir, logger=instance.logger)
|
||||
elif remote.git_url is not None:
|
||||
instance.logger.info("clone remote %s to %s using branch %s", remote.git_url, sources_dir, branch)
|
||||
Sources._check_output("git", "clone", "--branch", branch, "--single-branch",
|
||||
Sources._check_output("git", "clone", "--quiet", "--branch", branch, "--single-branch",
|
||||
remote.git_url, str(sources_dir), cwd=sources_dir.parent, logger=instance.logger)
|
||||
else:
|
||||
# it will cause an exception later
|
||||
@ -93,7 +94,8 @@ class Sources(LazyLogging):
|
||||
|
||||
# and now force reset to our branch
|
||||
Sources._check_output("git", "checkout", "--force", branch, cwd=sources_dir, logger=instance.logger)
|
||||
Sources._check_output("git", "reset", "--hard", f"origin/{branch}", cwd=sources_dir, logger=instance.logger)
|
||||
Sources._check_output("git", "reset", "--quiet", "--hard", f"origin/{branch}",
|
||||
cwd=sources_dir, logger=instance.logger)
|
||||
|
||||
# move content if required
|
||||
# we are using full path to source directory in order to make append possible
|
||||
@ -126,7 +128,7 @@ class Sources(LazyLogging):
|
||||
instance = Sources()
|
||||
if not (sources_dir / ".git").is_dir():
|
||||
# skip initializing in case if it was already
|
||||
Sources._check_output("git", "init", "--initial-branch", instance.DEFAULT_BRANCH,
|
||||
Sources._check_output("git", "init", "--quiet", "--initial-branch", instance.DEFAULT_BRANCH,
|
||||
cwd=sources_dir, logger=instance.logger)
|
||||
|
||||
# extract local files...
|
||||
@ -191,7 +193,7 @@ class Sources(LazyLogging):
|
||||
return # no changes to push, just skip action
|
||||
|
||||
git_url, branch = remote.git_source()
|
||||
Sources._check_output("git", "push", git_url, branch, cwd=sources_dir, logger=instance.logger)
|
||||
Sources._check_output("git", "push", "--quiet", git_url, branch, cwd=sources_dir, logger=instance.logger)
|
||||
|
||||
def add(self, sources_dir: Path, *pattern: str, intent_to_add: bool = False) -> None:
|
||||
"""
|
||||
@ -243,7 +245,8 @@ class Sources(LazyLogging):
|
||||
environment["GIT_AUTHOR_NAME"] = environment["GIT_COMMITTER_NAME"] = user
|
||||
environment["GIT_AUTHOR_EMAIL"] = environment["GIT_COMMITTER_EMAIL"] = email
|
||||
|
||||
Sources._check_output("git", "commit", *args, cwd=sources_dir, logger=self.logger, environment=environment)
|
||||
Sources._check_output("git", "commit", "--quiet", *args,
|
||||
cwd=sources_dir, logger=self.logger, environment=environment)
|
||||
|
||||
return True
|
||||
|
||||
|
@ -92,7 +92,7 @@ class Task(LazyLogging):
|
||||
|
||||
Task._check_output(
|
||||
*command,
|
||||
exception=BuildError(self.package.base),
|
||||
exception=BuildError.from_process(self.package.base),
|
||||
cwd=sources_dir,
|
||||
logger=self.logger,
|
||||
user=self.uid,
|
||||
@ -101,7 +101,7 @@ class Task(LazyLogging):
|
||||
# well it is not actually correct, but we can deal with it
|
||||
packages = Task._check_output(
|
||||
"makepkg", "--packagelist",
|
||||
exception=BuildError(self.package.base),
|
||||
exception=BuildError.from_process(self.package.base),
|
||||
cwd=sources_dir,
|
||||
logger=self.logger
|
||||
).splitlines()
|
||||
|
@ -228,6 +228,10 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
||||
"coerce": "list",
|
||||
"schema": {"type": "string"},
|
||||
},
|
||||
"enable_archive_upload": {
|
||||
"type": "boolean",
|
||||
"coerce": "boolean",
|
||||
},
|
||||
"host": {
|
||||
"type": "string",
|
||||
"is_ip_address": ["localhost"],
|
||||
@ -236,6 +240,11 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
||||
"type": "string",
|
||||
"is_url": ["http", "https"],
|
||||
},
|
||||
"max_body_size": {
|
||||
"type": "integer",
|
||||
"coerce": "integer",
|
||||
"min": 0,
|
||||
},
|
||||
"password": {
|
||||
"type": "string",
|
||||
},
|
||||
@ -257,6 +266,11 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
||||
"required": True,
|
||||
"path_exists": True,
|
||||
},
|
||||
"timeout": {
|
||||
"type": "integer",
|
||||
"coerce": "integer",
|
||||
"min": 0,
|
||||
},
|
||||
"unix_socket": {
|
||||
"type": "path",
|
||||
"coerce": "absolute_path",
|
||||
@ -268,6 +282,10 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
||||
"username": {
|
||||
"type": "string",
|
||||
},
|
||||
"wait_timeout": {
|
||||
"type": "integer",
|
||||
"coerce": "integer",
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -136,13 +136,13 @@ class Validator(RootValidator):
|
||||
The rule's arguments are validated against this schema:
|
||||
{"type": "list", "schema": {"type": "string"}}
|
||||
"""
|
||||
url = urlparse(value) # it probably will never rise exceptions on parse
|
||||
if not url.scheme:
|
||||
self._error(field, f"Url scheme is not set for {value}")
|
||||
if not url.netloc and url.scheme not in ("file",):
|
||||
self._error(field, f"Location must be set for url {value} of scheme {url.scheme}")
|
||||
if constraint and url.scheme not in constraint:
|
||||
self._error(field, f"Url {value} scheme must be one of {constraint}")
|
||||
match urlparse(value): # it probably will never rise exceptions on parse
|
||||
case url if not url.scheme:
|
||||
self._error(field, f"Url scheme is not set for {value}")
|
||||
case url if not url.netloc and url.scheme not in ("file",):
|
||||
self._error(field, f"Location must be set for url {value} of scheme {url.scheme}")
|
||||
case url if constraint and url.scheme not in constraint:
|
||||
self._error(field, f"Url {value} scheme must be one of {constraint}")
|
||||
|
||||
def _validate_path_exists(self, constraint: bool, field: str, value: Path) -> None:
|
||||
"""
|
||||
@ -157,7 +157,8 @@ class Validator(RootValidator):
|
||||
The rule's arguments are validated against this schema:
|
||||
{"type": "boolean"}
|
||||
"""
|
||||
if constraint and not value.exists():
|
||||
self._error(field, f"Path {value} must exist")
|
||||
if not constraint and value.exists():
|
||||
self._error(field, f"Path {value} must not exist")
|
||||
match value.exists():
|
||||
case True if not constraint:
|
||||
self._error(field, f"Path {value} must not exist")
|
||||
case False if constraint:
|
||||
self._error(field, f"Path {value} must exist")
|
||||
|
@ -0,0 +1,36 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
__all__ = ["steps"]
|
||||
|
||||
|
||||
steps = [
|
||||
"""
|
||||
drop index logs_package_base_process_id
|
||||
""",
|
||||
"""
|
||||
alter table logs drop column process_id
|
||||
""",
|
||||
"""
|
||||
alter table logs add column version text not null default ''
|
||||
""",
|
||||
"""
|
||||
create index logs_package_base_version on logs (package_base, version)
|
||||
""",
|
||||
]
|
@ -66,13 +66,13 @@ class LogsOperations(Operations):
|
||||
connection.execute(
|
||||
"""
|
||||
insert into logs
|
||||
(package_base, process_id, created, record)
|
||||
(package_base, version, created, record)
|
||||
values
|
||||
(:package_base, :process_id, :created, :record)
|
||||
(:package_base, :version, :created, :record)
|
||||
""",
|
||||
{
|
||||
"package_base": log_record_id.package_base,
|
||||
"process_id": log_record_id.process_id,
|
||||
"version": log_record_id.version,
|
||||
"created": created,
|
||||
"record": record,
|
||||
}
|
||||
@ -80,22 +80,22 @@ class LogsOperations(Operations):
|
||||
|
||||
return self.with_connection(run, commit=True)
|
||||
|
||||
def logs_remove(self, package_base: str, current_process_id: int | None) -> None:
|
||||
def logs_remove(self, package_base: str, version: str | None) -> None:
|
||||
"""
|
||||
remove log records for the specified package
|
||||
|
||||
Args:
|
||||
package_base(str): package base to remove logs
|
||||
current_process_id(int | None): current process id. If set it will remove only logs belonging to another
|
||||
process
|
||||
version(str): package version. If set it will remove only logs belonging to another
|
||||
version
|
||||
"""
|
||||
def run(connection: Connection) -> None:
|
||||
connection.execute(
|
||||
"""
|
||||
delete from logs
|
||||
where package_base = :package_base and (:process_id is null or process_id <> :process_id)
|
||||
where package_base = :package_base and (:version is null or version <> :version)
|
||||
""",
|
||||
{"package_base": package_base, "process_id": current_process_id}
|
||||
{"package_base": package_base, "version": version}
|
||||
)
|
||||
|
||||
return self.with_connection(run, commit=True)
|
||||
|
@ -17,8 +17,11 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import subprocess
|
||||
|
||||
from collections.abc import Callable
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from typing import Any, Self
|
||||
|
||||
|
||||
class BuildError(RuntimeError):
|
||||
@ -26,14 +29,61 @@ class BuildError(RuntimeError):
|
||||
base exception for failed builds
|
||||
"""
|
||||
|
||||
def __init__(self, package_base: str) -> None:
|
||||
def __init__(self, package_base: str, stderr: str | None = None) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
package_base(str): package base raised exception
|
||||
stderr(str | None, optional): stderr of the process if available (Default value = None)
|
||||
"""
|
||||
RuntimeError.__init__(self, f"Package {package_base} build failed, check logs for details")
|
||||
message = f"Package {package_base} build failed,\n"
|
||||
if stderr is not None:
|
||||
message += f"process stderr:\n{stderr}\n"
|
||||
message += "check logs for details"
|
||||
|
||||
RuntimeError.__init__(self, message)
|
||||
|
||||
@classmethod
|
||||
def from_process(cls, package_base: str) -> Callable[[int, list[str], str, str], Self]:
|
||||
"""
|
||||
generate exception callable from process error
|
||||
|
||||
Args:
|
||||
package_base(str): package base raised exception
|
||||
|
||||
Returns:
|
||||
Callable[[int, list[str], str, str], Self]: exception generator to be passed to subprocess utils
|
||||
"""
|
||||
return lambda code, process, stdout, stderr: cls(package_base, stderr)
|
||||
|
||||
|
||||
class CalledProcessError(subprocess.CalledProcessError):
|
||||
"""
|
||||
like ``subprocess.CalledProcessError``, but better
|
||||
"""
|
||||
|
||||
def __init__(self, status_code: int, process: list[str], stderr: str) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
status_code(int): process return code
|
||||
process(list[str]): process argument list
|
||||
stderr(str): stderr of the process
|
||||
"""
|
||||
subprocess.CalledProcessError.__init__(self, status_code, process, stderr=stderr)
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""
|
||||
string representation of the exception
|
||||
|
||||
Returns:
|
||||
str: string view of the exception
|
||||
"""
|
||||
return f"""{subprocess.CalledProcessError.__str__(self)}
|
||||
Process stderr:
|
||||
{self.stderr}"""
|
||||
|
||||
|
||||
class DuplicateRunError(RuntimeError):
|
||||
@ -194,6 +244,21 @@ class PasswordError(ValueError):
|
||||
ValueError.__init__(self, f"Password error: {details}")
|
||||
|
||||
|
||||
class PartitionError(RuntimeError):
|
||||
"""
|
||||
exception raised during packages partition actions
|
||||
"""
|
||||
|
||||
def __init__(self, count: int) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
count(int): count of partitions
|
||||
"""
|
||||
RuntimeError.__init__(self, f"Could not divide packages into {count} partitions")
|
||||
|
||||
|
||||
class PkgbuildGeneratorError(RuntimeError):
|
||||
"""
|
||||
exception class for support type triggers
|
||||
|
@ -38,7 +38,7 @@ class TreePrinter(StringPrinter):
|
||||
level(int): dependencies tree level
|
||||
packages(list[Package]): packages which belong to this level
|
||||
"""
|
||||
StringPrinter.__init__(self, f"level {level}")
|
||||
StringPrinter.__init__(self, f"level #{level}")
|
||||
self.packages = packages
|
||||
|
||||
def properties(self) -> list[Property]:
|
||||
|
@ -104,9 +104,12 @@ class RemotePull(LazyLogging):
|
||||
def run(self) -> None:
|
||||
"""
|
||||
run git pull action
|
||||
|
||||
Raises:
|
||||
GitRemoteError: pull processing error
|
||||
"""
|
||||
try:
|
||||
self.repo_clone()
|
||||
except Exception:
|
||||
self.logger.exception("git pull failed")
|
||||
raise GitRemoteError()
|
||||
raise GitRemoteError
|
||||
|
@ -118,6 +118,9 @@ class RemotePush(LazyLogging):
|
||||
|
||||
Args:
|
||||
result(Result): build result
|
||||
|
||||
Raises:
|
||||
GitRemoteError: push processing error
|
||||
"""
|
||||
try:
|
||||
with TemporaryDirectory(ignore_cleanup_errors=True) as dir_name:
|
||||
@ -127,4 +130,4 @@ class RemotePush(LazyLogging):
|
||||
commit_author=self.commit_author)
|
||||
except Exception:
|
||||
self.logger.exception("git push failed")
|
||||
raise GitRemoteError()
|
||||
raise GitRemoteError
|
||||
|
20
src/ahriman/core/http/__init__.py
Normal file
20
src/ahriman/core/http/__init__.py
Normal file
@ -0,0 +1,20 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from ahriman.core.http.sync_http_client import MultipartType, SyncHttpClient
|
137
src/ahriman/core/http/sync_http_client.py
Normal file
137
src/ahriman/core/http/sync_http_client.py
Normal file
@ -0,0 +1,137 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import requests
|
||||
|
||||
from functools import cached_property
|
||||
from typing import Any, IO, Literal
|
||||
|
||||
from ahriman import __version__
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.log import LazyLogging
|
||||
|
||||
|
||||
# filename, file, content-type, headers
|
||||
MultipartType = tuple[str, IO[bytes], str, dict[str, str]]
|
||||
|
||||
|
||||
class SyncHttpClient(LazyLogging):
|
||||
"""
|
||||
wrapper around requests library to reduce boilerplate
|
||||
|
||||
Attributes:
|
||||
auth(tuple[str, str] | None): HTTP basic auth object if set
|
||||
suppress_errors(bool): suppress logging of request errors
|
||||
timeout(int): HTTP request timeout in seconds
|
||||
"""
|
||||
|
||||
def __init__(self, section: str | None = None, configuration: Configuration | None = None, *,
|
||||
suppress_errors: bool = False) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
section(str, optional): settings section name (Default value = None)
|
||||
configuration(Configuration | None): configuration instance (Default value = None)
|
||||
suppress_errors(bool, optional): suppress logging of request errors (Default value = False)
|
||||
"""
|
||||
if configuration is None:
|
||||
configuration = Configuration() # dummy configuration
|
||||
if section is None:
|
||||
section = configuration.default_section
|
||||
|
||||
username = configuration.get(section, "username", fallback=None)
|
||||
password = configuration.get(section, "password", fallback=None)
|
||||
self.auth = (username, password) if username and password else None
|
||||
|
||||
self.timeout = configuration.getint(section, "timeout", fallback=30)
|
||||
self.suppress_errors = suppress_errors
|
||||
|
||||
@cached_property
|
||||
def session(self) -> requests.Session:
|
||||
"""
|
||||
get or create session
|
||||
|
||||
Returns:
|
||||
request.Session: created session object
|
||||
"""
|
||||
session = requests.Session()
|
||||
session.headers["User-Agent"] = f"ahriman/{__version__}"
|
||||
|
||||
return session
|
||||
|
||||
@staticmethod
|
||||
def exception_response_text(exception: requests.exceptions.RequestException) -> str:
|
||||
"""
|
||||
safe response exception text generation
|
||||
|
||||
Args:
|
||||
exception(requests.exceptions.RequestException): exception raised
|
||||
|
||||
Returns:
|
||||
str: text of the response if it is not None and empty string otherwise
|
||||
"""
|
||||
result: str = exception.response.text if exception.response is not None else ""
|
||||
return result
|
||||
|
||||
def make_request(self, method: Literal["DELETE", "GET", "POST", "PUT"], url: str, *,
|
||||
headers: dict[str, str] | None = None,
|
||||
params: list[tuple[str, str]] | None = None,
|
||||
data: Any | None = None,
|
||||
json: dict[str, Any] | None = None,
|
||||
files: dict[str, MultipartType] | None = None,
|
||||
session: requests.Session | None = None,
|
||||
suppress_errors: bool | None = None) -> requests.Response:
|
||||
"""
|
||||
perform request with specified parameters
|
||||
|
||||
Args:
|
||||
method(Literal["DELETE", "GET", "POST", "PUT"]): HTTP method to call
|
||||
url(str): remote url to call
|
||||
headers(dict[str, str] | None, optional): request headers (Default value = None)
|
||||
params(list[tuple[str, str]] | None, optional): request query parameters (Default value = None)
|
||||
data(Any | None, optional): request raw data parameters (Default value = None)
|
||||
json(dict[str, Any] | None, optional): request json parameters (Default value = None)
|
||||
files(dict[str, MultipartType] | None, optional): multipart upload (Default value = None)
|
||||
session(requests.Session | None, optional): session object if any (Default value = None)
|
||||
suppress_errors(bool | None, optional): suppress logging errors (e.g. if no web server available). If none
|
||||
set, the instance-wide value will be used (Default value = None)
|
||||
|
||||
Returns:
|
||||
requests.Response: response object
|
||||
"""
|
||||
# defaults
|
||||
if suppress_errors is None:
|
||||
suppress_errors = self.suppress_errors
|
||||
if session is None:
|
||||
session = self.session
|
||||
|
||||
try:
|
||||
response = session.request(method, url, params=params, data=data, headers=headers, files=files, json=json,
|
||||
auth=self.auth, timeout=self.timeout)
|
||||
response.raise_for_status()
|
||||
return response
|
||||
except requests.HTTPError as ex:
|
||||
if not suppress_errors:
|
||||
self.logger.exception("could not perform http request: %s", self.exception_response_text(ex))
|
||||
raise
|
||||
except Exception:
|
||||
if not suppress_errors:
|
||||
self.logger.exception("could not perform http request")
|
||||
raise
|
@ -30,13 +30,14 @@ class FilteredAccessLogger(AccessLogger):
|
||||
LOG_PATH_REGEX(re.Pattern): (class attribute) regex for logs uri
|
||||
"""
|
||||
|
||||
# official packages have only ``[A-Za-z0-9_.+-]`` regex
|
||||
LOG_PATH_REGEX = re.compile(r"^/api/v1/packages/[A-Za-z0-9_.+%-]+/logs$")
|
||||
LOG_PATH_REGEX = re.compile(r"^/api/v1/packages/[^/]+/logs$")
|
||||
# technically process id is uuid, but we might change it later
|
||||
PROCESS_PATH_REGEX = re.compile(r"^/api/v1/service/process/[^/]+$")
|
||||
|
||||
@staticmethod
|
||||
def is_logs_post(request: BaseRequest) -> bool:
|
||||
"""
|
||||
check if request looks lie logs posting
|
||||
check if request looks like logs posting
|
||||
|
||||
Args:
|
||||
request(BaseRequest): http reqeust descriptor
|
||||
@ -46,6 +47,19 @@ class FilteredAccessLogger(AccessLogger):
|
||||
"""
|
||||
return request.method == "POST" and FilteredAccessLogger.LOG_PATH_REGEX.match(request.path) is not None
|
||||
|
||||
@staticmethod
|
||||
def is_process_get(request: BaseRequest) -> bool:
|
||||
"""
|
||||
check if request looks like process status request
|
||||
|
||||
Args:
|
||||
request(BaseRequest): http reqeust descriptor
|
||||
|
||||
Returns:
|
||||
bool: True in case if request looks like process status request and False otherwise
|
||||
"""
|
||||
return request.method == "GET" and FilteredAccessLogger.PROCESS_PATH_REGEX.match(request.path) is not None
|
||||
|
||||
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
|
||||
"""
|
||||
access log with enabled filter by request path
|
||||
@ -55,6 +69,7 @@ class FilteredAccessLogger(AccessLogger):
|
||||
response(StreamResponse): streaming response object
|
||||
time(float):
|
||||
"""
|
||||
if self.is_logs_post(request):
|
||||
if self.is_logs_post(request) \
|
||||
or self.is_process_get(request):
|
||||
return
|
||||
AccessLogger.log(self, request, response, time)
|
||||
|
@ -81,12 +81,12 @@ class HttpLogHandler(logging.Handler):
|
||||
Args:
|
||||
record(logging.LogRecord): log record to log
|
||||
"""
|
||||
package_base = getattr(record, "package_base", None)
|
||||
if package_base is None:
|
||||
log_record_id = getattr(record, "package_id", None)
|
||||
if log_record_id is None:
|
||||
return # in case if no package base supplied we need just skip log message
|
||||
|
||||
try:
|
||||
self.reporter.logs(package_base, record)
|
||||
self.reporter.package_logs(log_record_id, record)
|
||||
except Exception:
|
||||
if self.suppress_errors:
|
||||
return
|
||||
|
@ -24,6 +24,8 @@ from collections.abc import Generator
|
||||
from functools import cached_property
|
||||
from typing import Any
|
||||
|
||||
from ahriman.models.log_record_id import LogRecordId
|
||||
|
||||
|
||||
class LazyLogging:
|
||||
"""
|
||||
@ -60,38 +62,40 @@ class LazyLogging:
|
||||
logging.setLogRecordFactory(logging.LogRecord)
|
||||
|
||||
@staticmethod
|
||||
def _package_logger_set(package_base: str) -> None:
|
||||
def _package_logger_set(package_base: str, version: str | None) -> None:
|
||||
"""
|
||||
set package base as extra info to the logger
|
||||
|
||||
Args:
|
||||
package_base(str): package base
|
||||
version(str | None): package version if available
|
||||
"""
|
||||
current_factory = logging.getLogRecordFactory()
|
||||
|
||||
def package_record_factory(*args: Any, **kwargs: Any) -> logging.LogRecord:
|
||||
record = current_factory(*args, **kwargs)
|
||||
record.package_base = package_base
|
||||
record.package_id = LogRecordId(package_base, version or "")
|
||||
return record
|
||||
|
||||
logging.setLogRecordFactory(package_record_factory)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def in_package_context(self, package_base: str) -> Generator[None, None, None]:
|
||||
def in_package_context(self, package_base: str, version: str | None) -> Generator[None, None, None]:
|
||||
"""
|
||||
execute function while setting package context
|
||||
|
||||
Args:
|
||||
package_base(str): package base to set context in
|
||||
version(str | None): package version if available
|
||||
|
||||
Examples:
|
||||
This function is designed to be called as context manager with ``package_base`` argument, e.g.:
|
||||
|
||||
>>> with self.in_package_context(package.base):
|
||||
>>> with self.in_package_context(package.base, package.version):
|
||||
>>> build_package(package)
|
||||
"""
|
||||
try:
|
||||
self._package_logger_set(package_base)
|
||||
self._package_logger_set(package_base, version)
|
||||
yield
|
||||
finally:
|
||||
self._package_logger_reset()
|
||||
|
120
src/ahriman/core/report/remote_call.py
Normal file
120
src/ahriman/core/report/remote_call.py
Normal file
@ -0,0 +1,120 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import requests
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.report.report import Report
|
||||
from ahriman.core.status.web_client import WebClient
|
||||
from ahriman.models.package import Package
|
||||
from ahriman.models.result import Result
|
||||
from ahriman.models.waiter import Waiter
|
||||
|
||||
|
||||
class RemoteCall(Report):
|
||||
"""
|
||||
trigger implementation which call remote service with update
|
||||
|
||||
Attributes:
|
||||
client(WebClient): web client instance
|
||||
update_aur(bool): check for AUR updates
|
||||
update_local(bool): check for local packages update
|
||||
update_manual(bool): check for manually built packages
|
||||
wait_timeout(int): timeout to wait external process
|
||||
"""
|
||||
|
||||
def __init__(self, architecture: str, configuration: Configuration, section: str) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
architecture(str): repository architecture
|
||||
configuration(Configuration): configuration instance
|
||||
section(str): settings section name
|
||||
"""
|
||||
Report.__init__(self, architecture, configuration)
|
||||
|
||||
self.client = WebClient(configuration)
|
||||
|
||||
self.update_aur = configuration.getboolean(section, "aur", fallback=False)
|
||||
self.update_local = configuration.getboolean(section, "local", fallback=False)
|
||||
self.update_manual = configuration.getboolean(section, "manual", fallback=False)
|
||||
|
||||
self.wait_timeout = configuration.getint(section, "wait_timeout", fallback=-1)
|
||||
|
||||
def generate(self, packages: list[Package], result: Result) -> None:
|
||||
"""
|
||||
generate report for the specified packages
|
||||
|
||||
Args:
|
||||
packages(list[Package]): list of packages to generate report
|
||||
result(Result): build result
|
||||
"""
|
||||
process_id = self.remote_update()
|
||||
self.remote_wait(process_id)
|
||||
|
||||
def is_process_alive(self, process_id: str) -> bool:
|
||||
"""
|
||||
check if process is alive
|
||||
|
||||
Args:
|
||||
process_id(str): remote process id
|
||||
|
||||
Returns:
|
||||
bool: True in case if remote process is alive and False otherwise
|
||||
"""
|
||||
try:
|
||||
response = self.client.make_request("GET", f"/api/v1/service/process/{process_id}")
|
||||
except requests.HTTPError as ex:
|
||||
status_code = ex.response.status_code if ex.response is not None else None
|
||||
if status_code == 404:
|
||||
return False
|
||||
raise
|
||||
|
||||
response_json = response.json()
|
||||
is_alive: bool = response_json["is_alive"]
|
||||
|
||||
return is_alive
|
||||
|
||||
def remote_update(self) -> str:
|
||||
"""
|
||||
call remote server for update
|
||||
|
||||
Returns:
|
||||
str: remote process id
|
||||
"""
|
||||
response = self.client.make_request("POST", "/api/v1/service/update", json={
|
||||
"aur": self.update_aur,
|
||||
"local": self.update_local,
|
||||
"manual": self.update_manual,
|
||||
})
|
||||
response_json = response.json()
|
||||
|
||||
process_id: str = response_json["process_id"]
|
||||
return process_id
|
||||
|
||||
def remote_wait(self, process_id: str) -> None:
|
||||
"""
|
||||
wait for remote process termination
|
||||
|
||||
Args:
|
||||
process_id(str): remote process id
|
||||
"""
|
||||
waiter = Waiter(self.wait_timeout)
|
||||
waiter.wait(self.is_process_alive, process_id)
|
@ -80,20 +80,24 @@ class Report(LazyLogging):
|
||||
Report: client according to current settings
|
||||
"""
|
||||
section, provider_name = configuration.gettype(target, architecture)
|
||||
provider = ReportSettings.from_option(provider_name)
|
||||
if provider == ReportSettings.HTML:
|
||||
from ahriman.core.report.html import HTML
|
||||
return HTML(architecture, configuration, section)
|
||||
if provider == ReportSettings.Email:
|
||||
from ahriman.core.report.email import Email
|
||||
return Email(architecture, configuration, section)
|
||||
if provider == ReportSettings.Console:
|
||||
from ahriman.core.report.console import Console
|
||||
return Console(architecture, configuration, section)
|
||||
if provider == ReportSettings.Telegram:
|
||||
from ahriman.core.report.telegram import Telegram
|
||||
return Telegram(architecture, configuration, section)
|
||||
return Report(architecture, configuration) # should never happen
|
||||
match ReportSettings.from_option(provider_name):
|
||||
case ReportSettings.HTML:
|
||||
from ahriman.core.report.html import HTML
|
||||
return HTML(architecture, configuration, section)
|
||||
case ReportSettings.Email:
|
||||
from ahriman.core.report.email import Email
|
||||
return Email(architecture, configuration, section)
|
||||
case ReportSettings.Console:
|
||||
from ahriman.core.report.console import Console
|
||||
return Console(architecture, configuration, section)
|
||||
case ReportSettings.Telegram:
|
||||
from ahriman.core.report.telegram import Telegram
|
||||
return Telegram(architecture, configuration, section)
|
||||
case ReportSettings.RemoteCall:
|
||||
from ahriman.core.report.remote_call import RemoteCall
|
||||
return RemoteCall(architecture, configuration, section)
|
||||
case _:
|
||||
return Report(architecture, configuration) # should never happen
|
||||
|
||||
def generate(self, packages: list[Package], result: Result) -> None:
|
||||
"""
|
||||
@ -113,10 +117,10 @@ class Report(LazyLogging):
|
||||
packages(list[Package]): list of packages to generate report
|
||||
|
||||
Raises:
|
||||
ReportFailed: in case of any report unmatched exception
|
||||
ReportError: in case of any report unmatched exception
|
||||
"""
|
||||
try:
|
||||
self.generate(packages, result)
|
||||
except Exception:
|
||||
self.logger.exception("report generation failed")
|
||||
raise ReportError()
|
||||
raise ReportError
|
||||
|
@ -191,6 +191,31 @@ class ReportTrigger(Trigger):
|
||||
},
|
||||
},
|
||||
},
|
||||
"remote-call": {
|
||||
"type": "dict",
|
||||
"schema": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"allowed": ["ahriman", "remote-call"],
|
||||
},
|
||||
"aur": {
|
||||
"type": "boolean",
|
||||
"coerce": "boolean",
|
||||
},
|
||||
"local": {
|
||||
"type": "boolean",
|
||||
"coerce": "boolean",
|
||||
},
|
||||
"manual": {
|
||||
"type": "boolean",
|
||||
"coerce": "boolean",
|
||||
},
|
||||
"wait_timeout": {
|
||||
"type": "integer",
|
||||
"coerce": "integer",
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
def __init__(self, architecture: str, configuration: Configuration) -> None:
|
||||
|
@ -17,17 +17,15 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import requests # technically we could use python-telegram-bot, but it is just a single request, c'mon
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.http import SyncHttpClient
|
||||
from ahriman.core.report.jinja_template import JinjaTemplate
|
||||
from ahriman.core.report.report import Report
|
||||
from ahriman.core.util import exception_response_text
|
||||
from ahriman.models.package import Package
|
||||
from ahriman.models.result import Result
|
||||
|
||||
|
||||
class Telegram(Report, JinjaTemplate):
|
||||
class Telegram(Report, JinjaTemplate, SyncHttpClient):
|
||||
"""
|
||||
telegram report generator
|
||||
|
||||
@ -38,7 +36,6 @@ class Telegram(Report, JinjaTemplate):
|
||||
chat_id(str): chat id to post message, either string with @ or integer
|
||||
template_path(Path): path to template for built packages
|
||||
template_type(str): template message type to be used in parse mode, one of MarkdownV2, HTML, Markdown
|
||||
timeout(int): HTTP request timeout in seconds
|
||||
"""
|
||||
|
||||
TELEGRAM_API_URL = "https://api.telegram.org"
|
||||
@ -55,12 +52,12 @@ class Telegram(Report, JinjaTemplate):
|
||||
"""
|
||||
Report.__init__(self, architecture, configuration)
|
||||
JinjaTemplate.__init__(self, section, configuration)
|
||||
SyncHttpClient.__init__(self, section, configuration)
|
||||
|
||||
self.api_key = configuration.get(section, "api_key")
|
||||
self.chat_id = configuration.get(section, "chat_id")
|
||||
self.template_path = configuration.getpath(section, "template_path")
|
||||
self.template_type = configuration.get(section, "template_type", fallback="HTML")
|
||||
self.timeout = configuration.getint(section, "timeout", fallback=30)
|
||||
|
||||
def _send(self, text: str) -> None:
|
||||
"""
|
||||
@ -69,18 +66,8 @@ class Telegram(Report, JinjaTemplate):
|
||||
Args:
|
||||
text(str): message body text
|
||||
"""
|
||||
try:
|
||||
response = requests.post(
|
||||
f"{self.TELEGRAM_API_URL}/bot{self.api_key}/sendMessage",
|
||||
data={"chat_id": self.chat_id, "text": text, "parse_mode": self.template_type},
|
||||
timeout=self.timeout)
|
||||
response.raise_for_status()
|
||||
except requests.HTTPError as e:
|
||||
self.logger.exception("could not perform request: %s", exception_response_text(e))
|
||||
raise
|
||||
except Exception:
|
||||
self.logger.exception("could not perform request")
|
||||
raise
|
||||
self.make_request("POST", f"{self.TELEGRAM_API_URL}/bot{self.api_key}/sendMessage",
|
||||
data={"chat_id": self.chat_id, "text": text, "parse_mode": self.template_type})
|
||||
|
||||
def generate(self, packages: list[Package], result: Result) -> None:
|
||||
"""
|
||||
@ -89,6 +76,9 @@ class Telegram(Report, JinjaTemplate):
|
||||
Args:
|
||||
packages(list[Package]): list of packages to generate report
|
||||
result(Result): build result
|
||||
|
||||
Raises:
|
||||
ValueError: impossible to split message by chunks
|
||||
"""
|
||||
if not result.success:
|
||||
return
|
||||
|
@ -93,7 +93,8 @@ class Executor(Cleaner):
|
||||
|
||||
result = Result()
|
||||
for single in updates:
|
||||
with self.in_package_context(single.base), TemporaryDirectory(ignore_cleanup_errors=True) as dir_name:
|
||||
with self.in_package_context(single.base, local_versions.get(single.base)), \
|
||||
TemporaryDirectory(ignore_cleanup_errors=True) as dir_name:
|
||||
try:
|
||||
packager = self.packager(packagers, single.base)
|
||||
build_single(single, Path(dir_name), packager.packager_id)
|
||||
@ -121,7 +122,7 @@ class Executor(Cleaner):
|
||||
self.database.build_queue_clear(package_base)
|
||||
self.database.patches_remove(package_base, [])
|
||||
self.database.logs_remove(package_base, None)
|
||||
self.reporter.remove(package_base) # we only update status page in case of base removal
|
||||
self.reporter.package_remove(package_base) # we only update status page in case of base removal
|
||||
except Exception:
|
||||
self.logger.exception("could not remove base %s", package_base)
|
||||
|
||||
@ -201,14 +202,16 @@ class Executor(Cleaner):
|
||||
package_path = self.paths.repository / safe_filename(name)
|
||||
self.repo.add(package_path)
|
||||
|
||||
current_packages = self.packages()
|
||||
current_packages = {package.base: package for package in self.packages()}
|
||||
local_versions = {package_base: package.version for package_base, package in current_packages.items()}
|
||||
|
||||
removed_packages: list[str] = [] # list of packages which have been removed from the base
|
||||
updates = self.load_archives(packages)
|
||||
packagers = packagers or Packagers()
|
||||
|
||||
result = Result()
|
||||
for local in updates:
|
||||
with self.in_package_context(local.base):
|
||||
with self.in_package_context(local.base, local_versions.get(local.base)):
|
||||
try:
|
||||
packager = self.packager(packagers, local.base)
|
||||
|
||||
@ -218,12 +221,9 @@ class Executor(Cleaner):
|
||||
self.reporter.set_success(local)
|
||||
result.add_success(local)
|
||||
|
||||
current_package_archives = {
|
||||
package
|
||||
for current in current_packages
|
||||
if current.base == local.base
|
||||
for package in current.packages
|
||||
}
|
||||
current_package_archives: set[str] = set()
|
||||
if local.base in current_packages:
|
||||
current_package_archives = set(current_packages[local.base].packages.keys())
|
||||
removed_packages.extend(current_package_archives.difference(local.packages))
|
||||
except Exception:
|
||||
self.reporter.set_failed(local.base)
|
||||
|
@ -40,7 +40,7 @@ class Repository(Executor, UpdateHandler):
|
||||
|
||||
Examples:
|
||||
This class along with traits provides access to local repository actions, e.g. remove packages, update packages,
|
||||
sync local repository to remote, generate report, etc::
|
||||
sync local repository to remote, generate report, etc.::
|
||||
|
||||
>>> from ahriman.core.configuration import Configuration
|
||||
>>> from ahriman.core.database import SQLite
|
||||
|
@ -66,10 +66,11 @@ class UpdateHandler(Cleaner):
|
||||
continue
|
||||
raise UnknownPackageError(package.base)
|
||||
|
||||
result: list[Package] = []
|
||||
local_versions = {package.base: package.version for package in self.packages()}
|
||||
|
||||
result: list[Package] = []
|
||||
for local in self.packages():
|
||||
with self.in_package_context(local.base):
|
||||
with self.in_package_context(local.base, local_versions.get(local.base)):
|
||||
if not local.remote.is_remote:
|
||||
continue # avoid checking local packages
|
||||
if local.base in self.ignore_list:
|
||||
@ -102,11 +103,12 @@ class UpdateHandler(Cleaner):
|
||||
Returns:
|
||||
list[Package]: list of local packages which are out-of-dated
|
||||
"""
|
||||
result: list[Package] = []
|
||||
packages = {local.base: local for local in self.packages()}
|
||||
local_versions = {package_base: package.version for package_base, package in packages.items()}
|
||||
|
||||
result: list[Package] = []
|
||||
for cache_dir in self.paths.cache.iterdir():
|
||||
with self.in_package_context(cache_dir.name):
|
||||
with self.in_package_context(cache_dir.name, local_versions.get(cache_dir.name)):
|
||||
try:
|
||||
source = RemoteSource(
|
||||
source=PackageSource.Local,
|
||||
|
@ -17,30 +17,26 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import requests
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.exceptions import BuildError
|
||||
from ahriman.core.log import LazyLogging
|
||||
from ahriman.core.util import check_output, exception_response_text
|
||||
from ahriman.core.http import SyncHttpClient
|
||||
from ahriman.core.util import check_output
|
||||
from ahriman.models.sign_settings import SignSettings
|
||||
|
||||
|
||||
class GPG(LazyLogging):
|
||||
class GPG(SyncHttpClient):
|
||||
"""
|
||||
gnupg wrapper
|
||||
|
||||
Attributes:
|
||||
DEFAULT_TIMEOUT(int): (class attribute) HTTP request timeout in seconds
|
||||
configuration(Configuration): configuration instance
|
||||
default_key(str | None): default PGP key ID to use
|
||||
targets(set[SignSettings]): list of targets to sign (repository, package etc)
|
||||
targets(set[SignSettings]): list of targets to sign (repository, package etc.)
|
||||
"""
|
||||
|
||||
_check_output = check_output
|
||||
DEFAULT_TIMEOUT = 30
|
||||
|
||||
def __init__(self, configuration: Configuration) -> None:
|
||||
"""
|
||||
@ -49,6 +45,7 @@ class GPG(LazyLogging):
|
||||
Args:
|
||||
configuration(Configuration): configuration instance
|
||||
"""
|
||||
SyncHttpClient.__init__(self)
|
||||
self.configuration = configuration
|
||||
self.targets, self.default_key = self.sign_options(configuration)
|
||||
|
||||
@ -101,28 +98,36 @@ class GPG(LazyLogging):
|
||||
default_key = configuration.get("sign", "key") if targets else None
|
||||
return targets, default_key
|
||||
|
||||
@staticmethod
|
||||
def signature(filepath: Path) -> Path:
|
||||
"""
|
||||
generate signature name for the file
|
||||
|
||||
Args:
|
||||
filepath(Path): path to the file which will be signed
|
||||
|
||||
Returns:
|
||||
str: path to signature file
|
||||
"""
|
||||
return filepath.parent / f"{filepath.name}.sig"
|
||||
|
||||
def key_download(self, server: str, key: str) -> str:
|
||||
"""
|
||||
download key from public PGP server
|
||||
|
||||
Args:
|
||||
server(str): public PGP server which will be used to download the key
|
||||
server(str): public PGP server which will be used to download data
|
||||
key(str): key ID to download
|
||||
|
||||
Returns:
|
||||
str: key as plain text
|
||||
"""
|
||||
key = key if key.startswith("0x") else f"0x{key}"
|
||||
try:
|
||||
response = requests.get(f"https://{server}/pks/lookup", params={
|
||||
"op": "get",
|
||||
"options": "mr",
|
||||
"search": key
|
||||
}, timeout=self.DEFAULT_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
self.logger.exception("could not download key %s from %s: %s", key, server, exception_response_text(e))
|
||||
raise
|
||||
response = self.make_request("GET", f"https://{server}/pks/lookup", params=[
|
||||
("op", "get"),
|
||||
("options", "mr"),
|
||||
("search", key),
|
||||
])
|
||||
return response.text
|
||||
|
||||
def key_export(self, key: str) -> str:
|
||||
@ -158,7 +163,7 @@ class GPG(LazyLogging):
|
||||
import key to current user and sign it locally
|
||||
|
||||
Args:
|
||||
server(str): public PGP server which will be used to download the key
|
||||
server(str): public PGP server which will be used to download data
|
||||
key(str): key ID to import
|
||||
"""
|
||||
key_body = self.key_download(server, key)
|
||||
@ -177,13 +182,13 @@ class GPG(LazyLogging):
|
||||
"""
|
||||
GPG._check_output(
|
||||
*GPG.sign_command(path, key),
|
||||
exception=BuildError(path.name),
|
||||
exception=BuildError.from_process(path.name),
|
||||
logger=self.logger)
|
||||
return [path, path.parent / f"{path.name}.sig"]
|
||||
return [path, self.signature(path)]
|
||||
|
||||
def process_sign_package(self, path: Path, packager_key: str | None) -> list[Path]:
|
||||
"""
|
||||
sign package if required by configuration
|
||||
sign package if required by configuration and signature doesn't exist
|
||||
|
||||
Args:
|
||||
path(Path): path to file to sign
|
||||
@ -192,6 +197,10 @@ class GPG(LazyLogging):
|
||||
Returns:
|
||||
list[Path]: list of generated files including original file
|
||||
"""
|
||||
if (signature := self.signature(path)).is_file():
|
||||
# the file was already signed before, just use its signature
|
||||
return [path, signature]
|
||||
|
||||
if SignSettings.Packages not in self.targets:
|
||||
return [path]
|
||||
|
||||
|
@ -20,6 +20,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from collections.abc import Callable, Iterable
|
||||
@ -38,7 +39,7 @@ class Spawn(Thread, LazyLogging):
|
||||
active(dict[str, Process]): map of active child processes required to avoid zombies
|
||||
architecture(str): repository architecture
|
||||
command_arguments(list[str]): base command line arguments
|
||||
queue(Queue[tuple[str, bool]]): multiprocessing queue to read updates from processes
|
||||
queue(Queue[tuple[str, bool, int]]): multiprocessing queue to read updates from processes
|
||||
"""
|
||||
|
||||
def __init__(self, args_parser: argparse.ArgumentParser, architecture: str, command_arguments: list[str]) -> None:
|
||||
@ -59,11 +60,25 @@ class Spawn(Thread, LazyLogging):
|
||||
self.lock = Lock()
|
||||
self.active: dict[str, Process] = {}
|
||||
# stupid pylint does not know that it is possible
|
||||
self.queue: Queue[tuple[str, bool] | None] = Queue() # pylint: disable=unsubscriptable-object
|
||||
self.queue: Queue[tuple[str, bool, int] | None] = Queue() # pylint: disable=unsubscriptable-object
|
||||
|
||||
@staticmethod
|
||||
def boolean_action_argument(name: str, value: bool) -> str:
|
||||
"""
|
||||
convert option of given name with value to boolean action argument
|
||||
|
||||
Args:
|
||||
name(str): command line argument name
|
||||
value(bool): command line argument value
|
||||
|
||||
Returns:
|
||||
str: if ``value`` is True, then returns positive flag and negative otherwise
|
||||
"""
|
||||
return name if value else f"no-{name}"
|
||||
|
||||
@staticmethod
|
||||
def process(callback: Callable[[argparse.Namespace, str], bool], args: argparse.Namespace, architecture: str,
|
||||
process_id: str, queue: Queue[tuple[str, bool]]) -> None: # pylint: disable=unsubscriptable-object
|
||||
process_id: str, queue: Queue[tuple[str, bool, int]]) -> None: # pylint: disable=unsubscriptable-object
|
||||
"""
|
||||
helper to run external process
|
||||
|
||||
@ -72,12 +87,17 @@ class Spawn(Thread, LazyLogging):
|
||||
args(argparse.Namespace): command line arguments
|
||||
architecture(str): repository architecture
|
||||
process_id(str): process unique identifier
|
||||
queue(Queue[tuple[str, bool]]): output queue
|
||||
queue(Queue[tuple[str, bool, int]]): output queue
|
||||
"""
|
||||
start_time = time.monotonic()
|
||||
result = callback(args, architecture)
|
||||
queue.put((process_id, result))
|
||||
stop_time = time.monotonic()
|
||||
|
||||
def _spawn_process(self, command: str, *args: str, **kwargs: str | None) -> None:
|
||||
consumed_time = int(1000 * (stop_time - start_time))
|
||||
|
||||
queue.put((process_id, result, consumed_time))
|
||||
|
||||
def _spawn_process(self, command: str, *args: str, **kwargs: str | None) -> str:
|
||||
"""
|
||||
spawn external ahriman process with supplied arguments
|
||||
|
||||
@ -85,6 +105,9 @@ class Spawn(Thread, LazyLogging):
|
||||
command(str): subcommand to run
|
||||
*args(str): positional command arguments
|
||||
**kwargs(str): named command arguments
|
||||
|
||||
Returns:
|
||||
str: spawned process id
|
||||
"""
|
||||
# default arguments
|
||||
arguments = self.command_arguments[:]
|
||||
@ -111,19 +134,36 @@ class Spawn(Thread, LazyLogging):
|
||||
|
||||
with self.lock:
|
||||
self.active[process_id] = process
|
||||
return process_id
|
||||
|
||||
def key_import(self, key: str, server: str | None) -> None:
|
||||
def has_process(self, process_id: str) -> bool:
|
||||
"""
|
||||
check if given process is alive
|
||||
|
||||
Args:
|
||||
process_id(str): process id to be checked as returned by ``Spawn._spawn_process``
|
||||
|
||||
Returns:
|
||||
bool: True in case if process still counts as active and False otherwise
|
||||
"""
|
||||
with self.lock:
|
||||
return process_id in self.active
|
||||
|
||||
def key_import(self, key: str, server: str | None) -> str:
|
||||
"""
|
||||
import key to service cache
|
||||
|
||||
Args:
|
||||
key(str): key to import
|
||||
server(str | None): PGP key server
|
||||
|
||||
Returns:
|
||||
str: spawned process id
|
||||
"""
|
||||
kwargs = {} if server is None else {"key-server": server}
|
||||
self._spawn_process("service-key-import", key, **kwargs)
|
||||
return self._spawn_process("service-key-import", key, **kwargs)
|
||||
|
||||
def packages_add(self, packages: Iterable[str], username: str | None, *, now: bool) -> None:
|
||||
def packages_add(self, packages: Iterable[str], username: str | None, *, now: bool) -> str:
|
||||
"""
|
||||
add packages
|
||||
|
||||
@ -131,48 +171,69 @@ class Spawn(Thread, LazyLogging):
|
||||
packages(Iterable[str]): packages list to add
|
||||
username(str | None): optional override of username for build process
|
||||
now(bool): build packages now
|
||||
|
||||
Returns:
|
||||
str: spawned process id
|
||||
"""
|
||||
kwargs = {"username": username}
|
||||
if now:
|
||||
kwargs["now"] = ""
|
||||
self._spawn_process("package-add", *packages, **kwargs)
|
||||
return self._spawn_process("package-add", *packages, **kwargs)
|
||||
|
||||
def packages_rebuild(self, depends_on: str, username: str | None) -> None:
|
||||
def packages_rebuild(self, depends_on: str, username: str | None) -> str:
|
||||
"""
|
||||
rebuild packages which depend on the specified package
|
||||
|
||||
Args:
|
||||
depends_on(str): packages dependency
|
||||
username(str | None): optional override of username for build process
|
||||
|
||||
Returns:
|
||||
str: spawned process id
|
||||
"""
|
||||
kwargs = {"depends-on": depends_on, "username": username}
|
||||
self._spawn_process("repo-rebuild", **kwargs)
|
||||
return self._spawn_process("repo-rebuild", **kwargs)
|
||||
|
||||
def packages_remove(self, packages: Iterable[str]) -> None:
|
||||
def packages_remove(self, packages: Iterable[str]) -> str:
|
||||
"""
|
||||
remove packages
|
||||
|
||||
Args:
|
||||
packages(Iterable[str]): packages list to remove
|
||||
"""
|
||||
self._spawn_process("package-remove", *packages)
|
||||
|
||||
def packages_update(self, username: str | None) -> None:
|
||||
Returns:
|
||||
str: spawned process id
|
||||
"""
|
||||
return self._spawn_process("package-remove", *packages)
|
||||
|
||||
def packages_update(self, username: str | None, *, aur: bool, local: bool, manual: bool) -> str:
|
||||
"""
|
||||
run full repository update
|
||||
|
||||
Args:
|
||||
username(str | None): optional override of username for build process
|
||||
aur(bool): check for aur updates
|
||||
local(bool): check for local packages updates
|
||||
manual(bool): check for manual packages
|
||||
|
||||
Returns:
|
||||
str: spawned process id
|
||||
"""
|
||||
kwargs = {"username": username}
|
||||
self._spawn_process("repo-update", **kwargs)
|
||||
kwargs = {
|
||||
"username": username,
|
||||
self.boolean_action_argument("aur", aur): "",
|
||||
self.boolean_action_argument("local", local): "",
|
||||
self.boolean_action_argument("manual", manual): "",
|
||||
}
|
||||
return self._spawn_process("repo-update", **kwargs)
|
||||
|
||||
def run(self) -> None:
|
||||
"""
|
||||
thread run method
|
||||
"""
|
||||
for process_id, status in iter(self.queue.get, None):
|
||||
self.logger.info("process %s has been terminated with status %s", process_id, status)
|
||||
for process_id, status, consumed_time in iter(self.queue.get, None):
|
||||
self.logger.info("process %s has been terminated with status %s, consumed time %s",
|
||||
process_id, status, consumed_time / 1000)
|
||||
|
||||
with self.lock:
|
||||
process = self.active.pop(process_id, None)
|
||||
|
@ -24,6 +24,7 @@ import logging
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.models.build_status import BuildStatus, BuildStatusEnum
|
||||
from ahriman.models.internal_status import InternalStatus
|
||||
from ahriman.models.log_record_id import LogRecordId
|
||||
from ahriman.models.package import Package
|
||||
|
||||
|
||||
@ -60,7 +61,7 @@ class Client:
|
||||
return WebClient(configuration)
|
||||
return Client()
|
||||
|
||||
def add(self, package: Package, status: BuildStatusEnum) -> None:
|
||||
def package_add(self, package: Package, status: BuildStatusEnum) -> None:
|
||||
"""
|
||||
add new package with status
|
||||
|
||||
@ -69,7 +70,7 @@ class Client:
|
||||
status(BuildStatusEnum): current package build status
|
||||
"""
|
||||
|
||||
def get(self, package_base: str | None) -> list[tuple[Package, BuildStatus]]:
|
||||
def package_get(self, package_base: str | None) -> list[tuple[Package, BuildStatus]]:
|
||||
"""
|
||||
get package status
|
||||
|
||||
@ -82,25 +83,16 @@ class Client:
|
||||
del package_base
|
||||
return []
|
||||
|
||||
def get_internal(self) -> InternalStatus:
|
||||
"""
|
||||
get internal service status
|
||||
|
||||
Returns:
|
||||
InternalStatus: current internal (web) service status
|
||||
"""
|
||||
return InternalStatus(status=BuildStatus())
|
||||
|
||||
def logs(self, package_base: str, record: logging.LogRecord) -> None:
|
||||
def package_logs(self, log_record_id: LogRecordId, record: logging.LogRecord) -> None:
|
||||
"""
|
||||
post log record
|
||||
|
||||
Args:
|
||||
package_base(str) package base
|
||||
log_record_id(LogRecordId): log record id
|
||||
record(logging.LogRecord): log record to post to api
|
||||
"""
|
||||
|
||||
def remove(self, package_base: str) -> None:
|
||||
def package_remove(self, package_base: str) -> None:
|
||||
"""
|
||||
remove packages from watcher
|
||||
|
||||
@ -108,7 +100,7 @@ class Client:
|
||||
package_base(str): package base to remove
|
||||
"""
|
||||
|
||||
def update(self, package_base: str, status: BuildStatusEnum) -> None:
|
||||
def package_update(self, package_base: str, status: BuildStatusEnum) -> None:
|
||||
"""
|
||||
update package build status. Unlike ``add`` it does not update package properties
|
||||
|
||||
@ -117,14 +109,6 @@ class Client:
|
||||
status(BuildStatusEnum): current package build status
|
||||
"""
|
||||
|
||||
def update_self(self, status: BuildStatusEnum) -> None:
|
||||
"""
|
||||
update ahriman status itself
|
||||
|
||||
Args:
|
||||
status(BuildStatusEnum): current ahriman status
|
||||
"""
|
||||
|
||||
def set_building(self, package_base: str) -> None:
|
||||
"""
|
||||
set package status to building
|
||||
@ -132,7 +116,7 @@ class Client:
|
||||
Args:
|
||||
package_base(str): package base to update
|
||||
"""
|
||||
return self.update(package_base, BuildStatusEnum.Building)
|
||||
return self.package_update(package_base, BuildStatusEnum.Building)
|
||||
|
||||
def set_failed(self, package_base: str) -> None:
|
||||
"""
|
||||
@ -141,7 +125,7 @@ class Client:
|
||||
Args:
|
||||
package_base(str): package base to update
|
||||
"""
|
||||
return self.update(package_base, BuildStatusEnum.Failed)
|
||||
return self.package_update(package_base, BuildStatusEnum.Failed)
|
||||
|
||||
def set_pending(self, package_base: str) -> None:
|
||||
"""
|
||||
@ -150,7 +134,7 @@ class Client:
|
||||
Args:
|
||||
package_base(str): package base to update
|
||||
"""
|
||||
return self.update(package_base, BuildStatusEnum.Pending)
|
||||
return self.package_update(package_base, BuildStatusEnum.Pending)
|
||||
|
||||
def set_success(self, package: Package) -> None:
|
||||
"""
|
||||
@ -159,7 +143,7 @@ class Client:
|
||||
Args:
|
||||
package(Package): current package properties
|
||||
"""
|
||||
return self.add(package, BuildStatusEnum.Success)
|
||||
return self.package_add(package, BuildStatusEnum.Success)
|
||||
|
||||
def set_unknown(self, package: Package) -> None:
|
||||
"""
|
||||
@ -168,4 +152,21 @@ class Client:
|
||||
Args:
|
||||
package(Package): current package properties
|
||||
"""
|
||||
return self.add(package, BuildStatusEnum.Unknown)
|
||||
return self.package_add(package, BuildStatusEnum.Unknown)
|
||||
|
||||
def status_get(self) -> InternalStatus:
|
||||
"""
|
||||
get internal service status
|
||||
|
||||
Returns:
|
||||
InternalStatus: current internal (web) service status
|
||||
"""
|
||||
return InternalStatus(status=BuildStatus())
|
||||
|
||||
def status_update(self, status: BuildStatusEnum) -> None:
|
||||
"""
|
||||
update ahriman status itself
|
||||
|
||||
Args:
|
||||
status(BuildStatusEnum): current ahriman status
|
||||
"""
|
||||
|
@ -17,8 +17,6 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import os
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.database import SQLite
|
||||
from ahriman.core.exceptions import UnknownPackageError
|
||||
@ -59,7 +57,7 @@ class Watcher(LazyLogging):
|
||||
self.status = BuildStatus()
|
||||
|
||||
# special variables for updating logs
|
||||
self._last_log_record_id = LogRecordId("", os.getpid())
|
||||
self._last_log_record_id = LogRecordId("", "")
|
||||
|
||||
@property
|
||||
def packages(self) -> list[tuple[Package, BuildStatus]]:
|
||||
@ -71,36 +69,6 @@ class Watcher(LazyLogging):
|
||||
"""
|
||||
return list(self.known.values())
|
||||
|
||||
def get(self, package_base: str) -> tuple[Package, BuildStatus]:
|
||||
"""
|
||||
get current package base build status
|
||||
|
||||
Args:
|
||||
package_base(str): package base
|
||||
|
||||
Returns:
|
||||
tuple[Package, BuildStatus]: package and its status
|
||||
|
||||
Raises:
|
||||
UnknownPackage: if no package found
|
||||
"""
|
||||
try:
|
||||
return self.known[package_base]
|
||||
except KeyError:
|
||||
raise UnknownPackageError(package_base)
|
||||
|
||||
def get_logs(self, package_base: str) -> str:
|
||||
"""
|
||||
extract logs for the package base
|
||||
|
||||
Args:
|
||||
package_base(str): package base
|
||||
|
||||
Returns:
|
||||
str: package logs
|
||||
"""
|
||||
return self.database.logs_get(package_base)
|
||||
|
||||
def load(self) -> None:
|
||||
"""
|
||||
load packages from local repository. In case if last status is known, it will use it
|
||||
@ -117,7 +85,62 @@ class Watcher(LazyLogging):
|
||||
if package.base in self.known:
|
||||
self.known[package.base] = (package, status)
|
||||
|
||||
def remove(self, package_base: str) -> None:
|
||||
def logs_get(self, package_base: str) -> str:
|
||||
"""
|
||||
extract logs for the package base
|
||||
|
||||
Args:
|
||||
package_base(str): package base
|
||||
|
||||
Returns:
|
||||
str: package logs
|
||||
"""
|
||||
return self.database.logs_get(package_base)
|
||||
|
||||
def logs_remove(self, package_base: str, version: str | None) -> None:
|
||||
"""
|
||||
remove package related logs
|
||||
|
||||
Args:
|
||||
package_base(str): package base
|
||||
version(str): package versio
|
||||
"""
|
||||
self.database.logs_remove(package_base, version)
|
||||
|
||||
def logs_update(self, log_record_id: LogRecordId, created: float, record: str) -> None:
|
||||
"""
|
||||
make new log record into database
|
||||
|
||||
Args:
|
||||
log_record_id(LogRecordId): log record id
|
||||
created(float): log created timestamp
|
||||
record(str): log record
|
||||
"""
|
||||
if self._last_log_record_id != log_record_id:
|
||||
# there is new log record, so we remove old ones
|
||||
self.logs_remove(log_record_id.package_base, log_record_id.version)
|
||||
self._last_log_record_id = log_record_id
|
||||
self.database.logs_insert(log_record_id, created, record)
|
||||
|
||||
def package_get(self, package_base: str) -> tuple[Package, BuildStatus]:
|
||||
"""
|
||||
get current package base build status
|
||||
|
||||
Args:
|
||||
package_base(str): package base
|
||||
|
||||
Returns:
|
||||
tuple[Package, BuildStatus]: package and its status
|
||||
|
||||
Raises:
|
||||
UnknownPackageError: if no package found
|
||||
"""
|
||||
try:
|
||||
return self.known[package_base]
|
||||
except KeyError:
|
||||
raise UnknownPackageError(package_base) from None
|
||||
|
||||
def package_remove(self, package_base: str) -> None:
|
||||
"""
|
||||
remove package base from known list if any
|
||||
|
||||
@ -126,19 +149,9 @@ class Watcher(LazyLogging):
|
||||
"""
|
||||
self.known.pop(package_base, None)
|
||||
self.database.package_remove(package_base)
|
||||
self.remove_logs(package_base, None)
|
||||
self.logs_remove(package_base, None)
|
||||
|
||||
def remove_logs(self, package_base: str, current_process_id: int | None) -> None:
|
||||
"""
|
||||
remove package related logs
|
||||
|
||||
Args:
|
||||
package_base(str): package base
|
||||
current_process_id(int | None): current process id
|
||||
"""
|
||||
self.database.logs_remove(package_base, current_process_id)
|
||||
|
||||
def update(self, package_base: str, status: BuildStatusEnum, package: Package | None) -> None:
|
||||
def package_update(self, package_base: str, status: BuildStatusEnum, package: Package | None) -> None:
|
||||
"""
|
||||
update package status and description
|
||||
|
||||
@ -148,33 +161,18 @@ class Watcher(LazyLogging):
|
||||
package(Package | None): optional package description. In case if not set current properties will be used
|
||||
|
||||
Raises:
|
||||
UnknownPackage: if no package found
|
||||
UnknownPackageError: if no package found
|
||||
"""
|
||||
if package is None:
|
||||
try:
|
||||
package, _ = self.known[package_base]
|
||||
except KeyError:
|
||||
raise UnknownPackageError(package_base)
|
||||
raise UnknownPackageError(package_base) from None
|
||||
full_status = BuildStatus(status)
|
||||
self.known[package_base] = (package, full_status)
|
||||
self.database.package_update(package, full_status)
|
||||
|
||||
def update_logs(self, log_record_id: LogRecordId, created: float, record: str) -> None:
|
||||
"""
|
||||
make new log record into database
|
||||
|
||||
Args:
|
||||
log_record_id(LogRecordId): log record id
|
||||
created(float): log created record
|
||||
record(str): log record
|
||||
"""
|
||||
if self._last_log_record_id != log_record_id:
|
||||
# there is new log record, so we remove old ones
|
||||
self.remove_logs(log_record_id.package_base, log_record_id.process_id)
|
||||
self._last_log_record_id = log_record_id
|
||||
self.database.logs_insert(log_record_id, created, record)
|
||||
|
||||
def update_self(self, status: BuildStatusEnum) -> None:
|
||||
def status_update(self, status: BuildStatusEnum) -> None:
|
||||
"""
|
||||
update service status
|
||||
|
||||
|
@ -21,28 +21,26 @@ import contextlib
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from collections.abc import Generator
|
||||
from functools import cached_property
|
||||
from urllib.parse import quote_plus as urlencode
|
||||
|
||||
from ahriman import __version__
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.log import LazyLogging
|
||||
from ahriman.core.http import SyncHttpClient
|
||||
from ahriman.core.status.client import Client
|
||||
from ahriman.core.util import exception_response_text
|
||||
from ahriman.models.build_status import BuildStatus, BuildStatusEnum
|
||||
from ahriman.models.internal_status import InternalStatus
|
||||
from ahriman.models.log_record_id import LogRecordId
|
||||
from ahriman.models.package import Package
|
||||
from ahriman.models.user import User
|
||||
|
||||
|
||||
class WebClient(Client, LazyLogging):
|
||||
class WebClient(Client, SyncHttpClient):
|
||||
"""
|
||||
build status reporter web client
|
||||
|
||||
Attributes:
|
||||
address(str): address of the web service
|
||||
suppress_errors(bool): suppress logging errors (e.g. if no web server available)
|
||||
user(User | None): web service user descriptor
|
||||
use_unix_socket(bool): use websocket or not
|
||||
"""
|
||||
|
||||
def __init__(self, configuration: Configuration) -> None:
|
||||
@ -52,33 +50,20 @@ class WebClient(Client, LazyLogging):
|
||||
Args:
|
||||
configuration(Configuration): configuration instance
|
||||
"""
|
||||
self.address, use_unix_socket = self.parse_address(configuration)
|
||||
self.user = User.from_option(
|
||||
configuration.get("web", "username", fallback=None),
|
||||
configuration.get("web", "password", fallback=None))
|
||||
self.suppress_errors = configuration.getboolean("settings", "suppress_http_log_errors", fallback=False)
|
||||
suppress_errors = configuration.getboolean("settings", "suppress_http_log_errors", fallback=False)
|
||||
SyncHttpClient.__init__(self, "web", configuration, suppress_errors=suppress_errors)
|
||||
|
||||
self.__session = self._create_session(use_unix_socket=use_unix_socket)
|
||||
self.address, self.use_unix_socket = self.parse_address(configuration)
|
||||
|
||||
@property
|
||||
def _login_url(self) -> str:
|
||||
@cached_property
|
||||
def session(self) -> requests.Session:
|
||||
"""
|
||||
get url for the login api
|
||||
get or create session
|
||||
|
||||
Returns:
|
||||
str: full url for web service to log in
|
||||
request.Session: created session object
|
||||
"""
|
||||
return f"{self.address}/api/v1/login"
|
||||
|
||||
@property
|
||||
def _status_url(self) -> str:
|
||||
"""
|
||||
get url for the status api
|
||||
|
||||
Returns:
|
||||
str: full url for web service for status
|
||||
"""
|
||||
return f"{self.address}/api/v1/status"
|
||||
return self._create_session(use_unix_socket=self.use_unix_socket)
|
||||
|
||||
@staticmethod
|
||||
def parse_address(configuration: Configuration) -> tuple[str, bool]:
|
||||
@ -102,32 +87,6 @@ class WebClient(Client, LazyLogging):
|
||||
address = f"http://{host}:{port}"
|
||||
return address, False
|
||||
|
||||
@contextlib.contextmanager
|
||||
def __get_session(self, session: requests.Session | None = None) -> Generator[requests.Session, None, None]:
|
||||
"""
|
||||
execute request and handle exceptions
|
||||
|
||||
Args:
|
||||
session(requests.Session | None, optional): session to be used or stored instance property otherwise
|
||||
(Default value = None)
|
||||
|
||||
Yields:
|
||||
requests.Session: session for requests
|
||||
"""
|
||||
try:
|
||||
if session is not None:
|
||||
yield session # use session from arguments
|
||||
else:
|
||||
yield self.__session # use instance generated session
|
||||
except requests.RequestException as e:
|
||||
if self.suppress_errors:
|
||||
return
|
||||
self.logger.exception("could not perform http request: %s", exception_response_text(e))
|
||||
except Exception:
|
||||
if self.suppress_errors:
|
||||
return
|
||||
self.logger.exception("could not perform http request")
|
||||
|
||||
def _create_session(self, *, use_unix_socket: bool) -> requests.Session:
|
||||
"""
|
||||
generate new request session
|
||||
@ -157,17 +116,25 @@ class WebClient(Client, LazyLogging):
|
||||
Args:
|
||||
session(requests.Session): request session to login
|
||||
"""
|
||||
if self.user is None:
|
||||
if self.auth is None:
|
||||
return # no auth configured
|
||||
|
||||
username, password = self.auth
|
||||
payload = {
|
||||
"username": self.user.username,
|
||||
"password": self.user.password
|
||||
"username": username,
|
||||
"password": password,
|
||||
}
|
||||
with contextlib.suppress(Exception):
|
||||
self.make_request("POST", self._login_url(), json=payload, session=session)
|
||||
|
||||
with self.__get_session(session):
|
||||
response = session.post(self._login_url, json=payload)
|
||||
response.raise_for_status()
|
||||
def _login_url(self) -> str:
|
||||
"""
|
||||
get url for the login api
|
||||
|
||||
Returns:
|
||||
str: full url for web service to log in
|
||||
"""
|
||||
return f"{self.address}/api/v1/login"
|
||||
|
||||
def _logs_url(self, package_base: str) -> str:
|
||||
"""
|
||||
@ -195,7 +162,16 @@ class WebClient(Client, LazyLogging):
|
||||
suffix = f"/{package_base}" if package_base else ""
|
||||
return f"{self.address}/api/v1/packages{suffix}"
|
||||
|
||||
def add(self, package: Package, status: BuildStatusEnum) -> None:
|
||||
def _status_url(self) -> str:
|
||||
"""
|
||||
get url for the status api
|
||||
|
||||
Returns:
|
||||
str: full url for web service for status
|
||||
"""
|
||||
return f"{self.address}/api/v1/status"
|
||||
|
||||
def package_add(self, package: Package, status: BuildStatusEnum) -> None:
|
||||
"""
|
||||
add new package with status
|
||||
|
||||
@ -207,12 +183,10 @@ class WebClient(Client, LazyLogging):
|
||||
"status": status.value,
|
||||
"package": package.view()
|
||||
}
|
||||
with contextlib.suppress(Exception):
|
||||
self.make_request("POST", self._package_url(package.base), json=payload)
|
||||
|
||||
with self.__get_session() as session:
|
||||
response = session.post(self._package_url(package.base), json=payload)
|
||||
response.raise_for_status()
|
||||
|
||||
def get(self, package_base: str | None) -> list[tuple[Package, BuildStatus]]:
|
||||
def package_get(self, package_base: str | None) -> list[tuple[Package, BuildStatus]]:
|
||||
"""
|
||||
get package status
|
||||
|
||||
@ -222,66 +196,47 @@ class WebClient(Client, LazyLogging):
|
||||
Returns:
|
||||
list[tuple[Package, BuildStatus]]: list of current package description and status if it has been found
|
||||
"""
|
||||
with self.__get_session() as session:
|
||||
response = session.get(self._package_url(package_base or ""))
|
||||
response.raise_for_status()
|
||||
with contextlib.suppress(Exception):
|
||||
response = self.make_request("GET", self._package_url(package_base or ""))
|
||||
response_json = response.json()
|
||||
|
||||
status_json = response.json()
|
||||
return [
|
||||
(Package.from_json(package["package"]), BuildStatus.from_json(package["status"]))
|
||||
for package in status_json
|
||||
for package in response_json
|
||||
]
|
||||
|
||||
# noinspection PyUnreachableCode
|
||||
return []
|
||||
|
||||
def get_internal(self) -> InternalStatus:
|
||||
"""
|
||||
get internal service status
|
||||
|
||||
Returns:
|
||||
InternalStatus: current internal (web) service status
|
||||
"""
|
||||
with self.__get_session() as session:
|
||||
response = session.get(self._status_url)
|
||||
response.raise_for_status()
|
||||
|
||||
status_json = response.json()
|
||||
return InternalStatus.from_json(status_json)
|
||||
|
||||
# noinspection PyUnreachableCode
|
||||
return InternalStatus(status=BuildStatus())
|
||||
|
||||
def logs(self, package_base: str, record: logging.LogRecord) -> None:
|
||||
def package_logs(self, log_record_id: LogRecordId, record: logging.LogRecord) -> None:
|
||||
"""
|
||||
post log record
|
||||
|
||||
Args:
|
||||
package_base(str) package base
|
||||
log_record_id(LogRecordId): log record id
|
||||
record(logging.LogRecord): log record to post to api
|
||||
"""
|
||||
payload = {
|
||||
"created": record.created,
|
||||
"message": record.getMessage(),
|
||||
"process_id": record.process,
|
||||
"version": log_record_id.version,
|
||||
}
|
||||
|
||||
# in this method exception has to be handled outside in logger handler
|
||||
response = self.__session.post(self._logs_url(package_base), json=payload)
|
||||
response.raise_for_status()
|
||||
# this is special case, because we would like to do not suppress exception here
|
||||
# in case of exception raised it will be handled by upstream HttpLogHandler
|
||||
# In the other hand, we force to suppress all http logs here to avoid cyclic reporting
|
||||
self.make_request("POST", self._logs_url(log_record_id.package_base), json=payload, suppress_errors=True)
|
||||
|
||||
def remove(self, package_base: str) -> None:
|
||||
def package_remove(self, package_base: str) -> None:
|
||||
"""
|
||||
remove packages from watcher
|
||||
|
||||
Args:
|
||||
package_base(str): basename to remove
|
||||
"""
|
||||
with self.__get_session() as session:
|
||||
response = session.delete(self._package_url(package_base))
|
||||
response.raise_for_status()
|
||||
with contextlib.suppress(Exception):
|
||||
self.make_request("DELETE", self._package_url(package_base))
|
||||
|
||||
def update(self, package_base: str, status: BuildStatusEnum) -> None:
|
||||
def package_update(self, package_base: str, status: BuildStatusEnum) -> None:
|
||||
"""
|
||||
update package build status. Unlike ``add`` it does not update package properties
|
||||
|
||||
@ -290,12 +245,25 @@ class WebClient(Client, LazyLogging):
|
||||
status(BuildStatusEnum): current package build status
|
||||
"""
|
||||
payload = {"status": status.value}
|
||||
with contextlib.suppress(Exception):
|
||||
self.make_request("POST", self._package_url(package_base), json=payload)
|
||||
|
||||
with self.__get_session() as session:
|
||||
response = session.post(self._package_url(package_base), json=payload)
|
||||
response.raise_for_status()
|
||||
def status_get(self) -> InternalStatus:
|
||||
"""
|
||||
get internal service status
|
||||
|
||||
def update_self(self, status: BuildStatusEnum) -> None:
|
||||
Returns:
|
||||
InternalStatus: current internal (web) service status
|
||||
"""
|
||||
with contextlib.suppress(Exception):
|
||||
response = self.make_request("GET", self._status_url())
|
||||
response_json = response.json()
|
||||
|
||||
return InternalStatus.from_json(response_json)
|
||||
|
||||
return InternalStatus(status=BuildStatus())
|
||||
|
||||
def status_update(self, status: BuildStatusEnum) -> None:
|
||||
"""
|
||||
update ahriman status itself
|
||||
|
||||
@ -303,7 +271,5 @@ class WebClient(Client, LazyLogging):
|
||||
status(BuildStatusEnum): current ahriman status
|
||||
"""
|
||||
payload = {"status": status.value}
|
||||
|
||||
with self.__get_session() as session:
|
||||
response = session.post(self._status_url, json=payload)
|
||||
response.raise_for_status()
|
||||
with contextlib.suppress(Exception):
|
||||
self.make_request("POST", self._status_url(), json=payload)
|
||||
|
@ -140,6 +140,9 @@ class KeyringGenerator(PkgbuildGenerator):
|
||||
|
||||
Args:
|
||||
source_path(Path): destination of the file content
|
||||
|
||||
Raises:
|
||||
PkgbuildGeneratorError: no trusted keys available
|
||||
"""
|
||||
if not self.trusted:
|
||||
raise PkgbuildGeneratorError
|
||||
|
@ -21,9 +21,10 @@ from __future__ import annotations
|
||||
|
||||
import functools
|
||||
|
||||
from collections.abc import Callable, Iterable
|
||||
from collections.abc import Iterable
|
||||
|
||||
from ahriman.core.util import partition
|
||||
from ahriman.core.exceptions import PartitionError
|
||||
from ahriman.core.util import minmax, partition
|
||||
from ahriman.models.package import Package
|
||||
|
||||
|
||||
@ -92,7 +93,7 @@ class Tree:
|
||||
dependency tree implementation
|
||||
|
||||
Attributes:
|
||||
leaves[list[Leaf]): list of tree leaves
|
||||
leaves(list[Leaf]): list of tree leaves
|
||||
|
||||
Examples:
|
||||
The most important feature here is to generate tree levels one by one which can be achieved by using class
|
||||
@ -128,6 +129,75 @@ class Tree:
|
||||
"""
|
||||
self.leaves = leaves
|
||||
|
||||
@staticmethod
|
||||
def balance(partitions: list[list[Leaf]]) -> list[list[Leaf]]:
|
||||
"""
|
||||
balance partitions. This method tries to find the longest and the shortest lists and move free leaves between
|
||||
them if possible. In case if there are no free packages (i.e. the ones which don't depend on any other in
|
||||
partition and are not dependency of any), it will drop it as it is. This method is guaranteed to produce the
|
||||
same unsorted sequences for same unsorted input
|
||||
|
||||
Args:
|
||||
partitions(list[list[Leaf]]): source unbalanced partitions
|
||||
|
||||
Returns:
|
||||
list[list[Leaf]]: balanced partitions
|
||||
"""
|
||||
# to make sure that we will have same sequences after balance we need to ensure that list is sorted
|
||||
partitions = [
|
||||
sorted(part, key=lambda leaf: leaf.package.base)
|
||||
for part in partitions if part
|
||||
]
|
||||
|
||||
while True:
|
||||
min_part, max_part = minmax(partitions, key=len)
|
||||
if len(max_part) - len(min_part) <= 1: # there is nothing to balance
|
||||
break
|
||||
|
||||
# find first package from max list which is not dependency and doesn't depend on any other package
|
||||
free_index = next(
|
||||
(
|
||||
index
|
||||
for index, leaf in enumerate(max_part)
|
||||
if not leaf.is_dependency(max_part) and leaf.is_root(max_part)
|
||||
),
|
||||
None
|
||||
)
|
||||
if free_index is None: # impossible to balance between the shortest and the longest
|
||||
break
|
||||
|
||||
min_part.append(max_part.pop(free_index))
|
||||
|
||||
return partitions
|
||||
|
||||
@staticmethod
|
||||
def partition(packages: Iterable[Package], *, count: int) -> list[list[Package]]:
|
||||
"""
|
||||
partition tree into independent chunks of more or less equal amount of packages. The packages in produced
|
||||
partitions don't depend on any package from other partitions
|
||||
|
||||
Args:
|
||||
packages(Iterable[Package]): packages list
|
||||
count(int): maximal amount of partitions
|
||||
|
||||
Returns:
|
||||
list[list[Package]]: list of packages lists based on their dependencies. The amount of elements in each
|
||||
sublist is less or equal to ``count``
|
||||
|
||||
Raises:
|
||||
PartitionError: in case if it is impossible to divide tree by specified amount of partitions
|
||||
"""
|
||||
if count < 1:
|
||||
raise PartitionError(count)
|
||||
|
||||
# special case
|
||||
if count == 1:
|
||||
return [sorted(packages, key=lambda package: package.base)]
|
||||
|
||||
leaves = [Leaf(package) for package in packages]
|
||||
instance = Tree(leaves)
|
||||
return instance.partitions(count=count)
|
||||
|
||||
@staticmethod
|
||||
def resolve(packages: Iterable[Package]) -> list[list[Package]]:
|
||||
"""
|
||||
@ -143,6 +213,22 @@ class Tree:
|
||||
instance = Tree(leaves)
|
||||
return instance.levels()
|
||||
|
||||
@staticmethod
|
||||
def sort(leaves: list[list[Leaf]]) -> list[list[Package]]:
|
||||
"""
|
||||
sort given list of leaves by package base
|
||||
|
||||
Args:
|
||||
leaves(list[list[Leaf]]): leaves to sort
|
||||
|
||||
Returns:
|
||||
list[list[Package]]: sorted list of packages on each level
|
||||
"""
|
||||
return [
|
||||
sorted([leaf.package for leaf in level], key=lambda package: package.base)
|
||||
for level in leaves if level
|
||||
]
|
||||
|
||||
def levels(self) -> list[list[Package]]:
|
||||
"""
|
||||
get build levels starting from the packages which do not require any other package to build
|
||||
@ -155,8 +241,10 @@ class Tree:
|
||||
# build initial tree
|
||||
unprocessed = self.leaves[:]
|
||||
while unprocessed:
|
||||
unsorted.append([leaf for leaf in unprocessed if leaf.is_root(unprocessed)])
|
||||
unprocessed = [leaf for leaf in unprocessed if not leaf.is_root(unprocessed)]
|
||||
# additional workaround with partial in order to hide cell-var-from-loop pylint warning
|
||||
predicate = functools.partial(Leaf.is_root, packages=unprocessed)
|
||||
new_level, unprocessed = partition(unprocessed, predicate)
|
||||
unsorted.append(new_level)
|
||||
|
||||
# move leaves to the end if they are not required at the next level
|
||||
for current_num, current_level in enumerate(unsorted[:-1]):
|
||||
@ -164,13 +252,47 @@ class Tree:
|
||||
next_level = unsorted[next_num]
|
||||
|
||||
# change lists inside the collection
|
||||
# additional workaround with partial in order to hide cell-var-from-loop pylint warning
|
||||
predicate = functools.partial(Leaf.is_dependency, packages=next_level)
|
||||
unsorted[current_num], to_be_moved = partition(current_level, predicate)
|
||||
unsorted[next_num].extend(to_be_moved)
|
||||
|
||||
comparator: Callable[[Package], str] = lambda package: package.base
|
||||
return [
|
||||
sorted([leaf.package for leaf in level], key=comparator)
|
||||
for level in unsorted if level
|
||||
]
|
||||
return self.sort(unsorted)
|
||||
|
||||
def partitions(self, *, count: int) -> list[list[Package]]:
|
||||
"""
|
||||
partition tree into (more or less) equal chunks of packages which don't depend on each other
|
||||
|
||||
Args:
|
||||
count(int): maximal amount of partitions
|
||||
|
||||
Returns:
|
||||
list[list[Package]]: sorted list of packages partitions
|
||||
"""
|
||||
unsorted: list[list[Leaf]] = [[] for _ in range(count)]
|
||||
|
||||
# in order to keep result stable we will need to sort packages all times
|
||||
unprocessed = sorted(self.leaves, key=lambda leaf: leaf.package.base)
|
||||
while unprocessed:
|
||||
# pick one and append it to the most free partition and build chunk
|
||||
leaf = unprocessed.pop()
|
||||
chunk = [leaf]
|
||||
|
||||
while True: # python doesn't allow to use walrus operator to unpack tuples
|
||||
# get packages which depend on packages in chunk
|
||||
predicate = functools.partial(Leaf.is_root, packages=chunk)
|
||||
unprocessed, new_dependent = partition(unprocessed, predicate)
|
||||
chunk.extend(new_dependent)
|
||||
|
||||
# get packages which are dependency of packages in chunk
|
||||
predicate = functools.partial(Leaf.is_dependency, packages=chunk)
|
||||
new_dependencies, unprocessed = partition(unprocessed, predicate)
|
||||
chunk.extend(new_dependencies)
|
||||
|
||||
if not new_dependent and not new_dependencies:
|
||||
break
|
||||
|
||||
part = min(unsorted, key=len)
|
||||
part.extend(chunk)
|
||||
|
||||
balanced = self.balance(unsorted)
|
||||
return self.sort(balanced)
|
||||
|
@ -158,13 +158,13 @@ class TriggerLoader(LazyLogging):
|
||||
ModuleType: module loaded from the imported module
|
||||
|
||||
Raises:
|
||||
InvalidExtension: in case if module cannot be loaded from specified package
|
||||
ExtensionError: in case if module cannot be loaded from specified package
|
||||
"""
|
||||
self.logger.info("load module from package %s", package)
|
||||
try:
|
||||
return import_module(package)
|
||||
except ModuleNotFoundError:
|
||||
raise ExtensionError(f"Module {package} not found")
|
||||
raise ExtensionError(f"Module {package} not found") from None
|
||||
|
||||
def load_trigger(self, module_path: str, architecture: str, configuration: Configuration) -> Trigger:
|
||||
"""
|
||||
@ -179,7 +179,7 @@ class TriggerLoader(LazyLogging):
|
||||
Trigger: loaded trigger based on settings
|
||||
|
||||
Raises:
|
||||
InvalidExtension: in case if trigger could not be instantiated
|
||||
ExtensionError: in case if trigger could not be instantiated
|
||||
"""
|
||||
trigger_type = self.load_trigger_class(module_path)
|
||||
try:
|
||||
@ -200,7 +200,7 @@ class TriggerLoader(LazyLogging):
|
||||
type[Trigger]: loaded trigger type by module path
|
||||
|
||||
Raises:
|
||||
InvalidExtension: in case if module cannot be loaded from the specified module path or is not a trigger
|
||||
ExtensionError: in case if module cannot be loaded from the specified module path or is not a trigger
|
||||
"""
|
||||
*package_path_parts, class_name = module_path.split(".")
|
||||
package_or_path = ".".join(package_path_parts)
|
||||
|
@ -31,11 +31,11 @@ from ahriman.models.package import Package
|
||||
|
||||
class Github(HttpUpload):
|
||||
"""
|
||||
upload files to github releases
|
||||
upload files to GitHub releases
|
||||
|
||||
Attributes:
|
||||
github_owner(str): github repository owner
|
||||
github_repository(str): github repository name
|
||||
github_owner(str): GitHub repository owner
|
||||
github_repository(str): GitHub repository name
|
||||
"""
|
||||
|
||||
def __init__(self, architecture: str, configuration: Configuration, section: str) -> None:
|
||||
@ -61,7 +61,7 @@ class Github(HttpUpload):
|
||||
"""
|
||||
try:
|
||||
asset = next(asset for asset in release["assets"] if asset["name"] == name)
|
||||
self._request("DELETE", asset["url"])
|
||||
self.make_request("DELETE", asset["url"])
|
||||
except StopIteration:
|
||||
self.logger.info("no asset %s found in release %s", name, release["name"])
|
||||
|
||||
@ -81,7 +81,7 @@ class Github(HttpUpload):
|
||||
headers = {"Content-Type": mime} if mime is not None else {"Content-Type": "application/octet-stream"}
|
||||
|
||||
with path.open("rb") as archive:
|
||||
self._request("POST", url, params={"name": path.name}, data=archive, headers=headers)
|
||||
self.make_request("POST", url, params=[("name", path.name)], data=archive, headers=headers)
|
||||
|
||||
def get_local_files(self, path: Path) -> dict[Path, str]:
|
||||
"""
|
||||
@ -100,7 +100,7 @@ class Github(HttpUpload):
|
||||
|
||||
def files_remove(self, release: dict[str, Any], local_files: dict[Path, str], remote_files: dict[str, str]) -> None:
|
||||
"""
|
||||
remove files from github
|
||||
remove files from GitHub
|
||||
|
||||
Args:
|
||||
release(dict[str, Any]): release object
|
||||
@ -115,7 +115,7 @@ class Github(HttpUpload):
|
||||
|
||||
def files_upload(self, release: dict[str, Any], local_files: dict[Path, str], remote_files: dict[str, str]) -> None:
|
||||
"""
|
||||
upload files to github
|
||||
upload files to GitHub
|
||||
|
||||
Args:
|
||||
release(dict[str, Any]): release object
|
||||
@ -133,10 +133,10 @@ class Github(HttpUpload):
|
||||
create empty release
|
||||
|
||||
Returns:
|
||||
dict[str, Any]: github API release object for the new release
|
||||
dict[str, Any]: GitHub API release object for the new release
|
||||
"""
|
||||
url = f"https://api.github.com/repos/{self.github_owner}/{self.github_repository}/releases"
|
||||
response = self._request("POST", url, json={"tag_name": self.architecture, "name": self.architecture})
|
||||
response = self.make_request("POST", url, json={"tag_name": self.architecture, "name": self.architecture})
|
||||
release: dict[str, Any] = response.json()
|
||||
return release
|
||||
|
||||
@ -145,15 +145,15 @@ class Github(HttpUpload):
|
||||
get release object if any
|
||||
|
||||
Returns:
|
||||
dict[str, Any] | None: github API release object if release found and None otherwise
|
||||
dict[str, Any] | None: GitHub API release object if release found and None otherwise
|
||||
"""
|
||||
url = f"https://api.github.com/repos/{self.github_owner}/{self.github_repository}/releases/tags/{self.architecture}"
|
||||
try:
|
||||
response = self._request("GET", url)
|
||||
response = self.make_request("GET", url)
|
||||
release: dict[str, Any] = response.json()
|
||||
return release
|
||||
except requests.HTTPError as e:
|
||||
status_code = e.response.status_code if e.response is not None else None
|
||||
except requests.HTTPError as ex:
|
||||
status_code = ex.response.status_code if ex.response is not None else None
|
||||
if status_code == 404:
|
||||
return None
|
||||
raise
|
||||
@ -166,7 +166,7 @@ class Github(HttpUpload):
|
||||
release(dict[str, Any]): release object
|
||||
body(str): new release body
|
||||
"""
|
||||
self._request("POST", release["url"], json={"body": body})
|
||||
self.make_request("POST", release["url"], json={"body": body})
|
||||
|
||||
def sync(self, path: Path, built_packages: list[Package]) -> None:
|
||||
"""
|
||||
|
@ -18,23 +18,17 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import hashlib
|
||||
import requests
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.http import SyncHttpClient
|
||||
from ahriman.core.upload.upload import Upload
|
||||
from ahriman.core.util import exception_response_text
|
||||
|
||||
|
||||
class HttpUpload(Upload):
|
||||
class HttpUpload(Upload, SyncHttpClient):
|
||||
"""
|
||||
helper for the http based uploads
|
||||
|
||||
Attributes:
|
||||
auth(tuple[str, str] | None): HTTP auth object if set
|
||||
timeout(int): HTTP request timeout in seconds
|
||||
"""
|
||||
|
||||
def __init__(self, architecture: str, configuration: Configuration, section: str) -> None:
|
||||
@ -47,10 +41,7 @@ class HttpUpload(Upload):
|
||||
section(str): configuration section name
|
||||
"""
|
||||
Upload.__init__(self, architecture, configuration)
|
||||
password = configuration.get(section, "password", fallback=None)
|
||||
username = configuration.get(section, "username", fallback=None)
|
||||
self.auth = (password, username) if password and username else None
|
||||
self.timeout = configuration.getint(section, "timeout", fallback=30)
|
||||
SyncHttpClient.__init__(self, section, configuration)
|
||||
|
||||
@staticmethod
|
||||
def calculate_hash(path: Path) -> str:
|
||||
@ -96,23 +87,3 @@ class HttpUpload(Upload):
|
||||
file, md5 = line.split()
|
||||
files[file] = md5
|
||||
return files
|
||||
|
||||
def _request(self, method: str, url: str, **kwargs: Any) -> requests.Response:
|
||||
"""
|
||||
request wrapper
|
||||
|
||||
Args:
|
||||
method(str): request method
|
||||
url(str): request url
|
||||
**kwargs(Any): request parameters to be passed as is
|
||||
|
||||
Returns:
|
||||
requests.Response: request response object
|
||||
"""
|
||||
try:
|
||||
response = requests.request(method, url, auth=self.auth, timeout=self.timeout, **kwargs)
|
||||
response.raise_for_status()
|
||||
except requests.HTTPError as e:
|
||||
self.logger.exception("could not perform %s request to %s: %s", method, url, exception_response_text(e))
|
||||
raise
|
||||
return response
|
||||
|
106
src/ahriman/core/upload/remote_service.py
Normal file
106
src/ahriman/core/upload/remote_service.py
Normal file
@ -0,0 +1,106 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import requests
|
||||
|
||||
from functools import cached_property
|
||||
from pathlib import Path
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.http import MultipartType
|
||||
from ahriman.core.sign.gpg import GPG
|
||||
from ahriman.core.status.web_client import WebClient
|
||||
from ahriman.core.upload.http_upload import HttpUpload
|
||||
from ahriman.models.package import Package
|
||||
|
||||
|
||||
class RemoteService(HttpUpload):
|
||||
"""
|
||||
upload files to another server instance
|
||||
|
||||
Attributes:
|
||||
client(WebClient): web client instance
|
||||
"""
|
||||
|
||||
def __init__(self, architecture: str, configuration: Configuration, section: str) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
architecture(str): repository architecture
|
||||
configuration(Configuration): configuration instance
|
||||
section(str): settings section name
|
||||
"""
|
||||
HttpUpload.__init__(self, architecture, configuration, section)
|
||||
self.client = WebClient(configuration)
|
||||
|
||||
@cached_property
|
||||
def session(self) -> requests.Session:
|
||||
"""
|
||||
get or create session
|
||||
|
||||
Returns:
|
||||
request.Session: created session object
|
||||
"""
|
||||
return self.client.session
|
||||
|
||||
def package_upload(self, path: Path, package: Package) -> None:
|
||||
"""
|
||||
upload single package to remote
|
||||
|
||||
Args:
|
||||
path(Path): local path to sync
|
||||
package(Package): package to upload
|
||||
"""
|
||||
def upload(package_path: Path, signature_path: Path | None) -> None:
|
||||
files: dict[str, MultipartType] = {}
|
||||
|
||||
try:
|
||||
# package part always persists
|
||||
files["package"] = package_path.name, package_path.open("rb"), "application/octet-stream", {}
|
||||
# signature part is optional
|
||||
if signature_path is not None:
|
||||
files["signature"] = signature_path.name, signature_path.open("rb"), "application/octet-stream", {}
|
||||
|
||||
self.make_request("POST", f"{self.client.address}/api/v1/service/upload", files=files)
|
||||
finally:
|
||||
for _, fd, _, _ in files.values():
|
||||
fd.close()
|
||||
|
||||
for key, descriptor in package.packages.items():
|
||||
if descriptor.filename is None:
|
||||
self.logger.warning("package %s of %s doesn't have filename set", key, package.base)
|
||||
continue
|
||||
|
||||
archive = path / descriptor.filename
|
||||
maybe_signature_path = GPG.signature(archive)
|
||||
signature = maybe_signature_path if maybe_signature_path.is_file() else None
|
||||
|
||||
upload(archive, signature)
|
||||
|
||||
def sync(self, path: Path, built_packages: list[Package]) -> None:
|
||||
"""
|
||||
sync data to remote server
|
||||
|
||||
Args:
|
||||
path(Path): local path to sync
|
||||
built_packages(list[Package]): list of packages which has just been built
|
||||
"""
|
||||
for package in built_packages:
|
||||
self.package_upload(path, package)
|
@ -51,8 +51,8 @@ class Upload(LazyLogging):
|
||||
|
||||
>>> try:
|
||||
>>> upload.sync(configuration.repository_paths.repository, [])
|
||||
>>> except Exception as exception:
|
||||
>>> handle_exceptions(exception)
|
||||
>>> except Exception as ex:
|
||||
>>> handle_exceptions(ex)
|
||||
"""
|
||||
|
||||
def __init__(self, architecture: str, configuration: Configuration) -> None:
|
||||
@ -80,17 +80,21 @@ class Upload(LazyLogging):
|
||||
Upload: client according to current settings
|
||||
"""
|
||||
section, provider_name = configuration.gettype(target, architecture)
|
||||
provider = UploadSettings.from_option(provider_name)
|
||||
if provider == UploadSettings.Rsync:
|
||||
from ahriman.core.upload.rsync import Rsync
|
||||
return Rsync(architecture, configuration, section)
|
||||
if provider == UploadSettings.S3:
|
||||
from ahriman.core.upload.s3 import S3
|
||||
return S3(architecture, configuration, section)
|
||||
if provider == UploadSettings.Github:
|
||||
from ahriman.core.upload.github import Github
|
||||
return Github(architecture, configuration, section)
|
||||
return Upload(architecture, configuration) # should never happen
|
||||
match UploadSettings.from_option(provider_name):
|
||||
case UploadSettings.Rsync:
|
||||
from ahriman.core.upload.rsync import Rsync
|
||||
return Rsync(architecture, configuration, section)
|
||||
case UploadSettings.S3:
|
||||
from ahriman.core.upload.s3 import S3
|
||||
return S3(architecture, configuration, section)
|
||||
case UploadSettings.GitHub:
|
||||
from ahriman.core.upload.github import Github
|
||||
return Github(architecture, configuration, section)
|
||||
case UploadSettings.RemoteService:
|
||||
from ahriman.core.upload.remote_service import RemoteService
|
||||
return RemoteService(architecture, configuration, section)
|
||||
case _:
|
||||
return Upload(architecture, configuration) # should never happen
|
||||
|
||||
def run(self, path: Path, built_packages: list[Package]) -> None:
|
||||
"""
|
||||
@ -101,13 +105,13 @@ class Upload(LazyLogging):
|
||||
built_packages(list[Package]): list of packages which has just been built
|
||||
|
||||
Raises:
|
||||
SyncFailed: in case of any synchronization unmatched exception
|
||||
SynchronizationError: in case of any synchronization unmatched exception
|
||||
"""
|
||||
try:
|
||||
self.sync(path, built_packages)
|
||||
except Exception:
|
||||
self.logger.exception("remote sync failed")
|
||||
raise SynchronizationError()
|
||||
raise SynchronizationError
|
||||
|
||||
def sync(self, path: Path, built_packages: list[Package]) -> None:
|
||||
"""
|
||||
|
@ -92,6 +92,20 @@ class UploadTrigger(Trigger):
|
||||
},
|
||||
},
|
||||
},
|
||||
"remote-service": {
|
||||
"type": "dict",
|
||||
"schema": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"allowed": ["ahriman", "remote-service"],
|
||||
},
|
||||
"timeout": {
|
||||
"type": "integer",
|
||||
"coerce": "integer",
|
||||
"min": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
"s3": {
|
||||
"type": "dict",
|
||||
"schema": {
|
||||
|
@ -24,7 +24,6 @@ import itertools
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
import selectors
|
||||
import subprocess
|
||||
|
||||
@ -35,7 +34,7 @@ from pathlib import Path
|
||||
from pwd import getpwuid
|
||||
from typing import Any, IO, TypeVar
|
||||
|
||||
from ahriman.core.exceptions import OptionError, UnsafeRunError
|
||||
from ahriman.core.exceptions import CalledProcessError, OptionError, UnsafeRunError
|
||||
from ahriman.models.repository_paths import RepositoryPaths
|
||||
|
||||
|
||||
@ -44,10 +43,10 @@ __all__ = [
|
||||
"check_user",
|
||||
"dataclass_view",
|
||||
"enum_values",
|
||||
"exception_response_text",
|
||||
"extract_user",
|
||||
"filter_json",
|
||||
"full_version",
|
||||
"minmax",
|
||||
"package_like",
|
||||
"parse_version",
|
||||
"partition",
|
||||
@ -65,7 +64,9 @@ __all__ = [
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
def check_output(*args: str, exception: Exception | None = None, cwd: Path | None = None, input_data: str | None = None,
|
||||
# pylint: disable=too-many-locals
|
||||
def check_output(*args: str, exception: Exception | Callable[[int, list[str], str, str], Exception] | None = None,
|
||||
cwd: Path | None = None, input_data: str | None = None,
|
||||
logger: logging.Logger | None = None, user: int | None = None,
|
||||
environment: dict[str, str] | None = None) -> str:
|
||||
"""
|
||||
@ -73,8 +74,9 @@ def check_output(*args: str, exception: Exception | None = None, cwd: Path | Non
|
||||
|
||||
Args:
|
||||
*args(str): command line arguments
|
||||
exception(Exception | None, optional): exception which has to be reraised instead of default subprocess
|
||||
exception (Default value = None)
|
||||
exception(Exception | Callable[[int, list[str], str, str]] | None, optional): exception which has to be raised
|
||||
instead of default subprocess exception. If callable us is supplied, the ``subprocess.CalledProcessError``
|
||||
arguments will be passed (Default value = None)
|
||||
cwd(Path | None, optional): current working directory (Default value = None)
|
||||
input_data(str | None, optional): data which will be written to command stdin (Default value = None)
|
||||
logger(logging.Logger | None, optional): logger to log command result if required (Default value = None)
|
||||
@ -85,7 +87,7 @@ def check_output(*args: str, exception: Exception | None = None, cwd: Path | Non
|
||||
str: command output
|
||||
|
||||
Raises:
|
||||
subprocess.CalledProcessError: if subprocess ended with status code different from 0 and no exception supplied
|
||||
CalledProcessError: if subprocess ended with status code different from 0 and no exception supplied
|
||||
|
||||
Examples:
|
||||
Simply call the function::
|
||||
@ -110,7 +112,7 @@ def check_output(*args: str, exception: Exception | None = None, cwd: Path | Non
|
||||
return channel if channel is not None else io.StringIO()
|
||||
|
||||
# wrapper around selectors polling
|
||||
def poll(sel: selectors.BaseSelector) -> Generator[str, None, None]:
|
||||
def poll(sel: selectors.BaseSelector) -> Generator[tuple[str, str], None, None]:
|
||||
for key, _ in sel.select(): # we don't need to check mask here because we have only subscribed on reading
|
||||
line = key.fileobj.readline() # type: ignore[union-attr]
|
||||
if not line: # in case of empty line we remove selector as there is no data here anymore
|
||||
@ -121,8 +123,7 @@ def check_output(*args: str, exception: Exception | None = None, cwd: Path | Non
|
||||
if logger is not None:
|
||||
logger.debug(line)
|
||||
|
||||
if key.data == "stdout":
|
||||
yield line # yield only stdout data
|
||||
yield key.data, line
|
||||
|
||||
environment = environment or {}
|
||||
if user is not None:
|
||||
@ -138,18 +139,26 @@ def check_output(*args: str, exception: Exception | None = None, cwd: Path | Non
|
||||
selector.register(get_io(process, "stdout"), selectors.EVENT_READ, data="stdout")
|
||||
selector.register(get_io(process, "stderr"), selectors.EVENT_READ, data="stderr")
|
||||
|
||||
result: list[str] = []
|
||||
result: dict[str, list[str]] = {
|
||||
"stdout": [],
|
||||
"stderr": [],
|
||||
}
|
||||
while selector.get_map(): # while there are unread selectors, keep reading
|
||||
result.extend(poll(selector))
|
||||
for key_data, output in poll(selector):
|
||||
result[key_data].append(output)
|
||||
|
||||
stdout = "\n".join(result["stdout"]).rstrip("\n") # remove newline at the end of any
|
||||
stderr = "\n".join(result["stderr"]).rstrip("\n")
|
||||
|
||||
process.terminate() # make sure that process is terminated
|
||||
status_code = process.wait()
|
||||
if status_code != 0:
|
||||
if exception is not None:
|
||||
if isinstance(exception, Exception):
|
||||
raise exception
|
||||
raise subprocess.CalledProcessError(status_code, process.args)
|
||||
if callable(exception):
|
||||
raise exception(status_code, list(args), stdout, stderr)
|
||||
raise CalledProcessError(status_code, list(args), stderr)
|
||||
|
||||
return "\n".join(result).rstrip("\n") # remove newline at the end of any
|
||||
return stdout
|
||||
|
||||
|
||||
def check_user(paths: RepositoryPaths, *, unsafe: bool) -> None:
|
||||
@ -161,7 +170,7 @@ def check_user(paths: RepositoryPaths, *, unsafe: bool) -> None:
|
||||
unsafe(bool): if set no user check will be performed before path creation
|
||||
|
||||
Raises:
|
||||
UnsafeRun: if root uid differs from current uid and check is enabled
|
||||
UnsafeRunError: if root uid differs from current uid and check is enabled
|
||||
|
||||
Examples:
|
||||
Simply run function with arguments::
|
||||
@ -204,20 +213,6 @@ def enum_values(enum: type[Enum]) -> list[str]:
|
||||
return [str(key.value) for key in enum] # explicit str conversion for typing
|
||||
|
||||
|
||||
def exception_response_text(exception: requests.exceptions.RequestException) -> str:
|
||||
"""
|
||||
safe response exception text generation
|
||||
|
||||
Args:
|
||||
exception(requests.exceptions.RequestException): exception raised
|
||||
|
||||
Returns:
|
||||
str: text of the response if it is not None and empty string otherwise
|
||||
"""
|
||||
result: str = exception.response.text if exception.response is not None else ""
|
||||
return result
|
||||
|
||||
|
||||
def extract_user() -> str | None:
|
||||
"""
|
||||
extract user from system environment
|
||||
@ -269,6 +264,22 @@ def full_version(epoch: str | int | None, pkgver: str, pkgrel: str) -> str:
|
||||
return f"{prefix}{pkgver}-{pkgrel}"
|
||||
|
||||
|
||||
def minmax(source: Iterable[T], *, key: Callable[[T], Any] | None = None) -> tuple[T, T]:
|
||||
"""
|
||||
get min and max value from iterable
|
||||
|
||||
Args:
|
||||
source(Iterable[T]): source list to find min and max values
|
||||
key(Callable[[T], Any] | None, optional): key to sort (Default value = None)
|
||||
|
||||
Returns:
|
||||
tuple[T, T]: min and max values for sequence
|
||||
"""
|
||||
first_iter, second_iter = itertools.tee(source)
|
||||
# typing doesn't expose SupportLessThan, so we just ignore this in typecheck
|
||||
return min(first_iter, key=key), max(second_iter, key=key) # type: ignore
|
||||
|
||||
|
||||
def package_like(filename: Path) -> bool:
|
||||
"""
|
||||
check if file looks like package
|
||||
@ -280,7 +291,7 @@ def package_like(filename: Path) -> bool:
|
||||
bool: True in case if name contains ``.pkg.`` and not signature, False otherwise
|
||||
"""
|
||||
name = filename.name
|
||||
return ".pkg." in name and not name.endswith(".sig")
|
||||
return not name.startswith(".") and ".pkg." in name and not name.endswith(".sig")
|
||||
|
||||
|
||||
def parse_version(version: str) -> tuple[str | None, str, str]:
|
||||
@ -302,12 +313,12 @@ def parse_version(version: str) -> tuple[str | None, str, str]:
|
||||
return epoch, pkgver, pkgrel
|
||||
|
||||
|
||||
def partition(source: list[T], predicate: Callable[[T], bool]) -> tuple[list[T], list[T]]:
|
||||
def partition(source: Iterable[T], predicate: Callable[[T], bool]) -> tuple[list[T], list[T]]:
|
||||
"""
|
||||
partition list into two based on predicate, based on https://docs.python.org/dev/library/itertools.html#itertools-recipes
|
||||
|
||||
Args:
|
||||
source(list[T]): source list to be partitioned
|
||||
source(Iterable[T]): source list to be partitioned
|
||||
predicate(Callable[[T], bool]): filter function
|
||||
|
||||
Returns:
|
||||
@ -340,24 +351,26 @@ def pretty_size(size: float | None, level: int = 0) -> str:
|
||||
|
||||
Args:
|
||||
size(float | None): size to convert
|
||||
level(int, optional): represents current units, 0 is B, 1 is KiB, etc (Default value = 0)
|
||||
level(int, optional): represents current units, 0 is B, 1 is KiB, etc. (Default value = 0)
|
||||
|
||||
Returns:
|
||||
str: pretty printable size as string
|
||||
|
||||
Raises:
|
||||
InvalidOption: if size is more than 1TiB
|
||||
OptionError: if size is more than 1TiB
|
||||
"""
|
||||
def str_level() -> str:
|
||||
if level == 0:
|
||||
return "B"
|
||||
if level == 1:
|
||||
return "KiB"
|
||||
if level == 2:
|
||||
return "MiB"
|
||||
if level == 3:
|
||||
return "GiB"
|
||||
raise OptionError(level) # must never happen actually
|
||||
match level:
|
||||
case 0:
|
||||
return "B"
|
||||
case 1:
|
||||
return "KiB"
|
||||
case 2:
|
||||
return "MiB"
|
||||
case 3:
|
||||
return "GiB"
|
||||
case _:
|
||||
raise OptionError(level) # must never happen actually
|
||||
|
||||
if size is None:
|
||||
return ""
|
||||
@ -395,7 +408,7 @@ def srcinfo_property(key: str, srcinfo: dict[str, Any], package_srcinfo: dict[st
|
||||
returned
|
||||
|
||||
Args:
|
||||
key(str): key to extract from srcinfo
|
||||
key(str): key to extract
|
||||
srcinfo(dict[str, Any]): root structure of SRCINFO
|
||||
package_srcinfo(dict[str, Any]): package specific SRCINFO
|
||||
default(Any, optional): the default value for the specified key (Default value = None)
|
||||
@ -414,7 +427,7 @@ def srcinfo_property_list(key: str, srcinfo: dict[str, Any], package_srcinfo: di
|
||||
append it at the end of result
|
||||
|
||||
Args:
|
||||
key(str): key to extract from srcinfo
|
||||
key(str): key to extract
|
||||
srcinfo(dict[str, Any]): root structure of SRCINFO
|
||||
package_srcinfo(dict[str, Any]): package specific SRCINFO
|
||||
architecture(str | None, optional): package architecture if set (Default value = None)
|
||||
@ -430,9 +443,9 @@ def srcinfo_property_list(key: str, srcinfo: dict[str, Any], package_srcinfo: di
|
||||
|
||||
def trim_package(package_name: str) -> str:
|
||||
"""
|
||||
remove version bound and description from package name. Pacman allows to specify version bound (=, <=, >= etc) for
|
||||
packages in dependencies and also allows to specify description (via :); this function removes trailing parts and
|
||||
return exact package name
|
||||
remove version bound and description from package name. Pacman allows to specify version bound (=, <=, >= etc.) for
|
||||
packages in dependencies and also allows to specify description (via ``:``); this function removes trailing parts
|
||||
and return exact package name
|
||||
|
||||
Args:
|
||||
package_name(str): source package name
|
||||
|
@ -44,9 +44,7 @@ class AuthSettings(str, Enum):
|
||||
Returns:
|
||||
bool: False in case if authorization is disabled and True otherwise
|
||||
"""
|
||||
if self == AuthSettings.Disabled:
|
||||
return False
|
||||
return True
|
||||
return self != AuthSettings.Disabled
|
||||
|
||||
@staticmethod
|
||||
def from_option(value: str) -> AuthSettings:
|
||||
@ -59,8 +57,10 @@ class AuthSettings(str, Enum):
|
||||
Returns:
|
||||
AuthSettings: parsed value
|
||||
"""
|
||||
if value.lower() in ("configuration", "mapping"):
|
||||
return AuthSettings.Configuration
|
||||
if value.lower() in ("oauth", "oauth2"):
|
||||
return AuthSettings.OAuth
|
||||
return AuthSettings.Disabled
|
||||
match value.lower():
|
||||
case "configuration" | "mapping":
|
||||
return AuthSettings.Configuration
|
||||
case "oauth" | "oauth2":
|
||||
return AuthSettings.OAuth
|
||||
case _:
|
||||
return AuthSettings.Disabled
|
||||
|
@ -27,8 +27,8 @@ class LogRecordId:
|
||||
|
||||
Attributes:
|
||||
package_base(str): package base for which log record belongs
|
||||
process_id(int): process id from which log record was emitted
|
||||
version(str): package version for which log record belongs
|
||||
"""
|
||||
|
||||
package_base: str
|
||||
process_id: int
|
||||
version: str
|
||||
|
@ -258,7 +258,7 @@ class Package(LazyLogging):
|
||||
Self: package properties
|
||||
|
||||
Raises:
|
||||
InvalidPackageInfo: if there are parsing errors
|
||||
PackageInfoError: if there are parsing errors
|
||||
"""
|
||||
srcinfo_source = Package._check_output("makepkg", "--printsrcinfo", cwd=path)
|
||||
srcinfo, errors = parse_srcinfo(srcinfo_source)
|
||||
@ -360,6 +360,9 @@ class Package(LazyLogging):
|
||||
Returns:
|
||||
Generator[Path, None, None]: list of paths of files which belong to the package and distributed together
|
||||
with this tarball. All paths are relative to the ``path``
|
||||
|
||||
Raises:
|
||||
PackageInfoError: if there are parsing errors
|
||||
"""
|
||||
srcinfo_source = Package._check_output("makepkg", "--printsrcinfo", cwd=path)
|
||||
srcinfo, errors = parse_srcinfo(srcinfo_source)
|
||||
@ -396,7 +399,7 @@ class Package(LazyLogging):
|
||||
set[str]: list of package supported architectures
|
||||
|
||||
Raises:
|
||||
InvalidPackageInfo: if there are parsing errors
|
||||
PackageInfoError: if there are parsing errors
|
||||
"""
|
||||
srcinfo_source = Package._check_output("makepkg", "--printsrcinfo", cwd=path)
|
||||
srcinfo, errors = parse_srcinfo(srcinfo_source)
|
||||
@ -435,7 +438,7 @@ class Package(LazyLogging):
|
||||
str: package version if package is not VCS and current version according to VCS otherwise
|
||||
|
||||
Raises:
|
||||
InvalidPackageInfo: if there are parsing errors
|
||||
PackageInfoError: if there are parsing errors
|
||||
"""
|
||||
if not self.is_vcs:
|
||||
return self.version
|
||||
|
@ -64,7 +64,7 @@ class RemoteSource:
|
||||
@property
|
||||
def pkgbuild_dir(self) -> Path | None:
|
||||
"""
|
||||
get path to directory with package sources (PKGBUILD etc)
|
||||
get path to directory with package sources (PKGBUILD etc.)
|
||||
|
||||
Returns:
|
||||
Path | None: path to directory with package sources based on settings if available
|
||||
|
@ -32,6 +32,7 @@ class ReportSettings(str, Enum):
|
||||
Email(ReportSettings): (class attribute) email report generation
|
||||
Console(ReportSettings): (class attribute) print result to console
|
||||
Telegram(ReportSettings): (class attribute) markdown report to telegram channel
|
||||
RemoteCall(ReportSettings): (class attribute) remote ahriman server call
|
||||
"""
|
||||
|
||||
Disabled = "disabled" # for testing purpose
|
||||
@ -39,6 +40,7 @@ class ReportSettings(str, Enum):
|
||||
Email = "email"
|
||||
Console = "console"
|
||||
Telegram = "telegram"
|
||||
RemoteCall = "remote-call"
|
||||
|
||||
@staticmethod
|
||||
def from_option(value: str) -> ReportSettings:
|
||||
@ -51,12 +53,16 @@ class ReportSettings(str, Enum):
|
||||
Returns:
|
||||
ReportSettings: parsed value
|
||||
"""
|
||||
if value.lower() in ("html",):
|
||||
return ReportSettings.HTML
|
||||
if value.lower() in ("email",):
|
||||
return ReportSettings.Email
|
||||
if value.lower() in ("console",):
|
||||
return ReportSettings.Console
|
||||
if value.lower() in ("telegram",):
|
||||
return ReportSettings.Telegram
|
||||
return ReportSettings.Disabled
|
||||
match value.lower():
|
||||
case "html":
|
||||
return ReportSettings.HTML
|
||||
case "email":
|
||||
return ReportSettings.Email
|
||||
case "console":
|
||||
return ReportSettings.Console
|
||||
case "telegram":
|
||||
return ReportSettings.Telegram
|
||||
case "ahriman" | "remote-call":
|
||||
return ReportSettings.RemoteCall
|
||||
case _:
|
||||
return ReportSettings.Disabled
|
||||
|
@ -32,8 +32,8 @@ class RepositoryPaths:
|
||||
repository paths holder. For the most operations with paths you want to use this object
|
||||
|
||||
Attributes:
|
||||
root(Path): repository root (i.e. ahriman home)
|
||||
architecture(str): repository architecture
|
||||
root(Path): repository root (i.e. ahriman home)
|
||||
|
||||
Examples:
|
||||
This class can be used in order to access the repository tree structure::
|
||||
@ -166,7 +166,7 @@ class RepositoryPaths:
|
||||
path(Path): path to be chown
|
||||
|
||||
Raises:
|
||||
InvalidPath: if path does not belong to root
|
||||
PathError: if path does not belong to root
|
||||
"""
|
||||
def set_owner(current: Path) -> None:
|
||||
uid, gid = self.owner(current)
|
||||
|
@ -104,7 +104,7 @@ class Result:
|
||||
Result: updated instance
|
||||
|
||||
Raises:
|
||||
SuccessFailed: if there is previously failed package which is masked as success
|
||||
UnprocessedPackageStatusError: if there is previously failed package which is masked as success
|
||||
"""
|
||||
for base, package in other._failed.items():
|
||||
if base in self._success:
|
||||
|
@ -47,8 +47,10 @@ class SignSettings(str, Enum):
|
||||
Returns:
|
||||
SignSettings: parsed value
|
||||
"""
|
||||
if value.lower() in ("package", "packages", "sign-package"):
|
||||
return SignSettings.Packages
|
||||
if value.lower() in ("repository", "sign-repository"):
|
||||
return SignSettings.Repository
|
||||
return SignSettings.Disabled
|
||||
match value.lower():
|
||||
case "package" | "packages" | "sign-package":
|
||||
return SignSettings.Packages
|
||||
case "repository" | "sign-repository":
|
||||
return SignSettings.Repository
|
||||
case _:
|
||||
return SignSettings.Disabled
|
||||
|
@ -47,8 +47,10 @@ class SmtpSSLSettings(str, Enum):
|
||||
Returns:
|
||||
SmtpSSLSettings: parsed value
|
||||
"""
|
||||
if value.lower() in ("ssl", "ssl/tls"):
|
||||
return SmtpSSLSettings.SSL
|
||||
if value.lower() in ("starttls",):
|
||||
return SmtpSSLSettings.STARTTLS
|
||||
return SmtpSSLSettings.Disabled
|
||||
match value.lower():
|
||||
case "ssl" | "ssl/tls":
|
||||
return SmtpSSLSettings.SSL
|
||||
case "starttls":
|
||||
return SmtpSSLSettings.STARTTLS
|
||||
case _:
|
||||
return SmtpSSLSettings.Disabled
|
||||
|
@ -30,13 +30,15 @@ class UploadSettings(str, Enum):
|
||||
Disabled(UploadSettings): (class attribute) no sync will be performed, required for testing purpose
|
||||
Rsync(UploadSettings): (class attribute) sync via rsync
|
||||
S3(UploadSettings): (class attribute) sync to Amazon S3
|
||||
Github(UploadSettings): (class attribute) sync to github releases page
|
||||
GitHub(UploadSettings): (class attribute) sync to GitHub releases page
|
||||
RemoteService(UploadSettings): (class attribute) sync to another ahriman instance
|
||||
"""
|
||||
|
||||
Disabled = "disabled" # for testing purpose
|
||||
Rsync = "rsync"
|
||||
S3 = "s3"
|
||||
Github = "github"
|
||||
GitHub = "github"
|
||||
RemoteService = "remote-service"
|
||||
|
||||
@staticmethod
|
||||
def from_option(value: str) -> UploadSettings:
|
||||
@ -49,10 +51,14 @@ class UploadSettings(str, Enum):
|
||||
Returns:
|
||||
UploadSettings: parsed value
|
||||
"""
|
||||
if value.lower() in ("rsync",):
|
||||
return UploadSettings.Rsync
|
||||
if value.lower() in ("s3",):
|
||||
return UploadSettings.S3
|
||||
if value.lower() in ("github",):
|
||||
return UploadSettings.Github
|
||||
return UploadSettings.Disabled
|
||||
match value.lower():
|
||||
case "rsync":
|
||||
return UploadSettings.Rsync
|
||||
case "s3":
|
||||
return UploadSettings.S3
|
||||
case "github":
|
||||
return UploadSettings.GitHub
|
||||
case "ahriman" | "remote-service":
|
||||
return UploadSettings.RemoteService
|
||||
case _:
|
||||
return UploadSettings.Disabled
|
||||
|
@ -75,24 +75,6 @@ class User:
|
||||
object.__setattr__(self, "packager_id", self.packager_id or None)
|
||||
object.__setattr__(self, "key", self.key or None)
|
||||
|
||||
@classmethod
|
||||
def from_option(cls, username: str | None, password: str | None,
|
||||
access: UserAccess = UserAccess.Read) -> Self | None:
|
||||
"""
|
||||
build user descriptor from configuration options
|
||||
|
||||
Args:
|
||||
username(str | None): username
|
||||
password(str | None): password as string
|
||||
access(UserAccess, optional): optional user access (Default value = UserAccess.Read)
|
||||
|
||||
Returns:
|
||||
Self | None: generated user descriptor if all options are supplied and None otherwise
|
||||
"""
|
||||
if username is None or password is None:
|
||||
return None
|
||||
return cls(username=username, password=password, access=access, packager_id=None, key=None)
|
||||
|
||||
@staticmethod
|
||||
def generate_password(length: int) -> str:
|
||||
"""
|
||||
|
72
src/ahriman/models/waiter.py
Normal file
72
src/ahriman/models/waiter.py
Normal file
@ -0,0 +1,72 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import time
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass, field
|
||||
from typing import ParamSpec
|
||||
|
||||
|
||||
Params = ParamSpec("Params")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Waiter:
|
||||
"""
|
||||
simple waiter implementation
|
||||
|
||||
Attributes:
|
||||
interval(int): interval in seconds between checks
|
||||
start_time(float): monotonic time of the waiter start. More likely must not be assigned explicitly
|
||||
wait_timeout(int): timeout in seconds to wait for. Negative value will result in immediate exit. Zero value
|
||||
means infinite timeout
|
||||
"""
|
||||
|
||||
wait_timeout: int
|
||||
start_time: float = field(default_factory=time.monotonic, kw_only=True)
|
||||
interval: int = field(default=10, kw_only=True)
|
||||
|
||||
def is_timed_out(self) -> bool:
|
||||
"""
|
||||
check if timer is out
|
||||
|
||||
Returns:
|
||||
bool: True in case current monotonic time is more than ``Waiter.start_time`` and
|
||||
``Waiter.wait_timeout`` doesn't equal to 0
|
||||
"""
|
||||
since_start: float = time.monotonic() - self.start_time
|
||||
return self.wait_timeout != 0 and since_start > self.wait_timeout
|
||||
|
||||
def wait(self, in_progress: Callable[Params, bool], *args: Params.args, **kwargs: Params.kwargs) -> float:
|
||||
"""
|
||||
wait until requirements are not met
|
||||
|
||||
Args:
|
||||
in_progress(Callable[Params, bool]): function to check if timer should wait for another cycle
|
||||
*args(Params.args): positional arguments for check call
|
||||
**kwargs(Params.kwargs): keyword arguments for check call
|
||||
|
||||
Returns:
|
||||
float: consumed time in seconds
|
||||
"""
|
||||
while not self.is_timed_out() and in_progress(*args, **kwargs):
|
||||
time.sleep(self.interval)
|
||||
|
||||
return time.monotonic() - self.start_time
|
@ -102,6 +102,7 @@ def _auth_handler(allow_read_only: bool) -> MiddlewareType:
|
||||
permission = UserAccess.Unauthorized if isinstance(handler_instance, StaticResource) else UserAccess.Full
|
||||
else:
|
||||
permission = UserAccess.Full
|
||||
|
||||
if permission == UserAccess.Unauthorized: # explicit if elif else for better code coverage
|
||||
pass
|
||||
elif allow_read_only and UserAccess.Read.permits(permission):
|
||||
|
@ -52,36 +52,39 @@ def exception_handler(logger: logging.Logger) -> MiddlewareType:
|
||||
|
||||
Returns:
|
||||
MiddlewareType: built middleware
|
||||
|
||||
Raises:
|
||||
HTTPNoContent: OPTIONS method response
|
||||
"""
|
||||
@middleware
|
||||
async def handle(request: Request, handler: HandlerType) -> StreamResponse:
|
||||
try:
|
||||
return await handler(request)
|
||||
except HTTPUnauthorized as e:
|
||||
except HTTPUnauthorized as ex:
|
||||
if _is_templated_unauthorized(request):
|
||||
context = {"code": e.status_code, "reason": e.reason}
|
||||
return aiohttp_jinja2.render_template("error.jinja2", request, context, status=e.status_code)
|
||||
return json_response(data={"error": e.reason}, status=e.status_code)
|
||||
except HTTPMethodNotAllowed as e:
|
||||
if e.method == "OPTIONS":
|
||||
context = {"code": ex.status_code, "reason": ex.reason}
|
||||
return aiohttp_jinja2.render_template("error.jinja2", request, context, status=ex.status_code)
|
||||
return json_response(data={"error": ex.reason}, status=ex.status_code)
|
||||
except HTTPMethodNotAllowed as ex:
|
||||
if ex.method == "OPTIONS":
|
||||
# automatically handle OPTIONS method, idea comes from
|
||||
# https://github.com/arcan1s/ffxivbis/blob/master/src/main/scala/me/arcanis/ffxivbis/http/api/v1/HttpHandler.scala#L32
|
||||
raise HTTPNoContent(headers={"Allow": ",".join(sorted(e.allowed_methods))})
|
||||
if e.method == "HEAD":
|
||||
raise HTTPNoContent(headers={"Allow": ",".join(sorted(ex.allowed_methods))})
|
||||
if ex.method == "HEAD":
|
||||
# since we have special autogenerated HEAD method, we need to remove it from list of available
|
||||
e.allowed_methods = {method for method in e.allowed_methods if method != "HEAD"}
|
||||
e.headers["Allow"] = ",".join(sorted(e.allowed_methods))
|
||||
raise e
|
||||
ex.allowed_methods = {method for method in ex.allowed_methods if method != "HEAD"}
|
||||
ex.headers["Allow"] = ",".join(sorted(ex.allowed_methods))
|
||||
raise ex
|
||||
raise
|
||||
except HTTPClientError as e:
|
||||
return json_response(data={"error": e.reason}, status=e.status_code)
|
||||
except HTTPServerError as e:
|
||||
except HTTPClientError as ex:
|
||||
return json_response(data={"error": ex.reason}, status=ex.status_code)
|
||||
except HTTPServerError as ex:
|
||||
logger.exception("server exception during performing request to %s", request.path)
|
||||
return json_response(data={"error": e.reason}, status=e.status_code)
|
||||
return json_response(data={"error": ex.reason}, status=ex.status_code)
|
||||
except HTTPException: # just raise 2xx and 3xx codes
|
||||
raise
|
||||
except Exception as e:
|
||||
except Exception as ex:
|
||||
logger.exception("unknown exception during performing request to %s", request.path)
|
||||
return json_response(data={"error": str(e)}, status=500)
|
||||
return json_response(data={"error": str(ex)}, status=500)
|
||||
|
||||
return handle
|
||||
|
@ -25,11 +25,13 @@ from ahriman.web.views.api.swagger import SwaggerView
|
||||
from ahriman.web.views.index import IndexView
|
||||
from ahriman.web.views.service.add import AddView
|
||||
from ahriman.web.views.service.pgp import PGPView
|
||||
from ahriman.web.views.service.process import ProcessView
|
||||
from ahriman.web.views.service.rebuild import RebuildView
|
||||
from ahriman.web.views.service.remove import RemoveView
|
||||
from ahriman.web.views.service.request import RequestView
|
||||
from ahriman.web.views.service.search import SearchView
|
||||
from ahriman.web.views.service.update import UpdateView
|
||||
from ahriman.web.views.service.upload import UploadView
|
||||
from ahriman.web.views.status.logs import LogsView
|
||||
from ahriman.web.views.status.package import PackageView
|
||||
from ahriman.web.views.status.packages import PackagesView
|
||||
@ -60,10 +62,12 @@ def setup_routes(application: Application, static_path: Path) -> None:
|
||||
application.router.add_view("/api/v1/service/add", AddView)
|
||||
application.router.add_view("/api/v1/service/pgp", PGPView)
|
||||
application.router.add_view("/api/v1/service/rebuild", RebuildView)
|
||||
application.router.add_view("/api/v1/service/process/{process_id}", ProcessView)
|
||||
application.router.add_view("/api/v1/service/remove", RemoveView)
|
||||
application.router.add_view("/api/v1/service/request", RequestView)
|
||||
application.router.add_view("/api/v1/service/search", SearchView)
|
||||
application.router.add_view("/api/v1/service/update", UpdateView)
|
||||
application.router.add_view("/api/v1/service/upload", UploadView)
|
||||
|
||||
application.router.add_view("/api/v1/packages", PackagesView)
|
||||
application.router.add_view("/api/v1/packages/{package}", PackageView)
|
||||
|
@ -21,6 +21,7 @@ from ahriman.web.schemas.aur_package_schema import AURPackageSchema
|
||||
from ahriman.web.schemas.auth_schema import AuthSchema
|
||||
from ahriman.web.schemas.counters_schema import CountersSchema
|
||||
from ahriman.web.schemas.error_schema import ErrorSchema
|
||||
from ahriman.web.schemas.file_schema import FileSchema
|
||||
from ahriman.web.schemas.internal_status_schema import InternalStatusSchema
|
||||
from ahriman.web.schemas.log_schema import LogSchema
|
||||
from ahriman.web.schemas.login_schema import LoginSchema
|
||||
@ -33,6 +34,9 @@ from ahriman.web.schemas.package_schema import PackageSchema
|
||||
from ahriman.web.schemas.package_status_schema import PackageStatusSimplifiedSchema, PackageStatusSchema
|
||||
from ahriman.web.schemas.pgp_key_id_schema import PGPKeyIdSchema
|
||||
from ahriman.web.schemas.pgp_key_schema import PGPKeySchema
|
||||
from ahriman.web.schemas.process_id_schema import ProcessIdSchema
|
||||
from ahriman.web.schemas.process_schema import ProcessSchema
|
||||
from ahriman.web.schemas.remote_schema import RemoteSchema
|
||||
from ahriman.web.schemas.search_schema import SearchSchema
|
||||
from ahriman.web.schemas.status_schema import StatusSchema
|
||||
from ahriman.web.schemas.update_flags_schema import UpdateFlagsSchema
|
||||
|
30
src/ahriman/web/schemas/file_schema.py
Normal file
30
src/ahriman/web/schemas/file_schema.py
Normal file
@ -0,0 +1,30 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from marshmallow import Schema, fields
|
||||
|
||||
|
||||
class FileSchema(Schema):
|
||||
"""
|
||||
request file upload schema
|
||||
"""
|
||||
|
||||
archive = fields.Field(required=True, metadata={
|
||||
"description": "Package archive to be uploaded",
|
||||
})
|
@ -19,6 +19,8 @@
|
||||
#
|
||||
from marshmallow import Schema, fields
|
||||
|
||||
from ahriman import __version__
|
||||
|
||||
|
||||
class LogSchema(Schema):
|
||||
"""
|
||||
@ -29,9 +31,9 @@ class LogSchema(Schema):
|
||||
"description": "Log record timestamp",
|
||||
"example": 1680537091.233495,
|
||||
})
|
||||
process_id = fields.Integer(required=True, metadata={
|
||||
"description": "Current process id",
|
||||
"example": 42,
|
||||
version = fields.Integer(required=True, metadata={
|
||||
"description": "Package version to tag",
|
||||
"example": __version__,
|
||||
})
|
||||
message = fields.String(required=True, metadata={
|
||||
"description": "Log message",
|
||||
|
31
src/ahriman/web/schemas/process_id_schema.py
Normal file
31
src/ahriman/web/schemas/process_id_schema.py
Normal file
@ -0,0 +1,31 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from marshmallow import Schema, fields
|
||||
|
||||
|
||||
class ProcessIdSchema(Schema):
|
||||
"""
|
||||
request and response spawned process id schema
|
||||
"""
|
||||
|
||||
process_id = fields.String(required=True, metadata={
|
||||
"description": "Spawned process unique ID",
|
||||
"example": "ff456814-5669-4de6-9143-44dbf6f68607",
|
||||
})
|
30
src/ahriman/web/schemas/process_schema.py
Normal file
30
src/ahriman/web/schemas/process_schema.py
Normal file
@ -0,0 +1,30 @@
|
||||
#
|
||||
# Copyright (c) 2021-2023 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from marshmallow import Schema, fields
|
||||
|
||||
|
||||
class ProcessSchema(Schema):
|
||||
"""
|
||||
process status response schema
|
||||
"""
|
||||
|
||||
is_alive = fields.Bool(required=True, metadata={
|
||||
"description": "Is process alive or not",
|
||||
})
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user