mirror of
https://github.com/arcan1s/ahriman.git
synced 2025-07-08 19:45:46 +00:00
Compare commits
20 Commits
Author | SHA1 | Date | |
---|---|---|---|
435375721d | |||
4c5caba6b7 | |||
b83df9d2c5 | |||
f2ea76aab9 | |||
471b1c1331 | |||
bd770aac2f | |||
6abe35ef8c | |||
fdc27a9ebf | |||
b729096a25 | |||
390b9da29e | |||
256376df85 | |||
939a94d889 | |||
2b1b17a1a3 | |||
9e6705056a | |||
b3a3a81f70 | |||
3e5dbbd6cd | |||
f41e44895d | |||
765bbf486f | |||
a3c54afb82 | |||
7f223ecc0a |
12
.github/workflows/release.yml
vendored
12
.github/workflows/release.yml
vendored
@ -13,7 +13,15 @@ jobs:
|
|||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
container:
|
||||||
|
image: archlinux:base
|
||||||
|
options: -w /build
|
||||||
|
volumes:
|
||||||
|
- ${{ github.workspace }}:/build
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
- run: pacman --noconfirm -Syu base-devel git python-tox
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Extract version
|
- name: Extract version
|
||||||
@ -27,10 +35,6 @@ jobs:
|
|||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
filter: 'Release \d+\.\d+\.\d+'
|
filter: 'Release \d+\.\d+\.\d+'
|
||||||
|
|
||||||
- uses: ConorMacBride/install-package@v1.1.0
|
|
||||||
with:
|
|
||||||
apt: tox
|
|
||||||
|
|
||||||
- name: Create archive
|
- name: Create archive
|
||||||
run: tox -e archive
|
run: tox -e archive
|
||||||
env:
|
env:
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
[tool.pylint.main]
|
[tool.pylint.main]
|
||||||
init-hook = "sys.path.append('pylint_plugins')"
|
init-hook = "sys.path.append('tools')"
|
||||||
load-plugins = [
|
load-plugins = [
|
||||||
"pylint.extensions.docparams",
|
"pylint.extensions.docparams",
|
||||||
"pylint.extensions.bad_builtin",
|
"pylint.extensions.bad_builtin",
|
||||||
"definition_order",
|
"pylint_plugins.definition_order",
|
||||||
"import_order",
|
"pylint_plugins.import_order",
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.pylint.classes]
|
[tool.pylint.classes]
|
||||||
|
5
.pytest.ini
Normal file
5
.pytest.ini
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
[pytest]
|
||||||
|
addopts = --cov=ahriman --cov-report=term-missing:skip-covered --no-cov-on-fail --cov-fail-under=100 --spec
|
||||||
|
asyncio_default_fixture_loop_scope = function
|
||||||
|
asyncio_mode = auto
|
||||||
|
spec_test_format = {result} {docstring_summary}
|
9
docs/_static/architecture.dot
vendored
9
docs/_static/architecture.dot
vendored
@ -64,7 +64,7 @@ digraph G {
|
|||||||
ahriman_core_alpm_remote_aur [fillcolor="blue",fontcolor="white",label="ahriman\.\ncore\.\nalpm\.\nremote\.\naur",shape="box"];
|
ahriman_core_alpm_remote_aur [fillcolor="blue",fontcolor="white",label="ahriman\.\ncore\.\nalpm\.\nremote\.\naur",shape="box"];
|
||||||
ahriman_core_alpm_remote_official [fillcolor="blue",fontcolor="white",label="ahriman\.\ncore\.\nalpm\.\nremote\.\nofficial",shape="box"];
|
ahriman_core_alpm_remote_official [fillcolor="blue",fontcolor="white",label="ahriman\.\ncore\.\nalpm\.\nremote\.\nofficial",shape="box"];
|
||||||
ahriman_core_alpm_remote_official_syncdb [fillcolor="blue",fontcolor="white",label="ahriman\.\ncore\.\nalpm\.\nremote\.\nofficial_syncdb",shape="box"];
|
ahriman_core_alpm_remote_official_syncdb [fillcolor="blue",fontcolor="white",label="ahriman\.\ncore\.\nalpm\.\nremote\.\nofficial_syncdb",shape="box"];
|
||||||
ahriman_core_alpm_remote_remote [fillcolor="#ae441e",fontcolor="#ffffff",label="ahriman\.\ncore\.\nalpm\.\nremote\.\nremote"];
|
ahriman_core_alpm_remote_remote [fillcolor="#a5401d",fontcolor="#ffffff",label="ahriman\.\ncore\.\nalpm\.\nremote\.\nremote"];
|
||||||
ahriman_core_alpm_repo [fillcolor="#994d33",fontcolor="#ffffff",label="ahriman\.\ncore\.\nalpm\.\nrepo"];
|
ahriman_core_alpm_repo [fillcolor="#994d33",fontcolor="#ffffff",label="ahriman\.\ncore\.\nalpm\.\nrepo"];
|
||||||
ahriman_core_auth [fillcolor="blue",fontcolor="white",label="ahriman\.\ncore\.\nauth",shape="box"];
|
ahriman_core_auth [fillcolor="blue",fontcolor="white",label="ahriman\.\ncore\.\nauth",shape="box"];
|
||||||
ahriman_core_auth_auth [fillcolor="blue",fontcolor="white",label="ahriman\.\ncore\.\nauth\.\nauth",shape="box"];
|
ahriman_core_auth_auth [fillcolor="blue",fontcolor="white",label="ahriman\.\ncore\.\nauth\.\nauth",shape="box"];
|
||||||
@ -509,9 +509,9 @@ digraph G {
|
|||||||
ahriman_core_alpm_remote_official -> ahriman_core_alpm_remote [fillcolor="blue",minlen="0",weight="4"];
|
ahriman_core_alpm_remote_official -> ahriman_core_alpm_remote [fillcolor="blue",minlen="0",weight="4"];
|
||||||
ahriman_core_alpm_remote_official -> ahriman_core_alpm_remote_official_syncdb [fillcolor="blue",minlen="0",weight="4"];
|
ahriman_core_alpm_remote_official -> ahriman_core_alpm_remote_official_syncdb [fillcolor="blue",minlen="0",weight="4"];
|
||||||
ahriman_core_alpm_remote_official_syncdb -> ahriman_core_alpm_remote [fillcolor="blue",minlen="0",weight="4"];
|
ahriman_core_alpm_remote_official_syncdb -> ahriman_core_alpm_remote [fillcolor="blue",minlen="0",weight="4"];
|
||||||
ahriman_core_alpm_remote_remote -> ahriman_core_alpm_remote [fillcolor="#ae441e",minlen="0",weight="4"];
|
ahriman_core_alpm_remote_remote -> ahriman_core_alpm_remote [fillcolor="#a5401d",minlen="0",weight="4"];
|
||||||
ahriman_core_alpm_remote_remote -> ahriman_core_alpm_remote_aur [fillcolor="#ae441e",minlen="0",weight="4"];
|
ahriman_core_alpm_remote_remote -> ahriman_core_alpm_remote_aur [fillcolor="#a5401d",minlen="0",weight="4"];
|
||||||
ahriman_core_alpm_remote_remote -> ahriman_core_alpm_remote_official [fillcolor="#ae441e",minlen="0",weight="4"];
|
ahriman_core_alpm_remote_remote -> ahriman_core_alpm_remote_official [fillcolor="#a5401d",minlen="0",weight="4"];
|
||||||
ahriman_core_alpm_repo -> ahriman_core_repository_repository_properties [fillcolor="#994d33",minlen="2",weight="2"];
|
ahriman_core_alpm_repo -> ahriman_core_repository_repository_properties [fillcolor="#994d33",minlen="2",weight="2"];
|
||||||
ahriman_core_auth -> ahriman_web_keys [fillcolor="blue",minlen="2"];
|
ahriman_core_auth -> ahriman_web_keys [fillcolor="blue",minlen="2"];
|
||||||
ahriman_core_auth -> ahriman_web_middlewares_auth_handler [fillcolor="blue",minlen="3"];
|
ahriman_core_auth -> ahriman_web_middlewares_auth_handler [fillcolor="blue",minlen="3"];
|
||||||
@ -710,6 +710,7 @@ digraph G {
|
|||||||
ahriman_core_exceptions -> ahriman_core_alpm_remote_aur [fillcolor="#ef4306",minlen="2",weight="2"];
|
ahriman_core_exceptions -> ahriman_core_alpm_remote_aur [fillcolor="#ef4306",minlen="2",weight="2"];
|
||||||
ahriman_core_exceptions -> ahriman_core_alpm_remote_official [fillcolor="#ef4306",minlen="2",weight="2"];
|
ahriman_core_exceptions -> ahriman_core_alpm_remote_official [fillcolor="#ef4306",minlen="2",weight="2"];
|
||||||
ahriman_core_exceptions -> ahriman_core_alpm_remote_official_syncdb [fillcolor="#ef4306",minlen="2",weight="2"];
|
ahriman_core_exceptions -> ahriman_core_alpm_remote_official_syncdb [fillcolor="#ef4306",minlen="2",weight="2"];
|
||||||
|
ahriman_core_exceptions -> ahriman_core_alpm_remote_remote [fillcolor="#ef4306",minlen="2",weight="2"];
|
||||||
ahriman_core_exceptions -> ahriman_core_alpm_repo [fillcolor="#ef4306",minlen="2",weight="2"];
|
ahriman_core_exceptions -> ahriman_core_alpm_repo [fillcolor="#ef4306",minlen="2",weight="2"];
|
||||||
ahriman_core_exceptions -> ahriman_core_auth_oauth [fillcolor="#ef4306",minlen="2",weight="2"];
|
ahriman_core_exceptions -> ahriman_core_auth_oauth [fillcolor="#ef4306",minlen="2",weight="2"];
|
||||||
ahriman_core_exceptions -> ahriman_core_auth_pam [fillcolor="#ef4306",minlen="2",weight="2"];
|
ahriman_core_exceptions -> ahriman_core_auth_pam [fillcolor="#ef4306",minlen="2",weight="2"];
|
||||||
|
@ -140,6 +140,14 @@ ahriman.web.schemas.logs\_schema module
|
|||||||
:no-undoc-members:
|
:no-undoc-members:
|
||||||
:show-inheritance:
|
:show-inheritance:
|
||||||
|
|
||||||
|
ahriman.web.schemas.logs\_search\_schema module
|
||||||
|
-----------------------------------------------
|
||||||
|
|
||||||
|
.. automodule:: ahriman.web.schemas.logs_search_schema
|
||||||
|
:members:
|
||||||
|
:no-undoc-members:
|
||||||
|
:show-inheritance:
|
||||||
|
|
||||||
ahriman.web.schemas.oauth2\_schema module
|
ahriman.web.schemas.oauth2\_schema module
|
||||||
-----------------------------------------
|
-----------------------------------------
|
||||||
|
|
||||||
|
@ -166,6 +166,7 @@ Reporting to web service related settings. In most cases there is fallback to we
|
|||||||
Web server settings. This feature requires ``aiohttp`` libraries to be installed.
|
Web server settings. This feature requires ``aiohttp`` libraries to be installed.
|
||||||
|
|
||||||
* ``address`` - optional address in form ``proto://host:port`` (``port`` can be omitted in case of default ``proto`` ports), will be used instead of ``http://{host}:{port}`` in case if set, string, optional. This option is required in case if ``OAuth`` provider is used.
|
* ``address`` - optional address in form ``proto://host:port`` (``port`` can be omitted in case of default ``proto`` ports), will be used instead of ``http://{host}:{port}`` in case if set, string, optional. This option is required in case if ``OAuth`` provider is used.
|
||||||
|
* ``autorefresh_intervals`` - enable page auto refresh options, space separated list of integers, optional. The first defined interval will be used as default. If no intervals set, the auto refresh buttons will be disabled. If first element of the list equals ``0``, auto refresh will be disabled by default.
|
||||||
* ``enable_archive_upload`` - allow to upload packages via HTTP (i.e. call of ``/api/v1/service/upload`` uri), boolean, optional, default ``no``.
|
* ``enable_archive_upload`` - allow to upload packages via HTTP (i.e. call of ``/api/v1/service/upload`` uri), boolean, optional, default ``no``.
|
||||||
* ``host`` - host to bind, string, optional.
|
* ``host`` - host to bind, string, optional.
|
||||||
* ``index_url`` - full URL of the repository index page, string, optional.
|
* ``index_url`` - full URL of the repository index page, string, optional.
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
pkgbase='ahriman'
|
pkgbase='ahriman'
|
||||||
pkgname=('ahriman' 'ahriman-core' 'ahriman-triggers' 'ahriman-web')
|
pkgname=('ahriman' 'ahriman-core' 'ahriman-triggers' 'ahriman-web')
|
||||||
pkgver=2.18.3
|
pkgver=2.19.0
|
||||||
pkgrel=1
|
pkgrel=1
|
||||||
pkgdesc="ArcH linux ReposItory MANager"
|
pkgdesc="ArcH linux ReposItory MANager"
|
||||||
arch=('any')
|
arch=('any')
|
||||||
|
@ -28,6 +28,10 @@ allow_read_only = yes
|
|||||||
; External address of the web service. Will be used for some features like OAuth. If none set will be generated as
|
; External address of the web service. Will be used for some features like OAuth. If none set will be generated as
|
||||||
; address = http://${web:host}:${web:port}
|
; address = http://${web:host}:${web:port}
|
||||||
;address = http://${web:host}:${web:port}
|
;address = http://${web:host}:${web:port}
|
||||||
|
; Enable page auto refresh. Intervals are given in seconds. Default interval is always the first element of the list.
|
||||||
|
; If no intervals set, auto refresh will be disabled. 0 can only be the first element and will disable auto refresh
|
||||||
|
; by default.
|
||||||
|
autorefresh_intervals = 5 1 10 30 60
|
||||||
; Enable file upload endpoint used by some triggers.
|
; Enable file upload endpoint used by some triggers.
|
||||||
;enable_archive_upload = no
|
;enable_archive_upload = no
|
||||||
; Address to bind the server.
|
; Address to bind the server.
|
||||||
|
@ -55,6 +55,11 @@
|
|||||||
<i class="bi bi-play"></i> update
|
<i class="bi bi-play"></i> update
|
||||||
</button>
|
</button>
|
||||||
</li>
|
</li>
|
||||||
|
<li>
|
||||||
|
<button id="update-repositories-button" class="btn dropdown-item" onclick="refreshDatabases()">
|
||||||
|
<i class="bi bi-arrow-down-circle"></i> update pacman databases
|
||||||
|
</button>
|
||||||
|
</li>
|
||||||
<li>
|
<li>
|
||||||
<button id="package-rebuild-button" class="btn dropdown-item" data-bs-toggle="modal" data-bs-target="#package-rebuild-modal">
|
<button id="package-rebuild-button" class="btn dropdown-item" data-bs-toggle="modal" data-bs-target="#package-rebuild-modal">
|
||||||
<i class="bi bi-arrow-clockwise"></i> rebuild
|
<i class="bi bi-arrow-clockwise"></i> rebuild
|
||||||
@ -75,10 +80,28 @@
|
|||||||
<button type="button" class="btn btn-secondary" onclick="reload()">
|
<button type="button" class="btn btn-secondary" onclick="reload()">
|
||||||
<i class="bi bi-arrow-clockwise"></i><span class="d-none d-sm-inline"> reload</span>
|
<i class="bi bi-arrow-clockwise"></i><span class="d-none d-sm-inline"> reload</span>
|
||||||
</button>
|
</button>
|
||||||
|
|
||||||
|
{% if autorefresh_intervals %}
|
||||||
|
<div class="btn-group">
|
||||||
|
<input id="table-autoreload-button" type="checkbox" class="btn-check" autocomplete="off" onclick="toggleTableAutoReload()" checked>
|
||||||
|
<label for="table-autoreload-button" class="btn btn-outline-secondary" title="toggle auto reload"><i class="bi bi-clock"></i></label>
|
||||||
|
<button type="button" class="btn btn-outline-secondary dropdown-toggle dropdown-toggle-split" data-bs-toggle="dropdown" aria-expanded="false">
|
||||||
|
<span class="visually-hidden">select interval</span>
|
||||||
|
</button>
|
||||||
|
<ul id="table-autoreload-input" class="dropdown-menu">
|
||||||
|
{% for interval in autorefresh_intervals %}
|
||||||
|
<li><a class="dropdown-item {{ "active" if interval.is_active }}" onclick="toggleTableAutoReload({{ interval.interval }})" data-interval="{{ interval.interval }}">{{ interval.text }}</a></li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<table id="packages"
|
<table id="packages"
|
||||||
data-classes="table table-hover"
|
data-classes="table table-hover"
|
||||||
|
data-cookie="true"
|
||||||
|
data-cookie-id-table="ahriman-packages"
|
||||||
|
data-cookie-storage="localStorage"
|
||||||
data-export-options='{"fileName": "packages"}'
|
data-export-options='{"fileName": "packages"}'
|
||||||
data-filter-control="true"
|
data-filter-control="true"
|
||||||
data-filter-control-visible="false"
|
data-filter-control-visible="false"
|
||||||
@ -97,8 +120,8 @@
|
|||||||
data-sortable="true"
|
data-sortable="true"
|
||||||
data-sort-name="base"
|
data-sort-name="base"
|
||||||
data-sort-order="asc"
|
data-sort-order="asc"
|
||||||
data-toggle="table"
|
data-toolbar="#toolbar"
|
||||||
data-toolbar="#toolbar">
|
data-unique-id="id">
|
||||||
<thead class="table-primary">
|
<thead class="table-primary">
|
||||||
<tr>
|
<tr>
|
||||||
<th data-checkbox="true"></th>
|
<th data-checkbox="true"></th>
|
||||||
|
@ -3,7 +3,9 @@
|
|||||||
|
|
||||||
function createAlert(title, message, clz, action, id) {
|
function createAlert(title, message, clz, action, id) {
|
||||||
id ??= md5(title + message); // MD5 id from the content
|
id ??= md5(title + message); // MD5 id from the content
|
||||||
if (alertPlaceholder.querySelector(`#alert-${id}`)) return; // check if there are duplicates
|
if (alertPlaceholder.querySelector(`#alert-${id}`)) {
|
||||||
|
return; // check if there are duplicates
|
||||||
|
}
|
||||||
|
|
||||||
const wrapper = document.createElement("div");
|
const wrapper = document.createElement("div");
|
||||||
wrapper.id = `alert-${id}`;
|
wrapper.id = `alert-${id}`;
|
||||||
|
@ -51,6 +51,87 @@
|
|||||||
const dashboardPackagesCountChartCanvas = document.getElementById("dashboard-packages-count-chart");
|
const dashboardPackagesCountChartCanvas = document.getElementById("dashboard-packages-count-chart");
|
||||||
let dashboardPackagesCountChart = null;
|
let dashboardPackagesCountChart = null;
|
||||||
|
|
||||||
|
function statusLoad() {
|
||||||
|
const badgeClass = status => {
|
||||||
|
if (status === "pending") return "btn-outline-warning";
|
||||||
|
if (status === "building") return "btn-outline-warning";
|
||||||
|
if (status === "failed") return "btn-outline-danger";
|
||||||
|
if (status === "success") return "btn-outline-success";
|
||||||
|
return "btn-outline-secondary";
|
||||||
|
};
|
||||||
|
|
||||||
|
makeRequest(
|
||||||
|
"/api/v1/status",
|
||||||
|
{
|
||||||
|
query: {
|
||||||
|
architecture: repository.architecture,
|
||||||
|
repository: repository.repository,
|
||||||
|
},
|
||||||
|
convert: response => response.json(),
|
||||||
|
},
|
||||||
|
data => {
|
||||||
|
versionBadge.innerHTML = `<i class="bi bi-github"></i> ahriman ${safe(data.version)}`;
|
||||||
|
|
||||||
|
dashboardButton.classList.remove(...dashboardButton.classList);
|
||||||
|
dashboardButton.classList.add("btn");
|
||||||
|
dashboardButton.classList.add(badgeClass(data.status.status));
|
||||||
|
|
||||||
|
dashboardModalHeader.classList.remove(...dashboardModalHeader.classList);
|
||||||
|
dashboardModalHeader.classList.add("modal-header");
|
||||||
|
headerClass(data.status.status).forEach(clz => dashboardModalHeader.classList.add(clz));
|
||||||
|
|
||||||
|
dashboardName.textContent = data.repository;
|
||||||
|
dashboardArchitecture.textContent = data.architecture;
|
||||||
|
dashboardStatus.textContent = data.status.status;
|
||||||
|
dashboardStatusTimestamp.textContent = new Date(1000 * data.status.timestamp).toISOStringShort();
|
||||||
|
|
||||||
|
if (dashboardPackagesStatusesChart) {
|
||||||
|
const labels = [
|
||||||
|
"unknown",
|
||||||
|
"pending",
|
||||||
|
"building",
|
||||||
|
"failed",
|
||||||
|
"success",
|
||||||
|
];
|
||||||
|
dashboardPackagesStatusesChart.config.data = {
|
||||||
|
labels: labels,
|
||||||
|
datasets: [{
|
||||||
|
label: "packages in status",
|
||||||
|
data: labels.map(label => data.packages[label]),
|
||||||
|
backgroundColor: [
|
||||||
|
"rgb(55, 58, 60)",
|
||||||
|
"rgb(255, 117, 24)",
|
||||||
|
"rgb(255, 117, 24)",
|
||||||
|
"rgb(255, 0, 57)",
|
||||||
|
"rgb(63, 182, 24)", // copy-paste from current style
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
};
|
||||||
|
dashboardPackagesStatusesChart.update();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dashboardPackagesCountChart) {
|
||||||
|
dashboardPackagesCountChart.config.data = {
|
||||||
|
labels: ["packages"],
|
||||||
|
datasets: [
|
||||||
|
{
|
||||||
|
label: "archives",
|
||||||
|
data: [data.stats.packages],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: "bases",
|
||||||
|
data: [data.stats.bases],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
dashboardPackagesCountChart.update();
|
||||||
|
}
|
||||||
|
|
||||||
|
dashboardCanvas.hidden = data.status.total > 0;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
ready(_ => {
|
ready(_ => {
|
||||||
dashboardPackagesStatusesChart = new Chart(dashboardPackagesStatusesChartCanvas, {
|
dashboardPackagesStatusesChart = new Chart(dashboardPackagesStatusesChartCanvas, {
|
||||||
type: "pie",
|
type: "pie",
|
||||||
|
@ -24,6 +24,13 @@
|
|||||||
<datalist id="package-add-known-packages-dlist"></datalist>
|
<datalist id="package-add-known-packages-dlist"></datalist>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="form-group row">
|
||||||
|
<label class="col-3 col-form-label"></label>
|
||||||
|
<div class="col-9">
|
||||||
|
<input id="package-add-refresh-input" type="checkbox" class="form-check-input" value="" checked>
|
||||||
|
<label for="package-add-refresh-input" class="form-check-label">update pacman databases</label>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
<div class="form-group row">
|
<div class="form-group row">
|
||||||
<div class="col-12">
|
<div class="col-12">
|
||||||
<button id="package-add-variable-button" type="button" class="form-control btn btn-light rounded" onclick="packageAddVariableInputCreate()"><i class="bi bi-plus"></i> add environment variable </button>
|
<button id="package-add-variable-button" type="button" class="form-control btn btn-light rounded" onclick="packageAddVariableInputCreate()"><i class="bi bi-plus"></i> add environment variable </button>
|
||||||
@ -50,6 +57,8 @@
|
|||||||
|
|
||||||
const packageAddVariablesDiv = document.getElementById("package-add-variables-div");
|
const packageAddVariablesDiv = document.getElementById("package-add-variables-div");
|
||||||
|
|
||||||
|
const packageAddRefreshInput = document.getElementById("package-add-refresh-input");
|
||||||
|
|
||||||
function packageAddVariableInputCreate() {
|
function packageAddVariableInputCreate() {
|
||||||
const variableInput = document.createElement("div");
|
const variableInput = document.createElement("div");
|
||||||
variableInput.classList.add("input-group");
|
variableInput.classList.add("input-group");
|
||||||
@ -99,16 +108,18 @@
|
|||||||
return {patches: patches};
|
return {patches: patches};
|
||||||
}
|
}
|
||||||
|
|
||||||
function packagesAdd(packages, patches, repository) {
|
function packagesAdd(packages, patches, repository, data) {
|
||||||
packages = packages ?? packageAddInput.value;
|
packages = packages ?? packageAddInput.value;
|
||||||
patches = patches ?? patchesParse();
|
patches = patches ?? patchesParse();
|
||||||
repository = repository ?? getRepositorySelector(packageAddRepositoryInput);
|
repository = repository ?? getRepositorySelector(packageAddRepositoryInput);
|
||||||
|
data = data ?? {refresh: packageAddRefreshInput.checked};
|
||||||
|
|
||||||
if (packages) {
|
if (packages) {
|
||||||
bootstrap.Modal.getOrCreateInstance(packageAddModal).hide();
|
bootstrap.Modal.getOrCreateInstance(packageAddModal).hide();
|
||||||
const onSuccess = update => `Packages ${update} have been added`;
|
const onSuccess = update => `Packages ${update} have been added`;
|
||||||
const onFailure = error => `Package addition failed: ${error}`;
|
const onFailure = error => `Package addition failed: ${error}`;
|
||||||
doPackageAction("/api/v1/service/add", [packages], repository, onSuccess, onFailure, patches);
|
const parameters = Object.assign({}, data, patches);
|
||||||
|
doPackageAction("/api/v1/service/add", [packages], repository, onSuccess, onFailure, parameters);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -80,8 +80,7 @@
|
|||||||
data-classes="table table-hover"
|
data-classes="table table-hover"
|
||||||
data-sortable="true"
|
data-sortable="true"
|
||||||
data-sort-name="timestamp"
|
data-sort-name="timestamp"
|
||||||
data-sort-order="desc"
|
data-sort-order="desc">
|
||||||
data-toggle="table">
|
|
||||||
<thead class="table-primary">
|
<thead class="table-primary">
|
||||||
<tr>
|
<tr>
|
||||||
<th data-align="right" data-field="timestamp">date</th>
|
<th data-align="right" data-field="timestamp">date</th>
|
||||||
@ -95,10 +94,27 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="modal-footer">
|
<div class="modal-footer">
|
||||||
{% if not auth.enabled or auth.username is not none %}
|
{% if not auth.enabled or auth.username is not none %}
|
||||||
<button id="package-info-update-button" type="submit" class="btn btn-success" onclick="packageInfoUpdate()" data-bs-dismiss="modal"><i class="bi bi-play"></i><span class="d-none d-sm-inline"> update</span></button>
|
<input id="package-info-refresh-input" type="checkbox" class="form-check-input" value="" checked>
|
||||||
|
<label for="package-info-refresh-input" class="form-check-label">update pacman databases</label>
|
||||||
|
|
||||||
|
<button id="package-info-update-button" type="submit" class="btn btn-success" onclick="packageInfoUpdate()"><i class="bi bi-play"></i><span class="d-none d-sm-inline"> update</span></button>
|
||||||
<button id="package-info-remove-button" type="submit" class="btn btn-danger" onclick="packageInfoRemove()" data-bs-dismiss="modal"><i class="bi bi-trash"></i><span class="d-none d-sm-inline"> remove</span></button>
|
<button id="package-info-remove-button" type="submit" class="btn btn-danger" onclick="packageInfoRemove()" data-bs-dismiss="modal"><i class="bi bi-trash"></i><span class="d-none d-sm-inline"> remove</span></button>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<button type="button" class="btn btn-secondary" onclick="showPackageInfo()"><i class="bi bi-arrow-clockwise"></i><span class="d-none d-sm-inline"> reload</span></button>
|
{% if autorefresh_intervals %}
|
||||||
|
<button type="button" class="btn btn-secondary" onclick="showPackageInfo()"><i class="bi bi-arrow-clockwise"></i><span class="d-none d-sm-inline"> reload</span></button>
|
||||||
|
<div class="btn-group dropup">
|
||||||
|
<input id="package-info-autoreload-button" type="checkbox" class="btn-check" autocomplete="off" onclick="togglePackageInfoAutoReload()" checked>
|
||||||
|
<label for="package-info-autoreload-button" class="btn btn-outline-secondary" title="toggle auto reload"><i class="bi bi-clock"></i></label>
|
||||||
|
<button type="button" class="btn btn-outline-secondary dropdown-toggle dropdown-toggle-split" data-bs-toggle="dropdown" aria-expanded="false">
|
||||||
|
<span class="visually-hidden">select interval</span>
|
||||||
|
</button>
|
||||||
|
<ul id="package-info-autoreload-input" class="dropdown-menu">
|
||||||
|
{% for interval in autorefresh_intervals %}
|
||||||
|
<li><a class="dropdown-item {{ "active" if interval.is_active }}" onclick="togglePackageInfoAutoReload({{ interval.interval }})" data-interval="{{ interval.interval }}">{{ interval.text }}</a></li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
<button type="button" class="btn btn-primary" data-bs-dismiss="modal"><i class="bi bi-x"></i><span class="d-none d-sm-inline"> close</span></button>
|
<button type="button" class="btn btn-primary" data-bs-dismiss="modal"><i class="bi bi-x"></i><span class="d-none d-sm-inline"> close</span></button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -135,6 +151,12 @@
|
|||||||
const packageInfoVariablesBlock = document.getElementById("package-info-variables-block");
|
const packageInfoVariablesBlock = document.getElementById("package-info-variables-block");
|
||||||
const packageInfoVariablesDiv = document.getElementById("package-info-variables-div");
|
const packageInfoVariablesDiv = document.getElementById("package-info-variables-div");
|
||||||
|
|
||||||
|
const packageInfoRefreshInput = document.getElementById("package-info-refresh-input");
|
||||||
|
|
||||||
|
const packageInfoAutoReloadButton = document.getElementById("package-info-autoreload-button");
|
||||||
|
const packageInfoAutoReloadInput = document.getElementById("package-info-autoreload-input");
|
||||||
|
let packageInfoAutoReloadTask = null;
|
||||||
|
|
||||||
function clearChart() {
|
function clearChart() {
|
||||||
packageInfoEventsUpdateChartCanvas.hidden = true;
|
packageInfoEventsUpdateChartCanvas.hidden = true;
|
||||||
if (packageInfoEventsUpdateChart) {
|
if (packageInfoEventsUpdateChart) {
|
||||||
@ -143,6 +165,13 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function convertLogs(data, filter) {
|
||||||
|
return data
|
||||||
|
.filter((filter || Boolean))
|
||||||
|
.map(log_record => `[${new Date(1000 * log_record.created).toISOString()}] ${log_record.message}`)
|
||||||
|
.join("\n");
|
||||||
|
}
|
||||||
|
|
||||||
async function copyChanges() {
|
async function copyChanges() {
|
||||||
const changes = packageInfoChangesInput.textContent;
|
const changes = packageInfoChangesInput.textContent;
|
||||||
await copyToClipboard(changes, packageInfoChangesCopyButton);
|
await copyToClipboard(changes, packageInfoChangesCopyButton);
|
||||||
@ -286,6 +315,69 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
function loadLogs(packageBase, onFailure) {
|
function loadLogs(packageBase, onFailure) {
|
||||||
|
const sortFn = (left, right) => left.process_id.localeCompare(right.process_id) || left.version.localeCompare(right.version);
|
||||||
|
const compareFn = (left, right) => left.process_id === right.process_id && left.version === right.version;
|
||||||
|
|
||||||
|
makeRequest(
|
||||||
|
`/api/v2/packages/${packageBase}/logs`,
|
||||||
|
{
|
||||||
|
query: {
|
||||||
|
architecture: repository.architecture,
|
||||||
|
head: true,
|
||||||
|
repository: repository.repository,
|
||||||
|
},
|
||||||
|
convert: response => response.json(),
|
||||||
|
},
|
||||||
|
data => {
|
||||||
|
const currentVersions = Array.from(packageInfoLogsVersions.children)
|
||||||
|
.map(el => {
|
||||||
|
return {
|
||||||
|
process_id: el.dataset.processId,
|
||||||
|
version: el.dataset.version,
|
||||||
|
};
|
||||||
|
})
|
||||||
|
.sort(sortFn);
|
||||||
|
const newVersions = data
|
||||||
|
.map(el => {
|
||||||
|
return {
|
||||||
|
process_id: el.process_id,
|
||||||
|
version: el.version,
|
||||||
|
};
|
||||||
|
})
|
||||||
|
.sort(sortFn);
|
||||||
|
|
||||||
|
if (currentVersions.equals(newVersions, compareFn))
|
||||||
|
loadLogsActive(packageBase);
|
||||||
|
else
|
||||||
|
loadLogsAll(packageBase, onFailure);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function loadLogsActive(packageBase) {
|
||||||
|
const activeLogSelector = packageInfoLogsVersions.querySelector(".active");
|
||||||
|
|
||||||
|
if (activeLogSelector) {
|
||||||
|
makeRequest(
|
||||||
|
`/api/v2/packages/${packageBase}/logs`,
|
||||||
|
{
|
||||||
|
query: {
|
||||||
|
architecture: repository.architecture,
|
||||||
|
repository: repository.repository,
|
||||||
|
version: activeLogSelector.dataset.version,
|
||||||
|
process_id: activeLogSelector.dataset.processId,
|
||||||
|
},
|
||||||
|
convert: response => response.json(),
|
||||||
|
},
|
||||||
|
data => {
|
||||||
|
activeLogSelector.dataset.logs = convertLogs(data);
|
||||||
|
activeLogSelector.click();
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function loadLogsAll(packageBase, onFailure) {
|
||||||
makeRequest(
|
makeRequest(
|
||||||
`/api/v2/packages/${packageBase}/logs`,
|
`/api/v2/packages/${packageBase}/logs`,
|
||||||
{
|
{
|
||||||
@ -314,15 +406,19 @@
|
|||||||
const link = document.createElement("a");
|
const link = document.createElement("a");
|
||||||
link.classList.add("dropdown-item");
|
link.classList.add("dropdown-item");
|
||||||
|
|
||||||
|
link.dataset.version = version.version;
|
||||||
|
link.dataset.processId = version.process_id;
|
||||||
|
link.dataset.logs = convertLogs(data, log_record => log_record.version === version.version && log_record.process_id === version.process_id);
|
||||||
|
|
||||||
link.textContent = new Date(1000 * version.created).toISOStringShort();
|
link.textContent = new Date(1000 * version.created).toISOStringShort();
|
||||||
link.href = "#";
|
link.href = "#";
|
||||||
link.onclick = _ => {
|
link.onclick = _ => {
|
||||||
const logs = data
|
// check if we are at the bottom of the code block
|
||||||
.filter(log_record => log_record.version === version.version && log_record.process_id === version.process_id)
|
const isScrolledToBottom = packageInfoLogsInput.scrollTop + packageInfoLogsInput.clientHeight >= packageInfoLogsInput.scrollHeight;
|
||||||
.map(log_record => `[${new Date(1000 * log_record.created).toISOString()}] ${log_record.message}`);
|
packageInfoLogsInput.textContent = link.dataset.logs;
|
||||||
|
|
||||||
packageInfoLogsInput.textContent = logs.join("\n");
|
|
||||||
highlight(packageInfoLogsInput);
|
highlight(packageInfoLogsInput);
|
||||||
|
if (isScrolledToBottom)
|
||||||
|
packageInfoLogsInput.scrollTop = packageInfoLogsInput.scrollHeight; // scroll to the new end
|
||||||
|
|
||||||
Array.from(packageInfoLogsVersions.children).forEach(el => el.classList.remove("active"));
|
Array.from(packageInfoLogsVersions.children).forEach(el => el.classList.remove("active"));
|
||||||
link.classList.add("active");
|
link.classList.add("active");
|
||||||
@ -398,23 +494,23 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
function packageInfoRemove() {
|
function packageInfoRemove() {
|
||||||
const packageBase = packageInfoModal.package;
|
const packageBase = packageInfoModal.dataset.package;
|
||||||
packagesRemove([packageBase]);
|
packagesRemove([packageBase]);
|
||||||
}
|
}
|
||||||
|
|
||||||
function packageInfoUpdate() {
|
function packageInfoUpdate() {
|
||||||
const packageBase = packageInfoModal.package;
|
const packageBase = packageInfoModal.dataset.package;
|
||||||
packagesAdd(packageBase, [], repository);
|
packagesAdd(packageBase, [], repository, {refresh: packageInfoRefreshInput.checked});
|
||||||
}
|
}
|
||||||
|
|
||||||
function showPackageInfo(packageBase) {
|
function showPackageInfo(packageBase) {
|
||||||
const isPackageBaseSet = packageBase !== undefined;
|
const isPackageBaseSet = packageBase !== undefined;
|
||||||
if (isPackageBaseSet) {
|
if (isPackageBaseSet) {
|
||||||
// set package base as currently used
|
// set package base as currently used
|
||||||
packageInfoModal.package = packageBase;
|
packageInfoModal.dataset.package = packageBase;
|
||||||
} else {
|
} else {
|
||||||
// read package base from the current window attribute
|
// read package base from the current window attribute
|
||||||
packageBase = packageInfoModal.package;
|
packageBase = packageInfoModal.dataset.package;
|
||||||
}
|
}
|
||||||
|
|
||||||
const onFailure = error => {
|
const onFailure = error => {
|
||||||
@ -433,10 +529,27 @@
|
|||||||
|
|
||||||
if (isPackageBaseSet) {
|
if (isPackageBaseSet) {
|
||||||
bootstrap.Modal.getOrCreateInstance(packageInfoModal).show();
|
bootstrap.Modal.getOrCreateInstance(packageInfoModal).show();
|
||||||
|
{% if autorefresh_intervals %}
|
||||||
|
togglePackageInfoAutoReload();
|
||||||
|
{% endif %}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function togglePackageInfoAutoReload(interval) {
|
||||||
|
clearInterval(packageInfoAutoReloadTask);
|
||||||
|
packageInfoAutoReloadTask = toggleAutoReload(packageInfoAutoReloadButton, interval, packageInfoAutoReloadInput, _ => {
|
||||||
|
if (!hasActiveSelection()) {
|
||||||
|
const packageBase = packageInfoModal.dataset.package;
|
||||||
|
// we only poll status and logs here
|
||||||
|
loadPackage(packageBase);
|
||||||
|
loadLogs(packageBase);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
ready(_ => {
|
ready(_ => {
|
||||||
|
packageInfoEventsTable.bootstrapTable({});
|
||||||
|
|
||||||
packageInfoEventsUpdateChart = new Chart(packageInfoEventsUpdateChartCanvas, {
|
packageInfoEventsUpdateChart = new Chart(packageInfoEventsUpdateChartCanvas, {
|
||||||
type: "line",
|
type: "line",
|
||||||
data: {},
|
data: {},
|
||||||
@ -463,6 +576,11 @@
|
|||||||
packageInfoChangesInput.textContent = "";
|
packageInfoChangesInput.textContent = "";
|
||||||
packageInfoEventsTable.bootstrapTable("load", []);
|
packageInfoEventsTable.bootstrapTable("load", []);
|
||||||
clearChart();
|
clearChart();
|
||||||
|
|
||||||
|
clearInterval(packageInfoAutoReloadTask);
|
||||||
|
packageInfoAutoReloadTask = null; // not really required (?) but lets clear everything
|
||||||
});
|
});
|
||||||
|
|
||||||
|
restoreAutoReloadSettings(packageInfoAutoReloadButton, packageInfoAutoReloadInput);
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
|
@ -10,6 +10,10 @@
|
|||||||
const dashboardButton = document.getElementById("dashboard-button");
|
const dashboardButton = document.getElementById("dashboard-button");
|
||||||
const versionBadge = document.getElementById("badge-version");
|
const versionBadge = document.getElementById("badge-version");
|
||||||
|
|
||||||
|
const tableAutoReloadButton = document.getElementById("table-autoreload-button");
|
||||||
|
const tableAutoReloadInput = document.getElementById("table-autoreload-input");
|
||||||
|
let tableAutoReloadTask = null;
|
||||||
|
|
||||||
function doPackageAction(uri, packages, repository, successText, failureText, data) {
|
function doPackageAction(uri, packages, repository, successText, failureText, data) {
|
||||||
makeRequest(
|
makeRequest(
|
||||||
uri,
|
uri,
|
||||||
@ -55,6 +59,41 @@
|
|||||||
return table.bootstrapTable("getSelections").map(row => row.id);
|
return table.bootstrapTable("getSelections").map(row => row.id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function packagesLoad(onFailure) {
|
||||||
|
makeRequest(
|
||||||
|
"/api/v1/packages",
|
||||||
|
{
|
||||||
|
query: {
|
||||||
|
architecture: repository.architecture,
|
||||||
|
repository: repository.repository,
|
||||||
|
},
|
||||||
|
convert: response => response.json(),
|
||||||
|
},
|
||||||
|
data => {
|
||||||
|
const payload = data
|
||||||
|
.map(description => {
|
||||||
|
const package_base = description.package.base;
|
||||||
|
const web_url = description.package.remote.web_url;
|
||||||
|
return {
|
||||||
|
id: package_base,
|
||||||
|
base: web_url ? safeLink(web_url, package_base, package_base).outerHTML : safe(package_base),
|
||||||
|
version: safe(description.package.version),
|
||||||
|
packager: description.package.packager ? safe(description.package.packager) : "",
|
||||||
|
packages: listToTable(Object.keys(description.package.packages)),
|
||||||
|
groups: listToTable(extractListProperties(description.package, "groups")),
|
||||||
|
licenses: listToTable(extractListProperties(description.package, "licenses")),
|
||||||
|
timestamp: new Date(1000 * description.status.timestamp).toISOStringShort(),
|
||||||
|
status: description.status.status,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
updateTable(table, payload);
|
||||||
|
table.bootstrapTable("hideLoading");
|
||||||
|
},
|
||||||
|
onFailure,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
function packagesRemove(packages) {
|
function packagesRemove(packages) {
|
||||||
packages = packages ?? getSelection();
|
packages = packages ?? getSelection();
|
||||||
const onSuccess = update => `Packages ${update} have been removed`;
|
const onSuccess = update => `Packages ${update} have been removed`;
|
||||||
@ -73,132 +112,37 @@
|
|||||||
doPackageAction(url, currentSelection, repository, onSuccess, onFailure);
|
doPackageAction(url, currentSelection, repository, onSuccess, onFailure);
|
||||||
}
|
}
|
||||||
|
|
||||||
function reload() {
|
function refreshDatabases() {
|
||||||
table.bootstrapTable("showLoading");
|
const onSuccess = _ => "Pacman database update has been requested";
|
||||||
|
const onFailure = error => `Could not update pacman databases: ${error}`;
|
||||||
const badgeClass = status => {
|
const parameters = {
|
||||||
if (status === "pending") return "btn-outline-warning";
|
refresh: true,
|
||||||
if (status === "building") return "btn-outline-warning";
|
aur: false,
|
||||||
if (status === "failed") return "btn-outline-danger";
|
local: false,
|
||||||
if (status === "success") return "btn-outline-success";
|
manual: false,
|
||||||
return "btn-outline-secondary";
|
|
||||||
};
|
};
|
||||||
|
|
||||||
makeRequest(
|
doPackageAction("/api/v1/service/update", [], repository, onSuccess, onFailure, parameters);
|
||||||
"/api/v1/packages",
|
}
|
||||||
{
|
|
||||||
query: {
|
|
||||||
architecture: repository.architecture,
|
|
||||||
repository: repository.repository,
|
|
||||||
},
|
|
||||||
convert: response => response.json(),
|
|
||||||
},
|
|
||||||
data => {
|
|
||||||
const payload = data.map(description => {
|
|
||||||
const package_base = description.package.base;
|
|
||||||
const web_url = description.package.remote.web_url;
|
|
||||||
return {
|
|
||||||
id: package_base,
|
|
||||||
base: web_url ? safeLink(web_url, package_base, package_base).outerHTML : safe(package_base),
|
|
||||||
version: safe(description.package.version),
|
|
||||||
packager: description.package.packager ? safe(description.package.packager) : "",
|
|
||||||
packages: listToTable(Object.keys(description.package.packages)),
|
|
||||||
groups: listToTable(extractListProperties(description.package, "groups")),
|
|
||||||
licenses: listToTable(extractListProperties(description.package, "licenses")),
|
|
||||||
timestamp: new Date(1000 * description.status.timestamp).toISOStringShort(),
|
|
||||||
status: description.status.status,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
table.bootstrapTable("load", payload);
|
function reload() {
|
||||||
table.bootstrapTable("uncheckAll");
|
table.bootstrapTable("showLoading");
|
||||||
|
const onFailure = error => {
|
||||||
|
if ((error.status === 401) || (error.status === 403)) {
|
||||||
|
// authorization error
|
||||||
|
const text = "In order to see statuses you must login first.";
|
||||||
|
table.find("tr.unauthorized").remove();
|
||||||
|
table.find("tbody").append(`<tr class="unauthorized"><td colspan="100%">${safe(text)}</td></tr>`);
|
||||||
table.bootstrapTable("hideLoading");
|
table.bootstrapTable("hideLoading");
|
||||||
},
|
} else {
|
||||||
error => {
|
// other errors
|
||||||
if ((error.status === 401) || (error.status === 403)) {
|
const message = details => `Could not load list of packages: ${details}`;
|
||||||
// authorization error
|
showFailure("Load failure", message, error);
|
||||||
const text = "In order to see statuses you must login first.";
|
}
|
||||||
table.find("tr.unauthorized").remove();
|
};
|
||||||
table.find("tbody").append(`<tr class="unauthorized"><td colspan="100%">${safe(text)}</td></tr>`);
|
|
||||||
table.bootstrapTable("hideLoading");
|
|
||||||
} else {
|
|
||||||
// other errors
|
|
||||||
const message = details => `Could not load list of packages: ${details}`;
|
|
||||||
showFailure("Load failure", message, error);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
makeRequest(
|
packagesLoad(onFailure);
|
||||||
"/api/v1/status",
|
statusLoad();
|
||||||
{
|
|
||||||
query: {
|
|
||||||
architecture: repository.architecture,
|
|
||||||
repository: repository.repository,
|
|
||||||
},
|
|
||||||
convert: response => response.json(),
|
|
||||||
},
|
|
||||||
data => {
|
|
||||||
versionBadge.innerHTML = `<i class="bi bi-github"></i> ahriman ${safe(data.version)}`;
|
|
||||||
|
|
||||||
dashboardButton.classList.remove(...dashboardButton.classList);
|
|
||||||
dashboardButton.classList.add("btn");
|
|
||||||
dashboardButton.classList.add(badgeClass(data.status.status));
|
|
||||||
|
|
||||||
dashboardModalHeader.classList.remove(...dashboardModalHeader.classList);
|
|
||||||
dashboardModalHeader.classList.add("modal-header");
|
|
||||||
headerClass(data.status.status).forEach(clz => dashboardModalHeader.classList.add(clz));
|
|
||||||
|
|
||||||
dashboardName.textContent = data.repository;
|
|
||||||
dashboardArchitecture.textContent = data.architecture;
|
|
||||||
dashboardStatus.textContent = data.status.status;
|
|
||||||
dashboardStatusTimestamp.textContent = new Date(1000 * data.status.timestamp).toISOStringShort();
|
|
||||||
|
|
||||||
if (dashboardPackagesStatusesChart) {
|
|
||||||
const labels = [
|
|
||||||
"unknown",
|
|
||||||
"pending",
|
|
||||||
"building",
|
|
||||||
"failed",
|
|
||||||
"success",
|
|
||||||
];
|
|
||||||
dashboardPackagesStatusesChart.config.data = {
|
|
||||||
labels: labels,
|
|
||||||
datasets: [{
|
|
||||||
label: "packages in status",
|
|
||||||
data: labels.map(label => data.packages[label]),
|
|
||||||
backgroundColor: [
|
|
||||||
"rgb(55, 58, 60)",
|
|
||||||
"rgb(255, 117, 24)",
|
|
||||||
"rgb(255, 117, 24)",
|
|
||||||
"rgb(255, 0, 57)",
|
|
||||||
"rgb(63, 182, 24)", // copy-paste from current style
|
|
||||||
],
|
|
||||||
}],
|
|
||||||
};
|
|
||||||
dashboardPackagesStatusesChart.update();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (dashboardPackagesCountChart) {
|
|
||||||
dashboardPackagesCountChart.config.data = {
|
|
||||||
labels: ["packages"],
|
|
||||||
datasets: [
|
|
||||||
{
|
|
||||||
label: "archives",
|
|
||||||
data: [data.stats.packages],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: "bases",
|
|
||||||
data: [data.stats.bases],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
dashboardPackagesCountChart.update();
|
|
||||||
}
|
|
||||||
|
|
||||||
dashboardCanvas.hidden = data.status.total > 0;
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function selectRepository() {
|
function selectRepository() {
|
||||||
@ -217,7 +161,24 @@
|
|||||||
return {classes: cellClass(value)};
|
return {classes: cellClass(value)};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function toggleTableAutoReload(interval) {
|
||||||
|
clearInterval(tableAutoReloadTask);
|
||||||
|
tableAutoReloadTask = toggleAutoReload(tableAutoReloadButton, interval, tableAutoReloadInput, _ => {
|
||||||
|
if (!dashboardModal.classList.contains("show") &&
|
||||||
|
!hasActiveDropdown()) {
|
||||||
|
packagesLoad();
|
||||||
|
statusLoad();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
ready(_ => {
|
ready(_ => {
|
||||||
|
const onCheckFunction = function () {
|
||||||
|
if (packageRemoveButton) {
|
||||||
|
packageRemoveButton.disabled = !getSelection().length;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
document.querySelectorAll("#repositories a").forEach(element => {
|
document.querySelectorAll("#repositories a").forEach(element => {
|
||||||
element.onclick = _ => {
|
element.onclick = _ => {
|
||||||
repository = {
|
repository = {
|
||||||
@ -232,49 +193,55 @@
|
|||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
table.on("check.bs.table uncheck.bs.table check-all.bs.table uncheck-all.bs.table", _ => {
|
table.bootstrapTable({
|
||||||
if (packageRemoveButton) {
|
onCheck: onCheckFunction,
|
||||||
packageRemoveButton.disabled = !table.bootstrapTable("getSelections").length;
|
onCheckAll: onCheckFunction,
|
||||||
}
|
onClickRow: (data, row, cell) => {
|
||||||
});
|
if (0 === cell || "base" === cell) {
|
||||||
table.on("click-row.bs.table", (self, data, row, cell) => {
|
const method = data[0] === true ? "uncheckBy" : "checkBy"; // fck javascript
|
||||||
if (0 === cell || "base" === cell) {
|
table.bootstrapTable(method, {field: "id", values: [data.id]});
|
||||||
const method = data[0] === true ? "uncheckBy" : "checkBy"; // fck javascript
|
} else showPackageInfo(data.id);
|
||||||
table.bootstrapTable(method, {field: "id", values: [data.id]});
|
},
|
||||||
} else showPackageInfo(data.id);
|
onCreatedControls: _ => {
|
||||||
});
|
new easepick.create({
|
||||||
table.on("created-controls.bs.table", _ => {
|
element: document.querySelector(".bootstrap-table-filter-control-timestamp"),
|
||||||
new easepick.create({
|
css: [
|
||||||
element: document.querySelector(".bootstrap-table-filter-control-timestamp"),
|
"https://cdn.jsdelivr.net/npm/@easepick/bundle@1.2.1/dist/index.css",
|
||||||
css: [
|
],
|
||||||
"https://cdn.jsdelivr.net/npm/@easepick/bundle@1.2.1/dist/index.css",
|
grid: 2,
|
||||||
],
|
calendars: 2,
|
||||||
grid: 2,
|
autoApply: false,
|
||||||
calendars: 2,
|
locale: {
|
||||||
autoApply: false,
|
cancel: "Clear",
|
||||||
locale: {
|
},
|
||||||
cancel: "Clear",
|
RangePlugin: {
|
||||||
},
|
tooltip: false,
|
||||||
RangePlugin: {
|
},
|
||||||
tooltip: false,
|
plugins: [
|
||||||
},
|
"RangePlugin",
|
||||||
plugins: [
|
],
|
||||||
"RangePlugin",
|
setup: picker => {
|
||||||
],
|
picker.on("select", _ => { table.bootstrapTable("triggerSearch"); });
|
||||||
setup: picker => {
|
// replace "Cancel" behaviour to "Clear"
|
||||||
picker.on("select", _ => { table.bootstrapTable("triggerSearch"); });
|
picker.onClickCancelButton = element => {
|
||||||
// replace "Cancel" behaviour to "Clear"
|
if (picker.isCancelButton(element)) {
|
||||||
picker.onClickCancelButton = element => {
|
picker.clear();
|
||||||
if (picker.isCancelButton(element)) {
|
picker.hide();
|
||||||
picker.clear();
|
table.bootstrapTable("triggerSearch");
|
||||||
picker.hide();
|
}
|
||||||
table.bootstrapTable("triggerSearch");
|
};
|
||||||
}
|
},
|
||||||
};
|
});
|
||||||
},
|
},
|
||||||
});
|
onUncheck: onCheckFunction,
|
||||||
|
onUncheckAll: onCheckFunction,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
restoreAutoReloadSettings(tableAutoReloadButton, tableAutoReloadInput);
|
||||||
|
|
||||||
selectRepository();
|
selectRepository();
|
||||||
|
{% if autorefresh_intervals %}
|
||||||
|
toggleTableAutoReload();
|
||||||
|
{% endif %}
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
|
@ -53,8 +53,7 @@ SigLevel = Database{% if has_repo_signed %}Required{% else %}Never{% endif %} Pa
|
|||||||
data-show-search-clear-button="true"
|
data-show-search-clear-button="true"
|
||||||
data-sortable="true"
|
data-sortable="true"
|
||||||
data-sort-name="base"
|
data-sort-name="base"
|
||||||
data-sort-order="asc"
|
data-sort-order="asc">
|
||||||
data-toggle="table">
|
|
||||||
<thead class="table-primary">
|
<thead class="table-primary">
|
||||||
<tr>
|
<tr>
|
||||||
<th data-sortable="true" data-switchable="false" data-field="name" data-filter-control="input" data-filter-control-placeholder="(any package)">package</th>
|
<th data-sortable="true" data-switchable="false" data-field="name" data-filter-control="input" data-filter-control-placeholder="(any package)">package</th>
|
||||||
@ -128,36 +127,38 @@ SigLevel = Database{% if has_repo_signed %}Required{% else %}Never{% endif %} Pa
|
|||||||
}
|
}
|
||||||
|
|
||||||
ready(_ => {
|
ready(_ => {
|
||||||
table.on("created-controls.bs.table", _ => {
|
table.bootstrapTable({
|
||||||
new easepick.create({
|
onCreatedControls: _ => {
|
||||||
element: document.querySelector(".bootstrap-table-filter-control-timestamp"),
|
new easepick.create({
|
||||||
css: [
|
element: document.querySelector(".bootstrap-table-filter-control-timestamp"),
|
||||||
"https://cdn.jsdelivr.net/npm/@easepick/bundle@1.2.1/dist/index.css",
|
css: [
|
||||||
],
|
"https://cdn.jsdelivr.net/npm/@easepick/bundle@1.2.1/dist/index.css",
|
||||||
grid: 2,
|
],
|
||||||
calendars: 2,
|
grid: 2,
|
||||||
autoApply: false,
|
calendars: 2,
|
||||||
locale: {
|
autoApply: false,
|
||||||
cancel: "Clear",
|
locale: {
|
||||||
},
|
cancel: "Clear",
|
||||||
RangePlugin: {
|
},
|
||||||
tooltip: false,
|
RangePlugin: {
|
||||||
},
|
tooltip: false,
|
||||||
plugins: [
|
},
|
||||||
"RangePlugin",
|
plugins: [
|
||||||
],
|
"RangePlugin",
|
||||||
setup: picker => {
|
],
|
||||||
picker.on("select", _ => { table.bootstrapTable("triggerSearch"); });
|
setup: picker => {
|
||||||
// replace "Cancel" behaviour to "Clear"
|
picker.on("select", _ => { table.bootstrapTable("triggerSearch"); });
|
||||||
picker.onClickCancelButton = element => {
|
// replace "Cancel" behaviour to "Clear"
|
||||||
if (picker.isCancelButton(element)) {
|
picker.onClickCancelButton = element => {
|
||||||
picker.clear();
|
if (picker.isCancelButton(element)) {
|
||||||
picker.hide();
|
picker.clear();
|
||||||
table.bootstrapTable("triggerSearch");
|
picker.hide();
|
||||||
}
|
table.bootstrapTable("triggerSearch");
|
||||||
};
|
}
|
||||||
},
|
};
|
||||||
});
|
},
|
||||||
|
});
|
||||||
|
},
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
|
@ -1,23 +1,24 @@
|
|||||||
<script src="https://cdn.jsdelivr.net/npm/jquery@3.7.1/dist/jquery.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
<script src="https://cdn.jsdelivr.net/npm/jquery@3.7.1/dist/jquery.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
||||||
<script src="https://cdn.jsdelivr.net/npm/js-md5@0.8.3/src/md5.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
<script src="https://cdn.jsdelivr.net/npm/js-md5@0.8.3/src/md5.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
||||||
|
|
||||||
<script src="https://cdn.jsdelivr.net/npm/tableexport.jquery.plugin@1.30.0/tableExport.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
<script src="https://cdn.jsdelivr.net/npm/tableexport.jquery.plugin@1.33.0/tableExport.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
||||||
|
|
||||||
<script src="https://cdn.jsdelivr.net/npm/jquery-resizable-columns@0.2.3/dist/jquery.resizableColumns.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
<script src="https://cdn.jsdelivr.net/npm/jquery-resizable-columns@0.2.3/dist/jquery.resizableColumns.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
||||||
|
|
||||||
<script src="https://cdn.jsdelivr.net/npm/@popperjs/core@2.11.8/dist/umd/popper.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
<script src="https://cdn.jsdelivr.net/npm/@popperjs/core@2.11.8/dist/umd/popper.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
||||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/js/bootstrap.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.7/dist/js/bootstrap.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
||||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap-table@1.23.2/dist/bootstrap-table.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
<script src="https://cdn.jsdelivr.net/npm/bootstrap-table@1.24.1/dist/bootstrap-table.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
||||||
|
|
||||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap-table@1.23.2/dist/extensions/export/bootstrap-table-export.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
<script src="https://cdn.jsdelivr.net/npm/bootstrap-table@1.24.1/dist/extensions/export/bootstrap-table-export.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
||||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap-table@1.23.2/dist/extensions/resizable/bootstrap-table-resizable.js" crossorigin="anonymous" type="application/javascript"></script>
|
<script src="https://cdn.jsdelivr.net/npm/bootstrap-table@1.24.1/dist/extensions/resizable/bootstrap-table-resizable.js" crossorigin="anonymous" type="application/javascript"></script>
|
||||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap-table@1.23.2/dist/extensions/filter-control/bootstrap-table-filter-control.js" crossorigin="anonymous" type="application/javascript"></script>
|
<script src="https://cdn.jsdelivr.net/npm/bootstrap-table@1.24.1/dist/extensions/filter-control/bootstrap-table-filter-control.js" crossorigin="anonymous" type="application/javascript"></script>
|
||||||
|
<script src="https://cdn.jsdelivr.net/npm/bootstrap-table@1.24.1/dist/extensions/cookie/bootstrap-table-cookie.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
||||||
|
|
||||||
<script src="https://cdn.jsdelivr.net/npm/@easepick/bundle@1.2.1/dist/index.umd.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
<script src="https://cdn.jsdelivr.net/npm/@easepick/bundle@1.2.1/dist/index.umd.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
||||||
|
|
||||||
<script src="https://cdn.jsdelivr.net/gh/highlightjs/cdn-release@11.10.0/build/highlight.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
<script src="https://cdn.jsdelivr.net/gh/highlightjs/cdn-release@11.11.1/build/highlight.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
||||||
|
|
||||||
<script src="https://cdn.jsdelivr.net/npm/chart.js@4.4.4/dist/chart.umd.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
<script src="https://cdn.jsdelivr.net/npm/chart.js@4.5.0/dist/chart.umd.min.js" crossorigin="anonymous" type="application/javascript"></script>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
async function copyToClipboard(text, button) {
|
async function copyToClipboard(text, button) {
|
||||||
@ -58,6 +59,15 @@
|
|||||||
return value.includes(dataList[index].toLowerCase());
|
return value.includes(dataList[index].toLowerCase());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function hasActiveSelection() {
|
||||||
|
return !document.getSelection().isCollapsed; // not sure if it is a valid way, but I guess so
|
||||||
|
}
|
||||||
|
|
||||||
|
function hasActiveDropdown() {
|
||||||
|
return Array.from(document.querySelectorAll(".dropdown-menu"))
|
||||||
|
.some(el => el.classList.contains("show"));
|
||||||
|
}
|
||||||
|
|
||||||
function headerClass(status) {
|
function headerClass(status) {
|
||||||
if (status === "pending") return ["bg-warning"];
|
if (status === "pending") return ["bg-warning"];
|
||||||
if (status === "building") return ["bg-warning"];
|
if (status === "building") return ["bg-warning"];
|
||||||
@ -106,6 +116,12 @@
|
|||||||
.catch(error => onFailure && onFailure(error));
|
.catch(error => onFailure && onFailure(error));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function readOptional(extractor, callback) {
|
||||||
|
for (let value = extractor(); !!value; value = null) {
|
||||||
|
callback(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function ready(fn) {
|
function ready(fn) {
|
||||||
if (document.readyState === "complete" || document.readyState === "interactive") {
|
if (document.readyState === "complete" || document.readyState === "interactive") {
|
||||||
setTimeout(fn, 1);
|
setTimeout(fn, 1);
|
||||||
@ -114,6 +130,11 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function restoreAutoReloadSettings(toggle, intervalSelector) {
|
||||||
|
readOptional(() => localStorage.getItem(`ahriman-${toggle.id}-refresh-enabled`), value => toggle.checked = value === "true");
|
||||||
|
readOptional(() => localStorage.getItem(`ahriman-${toggle.id}-refresh-interval`), value => toggleActiveElement(intervalSelector, "interval", value));
|
||||||
|
}
|
||||||
|
|
||||||
function safe(string) {
|
function safe(string) {
|
||||||
return String(string)
|
return String(string)
|
||||||
.replace(/&/g, "&")
|
.replace(/&/g, "&")
|
||||||
@ -133,7 +154,86 @@
|
|||||||
return element;
|
return element;
|
||||||
}
|
}
|
||||||
|
|
||||||
Date.prototype.toISOStringShort = function() {
|
function toggleActiveElement(selector, dataType, value) {
|
||||||
|
const targetElement = selector.querySelector(`a[data-${dataType}="${value}"]`);
|
||||||
|
if (targetElement?.classList?.contains("active")) {
|
||||||
|
return; // element is already active, skip processing
|
||||||
|
}
|
||||||
|
|
||||||
|
Array.from(selector.children).forEach(il => {
|
||||||
|
Array.from(il.children).forEach(el => el.classList.remove("active"));
|
||||||
|
});
|
||||||
|
targetElement?.classList?.add("active");
|
||||||
|
}
|
||||||
|
|
||||||
|
function toggleAutoReload(toggle, interval, intervalSelector, callback) {
|
||||||
|
if (interval) {
|
||||||
|
toggle.checked = true; // toggle reload
|
||||||
|
} else {
|
||||||
|
interval = intervalSelector.querySelector(".active")?.dataset?.interval; // find active element
|
||||||
|
}
|
||||||
|
|
||||||
|
let intervalId = null;
|
||||||
|
if (interval) {
|
||||||
|
if (toggle.checked) {
|
||||||
|
// refresh UI
|
||||||
|
toggleActiveElement(intervalSelector, "interval", interval);
|
||||||
|
// finally create timer task
|
||||||
|
intervalId = setInterval(callback, interval);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
toggle.checked = false; // no active interval found, disable toggle
|
||||||
|
}
|
||||||
|
|
||||||
|
localStorage.setItem(`ahriman-${toggle.id}-refresh-enabled`, toggle.checked);
|
||||||
|
localStorage.setItem(`ahriman-${toggle.id}-refresh-interval`, interval);
|
||||||
|
return intervalId;
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateTable(table, rows) {
|
||||||
|
// instead of using load method here, we just update rows manually to avoid table reinitialization
|
||||||
|
const currentData = table.bootstrapTable("getData").reduce((accumulator, row) => {
|
||||||
|
accumulator[row.id] = row["0"];
|
||||||
|
return accumulator;
|
||||||
|
}, {});
|
||||||
|
// insert or update rows
|
||||||
|
rows.forEach(row => {
|
||||||
|
if (Object.hasOwn(currentData, row.id)) {
|
||||||
|
row["0"] = currentData[row.id]; // copy checkbox state
|
||||||
|
table.bootstrapTable("updateByUniqueId", {
|
||||||
|
id: row.id,
|
||||||
|
row: row,
|
||||||
|
replace: true,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
table.bootstrapTable("insertRow", {index: 0, row: row});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// remove old rows
|
||||||
|
const newData = rows.map(value => value.id);
|
||||||
|
Object.keys(currentData).forEach(id => {
|
||||||
|
if (!newData.includes(id)) {
|
||||||
|
table.bootstrapTable("removeByUniqueId", id);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Array.prototype.equals = function (right, comparator) {
|
||||||
|
let index = this.length;
|
||||||
|
if (index !== right.length) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
while (index--) {
|
||||||
|
if (!comparator(this[index], right[index])) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
Date.prototype.toISOStringShort = function () {
|
||||||
const pad = number => String(number).padStart(2, "0");
|
const pad = number => String(number).padStart(2, "0");
|
||||||
return `${this.getFullYear()}-${pad(this.getMonth() + 1)}-${pad(this.getDate())} ${pad(this.getHours())}:${pad(this.getMinutes())}:${pad(this.getSeconds())}`;
|
return `${this.getFullYear()}-${pad(this.getMonth() + 1)}-${pad(this.getDate())} ${pad(this.getHours())}:${pad(this.getMinutes())}:${pad(this.getSeconds())}`;
|
||||||
}
|
}
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/css/bootstrap.min.css" crossorigin="anonymous" type="text/css">
|
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.7/dist/css/bootstrap.min.css" crossorigin="anonymous" type="text/css">
|
||||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.11.1/font/bootstrap-icons.css" crossorigin="anonymous" type="text/css">
|
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.13.1/font/bootstrap-icons.css" crossorigin="anonymous" type="text/css">
|
||||||
|
|
||||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-table@1.23.2/dist/bootstrap-table.min.css" crossorigin="anonymous" type="text/css">
|
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-table@1.24.1/dist/bootstrap-table.min.css" crossorigin="anonymous" type="text/css">
|
||||||
|
|
||||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/jquery-resizable-columns@0.2.3/dist/jquery.resizableColumns.css" crossorigin="anonymous" type="text/css">
|
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/jquery-resizable-columns@0.2.3/dist/jquery.resizableColumns.css" crossorigin="anonymous" type="text/css">
|
||||||
|
|
||||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-table@1.23.2/dist/extensions/filter-control/bootstrap-table-filter-control.css" crossorigin="anonymous" type="text/css">
|
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-table@1.24.1/dist/extensions/filter-control/bootstrap-table-filter-control.css" crossorigin="anonymous" type="text/css">
|
||||||
|
|
||||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootswatch@5.3.3/dist/cosmo/bootstrap.min.css" crossorigin="anonymous" type="text/css">
|
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootswatch@5.3.7/dist/cosmo/bootstrap.min.css" crossorigin="anonymous" type="text/css">
|
||||||
|
|
||||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/gh/highlightjs/cdn-release@11.10.0/build/styles/github.min.css" crossorigin="anonymous" type="text/css">
|
<link rel="stylesheet" href="https://cdn.jsdelivr.net/gh/highlightjs/cdn-release@11.11.1/build/styles/github.min.css" crossorigin="anonymous" type="text/css">
|
||||||
|
|
||||||
<style>
|
<style>
|
||||||
.pre-scrollable {
|
.pre-scrollable {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
.TH AHRIMAN "1" "2025\-06\-16" "ahriman" "Generated Python Manual"
|
.TH AHRIMAN "1" "2025\-06\-29" "ahriman 2.19.0" "ArcH linux ReposItory MANager"
|
||||||
.SH NAME
|
.SH NAME
|
||||||
ahriman
|
ahriman \- ArcH linux ReposItory MANager
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
.B ahriman
|
.B ahriman
|
||||||
[-h] [-a ARCHITECTURE] [-c CONFIGURATION] [--force] [-l LOCK] [--log-handler {console,syslog,journald}] [-q] [--report | --no-report] [-r REPOSITORY] [--unsafe] [-V] [--wait-timeout WAIT_TIMEOUT] {add,aur-search,check,clean,config,config-validate,copy,daemon,help,help-commands-unsafe,help-updates,help-version,init,key-import,package-add,package-changes,package-changes-remove,package-copy,package-remove,package-status,package-status-remove,package-status-update,package-update,patch-add,patch-list,patch-remove,patch-set-add,rebuild,remove,remove-unknown,repo-backup,repo-check,repo-clean,repo-config,repo-config-validate,repo-create-keyring,repo-create-mirrorlist,repo-daemon,repo-init,repo-rebuild,repo-remove-unknown,repo-report,repo-restore,repo-setup,repo-sign,repo-statistics,repo-status-update,repo-sync,repo-tree,repo-triggers,repo-update,report,run,search,service-clean,service-config,service-config-validate,service-key-import,service-repositories,service-run,service-setup,service-shell,service-tree-migrate,setup,shell,sign,status,status-update,sync,update,user-add,user-list,user-remove,version,web} ...
|
[-h] [-a ARCHITECTURE] [-c CONFIGURATION] [--force] [-l LOCK] [--log-handler {console,syslog,journald}] [-q] [--report | --no-report] [-r REPOSITORY] [--unsafe] [-V] [--wait-timeout WAIT_TIMEOUT] {add,aur-search,check,clean,config,config-validate,copy,daemon,help,help-commands-unsafe,help-updates,help-version,init,key-import,package-add,package-changes,package-changes-remove,package-copy,package-remove,package-status,package-status-remove,package-status-update,package-update,patch-add,patch-list,patch-remove,patch-set-add,rebuild,remove,remove-unknown,repo-backup,repo-check,repo-clean,repo-config,repo-config-validate,repo-create-keyring,repo-create-mirrorlist,repo-daemon,repo-init,repo-rebuild,repo-remove-unknown,repo-report,repo-restore,repo-setup,repo-sign,repo-statistics,repo-status-update,repo-sync,repo-tree,repo-triggers,repo-update,report,run,search,service-clean,service-config,service-config-validate,service-key-import,service-repositories,service-run,service-setup,service-shell,service-tree-migrate,setup,shell,sign,status,status-update,sync,update,user-add,user-list,user-remove,version,web} ...
|
||||||
|
@ -58,23 +58,23 @@ web = [
|
|||||||
"aiohttp_cors",
|
"aiohttp_cors",
|
||||||
"aiohttp_jinja2",
|
"aiohttp_jinja2",
|
||||||
]
|
]
|
||||||
web_api-docs = [
|
web-auth = [
|
||||||
"ahriman[web]",
|
|
||||||
"aiohttp-apispec",
|
|
||||||
"setuptools", # required by aiohttp-apispec
|
|
||||||
]
|
|
||||||
web_auth = [
|
|
||||||
"ahriman[web]",
|
"ahriman[web]",
|
||||||
"aiohttp_session",
|
"aiohttp_session",
|
||||||
"aiohttp_security",
|
"aiohttp_security",
|
||||||
"cryptography",
|
"cryptography",
|
||||||
]
|
]
|
||||||
web_metrics = [
|
web-docs = [
|
||||||
|
"ahriman[web]",
|
||||||
|
"aiohttp-apispec",
|
||||||
|
"setuptools", # required by aiohttp-apispec
|
||||||
|
]
|
||||||
|
web-metrics = [
|
||||||
"ahriman[web]",
|
"ahriman[web]",
|
||||||
"aiohttp-openmetrics",
|
"aiohttp-openmetrics",
|
||||||
]
|
]
|
||||||
web_oauth2 = [
|
web-oauth2 = [
|
||||||
"ahriman[web_auth]",
|
"ahriman[web-auth]",
|
||||||
"aioauth-client",
|
"aioauth-client",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -17,4 +17,4 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
__version__ = "2.18.3"
|
__version__ = "2.19.0"
|
||||||
|
@ -133,18 +133,18 @@ class Application(ApplicationPackages, ApplicationRepository):
|
|||||||
if not process_dependencies or not packages:
|
if not process_dependencies or not packages:
|
||||||
return packages
|
return packages
|
||||||
|
|
||||||
def missing_dependencies(source: Iterable[Package]) -> dict[str, str | None]:
|
def missing_dependencies(sources: Iterable[Package]) -> dict[str, str | None]:
|
||||||
# append list of known packages with packages which are in current sources
|
# append list of known packages with packages which are in current sources
|
||||||
satisfied_packages = known_packages | {
|
satisfied_packages = known_packages | {
|
||||||
single
|
single
|
||||||
for package in source
|
for source in sources
|
||||||
for single in package.packages_full
|
for single in source.packages_full
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
dependency: package.packager
|
dependency: source.packager
|
||||||
for package in source
|
for source in sources
|
||||||
for dependency in package.depends_build
|
for dependency in source.depends_build
|
||||||
if dependency not in satisfied_packages
|
if dependency not in satisfied_packages
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -156,7 +156,7 @@ class Application(ApplicationPackages, ApplicationRepository):
|
|||||||
# there is local cache, load package from it
|
# there is local cache, load package from it
|
||||||
leaf = Package.from_build(source_dir, self.repository.architecture, packager)
|
leaf = Package.from_build(source_dir, self.repository.architecture, packager)
|
||||||
else:
|
else:
|
||||||
leaf = Package.from_aur(package_name, packager)
|
leaf = Package.from_aur(package_name, packager, include_provides=True)
|
||||||
portion[leaf.base] = leaf
|
portion[leaf.base] = leaf
|
||||||
|
|
||||||
# register package in the database
|
# register package in the database
|
||||||
|
@ -255,3 +255,20 @@ class Pacman(LazyLogging):
|
|||||||
result.update(trim_package(provides) for provides in package.provides)
|
result.update(trim_package(provides) for provides in package.provides)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
def provided_by(self, package_name: str) -> Generator[Package, None, None]:
|
||||||
|
"""
|
||||||
|
search through databases and emit packages which provides the ``package_name``
|
||||||
|
|
||||||
|
Args:
|
||||||
|
package_name(str): package name to search
|
||||||
|
|
||||||
|
Yields:
|
||||||
|
Package: list of packages which were returned by the query
|
||||||
|
"""
|
||||||
|
def is_package_provided(package: Package) -> bool:
|
||||||
|
provides = [trim_package(name) for name in package.provides]
|
||||||
|
return package_name in provides
|
||||||
|
|
||||||
|
for database in self.handle.get_syncdbs():
|
||||||
|
yield from filter(is_package_provided, database.search(package_name))
|
||||||
|
@ -97,20 +97,17 @@ class AUR(Remote):
|
|||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
list[AURPackage]: response parsed to package list
|
list[AURPackage]: response parsed to package list
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
PackageInfoError: if multiple arguments are passed
|
||||||
"""
|
"""
|
||||||
query: list[tuple[str, str]] = [
|
if len(args) != 1:
|
||||||
("type", request_type),
|
raise PackageInfoError("AUR API requires exactly one argument to search")
|
||||||
("v", self.DEFAULT_RPC_VERSION),
|
|
||||||
]
|
|
||||||
|
|
||||||
arg_query = "arg[]" if len(args) > 1 else "arg"
|
url = f"{self.DEFAULT_RPC_URL}/v{self.DEFAULT_RPC_VERSION}/{request_type}/{args[0]}"
|
||||||
for arg in args:
|
query = list(kwargs.items())
|
||||||
query.append((arg_query, arg))
|
|
||||||
|
|
||||||
for key, value in kwargs.items():
|
response = self.make_request("GET", url, params=query)
|
||||||
query.append((key, value))
|
|
||||||
|
|
||||||
response = self.make_request("GET", self.DEFAULT_RPC_URL, params=query)
|
|
||||||
return self.parse_response(response.json())
|
return self.parse_response(response.json())
|
||||||
|
|
||||||
def package_info(self, package_name: str, *, pacman: Pacman | None) -> AURPackage:
|
def package_info(self, package_name: str, *, pacman: Pacman | None) -> AURPackage:
|
||||||
@ -133,15 +130,36 @@ class AUR(Remote):
|
|||||||
except StopIteration:
|
except StopIteration:
|
||||||
raise UnknownPackageError(package_name) from None
|
raise UnknownPackageError(package_name) from None
|
||||||
|
|
||||||
def package_search(self, *keywords: str, pacman: Pacman | None) -> list[AURPackage]:
|
def package_provided_by(self, package_name: str, *, pacman: Pacman | None) -> list[AURPackage]:
|
||||||
|
"""
|
||||||
|
get package list which provide the specified package name
|
||||||
|
|
||||||
|
Args:
|
||||||
|
package_name(str): package name to search
|
||||||
|
pacman(Pacman | None): alpm wrapper instance, required for official repositories search
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[AURPackage]: list of packages which match the criteria
|
||||||
|
"""
|
||||||
|
return [
|
||||||
|
package
|
||||||
|
# search api provides reduced models
|
||||||
|
for stub in self.package_search(package_name, pacman=pacman, search_by="provides")
|
||||||
|
# verity that found package actually provides it
|
||||||
|
if package_name in (package := self.package_info(stub.name, pacman=pacman)).provides
|
||||||
|
]
|
||||||
|
|
||||||
|
def package_search(self, *keywords: str, pacman: Pacman | None, search_by: str | None) -> list[AURPackage]:
|
||||||
"""
|
"""
|
||||||
search package in AUR web
|
search package in AUR web
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
*keywords(str): keywords to search
|
*keywords(str): keywords to search
|
||||||
pacman(Pacman | None): alpm wrapper instance, required for official repositories search
|
pacman(Pacman | None): alpm wrapper instance, required for official repositories search
|
||||||
|
search_by(str | None): search by keywords
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
list[AURPackage]: list of packages which match the criteria
|
list[AURPackage]: list of packages which match the criteria
|
||||||
"""
|
"""
|
||||||
return self.aur_request("search", *keywords, by="name-desc")
|
search_by = search_by or "name-desc"
|
||||||
|
return self.aur_request("search", *keywords, by=search_by)
|
||||||
|
@ -127,15 +127,17 @@ class Official(Remote):
|
|||||||
except StopIteration:
|
except StopIteration:
|
||||||
raise UnknownPackageError(package_name) from None
|
raise UnknownPackageError(package_name) from None
|
||||||
|
|
||||||
def package_search(self, *keywords: str, pacman: Pacman | None) -> list[AURPackage]:
|
def package_search(self, *keywords: str, pacman: Pacman | None, search_by: str | None) -> list[AURPackage]:
|
||||||
"""
|
"""
|
||||||
search package in AUR web
|
search package in AUR web
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
*keywords(str): keywords to search
|
*keywords(str): keywords to search
|
||||||
pacman(Pacman | None): alpm wrapper instance, required for official repositories search
|
pacman(Pacman | None): alpm wrapper instance, required for official repositories search
|
||||||
|
search_by(str | None): search by keywords
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
list[AURPackage]: list of packages which match the criteria
|
list[AURPackage]: list of packages which match the criteria
|
||||||
"""
|
"""
|
||||||
return self.arch_request(*keywords, by="q")
|
search_by = search_by or "q"
|
||||||
|
return self.arch_request(*keywords, by=search_by)
|
||||||
|
@ -59,3 +59,22 @@ class OfficialSyncdb(Official):
|
|||||||
return next(AURPackage.from_pacman(package) for package in pacman.package(package_name))
|
return next(AURPackage.from_pacman(package) for package in pacman.package(package_name))
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
raise UnknownPackageError(package_name) from None
|
raise UnknownPackageError(package_name) from None
|
||||||
|
|
||||||
|
def package_provided_by(self, package_name: str, *, pacman: Pacman | None) -> list[AURPackage]:
|
||||||
|
"""
|
||||||
|
get package list which provide the specified package name
|
||||||
|
|
||||||
|
Args:
|
||||||
|
package_name(str): package name to search
|
||||||
|
pacman(Pacman | None): alpm wrapper instance, required for official repositories search
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[AURPackage]: list of packages which match the criteria
|
||||||
|
"""
|
||||||
|
if pacman is None:
|
||||||
|
return []
|
||||||
|
|
||||||
|
return [
|
||||||
|
AURPackage.from_pacman(package)
|
||||||
|
for package in pacman.provided_by(package_name)
|
||||||
|
]
|
||||||
|
@ -18,6 +18,7 @@
|
|||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
from ahriman.core.alpm.pacman import Pacman
|
from ahriman.core.alpm.pacman import Pacman
|
||||||
|
from ahriman.core.exceptions import UnknownPackageError
|
||||||
from ahriman.core.http import SyncHttpClient
|
from ahriman.core.http import SyncHttpClient
|
||||||
from ahriman.models.aur_package import AURPackage
|
from ahriman.models.aur_package import AURPackage
|
||||||
|
|
||||||
@ -41,22 +42,36 @@ class Remote(SyncHttpClient):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def info(cls, package_name: str, *, pacman: Pacman | None = None) -> AURPackage:
|
def info(cls, package_name: str, *, pacman: Pacman | None = None, include_provides: bool = False) -> AURPackage:
|
||||||
"""
|
"""
|
||||||
get package info by its name
|
get package info by its name. If ``include_provides`` is set to ``True``, then, in addition, this method
|
||||||
|
will perform search by :attr:`ahriman.models.aur_package.AURPackage.provides` and return first package found.
|
||||||
|
Note, however, that in this case some implementation might not provide this method and search result will might
|
||||||
|
not be stable
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
package_name(str): package name to search
|
package_name(str): package name to search
|
||||||
pacman(Pacman | None, optional): alpm wrapper instance, required for official repositories search
|
pacman(Pacman | None, optional): alpm wrapper instance, required for official repositories search
|
||||||
(Default value = None)
|
(Default value = None)
|
||||||
|
include_provides(bool, optional): search by provides if no exact match found (Default value = False)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
AURPackage: package which match the package name
|
AURPackage: package which match the package name
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
UnknownPackageError: if requested package not found
|
||||||
"""
|
"""
|
||||||
return cls().package_info(package_name, pacman=pacman)
|
instance = cls()
|
||||||
|
try:
|
||||||
|
return instance.package_info(package_name, pacman=pacman)
|
||||||
|
except UnknownPackageError:
|
||||||
|
if include_provides and (provided_by := instance.package_provided_by(package_name, pacman=pacman)):
|
||||||
|
return next(iter(provided_by))
|
||||||
|
raise
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def multisearch(cls, *keywords: str, pacman: Pacman | None = None) -> list[AURPackage]:
|
def multisearch(cls, *keywords: str, pacman: Pacman | None = None,
|
||||||
|
search_by: str | None = None) -> list[AURPackage]:
|
||||||
"""
|
"""
|
||||||
search in remote repository by using API with multiple words. This method is required in order to handle
|
search in remote repository by using API with multiple words. This method is required in order to handle
|
||||||
https://bugs.archlinux.org/task/49133. In addition, short words will be dropped
|
https://bugs.archlinux.org/task/49133. In addition, short words will be dropped
|
||||||
@ -65,6 +80,7 @@ class Remote(SyncHttpClient):
|
|||||||
*keywords(str): search terms, e.g. "ahriman", "is", "cool"
|
*keywords(str): search terms, e.g. "ahriman", "is", "cool"
|
||||||
pacman(Pacman | None, optional): alpm wrapper instance, required for official repositories search
|
pacman(Pacman | None, optional): alpm wrapper instance, required for official repositories search
|
||||||
(Default value = None)
|
(Default value = None)
|
||||||
|
search_by(str | None, optional): search by keywords (Default value = None)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
list[AURPackage]: list of packages each of them matches all search terms
|
list[AURPackage]: list of packages each of them matches all search terms
|
||||||
@ -72,12 +88,21 @@ class Remote(SyncHttpClient):
|
|||||||
instance = cls()
|
instance = cls()
|
||||||
packages: dict[str, AURPackage] = {}
|
packages: dict[str, AURPackage] = {}
|
||||||
for term in filter(lambda word: len(word) >= 3, keywords):
|
for term in filter(lambda word: len(word) >= 3, keywords):
|
||||||
portion = instance.search(term, pacman=pacman)
|
portion = instance.package_search(term, pacman=pacman, search_by=search_by)
|
||||||
packages = {
|
packages = {
|
||||||
package.name: package # not mistake to group them by name
|
package.name: package # not mistake to group them by name
|
||||||
for package in portion
|
for package in portion
|
||||||
if package.name in packages or not packages
|
if package.name in packages or not packages
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# simple check for duplicates. This method will remove all packages under base if there is
|
||||||
|
# a package named exactly as its base
|
||||||
|
packages = {
|
||||||
|
package.name: package
|
||||||
|
for package in packages.values()
|
||||||
|
if package.package_base not in packages or package.package_base == package.name
|
||||||
|
}
|
||||||
|
|
||||||
return list(packages.values())
|
return list(packages.values())
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -114,7 +139,7 @@ class Remote(SyncHttpClient):
|
|||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def search(cls, *keywords: str, pacman: Pacman | None = None) -> list[AURPackage]:
|
def search(cls, *keywords: str, pacman: Pacman | None = None, search_by: str | None = None) -> list[AURPackage]:
|
||||||
"""
|
"""
|
||||||
search package in AUR web
|
search package in AUR web
|
||||||
|
|
||||||
@ -122,11 +147,12 @@ class Remote(SyncHttpClient):
|
|||||||
*keywords(str): search terms, e.g. "ahriman", "is", "cool"
|
*keywords(str): search terms, e.g. "ahriman", "is", "cool"
|
||||||
pacman(Pacman | None, optional): alpm wrapper instance, required for official repositories search
|
pacman(Pacman | None, optional): alpm wrapper instance, required for official repositories search
|
||||||
(Default value = None)
|
(Default value = None)
|
||||||
|
search_by(str | None, optional): search by keywords (Default value = None)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
list[AURPackage]: list of packages which match the criteria
|
list[AURPackage]: list of packages which match the criteria
|
||||||
"""
|
"""
|
||||||
return cls().package_search(*keywords, pacman=pacman)
|
return cls().package_search(*keywords, pacman=pacman, search_by=search_by)
|
||||||
|
|
||||||
def package_info(self, package_name: str, *, pacman: Pacman | None) -> AURPackage:
|
def package_info(self, package_name: str, *, pacman: Pacman | None) -> AURPackage:
|
||||||
"""
|
"""
|
||||||
@ -144,13 +170,28 @@ class Remote(SyncHttpClient):
|
|||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def package_search(self, *keywords: str, pacman: Pacman | None) -> list[AURPackage]:
|
def package_provided_by(self, package_name: str, *, pacman: Pacman | None) -> list[AURPackage]:
|
||||||
|
"""
|
||||||
|
get package list which provide the specified package name
|
||||||
|
|
||||||
|
Args:
|
||||||
|
package_name(str): package name to search
|
||||||
|
pacman(Pacman | None): alpm wrapper instance, required for official repositories search
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[AURPackage]: list of packages which match the criteria
|
||||||
|
"""
|
||||||
|
del package_name, pacman
|
||||||
|
return []
|
||||||
|
|
||||||
|
def package_search(self, *keywords: str, pacman: Pacman | None, search_by: str | None) -> list[AURPackage]:
|
||||||
"""
|
"""
|
||||||
search package in AUR web
|
search package in AUR web
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
*keywords(str): keywords to search
|
*keywords(str): keywords to search
|
||||||
pacman(Pacman | None): alpm wrapper instance, required for official repositories search
|
pacman(Pacman | None): alpm wrapper instance, required for official repositories search
|
||||||
|
search_by(str | None): search by keywords
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
list[AURPackage]: list of packages which match the criteria
|
list[AURPackage]: list of packages which match the criteria
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
|
# pylint: disable=too-many-public-methods
|
||||||
import configparser
|
import configparser
|
||||||
import shlex
|
import shlex
|
||||||
import sys
|
import sys
|
||||||
@ -85,9 +86,10 @@ class Configuration(configparser.RawConfigParser):
|
|||||||
empty_lines_in_values=not allow_multi_key,
|
empty_lines_in_values=not allow_multi_key,
|
||||||
interpolation=ShellInterpolator(),
|
interpolation=ShellInterpolator(),
|
||||||
converters={
|
converters={
|
||||||
|
"intlist": lambda value: list(map(int, shlex.split(value))),
|
||||||
"list": shlex.split,
|
"list": shlex.split,
|
||||||
"path": self._convert_path,
|
"path": self._convert_path,
|
||||||
"pathlist": lambda value: [self._convert_path(element) for element in shlex.split(value)],
|
"pathlist": lambda value: list(map(self._convert_path, shlex.split(value))),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -236,6 +238,8 @@ class Configuration(configparser.RawConfigParser):
|
|||||||
|
|
||||||
# pylint and mypy are too stupid to find these methods
|
# pylint and mypy are too stupid to find these methods
|
||||||
# pylint: disable=missing-function-docstring,unused-argument
|
# pylint: disable=missing-function-docstring,unused-argument
|
||||||
|
def getintlist(self, *args: Any, **kwargs: Any) -> list[int]: ... # type: ignore[empty-body]
|
||||||
|
|
||||||
def getlist(self, *args: Any, **kwargs: Any) -> list[str]: ... # type: ignore[empty-body]
|
def getlist(self, *args: Any, **kwargs: Any) -> list[str]: ... # type: ignore[empty-body]
|
||||||
|
|
||||||
def getpath(self, *args: Any, **kwargs: Any) -> Path: ... # type: ignore[empty-body]
|
def getpath(self, *args: Any, **kwargs: Any) -> Path: ... # type: ignore[empty-body]
|
||||||
|
@ -324,6 +324,15 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
|||||||
"empty": False,
|
"empty": False,
|
||||||
"is_url": ["http", "https"],
|
"is_url": ["http", "https"],
|
||||||
},
|
},
|
||||||
|
"autorefresh_intervals": {
|
||||||
|
"type": "list",
|
||||||
|
"coerce": "list",
|
||||||
|
"schema": {
|
||||||
|
"type": "integer",
|
||||||
|
"coerce": "integer",
|
||||||
|
"min": 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
"enable_archive_upload": {
|
"enable_archive_upload": {
|
||||||
"type": "boolean",
|
"type": "boolean",
|
||||||
"coerce": "boolean",
|
"coerce": "boolean",
|
||||||
|
@ -203,6 +203,8 @@ def migrate_package_repository(connection: Connection, configuration: Configurat
|
|||||||
configuration(Configuration): configuration instance
|
configuration(Configuration): configuration instance
|
||||||
"""
|
"""
|
||||||
_, repository_id = configuration.check_loaded()
|
_, repository_id = configuration.check_loaded()
|
||||||
|
if repository_id.is_empty:
|
||||||
|
return # no repository available yet
|
||||||
|
|
||||||
connection.execute("""update build_queue set repository = :repository""", {"repository": repository_id.id})
|
connection.execute("""update build_queue set repository = :repository""", {"repository": repository_id.id})
|
||||||
connection.execute("""update package_bases set repository = :repository""", {"repository": repository_id.id})
|
connection.execute("""update package_bases set repository = :repository""", {"repository": repository_id.id})
|
||||||
|
@ -29,13 +29,15 @@ class LogsOperations(Operations):
|
|||||||
logs operations
|
logs operations
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def logs_get(self, package_base: str, limit: int = -1, offset: int = 0,
|
def logs_get(self, package_base: str, version: str | None = None, process_id: str | None = None,
|
||||||
repository_id: RepositoryId | None = None) -> list[LogRecord]:
|
limit: int = -1, offset: int = 0, repository_id: RepositoryId | None = None) -> list[LogRecord]:
|
||||||
"""
|
"""
|
||||||
extract logs for specified package base
|
extract logs for specified package base
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
package_base(str): package base to extract logs
|
package_base(str): package base to extract logs
|
||||||
|
version(str | None, optional): package version to filter (Default value = None)
|
||||||
|
process_id(str | None, optional): process identifier to filter (Default value = None)
|
||||||
limit(int, optional): limit records to the specified count, -1 means unlimited (Default value = -1)
|
limit(int, optional): limit records to the specified count, -1 means unlimited (Default value = -1)
|
||||||
offset(int, optional): records offset (Default value = 0)
|
offset(int, optional): records offset (Default value = 0)
|
||||||
repository_id(RepositoryId, optional): repository unique identifier override (Default value = None)
|
repository_id(RepositoryId, optional): repository unique identifier override (Default value = None)
|
||||||
@ -52,12 +54,17 @@ class LogsOperations(Operations):
|
|||||||
"""
|
"""
|
||||||
select created, message, version, process_id from (
|
select created, message, version, process_id from (
|
||||||
select * from logs
|
select * from logs
|
||||||
where package_base = :package_base and repository = :repository
|
where package_base = :package_base
|
||||||
|
and repository = :repository
|
||||||
|
and (:version is null or version = :version)
|
||||||
|
and (:process_id is null or process_id = :process_id)
|
||||||
order by created desc limit :limit offset :offset
|
order by created desc limit :limit offset :offset
|
||||||
) order by created asc
|
) order by created asc
|
||||||
""",
|
""",
|
||||||
{
|
{
|
||||||
"package_base": package_base,
|
"package_base": package_base,
|
||||||
|
"version": version,
|
||||||
|
"process_id": process_id,
|
||||||
"repository": repository_id.id,
|
"repository": repository_id.id,
|
||||||
"limit": limit,
|
"limit": limit,
|
||||||
"offset": offset,
|
"offset": offset,
|
||||||
|
@ -18,6 +18,7 @@
|
|||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
import requests
|
import requests
|
||||||
|
import sys
|
||||||
|
|
||||||
from functools import cached_property
|
from functools import cached_property
|
||||||
from typing import Any, IO, Literal
|
from typing import Any, IO, Literal
|
||||||
@ -70,7 +71,10 @@ class SyncHttpClient(LazyLogging):
|
|||||||
request.Session: created session object
|
request.Session: created session object
|
||||||
"""
|
"""
|
||||||
session = requests.Session()
|
session = requests.Session()
|
||||||
session.headers["User-Agent"] = f"ahriman/{__version__}"
|
python_version = ".".join(map(str, sys.version_info[:3])) # just major.minor.patch
|
||||||
|
session.headers["User-Agent"] = f"ahriman/{__version__} " \
|
||||||
|
f"{requests.utils.default_user_agent()} " \
|
||||||
|
f"python/{python_version}"
|
||||||
|
|
||||||
return session
|
return session
|
||||||
|
|
||||||
|
@ -203,12 +203,15 @@ class Client:
|
|||||||
"""
|
"""
|
||||||
# this method does not raise NotImplementedError because it is actively used as dummy client for http log
|
# this method does not raise NotImplementedError because it is actively used as dummy client for http log
|
||||||
|
|
||||||
def package_logs_get(self, package_base: str, limit: int = -1, offset: int = 0) -> list[LogRecord]:
|
def package_logs_get(self, package_base: str, version: str | None = None, process_id: str | None = None,
|
||||||
|
limit: int = -1, offset: int = 0) -> list[LogRecord]:
|
||||||
"""
|
"""
|
||||||
get package logs
|
get package logs
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
package_base(str): package base
|
package_base(str): package base
|
||||||
|
version(str | None, optional): package version to search (Default value = None)
|
||||||
|
process_id(str | None, optional): process identifier to search (Default value = None)
|
||||||
limit(int, optional): limit records to the specified count, -1 means unlimited (Default value = -1)
|
limit(int, optional): limit records to the specified count, -1 means unlimited (Default value = -1)
|
||||||
offset(int, optional): records offset (Default value = 0)
|
offset(int, optional): records offset (Default value = 0)
|
||||||
|
|
||||||
|
@ -152,19 +152,22 @@ class LocalClient(Client):
|
|||||||
"""
|
"""
|
||||||
self.database.logs_insert(log_record, self.repository_id)
|
self.database.logs_insert(log_record, self.repository_id)
|
||||||
|
|
||||||
def package_logs_get(self, package_base: str, limit: int = -1, offset: int = 0) -> list[LogRecord]:
|
def package_logs_get(self, package_base: str, version: str | None = None, process_id: str | None = None,
|
||||||
|
limit: int = -1, offset: int = 0) -> list[LogRecord]:
|
||||||
"""
|
"""
|
||||||
get package logs
|
get package logs
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
package_base(str): package base
|
package_base(str): package base
|
||||||
|
version(str | None, optional): package version to search (Default value = None)
|
||||||
|
process_id(str | None, optional): process identifier to search (Default value = None)
|
||||||
limit(int, optional): limit records to the specified count, -1 means unlimited (Default value = -1)
|
limit(int, optional): limit records to the specified count, -1 means unlimited (Default value = -1)
|
||||||
offset(int, optional): records offset (Default value = 0)
|
offset(int, optional): records offset (Default value = 0)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
list[LogRecord]: package logs
|
list[LogRecord]: package logs
|
||||||
"""
|
"""
|
||||||
return self.database.logs_get(package_base, limit, offset, self.repository_id)
|
return self.database.logs_get(package_base, version, process_id, limit, offset, self.repository_id)
|
||||||
|
|
||||||
def package_logs_remove(self, package_base: str, version: str | None) -> None:
|
def package_logs_remove(self, package_base: str, version: str | None) -> None:
|
||||||
"""
|
"""
|
||||||
|
@ -109,7 +109,7 @@ class Watcher(LazyLogging):
|
|||||||
|
|
||||||
package_logs_add: Callable[[LogRecord], None]
|
package_logs_add: Callable[[LogRecord], None]
|
||||||
|
|
||||||
package_logs_get: Callable[[str, int, int], list[LogRecord]]
|
package_logs_get: Callable[[str, str | None, str | None, int, int], list[LogRecord]]
|
||||||
|
|
||||||
package_logs_remove: Callable[[str, str | None], None]
|
package_logs_remove: Callable[[str, str | None], None]
|
||||||
|
|
||||||
|
@ -326,12 +326,15 @@ class WebClient(Client, SyncAhrimanClient):
|
|||||||
self.make_request("POST", self._logs_url(log_record.log_record_id.package_base),
|
self.make_request("POST", self._logs_url(log_record.log_record_id.package_base),
|
||||||
params=self.repository_id.query(), json=payload, suppress_errors=True)
|
params=self.repository_id.query(), json=payload, suppress_errors=True)
|
||||||
|
|
||||||
def package_logs_get(self, package_base: str, limit: int = -1, offset: int = 0) -> list[LogRecord]:
|
def package_logs_get(self, package_base: str, version: str | None = None, process_id: str | None = None,
|
||||||
|
limit: int = -1, offset: int = 0) -> list[LogRecord]:
|
||||||
"""
|
"""
|
||||||
get package logs
|
get package logs
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
package_base(str): package base
|
package_base(str): package base
|
||||||
|
version(str | None, optional): package version to search (Default value = None)
|
||||||
|
process_id(str | None, optional): process identifier to search (Default value = None)
|
||||||
limit(int, optional): limit records to the specified count, -1 means unlimited (Default value = -1)
|
limit(int, optional): limit records to the specified count, -1 means unlimited (Default value = -1)
|
||||||
offset(int, optional): records offset (Default value = 0)
|
offset(int, optional): records offset (Default value = 0)
|
||||||
|
|
||||||
@ -339,6 +342,10 @@ class WebClient(Client, SyncAhrimanClient):
|
|||||||
list[LogRecord]: package logs
|
list[LogRecord]: package logs
|
||||||
"""
|
"""
|
||||||
query = self.repository_id.query() + [("limit", str(limit)), ("offset", str(offset))]
|
query = self.repository_id.query() + [("limit", str(limit)), ("offset", str(offset))]
|
||||||
|
if version is not None:
|
||||||
|
query.append(("version", version))
|
||||||
|
if process_id is not None:
|
||||||
|
query.append(("process_id", process_id))
|
||||||
|
|
||||||
with contextlib.suppress(Exception):
|
with contextlib.suppress(Exception):
|
||||||
response = self.make_request("GET", self._logs_url(package_base), params=query)
|
response = self.make_request("GET", self._logs_url(package_base), params=query)
|
||||||
|
@ -33,6 +33,7 @@ class Leaf:
|
|||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
dependencies(set[str]): list of package dependencies
|
dependencies(set[str]): list of package dependencies
|
||||||
|
items(list[str]): list of packages in this leaf including provides
|
||||||
package(Package): leaf package properties
|
package(Package): leaf package properties
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -42,17 +43,9 @@ class Leaf:
|
|||||||
package(Package): package properties
|
package(Package): package properties
|
||||||
"""
|
"""
|
||||||
self.package = package
|
self.package = package
|
||||||
|
# store frequently used properties
|
||||||
self.dependencies = package.depends_build
|
self.dependencies = package.depends_build
|
||||||
|
self.items = self.package.packages_full
|
||||||
@property
|
|
||||||
def items(self) -> Iterable[str]:
|
|
||||||
"""
|
|
||||||
extract all packages from the leaf
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Iterable[str]: packages containing in this leaf
|
|
||||||
"""
|
|
||||||
return self.package.packages.keys()
|
|
||||||
|
|
||||||
def is_dependency(self, packages: Iterable[Leaf]) -> bool:
|
def is_dependency(self, packages: Iterable[Leaf]) -> bool:
|
||||||
"""
|
"""
|
||||||
|
@ -51,6 +51,7 @@ __all__ = [
|
|||||||
"parse_version",
|
"parse_version",
|
||||||
"partition",
|
"partition",
|
||||||
"pretty_datetime",
|
"pretty_datetime",
|
||||||
|
"pretty_interval",
|
||||||
"pretty_size",
|
"pretty_size",
|
||||||
"safe_filename",
|
"safe_filename",
|
||||||
"srcinfo_property",
|
"srcinfo_property",
|
||||||
@ -136,7 +137,8 @@ def check_output(*args: str, exception: Exception | Callable[[int, list[str], st
|
|||||||
} | environment
|
} | environment
|
||||||
|
|
||||||
with subprocess.Popen(args, cwd=cwd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
with subprocess.Popen(args, cwd=cwd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
||||||
user=user, env=full_environment, text=True, encoding="utf8", bufsize=1) as process:
|
user=user, env=full_environment, text=True, encoding="utf8", errors="backslashreplace",
|
||||||
|
bufsize=1) as process:
|
||||||
if input_data is not None:
|
if input_data is not None:
|
||||||
input_channel = get_io(process, "stdin")
|
input_channel = get_io(process, "stdin")
|
||||||
input_channel.write(input_data)
|
input_channel.write(input_data)
|
||||||
@ -352,6 +354,28 @@ def pretty_datetime(timestamp: datetime.datetime | float | int | None) -> str:
|
|||||||
return timestamp.strftime("%Y-%m-%d %H:%M:%S")
|
return timestamp.strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
|
||||||
|
|
||||||
|
def pretty_interval(interval: int) -> str:
|
||||||
|
"""
|
||||||
|
convert time interval to string
|
||||||
|
|
||||||
|
Args:
|
||||||
|
interval(int): time interval in seconds
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: pretty printable interval as string
|
||||||
|
"""
|
||||||
|
minutes, seconds = divmod(interval, 60)
|
||||||
|
hours, minutes = divmod(minutes, 60)
|
||||||
|
return " ".join([
|
||||||
|
f"{value} {description}{"s" if value > 1 else ""}"
|
||||||
|
for value, description in [
|
||||||
|
(hours, "hour"),
|
||||||
|
(minutes, "minute"),
|
||||||
|
(seconds, "second"),
|
||||||
|
] if value > 0
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
def pretty_size(size: float | None, level: int = 0) -> str:
|
def pretty_size(size: float | None, level: int = 0) -> str:
|
||||||
"""
|
"""
|
||||||
convert size to string
|
convert size to string
|
||||||
|
@ -25,7 +25,7 @@ from dataclasses import dataclass, field, fields
|
|||||||
from pyalpm import Package # type: ignore[import-not-found]
|
from pyalpm import Package # type: ignore[import-not-found]
|
||||||
from typing import Any, Self
|
from typing import Any, Self
|
||||||
|
|
||||||
from ahriman.core.utils import filter_json, full_version
|
from ahriman.core.utils import filter_json, full_version, trim_package
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True, kw_only=True)
|
@dataclass(frozen=True, kw_only=True)
|
||||||
@ -103,6 +103,17 @@ class AURPackage:
|
|||||||
keywords: list[str] = field(default_factory=list)
|
keywords: list[str] = field(default_factory=list)
|
||||||
groups: list[str] = field(default_factory=list)
|
groups: list[str] = field(default_factory=list)
|
||||||
|
|
||||||
|
def __post_init__(self) -> None:
|
||||||
|
"""
|
||||||
|
update packages lists accordingly
|
||||||
|
"""
|
||||||
|
object.__setattr__(self, "depends", [trim_package(package) for package in self.depends])
|
||||||
|
object.__setattr__(self, "make_depends", [trim_package(package) for package in self.make_depends])
|
||||||
|
object.__setattr__(self, "opt_depends", [trim_package(package) for package in self.opt_depends])
|
||||||
|
object.__setattr__(self, "check_depends", [trim_package(package) for package in self.check_depends])
|
||||||
|
object.__setattr__(self, "conflicts", [trim_package(package) for package in self.conflicts])
|
||||||
|
object.__setattr__(self, "provides", [trim_package(package) for package in self.provides])
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_json(cls, dump: dict[str, Any]) -> Self:
|
def from_json(cls, dump: dict[str, Any]) -> Self:
|
||||||
"""
|
"""
|
||||||
|
@ -213,18 +213,19 @@ class Package(LazyLogging):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_aur(cls, name: str, packager: str | None = None) -> Self:
|
def from_aur(cls, name: str, packager: str | None = None, *, include_provides: bool = False) -> Self:
|
||||||
"""
|
"""
|
||||||
construct package properties from AUR page
|
construct package properties from AUR page
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
name(str): package name (either base or normal name)
|
name(str): package name (either base or normal name)
|
||||||
packager(str | None, optional): packager to be used for this build (Default value = None)
|
packager(str | None, optional): packager to be used for this build (Default value = None)
|
||||||
|
include_provides(bool, optional): search by provides if no exact match found (Default value = False)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Self: package properties
|
Self: package properties
|
||||||
"""
|
"""
|
||||||
package = AUR.info(name)
|
package = AUR.info(name, include_provides=include_provides)
|
||||||
|
|
||||||
remote = RemoteSource(
|
remote = RemoteSource(
|
||||||
source=PackageSource.AUR,
|
source=PackageSource.AUR,
|
||||||
@ -310,7 +311,8 @@ class Package(LazyLogging):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_official(cls, name: str, pacman: Pacman, packager: str | None = None, *, use_syncdb: bool = True) -> Self:
|
def from_official(cls, name: str, pacman: Pacman, packager: str | None = None, *, use_syncdb: bool = True,
|
||||||
|
include_provides: bool = False) -> Self:
|
||||||
"""
|
"""
|
||||||
construct package properties from official repository page
|
construct package properties from official repository page
|
||||||
|
|
||||||
@ -319,11 +321,13 @@ class Package(LazyLogging):
|
|||||||
pacman(Pacman): alpm wrapper instance
|
pacman(Pacman): alpm wrapper instance
|
||||||
packager(str | None, optional): packager to be used for this build (Default value = None)
|
packager(str | None, optional): packager to be used for this build (Default value = None)
|
||||||
use_syncdb(bool, optional): use pacman databases instead of official repositories RPC (Default value = True)
|
use_syncdb(bool, optional): use pacman databases instead of official repositories RPC (Default value = True)
|
||||||
|
include_provides(bool, optional): search by provides if no exact match found (Default value = False)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Self: package properties
|
Self: package properties
|
||||||
"""
|
"""
|
||||||
package = OfficialSyncdb.info(name, pacman=pacman) if use_syncdb else Official.info(name)
|
impl = OfficialSyncdb if use_syncdb else Official
|
||||||
|
package = impl.info(name, pacman=pacman, include_provides=include_provides)
|
||||||
|
|
||||||
remote = RemoteSource(
|
remote = RemoteSource(
|
||||||
source=PackageSource.Repository,
|
source=PackageSource.Repository,
|
||||||
|
@ -83,12 +83,13 @@ class PackageDescription:
|
|||||||
|
|
||||||
def __post_init__(self) -> None:
|
def __post_init__(self) -> None:
|
||||||
"""
|
"""
|
||||||
update dependencies list accordingly
|
update packages lists accordingly
|
||||||
"""
|
"""
|
||||||
self.depends = [trim_package(package) for package in self.depends]
|
self.depends = [trim_package(package) for package in self.depends]
|
||||||
self.opt_depends = [trim_package(package) for package in self.opt_depends]
|
|
||||||
self.make_depends = [trim_package(package) for package in self.make_depends]
|
self.make_depends = [trim_package(package) for package in self.make_depends]
|
||||||
|
self.opt_depends = [trim_package(package) for package in self.opt_depends]
|
||||||
self.check_depends = [trim_package(package) for package in self.check_depends]
|
self.check_depends = [trim_package(package) for package in self.check_depends]
|
||||||
|
self.provides = [trim_package(package) for package in self.provides]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def filepath(self) -> Path | None:
|
def filepath(self) -> Path | None:
|
||||||
|
@ -34,6 +34,7 @@ from ahriman.web.schemas.log_schema import LogSchema
|
|||||||
from ahriman.web.schemas.login_schema import LoginSchema
|
from ahriman.web.schemas.login_schema import LoginSchema
|
||||||
from ahriman.web.schemas.logs_rotate_schema import LogsRotateSchema
|
from ahriman.web.schemas.logs_rotate_schema import LogsRotateSchema
|
||||||
from ahriman.web.schemas.logs_schema import LogsSchema
|
from ahriman.web.schemas.logs_schema import LogsSchema
|
||||||
|
from ahriman.web.schemas.logs_search_schema import LogsSearchSchema
|
||||||
from ahriman.web.schemas.oauth2_schema import OAuth2Schema
|
from ahriman.web.schemas.oauth2_schema import OAuth2Schema
|
||||||
from ahriman.web.schemas.package_name_schema import PackageNameSchema
|
from ahriman.web.schemas.package_name_schema import PackageNameSchema
|
||||||
from ahriman.web.schemas.package_names_schema import PackageNamesSchema
|
from ahriman.web.schemas.package_names_schema import PackageNamesSchema
|
||||||
|
39
src/ahriman/web/schemas/logs_search_schema.py
Normal file
39
src/ahriman/web/schemas/logs_search_schema.py
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2021-2025 ahriman team.
|
||||||
|
#
|
||||||
|
# This file is part of ahriman
|
||||||
|
# (see https://github.com/arcan1s/ahriman).
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
from ahriman import __version__
|
||||||
|
from ahriman.web.apispec import fields
|
||||||
|
from ahriman.web.schemas.pagination_schema import PaginationSchema
|
||||||
|
|
||||||
|
|
||||||
|
class LogsSearchSchema(PaginationSchema):
|
||||||
|
"""
|
||||||
|
request log search schema
|
||||||
|
"""
|
||||||
|
|
||||||
|
head = fields.Boolean(metadata={
|
||||||
|
"description": "Return versions only without fetching logs themselves",
|
||||||
|
})
|
||||||
|
version = fields.String(metadata={
|
||||||
|
"description": "Package version to search",
|
||||||
|
"example": __version__,
|
||||||
|
})
|
||||||
|
process_id = fields.String(metadata={
|
||||||
|
"description": "Process unique identifier to search",
|
||||||
|
})
|
@ -22,6 +22,7 @@ import aiohttp_jinja2
|
|||||||
from typing import Any, ClassVar
|
from typing import Any, ClassVar
|
||||||
|
|
||||||
from ahriman.core.auth.helpers import authorized_userid
|
from ahriman.core.auth.helpers import authorized_userid
|
||||||
|
from ahriman.core.utils import pretty_interval
|
||||||
from ahriman.models.user_access import UserAccess
|
from ahriman.models.user_access import UserAccess
|
||||||
from ahriman.web.apispec import aiohttp_apispec
|
from ahriman.web.apispec import aiohttp_apispec
|
||||||
from ahriman.web.views.base import BaseView
|
from ahriman.web.views.base import BaseView
|
||||||
@ -37,6 +38,10 @@ class IndexView(BaseView):
|
|||||||
* control - HTML to insert for login control, HTML string, required
|
* control - HTML to insert for login control, HTML string, required
|
||||||
* enabled - whether authorization is enabled by configuration or not, boolean, required
|
* enabled - whether authorization is enabled by configuration or not, boolean, required
|
||||||
* username - authenticated username if any, string, null means not authenticated
|
* username - authenticated username if any, string, null means not authenticated
|
||||||
|
* autorefresh_intervals - auto refresh intervals, optional
|
||||||
|
* interval - auto refresh interval in milliseconds, integer, required
|
||||||
|
* is_active - is current interval active or not, boolean, required
|
||||||
|
* text - text representation of the interval (e.g. "30 seconds"), string, required
|
||||||
* docs_enabled - indicates if api docs is enabled, boolean, required
|
* docs_enabled - indicates if api docs is enabled, boolean, required
|
||||||
* index_url - url to the repository index, string, optional
|
* index_url - url to the repository index, string, optional
|
||||||
* repositories - list of repositories unique identifiers, required
|
* repositories - list of repositories unique identifiers, required
|
||||||
@ -66,8 +71,19 @@ class IndexView(BaseView):
|
|||||||
"username": auth_username,
|
"username": auth_username,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
autorefresh_intervals = [
|
||||||
|
{
|
||||||
|
"interval": interval * 1000, # milliseconds
|
||||||
|
"is_active": index == 0, # first element is always default
|
||||||
|
"text": pretty_interval(interval),
|
||||||
|
}
|
||||||
|
for index, interval in enumerate(self.configuration.getintlist("web", "autorefresh_intervals", fallback=[]))
|
||||||
|
if interval > 0 # special case if 0 exists and first, refresh will not be turned on by default
|
||||||
|
]
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"auth": auth,
|
"auth": auth,
|
||||||
|
"autorefresh_intervals": sorted(autorefresh_intervals, key=lambda interval: interval["interval"]),
|
||||||
"docs_enabled": aiohttp_apispec is not None,
|
"docs_enabled": aiohttp_apispec is not None,
|
||||||
"index_url": self.configuration.get("web", "index_url", fallback=None),
|
"index_url": self.configuration.get("web", "index_url", fallback=None),
|
||||||
"repositories": [
|
"repositories": [
|
||||||
|
@ -90,7 +90,7 @@ class LogsView(StatusViewGuard, BaseView):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
_, status = self.service().package_get(package_base)
|
_, status = self.service().package_get(package_base)
|
||||||
logs = self.service(package_base=package_base).package_logs_get(package_base, -1, 0)
|
logs = self.service(package_base=package_base).package_logs_get(package_base, None, None, -1, 0)
|
||||||
except UnknownPackageError:
|
except UnknownPackageError:
|
||||||
raise HTTPNotFound(reason=f"Package {package_base} is unknown")
|
raise HTTPNotFound(reason=f"Package {package_base} is unknown")
|
||||||
|
|
||||||
|
@ -70,7 +70,7 @@ class SearchView(BaseView):
|
|||||||
if not packages:
|
if not packages:
|
||||||
raise HTTPNotFound(reason=f"No packages found for terms: {search}")
|
raise HTTPNotFound(reason=f"No packages found for terms: {search}")
|
||||||
|
|
||||||
comparator: Callable[[AURPackage], str] = lambda item: str(item.package_base)
|
comparator: Callable[[AURPackage], str] = lambda item: item.package_base
|
||||||
response = [
|
response = [
|
||||||
{
|
{
|
||||||
"package": package.package_base,
|
"package": package.package_base,
|
||||||
|
@ -17,18 +17,22 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
|
import itertools
|
||||||
|
|
||||||
from aiohttp.web import Response, json_response
|
from aiohttp.web import Response, json_response
|
||||||
|
from dataclasses import replace
|
||||||
from typing import ClassVar
|
from typing import ClassVar
|
||||||
|
|
||||||
from ahriman.models.user_access import UserAccess
|
from ahriman.models.user_access import UserAccess
|
||||||
from ahriman.web.apispec.decorators import apidocs
|
from ahriman.web.apispec.decorators import apidocs
|
||||||
from ahriman.web.schemas import LogSchema, PackageNameSchema, PaginationSchema
|
from ahriman.web.schemas import LogSchema, LogsSearchSchema, PackageNameSchema
|
||||||
from ahriman.web.views.base import BaseView
|
from ahriman.web.views.base import BaseView
|
||||||
from ahriman.web.views.status_view_guard import StatusViewGuard
|
from ahriman.web.views.status_view_guard import StatusViewGuard
|
||||||
|
|
||||||
|
|
||||||
class LogsView(StatusViewGuard, BaseView):
|
class LogsView(StatusViewGuard, BaseView):
|
||||||
"""
|
""" else:
|
||||||
|
|
||||||
package logs web view
|
package logs web view
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
@ -47,7 +51,7 @@ class LogsView(StatusViewGuard, BaseView):
|
|||||||
error_404_description="Package base and/or repository are unknown",
|
error_404_description="Package base and/or repository are unknown",
|
||||||
schema=LogSchema(many=True),
|
schema=LogSchema(many=True),
|
||||||
match_schema=PackageNameSchema,
|
match_schema=PackageNameSchema,
|
||||||
query_schema=PaginationSchema,
|
query_schema=LogsSearchSchema,
|
||||||
)
|
)
|
||||||
async def get(self) -> Response:
|
async def get(self) -> Response:
|
||||||
"""
|
"""
|
||||||
@ -61,8 +65,19 @@ class LogsView(StatusViewGuard, BaseView):
|
|||||||
"""
|
"""
|
||||||
package_base = self.request.match_info["package"]
|
package_base = self.request.match_info["package"]
|
||||||
limit, offset = self.page()
|
limit, offset = self.page()
|
||||||
|
version = self.request.query.get("version", None)
|
||||||
|
process = self.request.query.get("process_id", None)
|
||||||
|
|
||||||
logs = self.service(package_base=package_base).package_logs_get(package_base, limit, offset)
|
logs = self.service(package_base=package_base).package_logs_get(package_base, version, process, limit, offset)
|
||||||
|
|
||||||
|
head = self.request.query.get("head", "false")
|
||||||
|
# pylint: disable=protected-access
|
||||||
|
if self.configuration._convert_to_boolean(head): # type: ignore[attr-defined]
|
||||||
|
# logs should be sorted already
|
||||||
|
logs = [
|
||||||
|
replace(next(log_records), message="") # remove messages
|
||||||
|
for _, log_records in itertools.groupby(logs, lambda log_record: log_record.log_record_id)
|
||||||
|
]
|
||||||
|
|
||||||
response = [log_record.view() for log_record in logs]
|
response = [log_record.view() for log_record in logs]
|
||||||
return json_response(response)
|
return json_response(response)
|
||||||
|
@ -72,7 +72,7 @@ def _create_socket(configuration: Configuration, application: Application) -> so
|
|||||||
async def remove_socket(_: Application) -> None:
|
async def remove_socket(_: Application) -> None:
|
||||||
unix_socket.unlink(missing_ok=True)
|
unix_socket.unlink(missing_ok=True)
|
||||||
|
|
||||||
application.on_shutdown.append(remove_socket)
|
application.on_shutdown.append(remove_socket) # type: ignore[arg-type]
|
||||||
|
|
||||||
return sock
|
return sock
|
||||||
|
|
||||||
@ -142,8 +142,8 @@ def setup_server(configuration: Configuration, spawner: Spawn, repositories: lis
|
|||||||
InitializeError: if no repositories set
|
InitializeError: if no repositories set
|
||||||
"""
|
"""
|
||||||
application = Application(logger=logging.getLogger(__name__))
|
application = Application(logger=logging.getLogger(__name__))
|
||||||
application.on_shutdown.append(_on_shutdown)
|
application.on_shutdown.append(_on_shutdown) # type: ignore[arg-type]
|
||||||
application.on_startup.append(_on_startup)
|
application.on_startup.append(_on_startup) # type: ignore[arg-type]
|
||||||
|
|
||||||
application.middlewares.append(normalize_path_middleware(append_slash=False, remove_slash=True))
|
application.middlewares.append(normalize_path_middleware(append_slash=False, remove_slash=True))
|
||||||
application.middlewares.append(exception_handler(application.logger))
|
application.middlewares.append(exception_handler(application.logger))
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#
|
#
|
||||||
# Copyright (c) 2021-2024 ahriman team.
|
# Copyright (c) 2021-2025 ahriman team.
|
||||||
#
|
#
|
||||||
# This file is part of ahriman
|
# This file is part of ahriman
|
||||||
# (see https://github.com/arcan1s/ahriman).
|
# (see https://github.com/arcan1s/ahriman).
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
|
from pathlib import Path
|
||||||
from pytest_mock import MockerFixture
|
from pytest_mock import MockerFixture
|
||||||
|
from typing import Any
|
||||||
from unittest.mock import MagicMock, call as MockCall
|
from unittest.mock import MagicMock, call as MockCall
|
||||||
|
|
||||||
from ahriman.application.application import Application
|
from ahriman.application.application import Application
|
||||||
@ -73,6 +75,10 @@ def test_with_dependencies(application: Application, package_ahriman: Package, p
|
|||||||
mock.packages_full = [package_base]
|
mock.packages_full = [package_base]
|
||||||
return mock
|
return mock
|
||||||
|
|
||||||
|
def get_package(name: str | Path, *args: Any, **kwargs: Any) -> Package:
|
||||||
|
name = name if isinstance(name, str) else name.name
|
||||||
|
return packages[name]
|
||||||
|
|
||||||
package_python_schedule.packages = {
|
package_python_schedule.packages = {
|
||||||
package_python_schedule.base: package_python_schedule.packages[package_python_schedule.base]
|
package_python_schedule.base: package_python_schedule.packages[package_python_schedule.base]
|
||||||
}
|
}
|
||||||
@ -87,10 +93,8 @@ def test_with_dependencies(application: Application, package_ahriman: Package, p
|
|||||||
}
|
}
|
||||||
|
|
||||||
mocker.patch("pathlib.Path.is_dir", autospec=True, side_effect=lambda p: p.name == "python")
|
mocker.patch("pathlib.Path.is_dir", autospec=True, side_effect=lambda p: p.name == "python")
|
||||||
package_aur_mock = mocker.patch("ahriman.models.package.Package.from_aur",
|
package_aur_mock = mocker.patch("ahriman.models.package.Package.from_aur", side_effect=get_package)
|
||||||
side_effect=lambda *args: packages[args[0]])
|
package_local_mock = mocker.patch("ahriman.models.package.Package.from_build", side_effect=get_package)
|
||||||
package_local_mock = mocker.patch("ahriman.models.package.Package.from_build",
|
|
||||||
side_effect=lambda *args: packages[args[0].name])
|
|
||||||
packages_mock = mocker.patch("ahriman.application.application.Application._known_packages",
|
packages_mock = mocker.patch("ahriman.application.application.Application._known_packages",
|
||||||
return_value={"devtools", "python-build", "python-pytest"})
|
return_value={"devtools", "python-build", "python-pytest"})
|
||||||
status_client_mock = mocker.patch("ahriman.core.status.Client.set_unknown")
|
status_client_mock = mocker.patch("ahriman.core.status.Client.set_unknown")
|
||||||
@ -98,8 +102,8 @@ def test_with_dependencies(application: Application, package_ahriman: Package, p
|
|||||||
result = application.with_dependencies([package_ahriman], process_dependencies=True)
|
result = application.with_dependencies([package_ahriman], process_dependencies=True)
|
||||||
assert {package.base: package for package in result} == packages
|
assert {package.base: package for package in result} == packages
|
||||||
package_aur_mock.assert_has_calls([
|
package_aur_mock.assert_has_calls([
|
||||||
MockCall(package_python_schedule.base, package_ahriman.packager),
|
MockCall(package_python_schedule.base, package_ahriman.packager, include_provides=True),
|
||||||
MockCall("python-installer", package_ahriman.packager),
|
MockCall("python-installer", package_ahriman.packager, include_provides=True),
|
||||||
], any_order=True)
|
], any_order=True)
|
||||||
package_local_mock.assert_has_calls([
|
package_local_mock.assert_has_calls([
|
||||||
MockCall(application.repository.paths.cache_for("python"), "x86_64", package_ahriman.packager),
|
MockCall(application.repository.paths.cache_for("python"), "x86_64", package_ahriman.packager),
|
||||||
|
@ -4,7 +4,7 @@ import requests
|
|||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from pytest_mock import MockerFixture
|
from pytest_mock import MockerFixture
|
||||||
from unittest.mock import MagicMock
|
from unittest.mock import MagicMock, call as MockCall
|
||||||
|
|
||||||
from ahriman.core.alpm.remote import AUR
|
from ahriman.core.alpm.remote import AUR
|
||||||
from ahriman.core.exceptions import PackageInfoError, UnknownPackageError
|
from ahriman.core.exceptions import PackageInfoError, UnknownPackageError
|
||||||
@ -76,24 +76,18 @@ def test_aur_request(aur: AUR, aur_package_ahriman: AURPackage,
|
|||||||
request_mock = mocker.patch("ahriman.core.alpm.remote.AUR.make_request", return_value=response_mock)
|
request_mock = mocker.patch("ahriman.core.alpm.remote.AUR.make_request", return_value=response_mock)
|
||||||
|
|
||||||
assert aur.aur_request("info", "ahriman") == [aur_package_ahriman]
|
assert aur.aur_request("info", "ahriman") == [aur_package_ahriman]
|
||||||
request_mock.assert_called_once_with(
|
request_mock.assert_called_once_with("GET", "https://aur.archlinux.org/rpc/v5/info/ahriman", params=[])
|
||||||
"GET", "https://aur.archlinux.org/rpc",
|
|
||||||
params=[("type", "info"), ("v", "5"), ("arg", "ahriman")])
|
|
||||||
|
|
||||||
|
|
||||||
def test_aur_request_multi_arg(aur: AUR, aur_package_ahriman: AURPackage,
|
def test_aur_request_multi_arg(aur: AUR) -> None:
|
||||||
mocker: MockerFixture, resource_path_root: Path) -> None:
|
|
||||||
"""
|
"""
|
||||||
must perform request to AUR with multiple args
|
must raise PackageInfoError if invalid amount of arguments supplied
|
||||||
"""
|
"""
|
||||||
response_mock = MagicMock()
|
with pytest.raises(PackageInfoError):
|
||||||
response_mock.json.return_value = json.loads(_get_response(resource_path_root))
|
aur.aur_request("search", "ahriman", "is", "cool")
|
||||||
request_mock = mocker.patch("ahriman.core.alpm.remote.AUR.make_request", return_value=response_mock)
|
|
||||||
|
|
||||||
assert aur.aur_request("search", "ahriman", "is", "cool") == [aur_package_ahriman]
|
with pytest.raises(PackageInfoError):
|
||||||
request_mock.assert_called_once_with(
|
aur.aur_request("search")
|
||||||
"GET", "https://aur.archlinux.org/rpc",
|
|
||||||
params=[("type", "search"), ("v", "5"), ("arg[]", "ahriman"), ("arg[]", "is"), ("arg[]", "cool")])
|
|
||||||
|
|
||||||
|
|
||||||
def test_aur_request_with_kwargs(aur: AUR, aur_package_ahriman: AURPackage,
|
def test_aur_request_with_kwargs(aur: AUR, aur_package_ahriman: AURPackage,
|
||||||
@ -106,9 +100,8 @@ def test_aur_request_with_kwargs(aur: AUR, aur_package_ahriman: AURPackage,
|
|||||||
request_mock = mocker.patch("ahriman.core.alpm.remote.AUR.make_request", return_value=response_mock)
|
request_mock = mocker.patch("ahriman.core.alpm.remote.AUR.make_request", return_value=response_mock)
|
||||||
|
|
||||||
assert aur.aur_request("search", "ahriman", by="name") == [aur_package_ahriman]
|
assert aur.aur_request("search", "ahriman", by="name") == [aur_package_ahriman]
|
||||||
request_mock.assert_called_once_with(
|
request_mock.assert_called_once_with("GET", "https://aur.archlinux.org/rpc/v5/search/ahriman",
|
||||||
"GET", "https://aur.archlinux.org/rpc",
|
params=[("by", "name")])
|
||||||
params=[("type", "search"), ("v", "5"), ("arg", "ahriman"), ("by", "name")])
|
|
||||||
|
|
||||||
|
|
||||||
def test_aur_request_failed(aur: AUR, mocker: MockerFixture) -> None:
|
def test_aur_request_failed(aur: AUR, mocker: MockerFixture) -> None:
|
||||||
@ -139,17 +132,46 @@ def test_package_info(aur: AUR, aur_package_ahriman: AURPackage, mocker: MockerF
|
|||||||
|
|
||||||
def test_package_info_not_found(aur: AUR, aur_package_ahriman: AURPackage, mocker: MockerFixture) -> None:
|
def test_package_info_not_found(aur: AUR, aur_package_ahriman: AURPackage, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must raise UnknownPackage exception in case if no package was found
|
must raise UnknownPackageError in case if no package was found
|
||||||
"""
|
"""
|
||||||
mocker.patch("ahriman.core.alpm.remote.AUR.aur_request", return_value=[])
|
mocker.patch("ahriman.core.alpm.remote.AUR.aur_request", return_value=[])
|
||||||
with pytest.raises(UnknownPackageError, match=aur_package_ahriman.name):
|
with pytest.raises(UnknownPackageError, match=aur_package_ahriman.name):
|
||||||
assert aur.package_info(aur_package_ahriman.name, pacman=None)
|
assert aur.package_info(aur_package_ahriman.name, pacman=None)
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_provided_by(aur: AUR, aur_package_ahriman: AURPackage, aur_package_akonadi: AURPackage,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must search for packages which provide required one
|
||||||
|
"""
|
||||||
|
aur_package_ahriman.provides.append(aur_package_ahriman.name)
|
||||||
|
search_mock = mocker.patch("ahriman.core.alpm.remote.AUR.package_search", return_value=[
|
||||||
|
aur_package_ahriman, aur_package_akonadi
|
||||||
|
])
|
||||||
|
info_mock = mocker.patch("ahriman.core.alpm.remote.AUR.package_info", side_effect=[
|
||||||
|
aur_package_ahriman, aur_package_akonadi
|
||||||
|
])
|
||||||
|
|
||||||
|
assert aur.package_provided_by(aur_package_ahriman.name, pacman=None) == [aur_package_ahriman]
|
||||||
|
search_mock.assert_called_once_with(aur_package_ahriman.name, pacman=None, search_by="provides")
|
||||||
|
info_mock.assert_has_calls([
|
||||||
|
MockCall(aur_package_ahriman.name, pacman=None), MockCall(aur_package_akonadi.name, pacman=None)
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
def test_package_search(aur: AUR, aur_package_ahriman: AURPackage, mocker: MockerFixture) -> None:
|
def test_package_search(aur: AUR, aur_package_ahriman: AURPackage, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must make request for search
|
must make request for search
|
||||||
"""
|
"""
|
||||||
request_mock = mocker.patch("ahriman.core.alpm.remote.AUR.aur_request", return_value=[aur_package_ahriman])
|
request_mock = mocker.patch("ahriman.core.alpm.remote.AUR.aur_request", return_value=[aur_package_ahriman])
|
||||||
assert aur.package_search(aur_package_ahriman.name, pacman=None) == [aur_package_ahriman]
|
assert aur.package_search(aur_package_ahriman.name, pacman=None, search_by=None) == [aur_package_ahriman]
|
||||||
request_mock.assert_called_once_with("search", aur_package_ahriman.name, by="name-desc")
|
request_mock.assert_called_once_with("search", aur_package_ahriman.name, by="name-desc")
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_search_provides(aur: AUR, aur_package_ahriman: AURPackage, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must make request for search with custom field
|
||||||
|
"""
|
||||||
|
request_mock = mocker.patch("ahriman.core.alpm.remote.AUR.aur_request")
|
||||||
|
aur.package_search(aur_package_ahriman.name, pacman=None, search_by="provides")
|
||||||
|
request_mock.assert_called_once_with("search", aur_package_ahriman.name, by="provides")
|
||||||
|
@ -106,7 +106,7 @@ def test_package_info(official: Official, aur_package_akonadi: AURPackage, mocke
|
|||||||
|
|
||||||
def test_package_info_not_found(official: Official, aur_package_ahriman: AURPackage, mocker: MockerFixture) -> None:
|
def test_package_info_not_found(official: Official, aur_package_ahriman: AURPackage, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must raise UnknownPackage exception in case if no package was found
|
must raise UnknownPackageError in case if no package was found
|
||||||
"""
|
"""
|
||||||
mocker.patch("ahriman.core.alpm.remote.Official.arch_request", return_value=[])
|
mocker.patch("ahriman.core.alpm.remote.Official.arch_request", return_value=[])
|
||||||
with pytest.raises(UnknownPackageError, match=aur_package_ahriman.name):
|
with pytest.raises(UnknownPackageError, match=aur_package_ahriman.name):
|
||||||
@ -119,5 +119,16 @@ def test_package_search(official: Official, aur_package_akonadi: AURPackage, moc
|
|||||||
"""
|
"""
|
||||||
request_mock = mocker.patch("ahriman.core.alpm.remote.Official.arch_request",
|
request_mock = mocker.patch("ahriman.core.alpm.remote.Official.arch_request",
|
||||||
return_value=[aur_package_akonadi])
|
return_value=[aur_package_akonadi])
|
||||||
assert official.package_search(aur_package_akonadi.name, pacman=None) == [aur_package_akonadi]
|
assert official.package_search(aur_package_akonadi.name, pacman=None, search_by=None) == [
|
||||||
|
aur_package_akonadi,
|
||||||
|
]
|
||||||
request_mock.assert_called_once_with(aur_package_akonadi.name, by="q")
|
request_mock.assert_called_once_with(aur_package_akonadi.name, by="q")
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_search_name(official: Official, aur_package_akonadi: AURPackage, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must make request for search with custom field
|
||||||
|
"""
|
||||||
|
request_mock = mocker.patch("ahriman.core.alpm.remote.Official.arch_request")
|
||||||
|
official.package_search(aur_package_akonadi.name, pacman=None, search_by="name")
|
||||||
|
request_mock.assert_called_once_with(aur_package_akonadi.name, by="name")
|
||||||
|
@ -16,18 +16,14 @@ def test_package_info(official_syncdb: OfficialSyncdb, aur_package_akonadi: AURP
|
|||||||
mocker.patch("ahriman.models.aur_package.AURPackage.from_pacman", return_value=aur_package_akonadi)
|
mocker.patch("ahriman.models.aur_package.AURPackage.from_pacman", return_value=aur_package_akonadi)
|
||||||
get_mock = mocker.patch("ahriman.core.alpm.pacman.Pacman.package", return_value=[aur_package_akonadi])
|
get_mock = mocker.patch("ahriman.core.alpm.pacman.Pacman.package", return_value=[aur_package_akonadi])
|
||||||
|
|
||||||
package = official_syncdb.package_info(aur_package_akonadi.name, pacman=pacman)
|
assert official_syncdb.package_info(aur_package_akonadi.name, pacman=pacman) == aur_package_akonadi
|
||||||
get_mock.assert_called_once_with(aur_package_akonadi.name)
|
get_mock.assert_called_once_with(aur_package_akonadi.name)
|
||||||
assert package == aur_package_akonadi
|
|
||||||
|
|
||||||
|
|
||||||
def test_package_info_no_pacman(official_syncdb: OfficialSyncdb, aur_package_akonadi: AURPackage,
|
def test_package_info_no_pacman(official_syncdb: OfficialSyncdb, aur_package_akonadi: AURPackage) -> None:
|
||||||
mocker: MockerFixture) -> None:
|
|
||||||
"""
|
"""
|
||||||
must raise UnknownPackageError if no pacman set
|
must raise UnknownPackageError if no pacman set
|
||||||
"""
|
"""
|
||||||
mocker.patch("ahriman.core.alpm.pacman.Pacman.package", return_value=[aur_package_akonadi])
|
|
||||||
|
|
||||||
with pytest.raises(UnknownPackageError, match=aur_package_akonadi.name):
|
with pytest.raises(UnknownPackageError, match=aur_package_akonadi.name):
|
||||||
official_syncdb.package_info(aur_package_akonadi.name, pacman=None)
|
official_syncdb.package_info(aur_package_akonadi.name, pacman=None)
|
||||||
|
|
||||||
@ -40,3 +36,22 @@ def test_package_info_not_found(official_syncdb: OfficialSyncdb, aur_package_ako
|
|||||||
mocker.patch("ahriman.core.alpm.pacman.Pacman.package", return_value=[])
|
mocker.patch("ahriman.core.alpm.pacman.Pacman.package", return_value=[])
|
||||||
with pytest.raises(UnknownPackageError, match=aur_package_akonadi.name):
|
with pytest.raises(UnknownPackageError, match=aur_package_akonadi.name):
|
||||||
assert official_syncdb.package_info(aur_package_akonadi.name, pacman=pacman)
|
assert official_syncdb.package_info(aur_package_akonadi.name, pacman=pacman)
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_provided_by(official_syncdb: OfficialSyncdb, aur_package_akonadi: AURPackage, pacman: Pacman,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must search by provides in database
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.models.aur_package.AURPackage.from_pacman", return_value=aur_package_akonadi)
|
||||||
|
get_mock = mocker.patch("ahriman.core.alpm.pacman.Pacman.provided_by", return_value=[aur_package_akonadi])
|
||||||
|
|
||||||
|
assert official_syncdb.package_provided_by(aur_package_akonadi.name, pacman=pacman) == [aur_package_akonadi]
|
||||||
|
get_mock.assert_called_once_with(aur_package_akonadi.name)
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_provided_by_no_pacman(official_syncdb: OfficialSyncdb, aur_package_akonadi: AURPackage) -> None:
|
||||||
|
"""
|
||||||
|
must return empty list if no pacman set
|
||||||
|
"""
|
||||||
|
assert official_syncdb.package_provided_by(aur_package_akonadi.name, pacman=None) == []
|
||||||
|
@ -1,20 +1,58 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from dataclasses import replace
|
||||||
from pytest_mock import MockerFixture
|
from pytest_mock import MockerFixture
|
||||||
from unittest.mock import call as MockCall
|
from unittest.mock import call as MockCall
|
||||||
|
|
||||||
from ahriman.core.alpm.pacman import Pacman
|
from ahriman.core.alpm.pacman import Pacman
|
||||||
from ahriman.core.alpm.remote import Remote
|
from ahriman.core.alpm.remote import Remote
|
||||||
|
from ahriman.core.exceptions import UnknownPackageError
|
||||||
from ahriman.models.aur_package import AURPackage
|
from ahriman.models.aur_package import AURPackage
|
||||||
|
|
||||||
|
|
||||||
def test_info(pacman: Pacman, mocker: MockerFixture) -> None:
|
def test_info(aur_package_ahriman: AURPackage, pacman: Pacman, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must call info method
|
must call info method
|
||||||
"""
|
"""
|
||||||
info_mock = mocker.patch("ahriman.core.alpm.remote.Remote.package_info")
|
info_mock = mocker.patch("ahriman.core.alpm.remote.Remote.package_info", return_value=aur_package_ahriman)
|
||||||
Remote.info("ahriman", pacman=pacman)
|
assert Remote.info(aur_package_ahriman.name, pacman=pacman) == aur_package_ahriman
|
||||||
info_mock.assert_called_once_with("ahriman", pacman=pacman)
|
info_mock.assert_called_once_with(aur_package_ahriman.name, pacman=pacman)
|
||||||
|
|
||||||
|
|
||||||
|
def test_info_not_found(aur_package_ahriman: AURPackage, pacman: Pacman, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must raise UnknownPackageError if no package found and search by provides is disabled
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.core.alpm.remote.Remote.package_info",
|
||||||
|
side_effect=UnknownPackageError(aur_package_ahriman.name))
|
||||||
|
with pytest.raises(UnknownPackageError):
|
||||||
|
Remote.info(aur_package_ahriman.name, pacman=pacman)
|
||||||
|
|
||||||
|
|
||||||
|
def test_info_include_provides(aur_package_ahriman: AURPackage, pacman: Pacman, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must perform search through provides list is set
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.core.alpm.remote.Remote.package_info",
|
||||||
|
side_effect=UnknownPackageError(aur_package_ahriman.name))
|
||||||
|
provided_mock = mocker.patch("ahriman.core.alpm.remote.Remote.package_provided_by",
|
||||||
|
return_value=[aur_package_ahriman])
|
||||||
|
|
||||||
|
assert Remote.info(aur_package_ahriman.name, pacman=pacman, include_provides=True) == aur_package_ahriman
|
||||||
|
provided_mock.assert_called_once_with(aur_package_ahriman.name, pacman=pacman)
|
||||||
|
|
||||||
|
|
||||||
|
def test_info_include_provides_not_found(aur_package_ahriman: AURPackage, pacman: Pacman,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must raise UnknownPackageError if no package found and search by provides returns empty list
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.core.alpm.remote.Remote.package_info",
|
||||||
|
side_effect=UnknownPackageError(aur_package_ahriman.name))
|
||||||
|
mocker.patch("ahriman.core.alpm.remote.Remote.package_provided_by", return_value=[])
|
||||||
|
|
||||||
|
with pytest.raises(UnknownPackageError):
|
||||||
|
Remote.info("ahriman", pacman=pacman, include_provides=True)
|
||||||
|
|
||||||
|
|
||||||
def test_multisearch(aur_package_ahriman: AURPackage, pacman: Pacman, mocker: MockerFixture) -> None:
|
def test_multisearch(aur_package_ahriman: AURPackage, pacman: Pacman, mocker: MockerFixture) -> None:
|
||||||
@ -22,10 +60,13 @@ def test_multisearch(aur_package_ahriman: AURPackage, pacman: Pacman, mocker: Mo
|
|||||||
must search in AUR with multiple words
|
must search in AUR with multiple words
|
||||||
"""
|
"""
|
||||||
terms = ["ahriman", "is", "cool"]
|
terms = ["ahriman", "is", "cool"]
|
||||||
search_mock = mocker.patch("ahriman.core.alpm.remote.Remote.search", return_value=[aur_package_ahriman])
|
search_mock = mocker.patch("ahriman.core.alpm.remote.Remote.package_search", return_value=[aur_package_ahriman])
|
||||||
|
|
||||||
assert Remote.multisearch(*terms, pacman=pacman) == [aur_package_ahriman]
|
assert Remote.multisearch(*terms, pacman=pacman, search_by="name") == [aur_package_ahriman]
|
||||||
search_mock.assert_has_calls([MockCall("ahriman", pacman=pacman), MockCall("cool", pacman=pacman)])
|
search_mock.assert_has_calls([
|
||||||
|
MockCall("ahriman", pacman=pacman, search_by="name"),
|
||||||
|
MockCall("cool", pacman=pacman, search_by="name"),
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
def test_multisearch_empty(pacman: Pacman, mocker: MockerFixture) -> None:
|
def test_multisearch_empty(pacman: Pacman, mocker: MockerFixture) -> None:
|
||||||
@ -33,7 +74,7 @@ def test_multisearch_empty(pacman: Pacman, mocker: MockerFixture) -> None:
|
|||||||
must return empty list if no long terms supplied
|
must return empty list if no long terms supplied
|
||||||
"""
|
"""
|
||||||
terms = ["it", "is"]
|
terms = ["it", "is"]
|
||||||
search_mock = mocker.patch("ahriman.core.alpm.remote.Remote.search")
|
search_mock = mocker.patch("ahriman.core.alpm.remote.Remote.package_search")
|
||||||
|
|
||||||
assert Remote.multisearch(*terms, pacman=pacman) == []
|
assert Remote.multisearch(*terms, pacman=pacman) == []
|
||||||
search_mock.assert_not_called()
|
search_mock.assert_not_called()
|
||||||
@ -43,9 +84,20 @@ def test_multisearch_single(aur_package_ahriman: AURPackage, pacman: Pacman, moc
|
|||||||
"""
|
"""
|
||||||
must search in AUR with one word
|
must search in AUR with one word
|
||||||
"""
|
"""
|
||||||
search_mock = mocker.patch("ahriman.core.alpm.remote.Remote.search", return_value=[aur_package_ahriman])
|
search_mock = mocker.patch("ahriman.core.alpm.remote.Remote.package_search", return_value=[aur_package_ahriman])
|
||||||
assert Remote.multisearch("ahriman", pacman=pacman) == [aur_package_ahriman]
|
assert Remote.multisearch("ahriman", pacman=pacman) == [aur_package_ahriman]
|
||||||
search_mock.assert_called_once_with("ahriman", pacman=pacman)
|
search_mock.assert_called_once_with("ahriman", pacman=pacman, search_by=None)
|
||||||
|
|
||||||
|
|
||||||
|
def test_multisearch_remove_duplicates(aur_package_ahriman: AURPackage, pacman: Pacman, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must remove duplicates from search result
|
||||||
|
"""
|
||||||
|
package1 = replace(aur_package_ahriman)
|
||||||
|
package2 = replace(aur_package_ahriman, name="ahriman-triggers")
|
||||||
|
mocker.patch("ahriman.core.alpm.remote.Remote.package_search", return_value=[package1, package2])
|
||||||
|
|
||||||
|
assert Remote.multisearch("ahriman", pacman=pacman) == [package1]
|
||||||
|
|
||||||
|
|
||||||
def test_remote_git_url(remote: Remote) -> None:
|
def test_remote_git_url(remote: Remote) -> None:
|
||||||
@ -69,8 +121,8 @@ def test_search(pacman: Pacman, mocker: MockerFixture) -> None:
|
|||||||
must call search method
|
must call search method
|
||||||
"""
|
"""
|
||||||
search_mock = mocker.patch("ahriman.core.alpm.remote.Remote.package_search")
|
search_mock = mocker.patch("ahriman.core.alpm.remote.Remote.package_search")
|
||||||
Remote.search("ahriman", pacman=pacman)
|
Remote.search("ahriman", pacman=pacman, search_by="name")
|
||||||
search_mock.assert_called_once_with("ahriman", pacman=pacman)
|
search_mock.assert_called_once_with("ahriman", pacman=pacman, search_by="name")
|
||||||
|
|
||||||
|
|
||||||
def test_package_info(remote: Remote, pacman: Pacman) -> None:
|
def test_package_info(remote: Remote, pacman: Pacman) -> None:
|
||||||
@ -81,9 +133,16 @@ def test_package_info(remote: Remote, pacman: Pacman) -> None:
|
|||||||
remote.package_info("package", pacman=pacman)
|
remote.package_info("package", pacman=pacman)
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_provided_by(remote: Remote, pacman: Pacman) -> None:
|
||||||
|
"""
|
||||||
|
must return empty list for provides method
|
||||||
|
"""
|
||||||
|
assert remote.package_provided_by("package", pacman=pacman) == []
|
||||||
|
|
||||||
|
|
||||||
def test_package_search(remote: Remote, pacman: Pacman) -> None:
|
def test_package_search(remote: Remote, pacman: Pacman) -> None:
|
||||||
"""
|
"""
|
||||||
must raise NotImplemented for missing package search method
|
must raise NotImplemented for missing package search method
|
||||||
"""
|
"""
|
||||||
with pytest.raises(NotImplementedError):
|
with pytest.raises(NotImplementedError):
|
||||||
remote.package_search("package", pacman=pacman)
|
remote.package_search("package", pacman=pacman, search_by=None)
|
||||||
|
@ -282,3 +282,11 @@ def test_packages_with_provides(pacman: Pacman) -> None:
|
|||||||
"""
|
"""
|
||||||
assert "sh" in pacman.packages()
|
assert "sh" in pacman.packages()
|
||||||
assert "mysql" in pacman.packages() # mariadb
|
assert "mysql" in pacman.packages() # mariadb
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_provided_by(pacman: Pacman) -> None:
|
||||||
|
"""
|
||||||
|
must search through the provides lists
|
||||||
|
"""
|
||||||
|
assert list(pacman.provided_by("sh"))
|
||||||
|
assert list(pacman.provided_by("libacl.so")) # case with exact version
|
||||||
|
@ -133,6 +133,14 @@ def test_dump_architecture_specific(configuration: Configuration) -> None:
|
|||||||
assert dump["build"]["archbuild_flags"] == "hello flag"
|
assert dump["build"]["archbuild_flags"] == "hello flag"
|
||||||
|
|
||||||
|
|
||||||
|
def test_getintlist(configuration: Configuration) -> None:
|
||||||
|
"""
|
||||||
|
must extract list of integers
|
||||||
|
"""
|
||||||
|
configuration.set_option("build", "test_int_list", "1 42 3")
|
||||||
|
assert configuration.getintlist("build", "test_int_list") == [1, 42, 3]
|
||||||
|
|
||||||
|
|
||||||
def test_getlist(configuration: Configuration) -> None:
|
def test_getlist(configuration: Configuration) -> None:
|
||||||
"""
|
"""
|
||||||
must return list of string correctly
|
must return list of string correctly
|
||||||
|
@ -6,6 +6,7 @@ from unittest.mock import call as MockCall
|
|||||||
|
|
||||||
from ahriman.core.configuration import Configuration
|
from ahriman.core.configuration import Configuration
|
||||||
from ahriman.core.database.migrations.m011_repository_name import migrate_data, migrate_package_repository, steps
|
from ahriman.core.database.migrations.m011_repository_name import migrate_data, migrate_package_repository, steps
|
||||||
|
from ahriman.models.repository_id import RepositoryId
|
||||||
|
|
||||||
|
|
||||||
def test_migration_repository_name() -> None:
|
def test_migration_repository_name() -> None:
|
||||||
@ -37,3 +38,13 @@ def test_migrate_package_repository(connection: Connection, configuration: Confi
|
|||||||
MockCall(pytest.helpers.anyvar(str, strict=True), {"repository": configuration.repository_id.id}),
|
MockCall(pytest.helpers.anyvar(str, strict=True), {"repository": configuration.repository_id.id}),
|
||||||
MockCall(pytest.helpers.anyvar(str, strict=True), {"repository": configuration.repository_id.id}),
|
MockCall(pytest.helpers.anyvar(str, strict=True), {"repository": configuration.repository_id.id}),
|
||||||
])
|
])
|
||||||
|
|
||||||
|
|
||||||
|
def test_migrate_package_repository_empty_id(connection: Connection, configuration: Configuration,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must do nothing on empty repository id
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.core.configuration.Configuration.check_loaded", return_value=("", RepositoryId("", "")))
|
||||||
|
migrate_package_repository(connection, configuration)
|
||||||
|
connection.execute.assert_not_called()
|
||||||
|
@ -71,11 +71,35 @@ def test_logs_insert_get_pagination(database: SQLite, package_ahriman: Package)
|
|||||||
"""
|
"""
|
||||||
database.logs_insert(LogRecord(LogRecordId(package_ahriman.base, "1"), 42.0, "message 1"))
|
database.logs_insert(LogRecord(LogRecordId(package_ahriman.base, "1"), 42.0, "message 1"))
|
||||||
database.logs_insert(LogRecord(LogRecordId(package_ahriman.base, "1"), 43.0, "message 2"))
|
database.logs_insert(LogRecord(LogRecordId(package_ahriman.base, "1"), 43.0, "message 2"))
|
||||||
assert database.logs_get(package_ahriman.base, 1, 1) == [
|
assert database.logs_get(package_ahriman.base, None, None, 1, 1) == [
|
||||||
LogRecord(LogRecordId(package_ahriman.base, "1"), 42.0, "message 1"),
|
LogRecord(LogRecordId(package_ahriman.base, "1"), 42.0, "message 1"),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_logs_insert_get_filter_by_version(database: SQLite, package_ahriman: Package) -> None:
|
||||||
|
"""
|
||||||
|
must insert and get package logs with pagination
|
||||||
|
"""
|
||||||
|
database.logs_insert(LogRecord(LogRecordId(package_ahriman.base, "1", "p1"), 42.0, "message 1"))
|
||||||
|
database.logs_insert(LogRecord(LogRecordId(package_ahriman.base, "1", "p2"), 43.0, "message 2"))
|
||||||
|
database.logs_insert(LogRecord(LogRecordId(package_ahriman.base, "2", "p1"), 44.0, "message 3"))
|
||||||
|
|
||||||
|
assert database.logs_get(package_ahriman.base, "1", None) == [
|
||||||
|
LogRecord(LogRecordId(package_ahriman.base, "1", "p1"), 42.0, "message 1"),
|
||||||
|
LogRecord(LogRecordId(package_ahriman.base, "1", "p2"), 43.0, "message 2"),
|
||||||
|
]
|
||||||
|
assert database.logs_get(package_ahriman.base, "2", None) == [
|
||||||
|
LogRecord(LogRecordId(package_ahriman.base, "2", "p1"), 44.0, "message 3"),
|
||||||
|
]
|
||||||
|
assert database.logs_get(package_ahriman.base, None, "p1") == [
|
||||||
|
LogRecord(LogRecordId(package_ahriman.base, "1", "p1"), 42.0, "message 1"),
|
||||||
|
LogRecord(LogRecordId(package_ahriman.base, "2", "p1"), 44.0, "message 3"),
|
||||||
|
]
|
||||||
|
assert database.logs_get(package_ahriman.base, "1", "p1") == [
|
||||||
|
LogRecord(LogRecordId(package_ahriman.base, "1", "p1"), 42.0, "message 1"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def test_logs_insert_get_multi(database: SQLite, package_ahriman: Package) -> None:
|
def test_logs_insert_get_multi(database: SQLite, package_ahriman: Package) -> None:
|
||||||
"""
|
"""
|
||||||
must insert and get package logs for multiple repositories
|
must insert and get package logs for multiple repositories
|
||||||
|
@ -124,8 +124,9 @@ def test_package_logs_get(local_client: LocalClient, package_ahriman: Package, m
|
|||||||
must retrieve package logs
|
must retrieve package logs
|
||||||
"""
|
"""
|
||||||
logs_mock = mocker.patch("ahriman.core.database.SQLite.logs_get")
|
logs_mock = mocker.patch("ahriman.core.database.SQLite.logs_get")
|
||||||
local_client.package_logs_get(package_ahriman.base, 1, 2)
|
local_client.package_logs_get(package_ahriman.base, package_ahriman.version, "process", 1, 2)
|
||||||
logs_mock.assert_called_once_with(package_ahriman.base, 1, 2, local_client.repository_id)
|
logs_mock.assert_called_once_with(package_ahriman.base, package_ahriman.version, "process", 1, 2,
|
||||||
|
local_client.repository_id)
|
||||||
|
|
||||||
|
|
||||||
def test_package_logs_remove(local_client: LocalClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
def test_package_logs_remove(local_client: LocalClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
|
@ -658,7 +658,7 @@ def test_package_logs_get(web_client: WebClient, package_ahriman: Package, mocke
|
|||||||
|
|
||||||
requests_mock = mocker.patch("ahriman.core.status.web_client.WebClient.make_request", return_value=response_obj)
|
requests_mock = mocker.patch("ahriman.core.status.web_client.WebClient.make_request", return_value=response_obj)
|
||||||
|
|
||||||
result = web_client.package_logs_get(package_ahriman.base, 1, 2)
|
result = web_client.package_logs_get(package_ahriman.base, None, None, 1, 2)
|
||||||
requests_mock.assert_called_once_with("GET", pytest.helpers.anyvar(str, True),
|
requests_mock.assert_called_once_with("GET", pytest.helpers.anyvar(str, True),
|
||||||
params=web_client.repository_id.query() + [("limit", "1"), ("offset", "2")])
|
params=web_client.repository_id.query() + [("limit", "1"), ("offset", "2")])
|
||||||
assert result == [
|
assert result == [
|
||||||
@ -666,6 +666,21 @@ def test_package_logs_get(web_client: WebClient, package_ahriman: Package, mocke
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_logs_get_filter(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must get logs with version and process id filter
|
||||||
|
"""
|
||||||
|
requests_mock = mocker.patch("ahriman.core.status.web_client.WebClient.make_request")
|
||||||
|
web_client.package_logs_get(package_ahriman.base, package_ahriman.version, LogRecordId.DEFAULT_PROCESS_ID, 1, 2)
|
||||||
|
requests_mock.assert_called_once_with("GET", pytest.helpers.anyvar(str, True),
|
||||||
|
params=web_client.repository_id.query() + [
|
||||||
|
("limit", "1"),
|
||||||
|
("offset", "2"),
|
||||||
|
("version", package_ahriman.version),
|
||||||
|
("process_id", LogRecordId.DEFAULT_PROCESS_ID),
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
def test_package_logs_get_failed(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
def test_package_logs_get_failed(web_client: WebClient, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must suppress any exception happened during logs fetch
|
must suppress any exception happened during logs fetch
|
||||||
|
@ -195,6 +195,32 @@ def test_tree_levels_sorted() -> None:
|
|||||||
assert third == [leaf2.package, leaf4.package]
|
assert third == [leaf2.package, leaf4.package]
|
||||||
|
|
||||||
|
|
||||||
|
def test_tree_levels_provides() -> None:
|
||||||
|
"""
|
||||||
|
must build tree according to provides list
|
||||||
|
"""
|
||||||
|
leaf1 = Leaf(
|
||||||
|
Package(
|
||||||
|
base="package1",
|
||||||
|
version="1.0.0",
|
||||||
|
remote=RemoteSource(source=PackageSource.AUR),
|
||||||
|
packages={"package1": PackageDescription(depends=["package3"])},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
leaf2 = Leaf(
|
||||||
|
Package(
|
||||||
|
base="package2",
|
||||||
|
version="1.0.0",
|
||||||
|
remote=RemoteSource(source=PackageSource.AUR),
|
||||||
|
packages={"package2": PackageDescription(provides=["package3"])},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
first, second = Tree([leaf1, leaf2]).levels()
|
||||||
|
assert first == [leaf2.package]
|
||||||
|
assert second == [leaf1.package]
|
||||||
|
|
||||||
|
|
||||||
def test_tree_partitions() -> None:
|
def test_tree_partitions() -> None:
|
||||||
"""
|
"""
|
||||||
must divide tree into partitions
|
must divide tree into partitions
|
||||||
|
@ -10,8 +10,8 @@ from unittest.mock import call as MockCall
|
|||||||
|
|
||||||
from ahriman.core.exceptions import BuildError, CalledProcessError, OptionError, UnsafeRunError
|
from ahriman.core.exceptions import BuildError, CalledProcessError, OptionError, UnsafeRunError
|
||||||
from ahriman.core.utils import check_output, check_user, dataclass_view, enum_values, extract_user, filter_json, \
|
from ahriman.core.utils import check_output, check_user, dataclass_view, enum_values, extract_user, filter_json, \
|
||||||
full_version, minmax, package_like, parse_version, partition, pretty_datetime, pretty_size, safe_filename, \
|
full_version, minmax, package_like, parse_version, partition, pretty_datetime, pretty_interval, pretty_size, \
|
||||||
srcinfo_property, srcinfo_property_list, trim_package, utcnow, walk
|
safe_filename, srcinfo_property, srcinfo_property_list, trim_package, utcnow, walk
|
||||||
from ahriman.models.package import Package
|
from ahriman.models.package import Package
|
||||||
from ahriman.models.package_source import PackageSource
|
from ahriman.models.package_source import PackageSource
|
||||||
from ahriman.models.repository_id import RepositoryId
|
from ahriman.models.repository_id import RepositoryId
|
||||||
@ -150,6 +150,13 @@ def test_check_output_empty_line(mocker: MockerFixture) -> None:
|
|||||||
logger_mock.assert_has_calls([MockCall(""), MockCall("hello")])
|
logger_mock.assert_has_calls([MockCall(""), MockCall("hello")])
|
||||||
|
|
||||||
|
|
||||||
|
def test_check_output_encoding_error(resource_path_root: Path) -> None:
|
||||||
|
"""
|
||||||
|
must correctly process unicode encoding error in command output
|
||||||
|
"""
|
||||||
|
assert check_output("cat", str(resource_path_root / "models" / "package_pacman-static_pkgbuild"))
|
||||||
|
|
||||||
|
|
||||||
def test_check_user(repository_id: RepositoryId, mocker: MockerFixture) -> None:
|
def test_check_user(repository_id: RepositoryId, mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must check user correctly
|
must check user correctly
|
||||||
@ -334,6 +341,18 @@ def test_pretty_datetime_empty() -> None:
|
|||||||
assert pretty_datetime(None) == ""
|
assert pretty_datetime(None) == ""
|
||||||
|
|
||||||
|
|
||||||
|
def test_pretty_interval() -> None:
|
||||||
|
"""
|
||||||
|
must generate string from interval
|
||||||
|
"""
|
||||||
|
assert pretty_interval(1) == "1 second"
|
||||||
|
assert pretty_interval(42) == "42 seconds"
|
||||||
|
assert pretty_interval(62) == "1 minute 2 seconds"
|
||||||
|
assert pretty_interval(121) == "2 minutes 1 second"
|
||||||
|
assert pretty_interval(3600) == "1 hour"
|
||||||
|
assert pretty_interval(7242) == "2 hours 42 seconds"
|
||||||
|
|
||||||
|
|
||||||
def test_pretty_size_bytes() -> None:
|
def test_pretty_size_bytes() -> None:
|
||||||
"""
|
"""
|
||||||
must generate bytes string for bytes value
|
must generate bytes string for bytes value
|
||||||
|
@ -2,7 +2,7 @@ import datetime
|
|||||||
import json
|
import json
|
||||||
import pyalpm # typing: ignore
|
import pyalpm # typing: ignore
|
||||||
|
|
||||||
from dataclasses import asdict, fields
|
from dataclasses import asdict, fields, replace
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from pytest_mock import MockerFixture
|
from pytest_mock import MockerFixture
|
||||||
from typing import Any
|
from typing import Any
|
||||||
@ -38,6 +38,25 @@ def _get_official_data(resource_path_root: Path) -> dict[str, Any]:
|
|||||||
return json.loads(response)["results"][0]
|
return json.loads(response)["results"][0]
|
||||||
|
|
||||||
|
|
||||||
|
def test_post_init(aur_package_ahriman: AURPackage) -> None:
|
||||||
|
"""
|
||||||
|
must trim versions and descriptions from packages list
|
||||||
|
"""
|
||||||
|
package = replace(
|
||||||
|
aur_package_ahriman,
|
||||||
|
depends=["a=1"],
|
||||||
|
make_depends=["b>=3"],
|
||||||
|
opt_depends=["c: a description"],
|
||||||
|
check_depends=["d=4"],
|
||||||
|
provides=["e=5"],
|
||||||
|
)
|
||||||
|
assert package.depends == ["a"]
|
||||||
|
assert package.make_depends == ["b"]
|
||||||
|
assert package.opt_depends == ["c"]
|
||||||
|
assert package.check_depends == ["d"]
|
||||||
|
assert package.provides == ["e"]
|
||||||
|
|
||||||
|
|
||||||
def test_from_json(aur_package_ahriman: AURPackage, resource_path_root: Path) -> None:
|
def test_from_json(aur_package_ahriman: AURPackage, resource_path_root: Path) -> None:
|
||||||
"""
|
"""
|
||||||
must load package from json
|
must load package from json
|
||||||
|
@ -167,15 +167,26 @@ def test_from_aur(package_ahriman: Package, aur_package_ahriman: AURPackage, moc
|
|||||||
"""
|
"""
|
||||||
must construct package from aur
|
must construct package from aur
|
||||||
"""
|
"""
|
||||||
mocker.patch("ahriman.core.alpm.remote.AUR.info", return_value=aur_package_ahriman)
|
info_mock = mocker.patch("ahriman.core.alpm.remote.AUR.info", return_value=aur_package_ahriman)
|
||||||
|
|
||||||
package = Package.from_aur(package_ahriman.base, package_ahriman.packager)
|
package = Package.from_aur(package_ahriman.base, package_ahriman.packager)
|
||||||
|
info_mock.assert_called_once_with(package_ahriman.base, include_provides=False)
|
||||||
assert package_ahriman.base == package.base
|
assert package_ahriman.base == package.base
|
||||||
assert package_ahriman.version == package.version
|
assert package_ahriman.version == package.version
|
||||||
assert package_ahriman.packages.keys() == package.packages.keys()
|
assert package_ahriman.packages.keys() == package.packages.keys()
|
||||||
assert package_ahriman.packager == package.packager
|
assert package_ahriman.packager == package.packager
|
||||||
|
|
||||||
|
|
||||||
|
def test_from_aur_include_provides(package_ahriman: Package, aur_package_ahriman: AURPackage,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must construct package from aur by using provides list
|
||||||
|
"""
|
||||||
|
info_mock = mocker.patch("ahriman.core.alpm.remote.AUR.info", return_value=aur_package_ahriman)
|
||||||
|
Package.from_aur(package_ahriman.base, package_ahriman.packager, include_provides=True)
|
||||||
|
info_mock.assert_called_once_with(package_ahriman.base, include_provides=True)
|
||||||
|
|
||||||
|
|
||||||
def test_from_build(package_ahriman: Package, mocker: MockerFixture, resource_path_root: Path) -> None:
|
def test_from_build(package_ahriman: Package, mocker: MockerFixture, resource_path_root: Path) -> None:
|
||||||
"""
|
"""
|
||||||
must construct package from PKGBUILD
|
must construct package from PKGBUILD
|
||||||
@ -269,14 +280,25 @@ def test_from_json_view_3(package_tpacpi_bat_git: Package) -> None:
|
|||||||
assert Package.from_json(package_tpacpi_bat_git.view()) == package_tpacpi_bat_git
|
assert Package.from_json(package_tpacpi_bat_git.view()) == package_tpacpi_bat_git
|
||||||
|
|
||||||
|
|
||||||
|
def test_from_official_include_provides(package_ahriman: Package, aur_package_ahriman: AURPackage, pacman: Pacman,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must construct package from official repository
|
||||||
|
"""
|
||||||
|
info_mock = mocker.patch("ahriman.core.alpm.remote.Official.info", return_value=aur_package_ahriman)
|
||||||
|
Package.from_official(package_ahriman.base, pacman, package_ahriman.packager, include_provides=True)
|
||||||
|
info_mock.assert_called_once_with(package_ahriman.base, pacman=pacman, include_provides=True)
|
||||||
|
|
||||||
|
|
||||||
def test_from_official(package_ahriman: Package, aur_package_ahriman: AURPackage, pacman: Pacman,
|
def test_from_official(package_ahriman: Package, aur_package_ahriman: AURPackage, pacman: Pacman,
|
||||||
mocker: MockerFixture) -> None:
|
mocker: MockerFixture) -> None:
|
||||||
"""
|
"""
|
||||||
must construct package from official repository
|
must construct package from official repository
|
||||||
"""
|
"""
|
||||||
mocker.patch("ahriman.core.alpm.remote.Official.info", return_value=aur_package_ahriman)
|
info_mock = mocker.patch("ahriman.core.alpm.remote.Official.info", return_value=aur_package_ahriman)
|
||||||
|
|
||||||
package = Package.from_official(package_ahriman.base, pacman, package_ahriman.packager)
|
package = Package.from_official(package_ahriman.base, pacman, package_ahriman.packager)
|
||||||
|
info_mock.assert_called_once_with(package_ahriman.base, pacman=pacman, include_provides=False)
|
||||||
assert package_ahriman.base == package.base
|
assert package_ahriman.base == package.base
|
||||||
assert package_ahriman.version == package.version
|
assert package_ahriman.version == package.version
|
||||||
assert package_ahriman.packages.keys() == package.packages.keys()
|
assert package_ahriman.packages.keys() == package.packages.keys()
|
||||||
|
@ -6,10 +6,15 @@ from ahriman.models.package_description import PackageDescription
|
|||||||
|
|
||||||
def test_post_init() -> None:
|
def test_post_init() -> None:
|
||||||
"""
|
"""
|
||||||
must trim versions and descriptions from dependencies list
|
must trim versions and descriptions from packages list
|
||||||
"""
|
"""
|
||||||
assert PackageDescription(depends=["a=1"], make_depends=["b>=3"], opt_depends=["c: a description"]) == \
|
assert PackageDescription(
|
||||||
PackageDescription(depends=["a"], make_depends=["b"], opt_depends=["c"])
|
depends=["a=1"],
|
||||||
|
make_depends=["b>=3"],
|
||||||
|
opt_depends=["c: a description"],
|
||||||
|
check_depends=["d=4"],
|
||||||
|
provides=["e=5"]
|
||||||
|
) == PackageDescription(depends=["a"], make_depends=["b"], opt_depends=["c"], check_depends=["d"], provides=["e"])
|
||||||
|
|
||||||
|
|
||||||
def test_filepath(package_description_ahriman: PackageDescription) -> None:
|
def test_filepath(package_description_ahriman: PackageDescription) -> None:
|
||||||
|
1
tests/ahriman/web/schemas/test_logs_search_schema.py
Normal file
1
tests/ahriman/web/schemas/test_logs_search_schema.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
# schema testing goes in view class tests
|
@ -86,6 +86,31 @@ async def test_get_with_pagination(client: TestClient, package_ahriman: Package)
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
async def test_get_with_filter(client: TestClient, package_ahriman: Package) -> None:
|
||||||
|
"""
|
||||||
|
must get logs with filter by version and process identifier
|
||||||
|
"""
|
||||||
|
await client.post(f"/api/v1/packages/{package_ahriman.base}",
|
||||||
|
json={"status": BuildStatusEnum.Success.value, "package": package_ahriman.view()})
|
||||||
|
await client.post(f"/api/v1/packages/{package_ahriman.base}/logs",
|
||||||
|
json={"created": 42.0, "message": "message 1", "version": "42"})
|
||||||
|
await client.post(f"/api/v1/packages/{package_ahriman.base}/logs",
|
||||||
|
json={"created": 43.0, "message": "message 2", "version": "43"})
|
||||||
|
request_schema = pytest.helpers.schema_request(LogsView.get, location="querystring")
|
||||||
|
response_schema = pytest.helpers.schema_response(LogsView.get)
|
||||||
|
|
||||||
|
payload = {"version": "42", "process_id": LogRecordId.DEFAULT_PROCESS_ID}
|
||||||
|
assert not request_schema.validate(payload)
|
||||||
|
response = await client.get(f"/api/v2/packages/{package_ahriman.base}/logs", params=payload)
|
||||||
|
assert response.status == 200
|
||||||
|
|
||||||
|
logs = await response.json()
|
||||||
|
assert not response_schema.validate(logs)
|
||||||
|
assert logs == [
|
||||||
|
{"created": 42.0, "message": "message 1", "version": "42", "process_id": LogRecordId.DEFAULT_PROCESS_ID},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
async def test_get_bad_request(client: TestClient, package_ahriman: Package) -> None:
|
async def test_get_bad_request(client: TestClient, package_ahriman: Package) -> None:
|
||||||
"""
|
"""
|
||||||
must return bad request for invalid query parameters
|
must return bad request for invalid query parameters
|
||||||
@ -114,3 +139,41 @@ async def test_get_not_found(client: TestClient, package_ahriman: Package) -> No
|
|||||||
response = await client.get(f"/api/v2/packages/{package_ahriman.base}/logs")
|
response = await client.get(f"/api/v2/packages/{package_ahriman.base}/logs")
|
||||||
assert response.status == 404
|
assert response.status == 404
|
||||||
assert not response_schema.validate(await response.json())
|
assert not response_schema.validate(await response.json())
|
||||||
|
|
||||||
|
|
||||||
|
async def test_get_head(client: TestClient, package_ahriman: Package) -> None:
|
||||||
|
"""
|
||||||
|
must return only versions if head parameter is set
|
||||||
|
"""
|
||||||
|
await client.post(f"/api/v1/packages/{package_ahriman.base}",
|
||||||
|
json={"status": BuildStatusEnum.Success.value, "package": package_ahriman.view()})
|
||||||
|
await client.post(f"/api/v1/packages/{package_ahriman.base}/logs",
|
||||||
|
json={"created": 42.0, "message": "message 1", "version": "42"})
|
||||||
|
await client.post(f"/api/v1/packages/{package_ahriman.base}/logs",
|
||||||
|
json={"created": 43.0, "message": "message 2", "version": "42"})
|
||||||
|
await client.post(f"/api/v1/packages/{package_ahriman.base}/logs",
|
||||||
|
json={"created": 44.0, "message": "message 3", "version": "43"})
|
||||||
|
request_schema = pytest.helpers.schema_request(LogsView.get, location="querystring")
|
||||||
|
response_schema = pytest.helpers.schema_response(LogsView.get)
|
||||||
|
|
||||||
|
payload = {"head": "true"}
|
||||||
|
assert not request_schema.validate(payload)
|
||||||
|
response = await client.get(f"/api/v2/packages/{package_ahriman.base}/logs", params=payload)
|
||||||
|
assert response.status == 200
|
||||||
|
|
||||||
|
logs = await response.json()
|
||||||
|
assert not response_schema.validate(logs)
|
||||||
|
assert logs == [
|
||||||
|
{
|
||||||
|
"created": 42.0,
|
||||||
|
"message": "",
|
||||||
|
"version": "42",
|
||||||
|
"process_id": LogRecordId.DEFAULT_PROCESS_ID,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"created": 44.0,
|
||||||
|
"message": "",
|
||||||
|
"version": "43",
|
||||||
|
"process_id": LogRecordId.DEFAULT_PROCESS_ID,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
19
tools/__init__.py
Normal file
19
tools/__init__.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2021-2025 ahriman team.
|
||||||
|
#
|
||||||
|
# This file is part of ahriman
|
||||||
|
# (see https://github.com/arcan1s/ahriman).
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
@ -1,5 +1,5 @@
|
|||||||
#
|
#
|
||||||
# Copyright (c) 2021-2023 ahriman team.
|
# Copyright (c) 2021-2025 ahriman team.
|
||||||
#
|
#
|
||||||
# This file is part of ahriman
|
# This file is part of ahriman
|
||||||
# (see https://github.com/arcan1s/ahriman).
|
# (see https://github.com/arcan1s/ahriman).
|
@ -1,5 +1,5 @@
|
|||||||
#
|
#
|
||||||
# Copyright (c) 2021-2023 ahriman team.
|
# Copyright (c) 2021-2025 ahriman team.
|
||||||
#
|
#
|
||||||
# This file is part of ahriman
|
# This file is part of ahriman
|
||||||
# (see https://github.com/arcan1s/ahriman).
|
# (see https://github.com/arcan1s/ahriman).
|
123
tox.ini
123
tox.ini
@ -1,123 +0,0 @@
|
|||||||
[tox]
|
|
||||||
envlist = check, tests
|
|
||||||
isolated_build = true
|
|
||||||
labels =
|
|
||||||
release = version, docs, publish
|
|
||||||
dependencies = -e .[journald,pacman,reports,s3,shell,stats,unixsocket,validator,web,web_api-docs,web_auth,web_oauth2,web_metrics]
|
|
||||||
project_name = ahriman
|
|
||||||
|
|
||||||
[flags]
|
|
||||||
autopep8 = --max-line-length 120 -aa --in-place
|
|
||||||
bandit = --configfile .bandit.yml
|
|
||||||
manpage = --author "ahriman team" --author-email "" --description "ArcH linux ReposItory MANager" --manual-title "ArcH linux ReposItory MANager" --project-name ahriman --url https://github.com/arcan1s/ahriman
|
|
||||||
mypy = --implicit-reexport --strict --allow-untyped-decorators --allow-subclassing-any
|
|
||||||
pydeps = --no-config --cluster
|
|
||||||
pylint = --rcfile .pylint.toml
|
|
||||||
shtab = --prefix ahriman --prog ahriman ahriman.application.ahriman._parser
|
|
||||||
|
|
||||||
[pytest]
|
|
||||||
addopts = --cov=ahriman --cov-report=term-missing:skip-covered --no-cov-on-fail --cov-fail-under=100 --spec
|
|
||||||
asyncio_default_fixture_loop_scope = function
|
|
||||||
asyncio_mode = auto
|
|
||||||
spec_test_format = {result} {docstring_summary}
|
|
||||||
|
|
||||||
[testenv:archive]
|
|
||||||
description = Create source files tarball
|
|
||||||
deps =
|
|
||||||
build
|
|
||||||
commands =
|
|
||||||
python -m build --sdist
|
|
||||||
|
|
||||||
[testenv:check]
|
|
||||||
description = Run common checks like linter, mypy, etc
|
|
||||||
dependency_groups =
|
|
||||||
check
|
|
||||||
deps =
|
|
||||||
{[tox]dependencies}
|
|
||||||
pip_pre = true
|
|
||||||
setenv =
|
|
||||||
CFLAGS="-Wno-unterminated-string-initialization"
|
|
||||||
MYPYPATH=src
|
|
||||||
commands =
|
|
||||||
autopep8 {[flags]autopep8} --exit-code --jobs 0 --recursive "src/{[tox]project_name}" "tests/{[tox]project_name}"
|
|
||||||
pylint {[flags]pylint} "src/{[tox]project_name}"
|
|
||||||
bandit {[flags]bandit} --recursive "src/{[tox]project_name}"
|
|
||||||
bandit {[flags]bandit} --skip B101,B105,B106 --recursive "tests/{[tox]project_name}"
|
|
||||||
mypy {[flags]mypy} --install-types --non-interactive --package "{[tox]project_name}"
|
|
||||||
|
|
||||||
[testenv:docs]
|
|
||||||
description = Generate source files for documentation
|
|
||||||
allowlist_externals =
|
|
||||||
bash
|
|
||||||
find
|
|
||||||
dependency_groups =
|
|
||||||
docs
|
|
||||||
depends =
|
|
||||||
version
|
|
||||||
deps =
|
|
||||||
{[tox]dependencies}
|
|
||||||
uv
|
|
||||||
pip_pre = true
|
|
||||||
setenv =
|
|
||||||
PYTHONPATH=src
|
|
||||||
SPHINX_APIDOC_OPTIONS=members,no-undoc-members,show-inheritance
|
|
||||||
commands =
|
|
||||||
bash -c 'shtab {[flags]shtab} --shell bash > package/share/bash-completion/completions/_ahriman'
|
|
||||||
bash -c 'shtab {[flags]shtab} --shell zsh > package/share/zsh/site-functions/_ahriman'
|
|
||||||
argparse-manpage {[flags]manpage} --module ahriman.application.ahriman --function _parser --output ../package/share/man/man1/ahriman.1
|
|
||||||
pydeps {[flags]pydeps} --no-output --show-dot --dot-output {tox_root}{/}docs/_static/architecture.dot src/ahriman
|
|
||||||
# remove autogenerated modules rst files
|
|
||||||
find docs -type f -name "{[tox]project_name}*.rst" -delete
|
|
||||||
sphinx-apidoc --output-dir docs src
|
|
||||||
# compile list of dependencies for rtd.io
|
|
||||||
uv pip compile --group pyproject.toml:docs --extra s3 --extra validator --extra web --output-file docs/requirements.txt --quiet pyproject.toml
|
|
||||||
|
|
||||||
[testenv:html]
|
|
||||||
description = Generate html documentation
|
|
||||||
dependency_groups =
|
|
||||||
docs
|
|
||||||
deps =
|
|
||||||
{[tox]dependencies}
|
|
||||||
pip_pre = true
|
|
||||||
recreate = true
|
|
||||||
commands =
|
|
||||||
sphinx-build --builder html --write-all --jobs auto --fail-on-warning docs {envtmpdir}{/}html
|
|
||||||
|
|
||||||
[testenv:publish]
|
|
||||||
description = Create and publish release to GitHub
|
|
||||||
allowlist_externals =
|
|
||||||
git
|
|
||||||
depends =
|
|
||||||
docs
|
|
||||||
passenv =
|
|
||||||
SSH_AUTH_SOCK
|
|
||||||
commands =
|
|
||||||
git add package/archlinux/PKGBUILD src/ahriman/__init__.py docs/_static/architecture.dot package/share/man/man1/ahriman.1 package/share/bash-completion/completions/_ahriman package/share/zsh/site-functions/_ahriman
|
|
||||||
git commit -m "Release {posargs}"
|
|
||||||
git tag "{posargs}"
|
|
||||||
git push
|
|
||||||
git push --tags
|
|
||||||
|
|
||||||
[testenv:tests]
|
|
||||||
description = Run tests
|
|
||||||
dependency_groups =
|
|
||||||
tests
|
|
||||||
deps =
|
|
||||||
{[tox]dependencies}
|
|
||||||
pip_pre = true
|
|
||||||
setenv =
|
|
||||||
CFLAGS="-Wno-unterminated-string-initialization"
|
|
||||||
commands =
|
|
||||||
pytest {posargs}
|
|
||||||
|
|
||||||
[testenv:version]
|
|
||||||
description = Bump package version
|
|
||||||
allowlist_externals =
|
|
||||||
sed
|
|
||||||
deps =
|
|
||||||
packaging
|
|
||||||
commands =
|
|
||||||
# check if version is set and validate it
|
|
||||||
{envpython} -c 'from packaging.version import Version; Version("{posargs}")'
|
|
||||||
sed -i 's/^__version__ = .*/__version__ = "{posargs}"/' src/ahriman/__init__.py
|
|
||||||
sed -i "s/pkgver=.*/pkgver={posargs}/" package/archlinux/PKGBUILD
|
|
310
tox.toml
Normal file
310
tox.toml
Normal file
@ -0,0 +1,310 @@
|
|||||||
|
env_list = [
|
||||||
|
"check",
|
||||||
|
"tests",
|
||||||
|
]
|
||||||
|
isolated_build = true
|
||||||
|
labels.release = [
|
||||||
|
"version",
|
||||||
|
"docs",
|
||||||
|
"publish",
|
||||||
|
]
|
||||||
|
|
||||||
|
[flags]
|
||||||
|
autopep8 = [
|
||||||
|
"--max-line-length", "120",
|
||||||
|
"-aa",
|
||||||
|
]
|
||||||
|
bandit = [
|
||||||
|
"--configfile", ".bandit.yml",
|
||||||
|
]
|
||||||
|
manpage = [
|
||||||
|
"--author", "{[project]name} team",
|
||||||
|
"--author-email", "",
|
||||||
|
"--description", "ArcH linux ReposItory MANager",
|
||||||
|
"--manual-title", "ArcH linux ReposItory MANager",
|
||||||
|
"--project-name", "{[project]name}",
|
||||||
|
"--version", "{env:VERSION}",
|
||||||
|
"--url", "https://github.com/arcan1s/ahriman",
|
||||||
|
]
|
||||||
|
mypy = [
|
||||||
|
"--implicit-reexport",
|
||||||
|
"--strict",
|
||||||
|
"--allow-untyped-decorators",
|
||||||
|
"--allow-subclassing-any",
|
||||||
|
]
|
||||||
|
pydeps = [
|
||||||
|
"--no-config",
|
||||||
|
"--cluster",
|
||||||
|
]
|
||||||
|
pylint = [
|
||||||
|
"--rcfile", ".pylint.toml",
|
||||||
|
]
|
||||||
|
shtab = [
|
||||||
|
"--prefix", "{[project]name}",
|
||||||
|
"--prog", "{[project]name}",
|
||||||
|
"ahriman.application.ahriman._parser",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project]
|
||||||
|
extras = [
|
||||||
|
"journald",
|
||||||
|
"pacman",
|
||||||
|
"reports",
|
||||||
|
"s3",
|
||||||
|
"shell",
|
||||||
|
"stats",
|
||||||
|
"unixsocket",
|
||||||
|
"validator",
|
||||||
|
"web",
|
||||||
|
"web-auth",
|
||||||
|
"web-docs",
|
||||||
|
"web-oauth2",
|
||||||
|
"web-metrics",
|
||||||
|
]
|
||||||
|
name = "ahriman"
|
||||||
|
|
||||||
|
[env.archive]
|
||||||
|
description = "Create source files tarball"
|
||||||
|
deps = [
|
||||||
|
"build",
|
||||||
|
]
|
||||||
|
commands = [
|
||||||
|
[
|
||||||
|
"{envpython}",
|
||||||
|
"-m", "build",
|
||||||
|
"--sdist",
|
||||||
|
],
|
||||||
|
]
|
||||||
|
|
||||||
|
[env.check]
|
||||||
|
description = "Run common checks like linter, mypy, etc"
|
||||||
|
dependency_groups = [
|
||||||
|
"check",
|
||||||
|
]
|
||||||
|
extras = [
|
||||||
|
{ replace = "ref", of = ["project", "extras"], extend = true },
|
||||||
|
]
|
||||||
|
pip_pre = true
|
||||||
|
set_env.CFLAGS = "-Wno-unterminated-string-initialization"
|
||||||
|
set_env.MYPYPATH = "src"
|
||||||
|
commands = [
|
||||||
|
[
|
||||||
|
"autopep8",
|
||||||
|
{ replace = "ref", of = ["flags", "autopep8"], extend = true },
|
||||||
|
"--exit-code",
|
||||||
|
"--in-place",
|
||||||
|
"--jobs", "0",
|
||||||
|
"--recursive",
|
||||||
|
"src/{[project]name}",
|
||||||
|
"tests/{[project]name}",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"pylint",
|
||||||
|
{ replace = "ref", of = ["flags", "pylint"], extend = true },
|
||||||
|
"src/{[project]name}",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"bandit",
|
||||||
|
{ replace = "ref", of = ["flags", "bandit"], extend = true },
|
||||||
|
"--recursive",
|
||||||
|
"src/{[project]name}",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"bandit",
|
||||||
|
{ replace = "ref", of = ["flags", "bandit"], extend = true },
|
||||||
|
"--skip", "B101,B105,B106",
|
||||||
|
"--recursive",
|
||||||
|
"src/{[project]name}",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"mypy",
|
||||||
|
{ replace = "ref", of = ["flags", "mypy"], extend = true },
|
||||||
|
"--install-types",
|
||||||
|
"--non-interactive",
|
||||||
|
"--package", "{[project]name}",
|
||||||
|
],
|
||||||
|
]
|
||||||
|
|
||||||
|
[env.docs]
|
||||||
|
description = "Generate source files for documentation"
|
||||||
|
dependency_groups = [
|
||||||
|
"docs",
|
||||||
|
]
|
||||||
|
depends = [
|
||||||
|
"version",
|
||||||
|
]
|
||||||
|
deps = [
|
||||||
|
"uv",
|
||||||
|
]
|
||||||
|
dynamic_version = "{[project]name}.__version__"
|
||||||
|
extras = [
|
||||||
|
{ replace = "ref", of = ["project", "extras"], extend = true },
|
||||||
|
]
|
||||||
|
# TODO: steamline shlex usage after https://github.com/iterative/shtab/pull/192 merge
|
||||||
|
handle_redirect = true
|
||||||
|
pip_pre = true
|
||||||
|
set_env.PYTHONPATH = "src"
|
||||||
|
set_env.SPHINX_APIDOC_OPTIONS = "members,no-undoc-members,show-inheritance"
|
||||||
|
commands = [
|
||||||
|
[
|
||||||
|
"shtab",
|
||||||
|
{ replace = "ref", of = ["flags", "shtab"], extend = true },
|
||||||
|
"--shell",
|
||||||
|
"bash",
|
||||||
|
">",
|
||||||
|
"package/share/bash-completion/completions/_ahriman",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"shtab",
|
||||||
|
{ replace = "ref", of = ["flags", "shtab"], extend = true },
|
||||||
|
"--shell",
|
||||||
|
"zsh",
|
||||||
|
">",
|
||||||
|
"package/share/zsh/site-functions/_ahriman",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"argparse-manpage",
|
||||||
|
{ replace = "ref", of = ["flags", "manpage"], extend = true },
|
||||||
|
"--module", "ahriman.application.ahriman",
|
||||||
|
"--function", "_parser",
|
||||||
|
"--output", "package/share/man/man1/ahriman.1",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"pydeps",
|
||||||
|
{ replace = "ref", of = ["flags", "pydeps"], extend = true },
|
||||||
|
"--dot-output", "{tox_root}/docs/_static/architecture.dot",
|
||||||
|
"--no-output",
|
||||||
|
"--show-dot",
|
||||||
|
"src/ahriman",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"sphinx-apidoc",
|
||||||
|
"--force",
|
||||||
|
"--no-toc",
|
||||||
|
"--output-dir", "docs",
|
||||||
|
"src",
|
||||||
|
],
|
||||||
|
# compile list of dependencies for rtd.io
|
||||||
|
[
|
||||||
|
"uv",
|
||||||
|
"pip",
|
||||||
|
"compile",
|
||||||
|
"--group", "pyproject.toml:docs",
|
||||||
|
"--extra", "s3",
|
||||||
|
"--extra", "validator",
|
||||||
|
"--extra", "web",
|
||||||
|
"--output-file", "docs/requirements.txt",
|
||||||
|
"--quiet",
|
||||||
|
"pyproject.toml",
|
||||||
|
],
|
||||||
|
]
|
||||||
|
|
||||||
|
[env.html]
|
||||||
|
description = "Generate html documentation"
|
||||||
|
dependency_groups = [
|
||||||
|
"docs",
|
||||||
|
]
|
||||||
|
extras = [
|
||||||
|
{ replace = "ref", of = ["project", "extras"], extend = true },
|
||||||
|
]
|
||||||
|
pip_pre = true
|
||||||
|
recreate = true
|
||||||
|
commands = [
|
||||||
|
[
|
||||||
|
"sphinx-build",
|
||||||
|
"--builder", "html",
|
||||||
|
"--fail-on-warning",
|
||||||
|
"--jobs", "auto",
|
||||||
|
"--write-all",
|
||||||
|
"docs",
|
||||||
|
"{envtmpdir}/html",
|
||||||
|
],
|
||||||
|
]
|
||||||
|
|
||||||
|
[env.publish]
|
||||||
|
description = "Create and publish release to GitHub"
|
||||||
|
allowlist_externals = [
|
||||||
|
"git",
|
||||||
|
]
|
||||||
|
depends = [
|
||||||
|
"docs",
|
||||||
|
]
|
||||||
|
pass_env = [
|
||||||
|
"SSH_AUTH_SOCK",
|
||||||
|
]
|
||||||
|
commands = [
|
||||||
|
[
|
||||||
|
"git",
|
||||||
|
"add",
|
||||||
|
"package/archlinux/PKGBUILD",
|
||||||
|
"src/ahriman/__init__.py",
|
||||||
|
"docs/_static/architecture.dot",
|
||||||
|
"package/share/man/man1/ahriman.1",
|
||||||
|
"package/share/bash-completion/completions/_ahriman",
|
||||||
|
"package/share/zsh/site-functions/_ahriman",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"git",
|
||||||
|
"commit",
|
||||||
|
"--message", "Release {posargs}",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"git",
|
||||||
|
"tag",
|
||||||
|
"{posargs}",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"git",
|
||||||
|
"push",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"git",
|
||||||
|
"push",
|
||||||
|
"--tags",
|
||||||
|
],
|
||||||
|
]
|
||||||
|
|
||||||
|
[env.tests]
|
||||||
|
description = "Run tests"
|
||||||
|
dependency_groups = [
|
||||||
|
"tests",
|
||||||
|
]
|
||||||
|
extras = [
|
||||||
|
{ replace = "ref", of = ["project", "extras"], extend = true },
|
||||||
|
]
|
||||||
|
pip_pre = true
|
||||||
|
set_env.CFLAGS = "-Wno-unterminated-string-initialization"
|
||||||
|
commands = [
|
||||||
|
[
|
||||||
|
"pytest",
|
||||||
|
{ replace = "posargs", extend = true },
|
||||||
|
],
|
||||||
|
]
|
||||||
|
|
||||||
|
[env.version]
|
||||||
|
description = "Bump package version"
|
||||||
|
allowlist_externals = [
|
||||||
|
"sed",
|
||||||
|
]
|
||||||
|
deps = [
|
||||||
|
"packaging",
|
||||||
|
]
|
||||||
|
commands = [
|
||||||
|
# check if version is set and validate it
|
||||||
|
[
|
||||||
|
"{envpython}",
|
||||||
|
"-c", "from packaging.version import Version; Version('{posargs}')",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"sed",
|
||||||
|
"--in-place",
|
||||||
|
"s/^__version__ = .*/__version__ = \"{posargs}\"/",
|
||||||
|
"src/ahriman/__init__.py",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"sed",
|
||||||
|
"--in-place",
|
||||||
|
"s/pkgver=.*/pkgver={posargs}/",
|
||||||
|
"package/archlinux/PKGBUILD",
|
||||||
|
],
|
||||||
|
]
|
128
toxfile.py
Normal file
128
toxfile.py
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2021-2025 ahriman team.
|
||||||
|
#
|
||||||
|
# This file is part of ahriman
|
||||||
|
# (see https://github.com/arcan1s/ahriman).
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
import importlib
|
||||||
|
import shlex
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from tox.config.sets import EnvConfigSet
|
||||||
|
from tox.config.types import Command
|
||||||
|
from tox.plugin import impl
|
||||||
|
from tox.session.state import State
|
||||||
|
from tox.tox_env.api import ToxEnv
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_version(env_conf: EnvConfigSet, python_path: str | None = None) -> dict[str, str]:
|
||||||
|
"""
|
||||||
|
extract version dynamically and set VERSION environment variable
|
||||||
|
|
||||||
|
Args:
|
||||||
|
env_conf(EnvConfigSet): the core configuration object
|
||||||
|
python_path(str | None): python path variable if available
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, str]: environment variables which must be inserted
|
||||||
|
"""
|
||||||
|
import_path = env_conf["dynamic_version"]
|
||||||
|
if not import_path:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
if python_path is not None:
|
||||||
|
sys.path.append(python_path)
|
||||||
|
|
||||||
|
module_name, variable_name = import_path.rsplit(".", maxsplit=1)
|
||||||
|
module = importlib.import_module(module_name)
|
||||||
|
version = getattr(module, variable_name)
|
||||||
|
|
||||||
|
# reset import paths
|
||||||
|
sys.path.pop()
|
||||||
|
|
||||||
|
return {"VERSION": version}
|
||||||
|
|
||||||
|
|
||||||
|
def _wrap_commands(env_conf: EnvConfigSet, shell: str = "bash") -> None:
|
||||||
|
"""
|
||||||
|
wrap commands into shell if there is redirect
|
||||||
|
|
||||||
|
Args:
|
||||||
|
env_conf(EnvConfigSet): the core configuration object
|
||||||
|
shell(str, optional): shell command to use (Default value = "bash")
|
||||||
|
"""
|
||||||
|
if not env_conf["handle_redirect"]:
|
||||||
|
return
|
||||||
|
|
||||||
|
# append shell just in case
|
||||||
|
env_conf["allowlist_externals"].append(shell)
|
||||||
|
|
||||||
|
for command in env_conf["commands"]:
|
||||||
|
if len(command.args) < 3: # command itself, redirect and output
|
||||||
|
continue
|
||||||
|
|
||||||
|
redirect, output = command.args[-2:]
|
||||||
|
if redirect not in (">", "2>", "&>"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
command.args = [
|
||||||
|
shell,
|
||||||
|
"-c",
|
||||||
|
f"{Command(command.args[:-2]).shell} {redirect} {shlex.quote(output)}",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@impl
|
||||||
|
def tox_add_env_config(env_conf: EnvConfigSet, state: State) -> None:
|
||||||
|
"""
|
||||||
|
add a command line argument. This is the first hook to be called,
|
||||||
|
right after the logging setup and config source discovery.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
env_conf(EnvConfigSet): the core configuration object
|
||||||
|
state(State): the global tox state object
|
||||||
|
"""
|
||||||
|
del state
|
||||||
|
|
||||||
|
env_conf.add_config(
|
||||||
|
keys=["dynamic_version"],
|
||||||
|
of_type=str,
|
||||||
|
default="",
|
||||||
|
desc="import path for the version variable",
|
||||||
|
)
|
||||||
|
env_conf.add_config(
|
||||||
|
keys=["handle_redirect"],
|
||||||
|
of_type=bool,
|
||||||
|
default=False,
|
||||||
|
desc="wrap commands to handle redirects if any",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@impl
|
||||||
|
def tox_before_run_commands(tox_env: ToxEnv) -> None:
|
||||||
|
"""
|
||||||
|
called before the commands set is executed
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tox_env(ToxEnv): the tox environment being executed
|
||||||
|
"""
|
||||||
|
env_conf = tox_env.conf
|
||||||
|
set_env = env_conf["set_env"]
|
||||||
|
|
||||||
|
python_path = set_env.load("PYTHONPATH") if "PYTHONPATH" in set_env else None
|
||||||
|
set_env.update(_extract_version(env_conf, python_path))
|
||||||
|
|
||||||
|
_wrap_commands(env_conf)
|
Reference in New Issue
Block a user