mirror of
https://github.com/arcan1s/ahriman.git
synced 2025-04-24 23:37:18 +00:00
repository component tests
This commit is contained in:
parent
fc29689ef9
commit
83084a318d
@ -42,6 +42,4 @@ remote =
|
|||||||
bucket =
|
bucket =
|
||||||
|
|
||||||
[web]
|
[web]
|
||||||
host =
|
|
||||||
port =
|
|
||||||
templates = /usr/share/ahriman
|
templates = /usr/share/ahriman
|
@ -17,19 +17,19 @@ args = (sys.stderr,)
|
|||||||
class = logging.handlers.RotatingFileHandler
|
class = logging.handlers.RotatingFileHandler
|
||||||
level = DEBUG
|
level = DEBUG
|
||||||
formatter = generic_format
|
formatter = generic_format
|
||||||
args = ('/var/log/ahriman/ahriman.log', 'a', 20971520, 20)
|
args = ("/var/log/ahriman/ahriman.log", "a", 20971520, 20)
|
||||||
|
|
||||||
[handler_build_file_handler]
|
[handler_build_file_handler]
|
||||||
class = logging.handlers.RotatingFileHandler
|
class = logging.handlers.RotatingFileHandler
|
||||||
level = DEBUG
|
level = DEBUG
|
||||||
formatter = generic_format
|
formatter = generic_format
|
||||||
args = ('/var/log/ahriman/build.log', 'a', 20971520, 20)
|
args = ("/var/log/ahriman/build.log", "a", 20971520, 20)
|
||||||
|
|
||||||
[handler_http_handler]
|
[handler_http_handler]
|
||||||
class = logging.handlers.RotatingFileHandler
|
class = logging.handlers.RotatingFileHandler
|
||||||
level = DEBUG
|
level = DEBUG
|
||||||
formatter = generic_format
|
formatter = generic_format
|
||||||
args = ('/var/log/ahriman/http.log', 'a', 20971520, 20)
|
args = ("/var/log/ahriman/http.log", "a", 20971520, 20)
|
||||||
|
|
||||||
[formatter_generic_format]
|
[formatter_generic_format]
|
||||||
format = [%(levelname)s %(asctime)s] [%(filename)s:%(lineno)d] [%(funcName)s]: %(message)s
|
format = [%(levelname)s %(asctime)s] [%(filename)s:%(lineno)d] [%(funcName)s]: %(message)s
|
||||||
|
@ -2,4 +2,4 @@
|
|||||||
test = pytest
|
test = pytest
|
||||||
|
|
||||||
[tool:pytest]
|
[tool:pytest]
|
||||||
addopts = --pspec
|
addopts = --cov=ahriman --pspec
|
||||||
|
3
setup.py
3
setup.py
@ -36,6 +36,7 @@ setup(
|
|||||||
],
|
],
|
||||||
tests_require=[
|
tests_require=[
|
||||||
"pytest",
|
"pytest",
|
||||||
|
"pytest-cov",
|
||||||
"pytest-helpers-namespace",
|
"pytest-helpers-namespace",
|
||||||
"pytest-mock",
|
"pytest-mock",
|
||||||
"pytest-pspec",
|
"pytest-pspec",
|
||||||
@ -70,7 +71,7 @@ setup(
|
|||||||
|
|
||||||
extras_require={
|
extras_require={
|
||||||
"html-templates": ["Jinja2"],
|
"html-templates": ["Jinja2"],
|
||||||
"test": ["coverage", "pytest", "pytest-helpers-namespace", "pytest-mock", "pytest-pspec", "pytest-resource-path"],
|
"test": ["pytest", "pytest-cov", "pytest-helpers-namespace", "pytest-mock", "pytest-pspec", "pytest-resource-path"],
|
||||||
"web": ["Jinja2", "aiohttp", "aiohttp_jinja2", "requests"],
|
"web": ["Jinja2", "aiohttp", "aiohttp_jinja2", "requests"],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -48,7 +48,7 @@ class Handler:
|
|||||||
cls.run(args, architecture, config)
|
cls.run(args, architecture, config)
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except Exception:
|
||||||
logging.getLogger("root").exception("process exception", exc_info=True)
|
logging.getLogger("root").exception("process exception")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -89,6 +89,7 @@ class Task:
|
|||||||
else:
|
else:
|
||||||
Task._check_output("git", "clone", remote, str(local), exception=None, logger=logger)
|
Task._check_output("git", "clone", remote, str(local), exception=None, logger=logger)
|
||||||
# and now force reset to our branch
|
# and now force reset to our branch
|
||||||
|
Task._check_output("git", "checkout", "--force", branch, exception=None, cwd=local, logger=logger)
|
||||||
Task._check_output("git", "reset", "--hard", f"origin/{branch}", exception=None, cwd=local, logger=logger)
|
Task._check_output("git", "reset", "--hard", f"origin/{branch}", exception=None, cwd=local, logger=logger)
|
||||||
|
|
||||||
def build(self) -> List[Path]:
|
def build(self) -> List[Path]:
|
||||||
|
@ -140,7 +140,7 @@ class Configuration(configparser.RawConfigParser):
|
|||||||
fileConfig(self.get("settings", "logging"))
|
fileConfig(self.get("settings", "logging"))
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
console_logger()
|
console_logger()
|
||||||
logging.error("could not create logfile, fallback to stderr", exc_info=True)
|
logging.exception("could not create logfile, fallback to stderr")
|
||||||
|
|
||||||
def console_logger() -> None:
|
def console_logger() -> None:
|
||||||
logging.basicConfig(filename=None, format=Configuration.DEFAULT_LOG_FORMAT,
|
logging.basicConfig(filename=None, format=Configuration.DEFAULT_LOG_FORMAT,
|
||||||
|
@ -64,7 +64,7 @@ class Report:
|
|||||||
try:
|
try:
|
||||||
report.generate(packages)
|
report.generate(packages)
|
||||||
except Exception:
|
except Exception:
|
||||||
report.logger.exception("report generation failed", exc_info=True)
|
report.logger.exception(f"report generation failed for target {provider.name}")
|
||||||
raise ReportFailed()
|
raise ReportFailed()
|
||||||
|
|
||||||
def generate(self, packages: Iterable[Package]) -> None:
|
def generate(self, packages: Iterable[Package]) -> None:
|
||||||
|
@ -56,13 +56,12 @@ class Executor(Cleaner):
|
|||||||
dst = self.paths.packages / src.name
|
dst = self.paths.packages / src.name
|
||||||
shutil.move(src, dst)
|
shutil.move(src, dst)
|
||||||
|
|
||||||
for package in updates:
|
for single in updates:
|
||||||
try:
|
try:
|
||||||
build_single(package)
|
build_single(single)
|
||||||
except Exception:
|
except Exception:
|
||||||
self.reporter.set_failed(package.base)
|
self.reporter.set_failed(single.base)
|
||||||
self.logger.exception(f"{package.base} ({self.architecture}) build exception", exc_info=True)
|
self.logger.exception(f"{single.base} ({self.architecture}) build exception")
|
||||||
continue
|
|
||||||
self.clear_build()
|
self.clear_build()
|
||||||
|
|
||||||
return self.packages_built()
|
return self.packages_built()
|
||||||
@ -73,11 +72,11 @@ class Executor(Cleaner):
|
|||||||
:param packages: list of package names or bases to remove
|
:param packages: list of package names or bases to remove
|
||||||
:return: path to repository database
|
:return: path to repository database
|
||||||
"""
|
"""
|
||||||
def remove_single(package: str, filename: Path) -> None:
|
def remove_single(package: str, fn: Path) -> None:
|
||||||
try:
|
try:
|
||||||
self.repo.remove(package, filename)
|
self.repo.remove(package, fn)
|
||||||
except Exception:
|
except Exception:
|
||||||
self.logger.exception(f"could not remove {package}", exc_info=True)
|
self.logger.exception(f"could not remove {package}")
|
||||||
|
|
||||||
requested = set(packages)
|
requested = set(packages)
|
||||||
for local in self.packages():
|
for local in self.packages():
|
||||||
@ -142,9 +141,12 @@ class Executor(Cleaner):
|
|||||||
|
|
||||||
# we are iterating over bases, not single packages
|
# we are iterating over bases, not single packages
|
||||||
updates: Dict[str, Package] = {}
|
updates: Dict[str, Package] = {}
|
||||||
for fn in packages:
|
for filename in packages:
|
||||||
local = Package.load(fn, self.pacman, self.aur_url)
|
try:
|
||||||
|
local = Package.load(filename, self.pacman, self.aur_url)
|
||||||
updates.setdefault(local.base, local).packages.update(local.packages)
|
updates.setdefault(local.base, local).packages.update(local.packages)
|
||||||
|
except Exception:
|
||||||
|
self.logger.exception(f"could not load package from {filename}")
|
||||||
|
|
||||||
for local in updates.values():
|
for local in updates.values():
|
||||||
try:
|
try:
|
||||||
@ -153,7 +155,7 @@ class Executor(Cleaner):
|
|||||||
self.reporter.set_success(local)
|
self.reporter.set_success(local)
|
||||||
except Exception:
|
except Exception:
|
||||||
self.reporter.set_failed(local.base)
|
self.reporter.set_failed(local.base)
|
||||||
self.logger.exception(f"could not process {local.base}", exc_info=True)
|
self.logger.exception(f"could not process {local.base}")
|
||||||
self.clear_packages()
|
self.clear_packages()
|
||||||
|
|
||||||
return self.repo.repo_path
|
return self.repo.repo_path
|
||||||
|
@ -44,7 +44,7 @@ class Repository(Executor, UpdateHandler):
|
|||||||
local = Package.load(full_path, self.pacman, self.aur_url)
|
local = Package.load(full_path, self.pacman, self.aur_url)
|
||||||
result.setdefault(local.base, local).packages.update(local.packages)
|
result.setdefault(local.base, local).packages.update(local.packages)
|
||||||
except Exception:
|
except Exception:
|
||||||
self.logger.exception(f"could not load package from {full_path}", exc_info=True)
|
self.logger.exception(f"could not load package from {full_path}")
|
||||||
continue
|
continue
|
||||||
return list(result.values())
|
return list(result.values())
|
||||||
|
|
||||||
|
@ -62,7 +62,7 @@ class UpdateHandler(Cleaner):
|
|||||||
result.append(remote)
|
result.append(remote)
|
||||||
except Exception:
|
except Exception:
|
||||||
self.reporter.set_failed(local.base)
|
self.reporter.set_failed(local.base)
|
||||||
self.logger.exception(f"could not load remote package {local.base}", exc_info=True)
|
self.logger.exception(f"could not load remote package {local.base}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
return result
|
return result
|
||||||
@ -84,7 +84,7 @@ class UpdateHandler(Cleaner):
|
|||||||
else:
|
else:
|
||||||
self.reporter.set_pending(local.base)
|
self.reporter.set_pending(local.base)
|
||||||
except Exception:
|
except Exception:
|
||||||
self.logger.exception(f"could not add package from {fn}", exc_info=True)
|
self.logger.exception(f"could not add package from {fn}")
|
||||||
self.clear_manual()
|
self.clear_manual()
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -66,7 +66,7 @@ class Uploader:
|
|||||||
try:
|
try:
|
||||||
uploader.sync(path)
|
uploader.sync(path)
|
||||||
except Exception:
|
except Exception:
|
||||||
uploader.logger.exception("remote sync failed", exc_info=True)
|
uploader.logger.exception(f"remote sync failed for {provider.name}")
|
||||||
raise SyncFailed()
|
raise SyncFailed()
|
||||||
|
|
||||||
def sync(self, path: Path) -> None:
|
def sync(self, path: Path) -> None:
|
||||||
|
@ -85,7 +85,7 @@ class Watcher:
|
|||||||
try:
|
try:
|
||||||
parse_single(item)
|
parse_single(item)
|
||||||
except Exception:
|
except Exception:
|
||||||
self.logger.exception(f"cannot parse item f{item} to package", exc_info=True)
|
self.logger.exception(f"cannot parse item f{item} to package")
|
||||||
|
|
||||||
def _cache_save(self) -> None:
|
def _cache_save(self) -> None:
|
||||||
"""
|
"""
|
||||||
@ -103,7 +103,7 @@ class Watcher:
|
|||||||
with self.cache_path.open("w") as cache:
|
with self.cache_path.open("w") as cache:
|
||||||
json.dump(dump, cache)
|
json.dump(dump, cache)
|
||||||
except Exception:
|
except Exception:
|
||||||
self.logger.exception("cannot dump cache", exc_info=True)
|
self.logger.exception("cannot dump cache")
|
||||||
|
|
||||||
def get(self, base: str) -> Tuple[Package, BuildStatus]:
|
def get(self, base: str) -> Tuple[Package, BuildStatus]:
|
||||||
"""
|
"""
|
||||||
|
@ -75,9 +75,9 @@ class WebClient(Client):
|
|||||||
response = requests.post(self._package_url(package.base), json=payload)
|
response = requests.post(self._package_url(package.base), json=payload)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
except requests.exceptions.HTTPError as e:
|
except requests.exceptions.HTTPError as e:
|
||||||
self.logger.exception(f"could not add {package.base}: {e.response.text}", exc_info=True)
|
self.logger.exception(f"could not add {package.base}: {e.response.text}")
|
||||||
except Exception:
|
except Exception:
|
||||||
self.logger.exception(f"could not add {package.base}", exc_info=True)
|
self.logger.exception(f"could not add {package.base}")
|
||||||
|
|
||||||
def get(self, base: Optional[str]) -> List[Tuple[Package, BuildStatus]]:
|
def get(self, base: Optional[str]) -> List[Tuple[Package, BuildStatus]]:
|
||||||
"""
|
"""
|
||||||
@ -95,9 +95,9 @@ class WebClient(Client):
|
|||||||
for package in status_json
|
for package in status_json
|
||||||
]
|
]
|
||||||
except requests.exceptions.HTTPError as e:
|
except requests.exceptions.HTTPError as e:
|
||||||
self.logger.exception(f"could not get {base}: {e.response.text}", exc_info=True)
|
self.logger.exception(f"could not get {base}: {e.response.text}")
|
||||||
except Exception:
|
except Exception:
|
||||||
self.logger.exception(f"could not get {base}", exc_info=True)
|
self.logger.exception(f"could not get {base}")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def get_self(self) -> BuildStatus:
|
def get_self(self) -> BuildStatus:
|
||||||
@ -112,9 +112,9 @@ class WebClient(Client):
|
|||||||
status_json = response.json()
|
status_json = response.json()
|
||||||
return BuildStatus.from_json(status_json)
|
return BuildStatus.from_json(status_json)
|
||||||
except requests.exceptions.HTTPError as e:
|
except requests.exceptions.HTTPError as e:
|
||||||
self.logger.exception(f"could not get service status: {e.response.text}", exc_info=True)
|
self.logger.exception(f"could not get service status: {e.response.text}")
|
||||||
except Exception:
|
except Exception:
|
||||||
self.logger.exception("could not get service status", exc_info=True)
|
self.logger.exception("could not get service status")
|
||||||
return BuildStatus()
|
return BuildStatus()
|
||||||
|
|
||||||
def remove(self, base: str) -> None:
|
def remove(self, base: str) -> None:
|
||||||
@ -126,9 +126,9 @@ class WebClient(Client):
|
|||||||
response = requests.delete(self._package_url(base))
|
response = requests.delete(self._package_url(base))
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
except requests.exceptions.HTTPError as e:
|
except requests.exceptions.HTTPError as e:
|
||||||
self.logger.exception(f"could not delete {base}: {e.response.text}", exc_info=True)
|
self.logger.exception(f"could not delete {base}: {e.response.text}")
|
||||||
except Exception:
|
except Exception:
|
||||||
self.logger.exception(f"could not delete {base}", exc_info=True)
|
self.logger.exception(f"could not delete {base}")
|
||||||
|
|
||||||
def update(self, base: str, status: BuildStatusEnum) -> None:
|
def update(self, base: str, status: BuildStatusEnum) -> None:
|
||||||
"""
|
"""
|
||||||
@ -142,9 +142,9 @@ class WebClient(Client):
|
|||||||
response = requests.post(self._package_url(base), json=payload)
|
response = requests.post(self._package_url(base), json=payload)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
except requests.exceptions.HTTPError as e:
|
except requests.exceptions.HTTPError as e:
|
||||||
self.logger.exception(f"could not update {base}: {e.response.text}", exc_info=True)
|
self.logger.exception(f"could not update {base}: {e.response.text}")
|
||||||
except Exception:
|
except Exception:
|
||||||
self.logger.exception(f"could not update {base}", exc_info=True)
|
self.logger.exception(f"could not update {base}")
|
||||||
|
|
||||||
def update_self(self, status: BuildStatusEnum) -> None:
|
def update_self(self, status: BuildStatusEnum) -> None:
|
||||||
"""
|
"""
|
||||||
@ -157,6 +157,6 @@ class WebClient(Client):
|
|||||||
response = requests.post(self._ahriman_url(), json=payload)
|
response = requests.post(self._ahriman_url(), json=payload)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
except requests.exceptions.HTTPError as e:
|
except requests.exceptions.HTTPError as e:
|
||||||
self.logger.exception(f"could not update service status: {e.response.text}", exc_info=True)
|
self.logger.exception(f"could not update service status: {e.response.text}")
|
||||||
except Exception:
|
except Exception:
|
||||||
self.logger.exception("could not update service status", exc_info=True)
|
self.logger.exception("could not update service status")
|
||||||
|
@ -40,7 +40,7 @@ def exception_handler(logger: Logger) -> Callable[[Request, HandlerType], Awaita
|
|||||||
except HTTPClientError:
|
except HTTPClientError:
|
||||||
raise
|
raise
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception(f"exception during performing request to {request.path}", exc_info=True)
|
logger.exception(f"exception during performing request to {request.path}")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
return handle
|
return handle
|
||||||
|
@ -47,7 +47,7 @@ async def on_startup(application: web.Application) -> None:
|
|||||||
try:
|
try:
|
||||||
application["watcher"].load()
|
application["watcher"].load()
|
||||||
except Exception:
|
except Exception:
|
||||||
application.logger.exception("could not load packages", exc_info=True)
|
application.logger.exception("could not load packages")
|
||||||
raise InitializeException()
|
raise InitializeException()
|
||||||
|
|
||||||
|
|
||||||
|
@ -31,6 +31,21 @@ def package_ahriman(package_description_ahriman: PackageDescription) -> Package:
|
|||||||
packages=packages)
|
packages=packages)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def package_python_schedule(
|
||||||
|
package_description_python_schedule: PackageDescription,
|
||||||
|
package_description_python2_schedule: PackageDescription) -> Package:
|
||||||
|
packages = {
|
||||||
|
"python-schedule": package_description_python_schedule,
|
||||||
|
"python2-schedule": package_description_python2_schedule
|
||||||
|
}
|
||||||
|
return Package(
|
||||||
|
base="python-schedule",
|
||||||
|
version="1.0.0-2",
|
||||||
|
aur_url="https://aur.archlinux.org",
|
||||||
|
packages=packages)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def package_description_ahriman() -> PackageDescription:
|
def package_description_ahriman() -> PackageDescription:
|
||||||
return PackageDescription(
|
return PackageDescription(
|
||||||
@ -40,6 +55,24 @@ def package_description_ahriman() -> PackageDescription:
|
|||||||
installed_size=4200000)
|
installed_size=4200000)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def package_description_python_schedule() -> PackageDescription:
|
||||||
|
return PackageDescription(
|
||||||
|
archive_size=4201,
|
||||||
|
build_date=421,
|
||||||
|
filename="python-schedule-1.0.0-2-any.pkg.tar.zst",
|
||||||
|
installed_size=4200001)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def package_description_python2_schedule() -> PackageDescription:
|
||||||
|
return PackageDescription(
|
||||||
|
archive_size=4202,
|
||||||
|
build_date=422,
|
||||||
|
filename="python2-schedule-1.0.0-2-any.pkg.tar.zst",
|
||||||
|
installed_size=4200002)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def repository_paths() -> RepositoryPaths:
|
def repository_paths() -> RepositoryPaths:
|
||||||
return RepositoryPaths(
|
return RepositoryPaths(
|
||||||
|
@ -21,6 +21,9 @@ def test_fetch_existing(mocker: MockerFixture) -> None:
|
|||||||
mock.call("git", "fetch", "origin", "master",
|
mock.call("git", "fetch", "origin", "master",
|
||||||
exception=pytest.helpers.anyvar(int),
|
exception=pytest.helpers.anyvar(int),
|
||||||
cwd=local, logger=pytest.helpers.anyvar(int)),
|
cwd=local, logger=pytest.helpers.anyvar(int)),
|
||||||
|
mock.call("git", "checkout", "--force", "master",
|
||||||
|
exception=pytest.helpers.anyvar(int),
|
||||||
|
cwd=local, logger=pytest.helpers.anyvar(int)),
|
||||||
mock.call("git", "reset", "--hard", "origin/master",
|
mock.call("git", "reset", "--hard", "origin/master",
|
||||||
exception=pytest.helpers.anyvar(int),
|
exception=pytest.helpers.anyvar(int),
|
||||||
cwd=local, logger=pytest.helpers.anyvar(int))
|
cwd=local, logger=pytest.helpers.anyvar(int))
|
||||||
@ -40,6 +43,9 @@ def test_fetch_new(mocker: MockerFixture) -> None:
|
|||||||
mock.call("git", "clone", "remote", str(local),
|
mock.call("git", "clone", "remote", str(local),
|
||||||
exception=pytest.helpers.anyvar(int),
|
exception=pytest.helpers.anyvar(int),
|
||||||
logger=pytest.helpers.anyvar(int)),
|
logger=pytest.helpers.anyvar(int)),
|
||||||
|
mock.call("git", "checkout", "--force", "master",
|
||||||
|
exception=pytest.helpers.anyvar(int),
|
||||||
|
cwd=local, logger=pytest.helpers.anyvar(int)),
|
||||||
mock.call("git", "reset", "--hard", "origin/master",
|
mock.call("git", "reset", "--hard", "origin/master",
|
||||||
exception=pytest.helpers.anyvar(int),
|
exception=pytest.helpers.anyvar(int),
|
||||||
cwd=local, logger=pytest.helpers.anyvar(int))
|
cwd=local, logger=pytest.helpers.anyvar(int))
|
||||||
|
0
tests/ahriman/core/report/test_html.py
Normal file
0
tests/ahriman/core/report/test_html.py
Normal file
0
tests/ahriman/core/report/test_report.py
Normal file
0
tests/ahriman/core/report/test_report.py
Normal file
49
tests/ahriman/core/repository/conftest.py
Normal file
49
tests/ahriman/core/repository/conftest.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from pytest_mock import MockerFixture
|
||||||
|
|
||||||
|
from ahriman.core.configuration import Configuration
|
||||||
|
from ahriman.core.repository.cleaner import Cleaner
|
||||||
|
from ahriman.core.repository.executor import Executor
|
||||||
|
from ahriman.core.repository.properties import Properties
|
||||||
|
from ahriman.core.repository.repository import Repository
|
||||||
|
from ahriman.core.repository.update_handler import UpdateHandler
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def cleaner(configuration: Configuration, mocker: MockerFixture) -> Cleaner:
|
||||||
|
mocker.patch("pathlib.Path.mkdir")
|
||||||
|
return Cleaner("x86_64", configuration)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def executor(configuration: Configuration, mocker: MockerFixture) -> Executor:
|
||||||
|
mocker.patch("pathlib.Path.mkdir")
|
||||||
|
mocker.patch("ahriman.core.repository.cleaner.Cleaner.clear_build")
|
||||||
|
mocker.patch("ahriman.core.repository.cleaner.Cleaner.clear_cache")
|
||||||
|
mocker.patch("ahriman.core.repository.cleaner.Cleaner.clear_chroot")
|
||||||
|
mocker.patch("ahriman.core.repository.cleaner.Cleaner.clear_manual")
|
||||||
|
mocker.patch("ahriman.core.repository.cleaner.Cleaner.clear_packages")
|
||||||
|
return Executor("x86_64", configuration)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def repository(configuration: Configuration, mocker: MockerFixture) -> Repository:
|
||||||
|
mocker.patch("pathlib.Path.mkdir")
|
||||||
|
return Repository("x86_64", configuration)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def properties(configuration: Configuration) -> Properties:
|
||||||
|
return Properties("x86_64", configuration)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def update_handler(configuration: Configuration, mocker: MockerFixture) -> UpdateHandler:
|
||||||
|
mocker.patch("pathlib.Path.mkdir")
|
||||||
|
mocker.patch("ahriman.core.repository.cleaner.Cleaner.clear_build")
|
||||||
|
mocker.patch("ahriman.core.repository.cleaner.Cleaner.clear_cache")
|
||||||
|
mocker.patch("ahriman.core.repository.cleaner.Cleaner.clear_chroot")
|
||||||
|
mocker.patch("ahriman.core.repository.cleaner.Cleaner.clear_manual")
|
||||||
|
mocker.patch("ahriman.core.repository.cleaner.Cleaner.clear_packages")
|
||||||
|
return UpdateHandler("x86_64", configuration)
|
68
tests/ahriman/core/repository/test_cleaner.py
Normal file
68
tests/ahriman/core/repository/test_cleaner.py
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
import shutil
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from pytest_mock import MockerFixture
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
|
from ahriman.core.repository.cleaner import Cleaner
|
||||||
|
|
||||||
|
|
||||||
|
def _mock_clear(mocker: MockerFixture) -> None:
|
||||||
|
mocker.patch("pathlib.Path.iterdir", return_value=[Path("a"), Path("b"), Path("c")])
|
||||||
|
mocker.patch("shutil.rmtree")
|
||||||
|
|
||||||
|
|
||||||
|
def _mock_clear_check() -> None:
|
||||||
|
shutil.rmtree.assert_has_calls([
|
||||||
|
mock.call(Path("a")),
|
||||||
|
mock.call(Path("b")),
|
||||||
|
mock.call(Path("c"))
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
|
def test_clear_build(cleaner: Cleaner, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must remove directories with sources
|
||||||
|
"""
|
||||||
|
_mock_clear(mocker)
|
||||||
|
cleaner.clear_build()
|
||||||
|
_mock_clear_check()
|
||||||
|
|
||||||
|
|
||||||
|
def test_clear_cache(cleaner: Cleaner, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must remove every cached sources
|
||||||
|
"""
|
||||||
|
_mock_clear(mocker)
|
||||||
|
cleaner.clear_cache()
|
||||||
|
_mock_clear_check()
|
||||||
|
|
||||||
|
|
||||||
|
def test_clear_chroot(cleaner: Cleaner, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must clear chroot
|
||||||
|
"""
|
||||||
|
_mock_clear(mocker)
|
||||||
|
cleaner.clear_chroot()
|
||||||
|
_mock_clear_check()
|
||||||
|
|
||||||
|
|
||||||
|
def test_clear_manual(cleaner: Cleaner, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must clear directory with manual packages
|
||||||
|
"""
|
||||||
|
_mock_clear(mocker)
|
||||||
|
cleaner.clear_manual()
|
||||||
|
_mock_clear_check()
|
||||||
|
|
||||||
|
|
||||||
|
def test_clear_packages(cleaner: Cleaner, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must delete built packages
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.core.repository.cleaner.Cleaner.packages_built",
|
||||||
|
return_value=[Path("a"), Path("b"), Path("c")])
|
||||||
|
mocker.patch("pathlib.Path.unlink")
|
||||||
|
|
||||||
|
cleaner.clear_packages()
|
||||||
|
Path.unlink.assert_has_calls([mock.call(), mock.call(), mock.call()])
|
189
tests/ahriman/core/repository/test_executor.py
Normal file
189
tests/ahriman/core/repository/test_executor.py
Normal file
@ -0,0 +1,189 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
|
from pytest_mock import MockerFixture
|
||||||
|
|
||||||
|
from ahriman.core.repository.executor import Executor
|
||||||
|
from ahriman.models.package import Package
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_build(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must run build process
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.core.repository.executor.Executor.packages_built", return_value=[package_ahriman])
|
||||||
|
mocker.patch("ahriman.core.build_tools.task.Task.build", return_value=[Path(package_ahriman.base)])
|
||||||
|
mocker.patch("ahriman.core.build_tools.task.Task.init")
|
||||||
|
move_mock = mocker.patch("shutil.move")
|
||||||
|
watcher_client_mock = mocker.patch("ahriman.core.watcher.client.Client.set_building")
|
||||||
|
|
||||||
|
# must return list of built packages
|
||||||
|
assert executor.process_build([package_ahriman]) == [package_ahriman]
|
||||||
|
# must move files (once)
|
||||||
|
move_mock.assert_called_once()
|
||||||
|
# must update status
|
||||||
|
watcher_client_mock.assert_called_once()
|
||||||
|
# must clear directory
|
||||||
|
from ahriman.core.repository.cleaner import Cleaner
|
||||||
|
Cleaner.clear_build.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_build_failure(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must run correct process failed builds
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.core.repository.executor.Executor.packages_built")
|
||||||
|
mocker.patch("ahriman.core.build_tools.task.Task.build", return_value=[Path(package_ahriman.base)])
|
||||||
|
mocker.patch("ahriman.core.build_tools.task.Task.init")
|
||||||
|
mocker.patch("shutil.move", side_effect=Exception())
|
||||||
|
watcher_client_mock = mocker.patch("ahriman.core.watcher.client.Client.set_failed")
|
||||||
|
|
||||||
|
executor.process_build([package_ahriman])
|
||||||
|
watcher_client_mock.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_remove_base(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must run remove process for whole base
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
|
||||||
|
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
|
||||||
|
watcher_client_mock = mocker.patch("ahriman.core.watcher.client.Client.remove")
|
||||||
|
|
||||||
|
executor.process_remove([package_ahriman.base])
|
||||||
|
# must remove via alpm wrapper
|
||||||
|
repo_remove_mock.assert_called_once()
|
||||||
|
# must update status
|
||||||
|
watcher_client_mock.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_remove_base_multiple(executor: Executor, package_python_schedule: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must run remove process for whole base with multiple packages
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_python_schedule])
|
||||||
|
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
|
||||||
|
watcher_client_mock = mocker.patch("ahriman.core.watcher.client.Client.remove")
|
||||||
|
|
||||||
|
executor.process_remove([package_python_schedule.base])
|
||||||
|
# must remove via alpm wrapper
|
||||||
|
repo_remove_mock.assert_has_calls([
|
||||||
|
mock.call(package, Path(props.filename))
|
||||||
|
for package, props in package_python_schedule.packages.items()
|
||||||
|
], any_order=True)
|
||||||
|
# must update status
|
||||||
|
watcher_client_mock.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_remove_base_single(executor: Executor, package_python_schedule: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must run remove process for single package in base
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_python_schedule])
|
||||||
|
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
|
||||||
|
watcher_client_mock = mocker.patch("ahriman.core.watcher.client.Client.remove")
|
||||||
|
|
||||||
|
executor.process_remove(["python2-schedule"])
|
||||||
|
# must remove via alpm wrapper
|
||||||
|
repo_remove_mock.assert_called_once()
|
||||||
|
# must not update status
|
||||||
|
watcher_client_mock.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_remove_nothing(executor: Executor, package_ahriman: Package, package_python_schedule: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must not remove anything if it was not requested
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.core.repository.executor.Executor.packages", return_value=[package_ahriman])
|
||||||
|
repo_remove_mock = mocker.patch("ahriman.core.alpm.repo.Repo.remove")
|
||||||
|
|
||||||
|
executor.process_remove([package_python_schedule.base])
|
||||||
|
repo_remove_mock.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_report_auto(executor: Executor, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must process report in auto mode if no targets supplied
|
||||||
|
"""
|
||||||
|
config_getlist_mock = mocker.patch("ahriman.core.configuration.Configuration.getlist")
|
||||||
|
|
||||||
|
executor.process_report(None)
|
||||||
|
config_getlist_mock.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_sync_auto(executor: Executor, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must process sync in auto mode if no targets supplied
|
||||||
|
"""
|
||||||
|
config_getlist_mock = mocker.patch("ahriman.core.configuration.Configuration.getlist")
|
||||||
|
|
||||||
|
executor.process_sync(None)
|
||||||
|
config_getlist_mock.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_update(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must run update process
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.models.package.Package.load", return_value=package_ahriman)
|
||||||
|
move_mock = mocker.patch("shutil.move")
|
||||||
|
repo_add_mock = mocker.patch("ahriman.core.alpm.repo.Repo.add")
|
||||||
|
sign_package_mock = mocker.patch("ahriman.core.sign.gpg.GPG.sign_package", side_effect=lambda fn, _: [fn])
|
||||||
|
watcher_client_mock = mocker.patch("ahriman.core.watcher.client.Client.set_success")
|
||||||
|
|
||||||
|
# must return complete
|
||||||
|
assert executor.process_update([Path(package.filename) for package in package_ahriman.packages.values()])
|
||||||
|
# must move files (once)
|
||||||
|
move_mock.assert_called_once()
|
||||||
|
# must sign package
|
||||||
|
sign_package_mock.assert_called_once()
|
||||||
|
# must add package
|
||||||
|
repo_add_mock.assert_called_once()
|
||||||
|
# must update status
|
||||||
|
watcher_client_mock.assert_called_once()
|
||||||
|
# must clear directory
|
||||||
|
from ahriman.core.repository.cleaner import Cleaner
|
||||||
|
Cleaner.clear_packages.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_update_group(executor: Executor, package_python_schedule: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must group single packages under one base
|
||||||
|
"""
|
||||||
|
mocker.patch("shutil.move")
|
||||||
|
mocker.patch("ahriman.models.package.Package.load", return_value=package_python_schedule)
|
||||||
|
repo_add_mock = mocker.patch("ahriman.core.alpm.repo.Repo.add")
|
||||||
|
watcher_client_mock = mocker.patch("ahriman.core.watcher.client.Client.set_success")
|
||||||
|
|
||||||
|
executor.process_update([Path(package.filename) for package in package_python_schedule.packages.values()])
|
||||||
|
repo_add_mock.assert_has_calls([
|
||||||
|
mock.call(executor.paths.repository / package.filename)
|
||||||
|
for package in package_python_schedule.packages.values()
|
||||||
|
], any_order=True)
|
||||||
|
watcher_client_mock.assert_called_with(package_python_schedule)
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_update_failed(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must process update for failed package
|
||||||
|
"""
|
||||||
|
mocker.patch("shutil.move", side_effect=Exception())
|
||||||
|
mocker.patch("ahriman.models.package.Package.load", return_value=package_ahriman)
|
||||||
|
watcher_client_mock = mocker.patch("ahriman.core.watcher.client.Client.set_failed")
|
||||||
|
|
||||||
|
executor.process_update([Path(package.filename) for package in package_ahriman.packages.values()])
|
||||||
|
watcher_client_mock.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_update_failed_on_load(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must process update even with failed package load
|
||||||
|
"""
|
||||||
|
mocker.patch("shutil.move")
|
||||||
|
mocker.patch("ahriman.models.package.Package.load", side_effect=Exception())
|
||||||
|
|
||||||
|
assert executor.process_update([Path(package.filename) for package in package_ahriman.packages.values()])
|
14
tests/ahriman/core/repository/test_properties.py
Normal file
14
tests/ahriman/core/repository/test_properties.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
from pytest_mock import MockerFixture
|
||||||
|
|
||||||
|
from ahriman.core.configuration import Configuration
|
||||||
|
from ahriman.core.repository.properties import Properties
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_tree_on_load(configuration: Configuration, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must create tree on load
|
||||||
|
"""
|
||||||
|
create_tree_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.create_tree")
|
||||||
|
Properties("x86_64", configuration)
|
||||||
|
|
||||||
|
create_tree_mock.assert_called_once()
|
34
tests/ahriman/core/repository/test_repository.py
Normal file
34
tests/ahriman/core/repository/test_repository.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from pytest_mock import MockerFixture
|
||||||
|
|
||||||
|
from ahriman.core.repository.repository import Repository
|
||||||
|
from ahriman.models.package import Package
|
||||||
|
|
||||||
|
|
||||||
|
def test_packages(package_ahriman: Package, package_python_schedule: Package,
|
||||||
|
repository: Repository, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must return all packages grouped by package base
|
||||||
|
"""
|
||||||
|
single_packages = [
|
||||||
|
Package(base=package_python_schedule.base,
|
||||||
|
version=package_python_schedule.version,
|
||||||
|
aur_url=package_python_schedule.aur_url,
|
||||||
|
packages={package: props})
|
||||||
|
for package, props in package_python_schedule.packages.items()
|
||||||
|
] + [package_ahriman]
|
||||||
|
|
||||||
|
mocker.patch("pathlib.Path.iterdir",
|
||||||
|
return_value=[Path("a.pkg.tar.xz"), Path("b.pkg.tar.xz"), Path("c.pkg.tar.xz")])
|
||||||
|
mocker.patch("ahriman.models.package.Package.load", side_effect=single_packages)
|
||||||
|
|
||||||
|
packages = repository.packages()
|
||||||
|
assert len(packages) == 2
|
||||||
|
assert {package.base for package in packages} == {package_ahriman.base, package_python_schedule.base}
|
||||||
|
|
||||||
|
archives = sum([list(package.packages.keys()) for package in packages], start=[])
|
||||||
|
assert len(archives) == 3
|
||||||
|
expected = set(package_ahriman.packages.keys())
|
||||||
|
expected.update(package_python_schedule.packages.keys())
|
||||||
|
assert set(archives) == expected
|
124
tests/ahriman/core/repository/test_update_handler.py
Normal file
124
tests/ahriman/core/repository/test_update_handler.py
Normal file
@ -0,0 +1,124 @@
|
|||||||
|
from pytest_mock import MockerFixture
|
||||||
|
|
||||||
|
from ahriman.core.repository.update_handler import UpdateHandler
|
||||||
|
from ahriman.models.package import Package
|
||||||
|
|
||||||
|
|
||||||
|
def test_updates_aur(update_handler: UpdateHandler, package_ahriman: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must provide updates with status watcher updates
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.packages", return_value=[package_ahriman])
|
||||||
|
mocker.patch("ahriman.models.package.Package.is_outdated", return_value=True)
|
||||||
|
mocker.patch("ahriman.models.package.Package.load", return_value=package_ahriman)
|
||||||
|
watcher_client_mock = mocker.patch("ahriman.core.watcher.client.Client.set_pending")
|
||||||
|
|
||||||
|
assert update_handler.updates_aur([], False) == [package_ahriman]
|
||||||
|
watcher_client_mock.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_updates_aur_failed(update_handler: UpdateHandler, package_ahriman: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must update status watcher via client for failed load
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.packages", return_value=[package_ahriman])
|
||||||
|
mocker.patch("ahriman.models.package.Package.load", side_effect=Exception())
|
||||||
|
watcher_client_mock = mocker.patch("ahriman.core.watcher.client.Client.set_failed")
|
||||||
|
|
||||||
|
update_handler.updates_aur([], False)
|
||||||
|
watcher_client_mock.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_updates_aur_filter(update_handler: UpdateHandler, package_ahriman: Package, package_python_schedule: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must provide updates only for filtered packages
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.packages",
|
||||||
|
return_value=[package_ahriman, package_python_schedule])
|
||||||
|
mocker.patch("ahriman.models.package.Package.is_outdated", return_value=True)
|
||||||
|
package_load_mock = mocker.patch("ahriman.models.package.Package.load", return_value=package_ahriman)
|
||||||
|
|
||||||
|
assert update_handler.updates_aur([package_ahriman.base], False) == [package_ahriman]
|
||||||
|
package_load_mock.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_updates_aur_ignore(update_handler: UpdateHandler, package_ahriman: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must skip ignore packages
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.core.configuration.Configuration.getlist", return_value=[package_ahriman.base])
|
||||||
|
mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.packages", return_value=[package_ahriman])
|
||||||
|
package_load_mock = mocker.patch("ahriman.models.package.Package.load")
|
||||||
|
|
||||||
|
update_handler.updates_aur([], False)
|
||||||
|
package_load_mock.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
def test_updates_aur_ignore_vcs(update_handler: UpdateHandler, package_ahriman: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must skip VCS packages check if requested
|
||||||
|
"""
|
||||||
|
mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.packages", return_value=[package_ahriman])
|
||||||
|
mocker.patch("ahriman.models.package.Package.is_vcs", return_value=True)
|
||||||
|
package_is_outdated_mock = mocker.patch("ahriman.models.package.Package.is_outdated")
|
||||||
|
|
||||||
|
update_handler.updates_aur([], True)
|
||||||
|
package_is_outdated_mock.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
def test_updates_manual_clear(update_handler: UpdateHandler, mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
requesting manual updates must clear packages directory
|
||||||
|
"""
|
||||||
|
mocker.patch("pathlib.Path.iterdir", return_value=[])
|
||||||
|
mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.packages")
|
||||||
|
|
||||||
|
update_handler.updates_manual()
|
||||||
|
|
||||||
|
from ahriman.core.repository.cleaner import Cleaner
|
||||||
|
Cleaner.clear_manual.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_updates_manual_status_known(update_handler: UpdateHandler, package_ahriman: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must create record for known package via reporter
|
||||||
|
"""
|
||||||
|
mocker.patch("pathlib.Path.iterdir", return_value=[package_ahriman.base])
|
||||||
|
mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.packages", return_value=[package_ahriman])
|
||||||
|
mocker.patch("ahriman.models.package.Package.load", return_value=package_ahriman)
|
||||||
|
watcher_client_mock = mocker.patch("ahriman.core.watcher.client.Client.set_pending")
|
||||||
|
|
||||||
|
update_handler.updates_manual()
|
||||||
|
watcher_client_mock.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_updates_manual_status_unknown(update_handler: UpdateHandler, package_ahriman: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must create record for unknown package via reporter
|
||||||
|
"""
|
||||||
|
mocker.patch("pathlib.Path.iterdir", return_value=[package_ahriman.base])
|
||||||
|
mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.packages", return_value=[])
|
||||||
|
mocker.patch("ahriman.models.package.Package.load", return_value=package_ahriman)
|
||||||
|
watcher_client_mock = mocker.patch("ahriman.core.watcher.client.Client.set_unknown")
|
||||||
|
|
||||||
|
update_handler.updates_manual()
|
||||||
|
watcher_client_mock.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_updates_manual_with_failures(update_handler: UpdateHandler, package_ahriman: Package,
|
||||||
|
mocker: MockerFixture) -> None:
|
||||||
|
"""
|
||||||
|
must process through the packages with failure
|
||||||
|
"""
|
||||||
|
mocker.patch("pathlib.Path.iterdir", return_value=[package_ahriman.base])
|
||||||
|
mocker.patch("ahriman.core.repository.update_handler.UpdateHandler.packages", return_value=[])
|
||||||
|
mocker.patch("ahriman.models.package.Package.load", side_effect=Exception())
|
||||||
|
|
||||||
|
assert update_handler.updates_manual() == []
|
@ -10,21 +10,6 @@ def build_status_failed() -> BuildStatus:
|
|||||||
return BuildStatus(BuildStatusEnum.Failed, 42)
|
return BuildStatus(BuildStatusEnum.Failed, 42)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def package_python_schedule(
|
|
||||||
package_description_python_schedule: PackageDescription,
|
|
||||||
package_description_python2_schedule: PackageDescription) -> Package:
|
|
||||||
packages = {
|
|
||||||
"python-schedule": package_description_python_schedule,
|
|
||||||
"python2-schedule": package_description_python2_schedule
|
|
||||||
}
|
|
||||||
return Package(
|
|
||||||
base="python-schedule",
|
|
||||||
version="1.0.0-2",
|
|
||||||
aur_url="https://aur.archlinux.org",
|
|
||||||
packages=packages)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def package_tpacpi_bat_git() -> Package:
|
def package_tpacpi_bat_git() -> Package:
|
||||||
return Package(
|
return Package(
|
||||||
@ -32,21 +17,3 @@ def package_tpacpi_bat_git() -> Package:
|
|||||||
version="3.1.r12.g4959b52-1",
|
version="3.1.r12.g4959b52-1",
|
||||||
aur_url="https://aur.archlinux.org",
|
aur_url="https://aur.archlinux.org",
|
||||||
packages={"tpacpi-bat-git": PackageDescription()})
|
packages={"tpacpi-bat-git": PackageDescription()})
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def package_description_python_schedule() -> PackageDescription:
|
|
||||||
return PackageDescription(
|
|
||||||
archive_size=4201,
|
|
||||||
build_date=421,
|
|
||||||
filename="python-schedule-1.0.0-2-any.pkg.tar.zst",
|
|
||||||
installed_size=4200001)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def package_description_python2_schedule() -> PackageDescription:
|
|
||||||
return PackageDescription(
|
|
||||||
archive_size=4202,
|
|
||||||
build_date=422,
|
|
||||||
filename="python2-schedule-1.0.0-2-any.pkg.tar.zst",
|
|
||||||
installed_size=4200002)
|
|
||||||
|
0
tests/ahriman/models/test_package_desciption.py
Normal file
0
tests/ahriman/models/test_package_desciption.py
Normal file
@ -41,6 +41,4 @@ remote =
|
|||||||
bucket =
|
bucket =
|
||||||
|
|
||||||
[web]
|
[web]
|
||||||
host =
|
|
||||||
port =
|
|
||||||
templates = /usr/share/ahriman
|
templates = /usr/share/ahriman
|
@ -17,19 +17,19 @@ args = (sys.stderr,)
|
|||||||
class = logging.handlers.RotatingFileHandler
|
class = logging.handlers.RotatingFileHandler
|
||||||
level = DEBUG
|
level = DEBUG
|
||||||
formatter = generic_format
|
formatter = generic_format
|
||||||
args = ('/var/log/ahriman/ahriman.log', 'a', 20971520, 20)
|
args = ("/var/log/ahriman/ahriman.log", "a", 20971520, 20)
|
||||||
|
|
||||||
[handler_build_file_handler]
|
[handler_build_file_handler]
|
||||||
class = logging.handlers.RotatingFileHandler
|
class = logging.handlers.RotatingFileHandler
|
||||||
level = DEBUG
|
level = DEBUG
|
||||||
formatter = generic_format
|
formatter = generic_format
|
||||||
args = ('/var/log/ahriman/build.log', 'a', 20971520, 20)
|
args = ("/var/log/ahriman/build.log", "a", 20971520, 20)
|
||||||
|
|
||||||
[handler_http_handler]
|
[handler_http_handler]
|
||||||
class = logging.handlers.RotatingFileHandler
|
class = logging.handlers.RotatingFileHandler
|
||||||
level = DEBUG
|
level = DEBUG
|
||||||
formatter = generic_format
|
formatter = generic_format
|
||||||
args = ('/var/log/ahriman/http.log', 'a', 20971520, 20)
|
args = ("/var/log/ahriman/http.log", "a", 20971520, 20)
|
||||||
|
|
||||||
[formatter_generic_format]
|
[formatter_generic_format]
|
||||||
format = [%(levelname)s %(asctime)s] [%(filename)s:%(lineno)d] [%(funcName)s]: %(message)s
|
format = [%(levelname)s %(asctime)s] [%(filename)s:%(lineno)d] [%(funcName)s]: %(message)s
|
||||||
|
Loading…
Reference in New Issue
Block a user