mirror of
https://github.com/arcan1s/ahriman.git
synced 2025-04-24 07:17:17 +00:00
add ability to use ahriman pacman database instead of system one (#71)
By default this feature is enabled. On the first run it will copy (if exists) databases from filesystem to local cache (one per each architecture). Later it will use this cache for all alpm operations. In order to update this cache, some commands (mainly package building) provide `-y`/`--refresh` option which has same semantics as pacman -Sy does. Note however that due to extending `Pacman` class some methods were renamed in order to be more descriptive: * `Pacman.all_packages` -> `Pacman.packages` * `Pacman.get` -> `Pacman.package_get` This commit also adds multilib repository to the default docker image which was missed.
This commit is contained in:
parent
45fb2f3c46
commit
82b932371b
@ -13,11 +13,14 @@ ENV AHRIMAN_REPOSITORY_ROOT="/var/lib/ahriman/ahriman"
|
||||
ENV AHRIMAN_USER="ahriman"
|
||||
|
||||
# install environment
|
||||
## update pacman.conf with multilib
|
||||
RUN echo "[multilib]" >> "/etc/pacman.conf" && \
|
||||
echo "Include = /etc/pacman.d/mirrorlist" >> "/etc/pacman.conf"
|
||||
## install minimal required packages
|
||||
RUN pacman --noconfirm -Syu binutils fakeroot git make sudo
|
||||
## create build user
|
||||
RUN useradd -m -d /home/build -s /usr/bin/nologin build && \
|
||||
echo "build ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/build
|
||||
RUN useradd -m -d "/home/build" -s "/usr/bin/nologin" build && \
|
||||
echo "build ALL=(ALL) NOPASSWD: ALL" > "/etc/sudoers.d/build"
|
||||
COPY "docker/install-aur-package.sh" "/usr/local/bin/install-aur-package"
|
||||
## install package dependencies
|
||||
## darcs is not installed by reasons, because it requires a lot haskell packages which dramatically increase image size
|
||||
|
@ -26,9 +26,11 @@ Base configuration settings.
|
||||
|
||||
libalpm and AUR related configuration.
|
||||
|
||||
* ``database`` - path to pacman local database cache, string, required.
|
||||
* ``database`` - path to pacman system database cache, string, required.
|
||||
* ``mirror`` - package database mirror used by pacman for syncronization, string, required. This option supports standard pacman substitutions with ``$arch`` and ``$repo``. Note that the mentioned mirror should contain all repositories which are set by ``alpm.repositories`` option.
|
||||
* ``repositories`` - list of pacman repositories, space separated list of strings, required.
|
||||
* ``root`` - root for alpm library, string, required.
|
||||
* ``use_ahriman_cache`` - use local pacman package cache instead of system one, boolean, required. With this option enabled you might want to refresh database periodically (available as additional flag for some subcommands).
|
||||
|
||||
``auth`` group
|
||||
--------------
|
||||
|
@ -2,6 +2,6 @@
|
||||
Description=ArcH linux ReposItory MANager (%I architecture)
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/bin/ahriman --architecture %i update
|
||||
ExecStart=/usr/bin/ahriman --architecture %i repo-update --refresh
|
||||
User=ahriman
|
||||
Group=ahriman
|
@ -5,8 +5,10 @@ database = /var/lib/ahriman/ahriman.db
|
||||
|
||||
[alpm]
|
||||
database = /var/lib/pacman
|
||||
mirror = https://geo.mirror.pkgbuild.com/$repo/os/$arch
|
||||
repositories = core extra community multilib
|
||||
root = /
|
||||
use_ahriman_cache = yes
|
||||
|
||||
[auth]
|
||||
target = disabled
|
||||
|
@ -160,6 +160,9 @@ def _set_daemon_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
parser.add_argument("--no-local", help="do not check local packages for updates", action="store_true")
|
||||
parser.add_argument("--no-manual", help="do not include manual updates", action="store_true")
|
||||
parser.add_argument("--no-vcs", help="do not check VCS packages", action="store_true")
|
||||
parser.add_argument("-y", "--refresh", help="download fresh package databases from the mirror before actions, "
|
||||
"-yy to force refresh even if up to date",
|
||||
action="count", default=0)
|
||||
parser.set_defaults(handler=handlers.Daemon, dry_run=False, exit_code=False, package=[])
|
||||
return parser
|
||||
|
||||
@ -251,6 +254,9 @@ def _set_package_add_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
parser.add_argument("package", help="package source (base name, path to local files, remote URL)", nargs="+")
|
||||
parser.add_argument("-e", "--exit-code", help="return non-zero exit status if result is empty", action="store_true")
|
||||
parser.add_argument("-n", "--now", help="run update function after", action="store_true")
|
||||
parser.add_argument("-y", "--refresh", help="download fresh package databases from the mirror before actions, "
|
||||
"-yy to force refresh even if up to date",
|
||||
action="count", default=0)
|
||||
parser.add_argument("-s", "--source", help="explicitly specify the package source for this command",
|
||||
type=PackageSource, choices=enum_values(PackageSource), default=PackageSource.Auto)
|
||||
parser.add_argument("--without-dependencies", help="do not add dependencies", action="store_true")
|
||||
@ -467,6 +473,9 @@ def _set_repo_check_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
parser.add_argument("package", help="filter check by package base", nargs="*")
|
||||
parser.add_argument("-e", "--exit-code", help="return non-zero exit status if result is empty", action="store_true")
|
||||
parser.add_argument("--no-vcs", help="do not check VCS packages", action="store_true")
|
||||
parser.add_argument("-y", "--refresh", help="download fresh package databases from the mirror before actions, "
|
||||
"-yy to force refresh even if up to date",
|
||||
action="count", default=0)
|
||||
parser.set_defaults(handler=handlers.Update, dry_run=True, no_aur=False, no_local=False, no_manual=True)
|
||||
return parser
|
||||
|
||||
@ -717,6 +726,9 @@ def _set_repo_update_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
parser.add_argument("--no-local", help="do not check local packages for updates", action="store_true")
|
||||
parser.add_argument("--no-manual", help="do not include manual updates", action="store_true")
|
||||
parser.add_argument("--no-vcs", help="do not check VCS packages", action="store_true")
|
||||
parser.add_argument("-y", "--refresh", help="download fresh package databases from the mirror before actions, "
|
||||
"-yy to force refresh even if up to date",
|
||||
action="count", default=0)
|
||||
parser.set_defaults(handler=handlers.Update)
|
||||
return parser
|
||||
|
||||
|
@ -62,7 +62,7 @@ class Application(ApplicationPackages, ApplicationRepository):
|
||||
for package, properties in base.packages.items():
|
||||
known_packages.add(package)
|
||||
known_packages.update(properties.provides)
|
||||
known_packages.update(self.repository.pacman.all_packages())
|
||||
known_packages.update(self.repository.pacman.packages())
|
||||
return known_packages
|
||||
|
||||
def on_result(self, result: Result) -> None:
|
||||
|
@ -34,7 +34,8 @@ class ApplicationProperties(LazyLogging):
|
||||
repository(Repository): repository instance
|
||||
"""
|
||||
|
||||
def __init__(self, architecture: str, configuration: Configuration, no_report: bool, unsafe: bool) -> None:
|
||||
def __init__(self, architecture: str, configuration: Configuration,
|
||||
no_report: bool, unsafe: bool, refresh_pacman_database: int = 0) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
@ -43,8 +44,10 @@ class ApplicationProperties(LazyLogging):
|
||||
configuration(Configuration): configuration instance
|
||||
no_report(bool): force disable reporting
|
||||
unsafe(bool): if set no user check will be performed before path creation
|
||||
refresh_pacman_database(int): pacman database syncronization level, ``0`` is disabled
|
||||
"""
|
||||
self.configuration = configuration
|
||||
self.architecture = architecture
|
||||
self.database = SQLite.load(configuration)
|
||||
self.repository = Repository(architecture, configuration, self.database, no_report, unsafe)
|
||||
self.repository = Repository(architecture, configuration, self.database,
|
||||
no_report, unsafe, refresh_pacman_database)
|
||||
|
@ -44,7 +44,7 @@ class Add(Handler):
|
||||
no_report(bool): force disable reporting
|
||||
unsafe(bool): if set no user check will be performed before path creation
|
||||
"""
|
||||
application = Application(architecture, configuration, no_report, unsafe)
|
||||
application = Application(architecture, configuration, no_report, unsafe, args.refresh)
|
||||
application.on_start()
|
||||
application.add(args.package, args.source, args.without_dependencies)
|
||||
if not args.now:
|
||||
|
@ -44,7 +44,7 @@ class Update(Handler):
|
||||
no_report(bool): force disable reporting
|
||||
unsafe(bool): if set no user check will be performed before path creation
|
||||
"""
|
||||
application = Application(architecture, configuration, no_report, unsafe)
|
||||
application = Application(architecture, configuration, no_report, unsafe, args.refresh)
|
||||
application.on_start()
|
||||
packages = application.updates(args.package, args.no_aur, args.no_local, args.no_manual, args.no_vcs,
|
||||
Update.log_fn(application, args.dry_run))
|
||||
|
@ -17,13 +17,18 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from pyalpm import Handle, Package, SIG_PACKAGE # type: ignore
|
||||
import shutil
|
||||
|
||||
from pathlib import Path
|
||||
from pyalpm import DB, Handle, Package, SIG_PACKAGE, error as PyalpmError # type: ignore
|
||||
from typing import Generator, Set
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.lazy_logging import LazyLogging
|
||||
from ahriman.models.repository_paths import RepositoryPaths
|
||||
|
||||
|
||||
class Pacman:
|
||||
class Pacman(LazyLogging):
|
||||
"""
|
||||
alpm wrapper
|
||||
|
||||
@ -31,35 +36,96 @@ class Pacman:
|
||||
handle(Handle): pyalpm root ``Handle``
|
||||
"""
|
||||
|
||||
def __init__(self, configuration: Configuration) -> None:
|
||||
def __init__(self, architecture: str, configuration: Configuration, *, refresh_database: int) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
architecture(str): repository architecture
|
||||
configuration(Configuration): configuration instance
|
||||
refresh_database(int): synchronize local cache to remote. If set to ``0``, no syncronization will be
|
||||
enabled, if set to ``1`` - normal syncronization, if set to ``2`` - force syncronization
|
||||
"""
|
||||
root = configuration.get("alpm", "root")
|
||||
root = configuration.getpath("alpm", "root")
|
||||
pacman_root = configuration.getpath("alpm", "database")
|
||||
self.handle = Handle(root, str(pacman_root))
|
||||
for repository in configuration.getlist("alpm", "repositories"):
|
||||
self.handle.register_syncdb(repository, SIG_PACKAGE)
|
||||
use_ahriman_cache = configuration.getboolean("alpm", "use_ahriman_cache")
|
||||
mirror = configuration.get("alpm", "mirror")
|
||||
paths = configuration.repository_paths
|
||||
database_path = paths.pacman if use_ahriman_cache else pacman_root
|
||||
|
||||
def all_packages(self) -> Set[str]:
|
||||
self.handle = Handle(str(root), str(database_path))
|
||||
for repository in configuration.getlist("alpm", "repositories"):
|
||||
database = self.database_init(repository, mirror, architecture)
|
||||
self.database_copy(database, pacman_root, paths, use_ahriman_cache=use_ahriman_cache)
|
||||
|
||||
if use_ahriman_cache and refresh_database:
|
||||
self.database_sync(refresh_database > 1)
|
||||
|
||||
def database_copy(self, database: DB, pacman_root: Path, paths: RepositoryPaths, *,
|
||||
use_ahriman_cache: bool) -> None:
|
||||
"""
|
||||
get list of packages known for alpm
|
||||
copy database from the operating system root to the ahriman local home
|
||||
|
||||
Args:
|
||||
database(DB): pacman database instance to be copied
|
||||
pacman_root(Path): operating system pacman's root
|
||||
paths(RepositoryPaths): repository paths instance
|
||||
use_ahriman_cache(bool): use local ahriman cache instead of system one
|
||||
"""
|
||||
def repository_database(root: Path) -> Path:
|
||||
return root / "sync" / f"{database.name}.db"
|
||||
|
||||
if not use_ahriman_cache:
|
||||
return
|
||||
# copy root database if no local copy found
|
||||
pacman_db_path = Path(self.handle.dbpath)
|
||||
if not pacman_db_path.is_dir():
|
||||
return # root directory does not exist yet
|
||||
dst = repository_database(pacman_db_path)
|
||||
if dst.is_file():
|
||||
return # file already exists, do not copy
|
||||
src = repository_database(pacman_root)
|
||||
if not src.is_file():
|
||||
self.logger.warning("repository %s is set to be used, however, no working copy was found", database.name)
|
||||
return # database for some reasons deos not exist
|
||||
self.logger.info("copy pacman database from operating system root to ahriman's home")
|
||||
shutil.copy(src, dst)
|
||||
paths.chown(dst)
|
||||
|
||||
def database_init(self, repository: str, mirror: str, architecture: str) -> DB:
|
||||
"""
|
||||
create database instance from pacman handler and set its properties
|
||||
|
||||
Args:
|
||||
repository(str): pacman repository name (e.g. core)
|
||||
mirror(str): arch linux mirror url
|
||||
architecture(str): repository architecture
|
||||
|
||||
Returns:
|
||||
Set[str]: list of package names
|
||||
DB: loaded pacman database instance
|
||||
"""
|
||||
result: Set[str] = set()
|
||||
database: DB = self.handle.register_syncdb(repository, SIG_PACKAGE)
|
||||
# replace variables in mirror address
|
||||
database.servers = [mirror.replace("$repo", repository).replace("$arch", architecture)]
|
||||
return database
|
||||
|
||||
def database_sync(self, force: bool) -> None:
|
||||
"""
|
||||
sync local database
|
||||
|
||||
Args:
|
||||
force(bool): force database syncronization (same as ``pacman -Syy``)
|
||||
"""
|
||||
self.logger.info("refresh ahriman's home pacman database (force refresh %s)", force)
|
||||
transaction = self.handle.init_transaction()
|
||||
for database in self.handle.get_syncdbs():
|
||||
for package in database.pkgcache:
|
||||
result.add(package.name) # package itself
|
||||
result.update(package.provides) # provides list for meta-packages
|
||||
try:
|
||||
database.update(force)
|
||||
except PyalpmError:
|
||||
self.logger.exception("exception during update %s", database.name)
|
||||
transaction.release()
|
||||
|
||||
return result
|
||||
|
||||
def get(self, package_name: str) -> Generator[Package, None, None]:
|
||||
def package_get(self, package_name: str) -> Generator[Package, None, None]:
|
||||
"""
|
||||
retrieve list of the packages from the repository by name
|
||||
|
||||
@ -74,3 +140,18 @@ class Pacman:
|
||||
if package is None:
|
||||
continue
|
||||
yield package
|
||||
|
||||
def packages(self) -> Set[str]:
|
||||
"""
|
||||
get list of packages known for alpm
|
||||
|
||||
Returns:
|
||||
Set[str]: list of package names
|
||||
"""
|
||||
result: Set[str] = set()
|
||||
for database in self.handle.get_syncdbs():
|
||||
for package in database.pkgcache:
|
||||
result.add(package.name) # package itself
|
||||
result.update(package.provides) # provides list for meta-packages
|
||||
|
||||
return result
|
||||
|
@ -48,4 +48,4 @@ class OfficialSyncdb(Official):
|
||||
Returns:
|
||||
AURPackage: package which match the package name
|
||||
"""
|
||||
return next(AURPackage.from_pacman(package) for package in pacman.get(package_name))
|
||||
return next(AURPackage.from_pacman(package) for package in pacman.package_get(package_name))
|
||||
|
@ -225,14 +225,14 @@ class Configuration(configparser.RawConfigParser):
|
||||
|
||||
# pylint and mypy are too stupid to find these methods
|
||||
# pylint: disable=missing-function-docstring,multiple-statements,unused-argument
|
||||
def getlist(self, *args: Any, **kwargs: Any) -> List[str]: ...
|
||||
def getlist(self, *args: Any, **kwargs: Any) -> List[str]: ... # type: ignore
|
||||
|
||||
def getpath(self, *args: Any, **kwargs: Any) -> Path: ...
|
||||
def getpath(self, *args: Any, **kwargs: Any) -> Path: ... # type: ignore
|
||||
|
||||
def gettype(self, section: str, architecture: str) -> Tuple[str, str]:
|
||||
"""
|
||||
get type variable with fallback to old logic
|
||||
Despite the fact that it has same semantics as other get* methods, but it has different argument list
|
||||
get type variable with fallback to old logic. Despite the fact that it has same semantics as other get* methods,
|
||||
but it has different argument list
|
||||
|
||||
Args:
|
||||
section(str): section name
|
||||
|
@ -48,7 +48,7 @@ class RepositoryProperties(LazyLogging):
|
||||
"""
|
||||
|
||||
def __init__(self, architecture: str, configuration: Configuration, database: SQLite,
|
||||
no_report: bool, unsafe: bool) -> None:
|
||||
no_report: bool, unsafe: bool, refresh_pacman_database: int = 0) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
@ -58,6 +58,7 @@ class RepositoryProperties(LazyLogging):
|
||||
database(SQLite): database instance
|
||||
no_report(bool): force disable reporting
|
||||
unsafe(bool): if set no user check will be performed before path creation
|
||||
refresh_pacman_database(int): pacman database syncronization level, ``0`` is disabled
|
||||
"""
|
||||
self.architecture = architecture
|
||||
self.configuration = configuration
|
||||
@ -73,7 +74,7 @@ class RepositoryProperties(LazyLogging):
|
||||
self.logger.warning("root owner differs from the current user, skipping tree creation")
|
||||
|
||||
self.ignore_list = configuration.getlist("build", "ignore_packages", fallback=[])
|
||||
self.pacman = Pacman(configuration)
|
||||
self.pacman = Pacman(architecture, configuration, refresh_database=refresh_pacman_database)
|
||||
self.sign = GPG(architecture, configuration)
|
||||
self.repo = Repo(self.name, self.paths, self.sign.repository_sign_args)
|
||||
self.reporter = Client() if no_report else Client.load(configuration)
|
||||
|
@ -67,8 +67,10 @@ class AURPackage:
|
||||
>>>
|
||||
>>>
|
||||
>>> from ahriman.core.alpm.pacman import Pacman
|
||||
>>> from ahriman.core.configuration import Configuration
|
||||
>>>
|
||||
>>> pacman = Pacman(configuration)
|
||||
>>> configuration = Configuration()
|
||||
>>> pacman = Pacman("x86_64", configuration)
|
||||
>>> metadata = pacman.get("pacman")
|
||||
>>> package = AURPackage.from_pacman(next(metadata)) # load package from pyalpm wrapper
|
||||
"""
|
||||
|
@ -57,7 +57,7 @@ class PackageDescription:
|
||||
>>> from ahriman.core.configuration import Configuration
|
||||
>>>
|
||||
>>> configuration = Configuration()
|
||||
>>> pacman = Pacman(configuration)
|
||||
>>> pacman = Pacman("x86_64", configuration)
|
||||
>>> pyalpm_description = next(package for package in pacman.get("pacman"))
|
||||
>>> description = PackageDescription.from_package(
|
||||
>>> pyalpm_description, Path("/var/cache/pacman/pkg/pacman-6.0.1-4-x86_64.pkg.tar.zst"))
|
||||
|
@ -87,6 +87,16 @@ class RepositoryPaths:
|
||||
"""
|
||||
return self.root / "packages" / self.architecture
|
||||
|
||||
@property
|
||||
def pacman(self) -> Path:
|
||||
"""
|
||||
get directory for pacman local package cache
|
||||
|
||||
Returns:
|
||||
Path: full path to pacman local database cache
|
||||
"""
|
||||
return self.root / "pacman" / self.architecture
|
||||
|
||||
@property
|
||||
def repository(self) -> Path:
|
||||
"""
|
||||
@ -194,6 +204,7 @@ class RepositoryPaths:
|
||||
self.cache,
|
||||
self.chroot,
|
||||
self.packages,
|
||||
self.pacman / "sync", # we need sync directory in order to be able to copy databases
|
||||
self.repository,
|
||||
):
|
||||
directory.mkdir(mode=0o755, parents=True, exist_ok=True)
|
||||
|
@ -21,8 +21,8 @@ from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, replace
|
||||
from typing import Optional, Type
|
||||
from passlib.pwd import genword as generate_password # type: ignore
|
||||
from passlib.handlers.sha2_crypt import sha512_crypt # type: ignore
|
||||
from passlib.pwd import genword as generate_password
|
||||
from passlib.handlers.sha2_crypt import sha512_crypt
|
||||
|
||||
from ahriman.models.user_access import UserAccess
|
||||
|
||||
|
@ -23,6 +23,7 @@ def _default_args(args: argparse.Namespace) -> argparse.Namespace:
|
||||
args.package = []
|
||||
args.exit_code = False
|
||||
args.now = False
|
||||
args.refresh = 0
|
||||
args.source = PackageSource.Auto
|
||||
args.without_dependencies = False
|
||||
return args
|
||||
|
@ -28,6 +28,7 @@ def _default_args(args: argparse.Namespace) -> argparse.Namespace:
|
||||
args.no_local = False
|
||||
args.no_manual = False
|
||||
args.no_vcs = False
|
||||
args.refresh = 0
|
||||
return args
|
||||
|
||||
|
||||
|
@ -75,6 +75,18 @@ def test_subparsers_daemon(parser: argparse.ArgumentParser) -> None:
|
||||
assert args.package == []
|
||||
|
||||
|
||||
def test_subparsers_daemon_option_refresh(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
daemon command must count refresh options
|
||||
"""
|
||||
args = parser.parse_args(["daemon"])
|
||||
assert args.refresh == 0
|
||||
args = parser.parse_args(["daemon", "-y"])
|
||||
assert args.refresh == 1
|
||||
args = parser.parse_args(["daemon", "-yy"])
|
||||
assert args.refresh == 2
|
||||
|
||||
|
||||
def test_subparsers_daemon_option_interval(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
daemon command must convert interval option to int instance
|
||||
@ -155,6 +167,18 @@ def test_subparsers_package_add_architecture(parser: argparse.ArgumentParser) ->
|
||||
assert args.architecture == ["x86_64"]
|
||||
|
||||
|
||||
def test_subparsers_package_add_option_refresh(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
package-add command must count refresh options
|
||||
"""
|
||||
args = parser.parse_args(["package-add", "ahriman"])
|
||||
assert args.refresh == 0
|
||||
args = parser.parse_args(["package-add", "ahriman", "-y"])
|
||||
assert args.refresh == 1
|
||||
args = parser.parse_args(["package-add", "ahriman", "-yy"])
|
||||
assert args.refresh == 2
|
||||
|
||||
|
||||
def test_subparsers_package_remove_architecture(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
package-remove command must correctly parse architecture list
|
||||
@ -345,6 +369,18 @@ def test_subparsers_repo_check_architecture(parser: argparse.ArgumentParser) ->
|
||||
assert args.architecture == ["x86_64"]
|
||||
|
||||
|
||||
def test_subparsers_repo_check_option_refresh(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
repo-check command must count refresh options
|
||||
"""
|
||||
args = parser.parse_args(["repo-check"])
|
||||
assert args.refresh == 0
|
||||
args = parser.parse_args(["repo-check", "-y"])
|
||||
assert args.refresh == 1
|
||||
args = parser.parse_args(["repo-check", "-yy"])
|
||||
assert args.refresh == 2
|
||||
|
||||
|
||||
def test_subparsers_repo_clean(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
repo-clean command must imply quiet and unsafe
|
||||
@ -540,6 +576,18 @@ def test_subparsers_repo_update_architecture(parser: argparse.ArgumentParser) ->
|
||||
assert args.architecture == ["x86_64"]
|
||||
|
||||
|
||||
def test_subparsers_repo_update_option_refresh(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
repo-update command must count refresh options
|
||||
"""
|
||||
args = parser.parse_args(["repo-update"])
|
||||
assert args.refresh == 0
|
||||
args = parser.parse_args(["repo-update", "-y"])
|
||||
assert args.refresh == 1
|
||||
args = parser.parse_args(["repo-update", "-yy"])
|
||||
assert args.refresh == 2
|
||||
|
||||
|
||||
def test_subparsers_shell(parser: argparse.ArgumentParser) -> None:
|
||||
"""
|
||||
shell command must imply lock and no-report
|
||||
|
@ -360,7 +360,7 @@ def pacman(configuration: Configuration) -> Pacman:
|
||||
Returns:
|
||||
Pacman: pacman wrapper test instance
|
||||
"""
|
||||
return Pacman(configuration)
|
||||
return Pacman("x86_64", configuration, refresh_database=0)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -11,7 +11,7 @@ def test_package_info(official_syncdb: OfficialSyncdb, aur_package_akonadi: AURP
|
||||
must return package info from the database
|
||||
"""
|
||||
mocker.patch("ahriman.models.aur_package.AURPackage.from_pacman", return_value=aur_package_akonadi)
|
||||
get_mock = mocker.patch("ahriman.core.alpm.pacman.Pacman.get", return_value=[aur_package_akonadi])
|
||||
get_mock = mocker.patch("ahriman.core.alpm.pacman.Pacman.package_get", return_value=[aur_package_akonadi])
|
||||
|
||||
package = official_syncdb.package_info(aur_package_akonadi.name, pacman=pacman)
|
||||
get_mock.assert_called_once_with(aur_package_akonadi.name)
|
||||
|
@ -1,31 +1,205 @@
|
||||
from pathlib import Path
|
||||
from pyalpm import error as PyalpmError
|
||||
from pytest_mock import MockerFixture
|
||||
from tempfile import TemporaryDirectory
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from ahriman.core.alpm.pacman import Pacman
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.models.repository_paths import RepositoryPaths
|
||||
|
||||
|
||||
def test_all_packages(pacman: Pacman) -> None:
|
||||
def test_init_with_local_cache(configuration: Configuration, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must sync repositories at the start if set
|
||||
"""
|
||||
mocker.patch("ahriman.core.alpm.pacman.Pacman.database_copy")
|
||||
sync_mock = mocker.patch("ahriman.core.alpm.pacman.Pacman.database_sync")
|
||||
configuration.set_option("alpm", "use_ahriman_cache", "yes")
|
||||
|
||||
# pyalpm.Handle is trying to reach the directory we've asked, thus we need to patch it a bit
|
||||
with TemporaryDirectory(ignore_cleanup_errors=True) as pacman_root:
|
||||
mocker.patch.object(RepositoryPaths, "pacman", Path(pacman_root))
|
||||
# during the creation pyalpm.Handle will create also version file which we would like to remove later
|
||||
Pacman("x86_64", configuration, refresh_database=1)
|
||||
sync_mock.assert_called_once_with(False)
|
||||
|
||||
|
||||
def test_init_with_local_cache_forced(configuration: Configuration, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must sync repositories at the start if set with force flag
|
||||
"""
|
||||
mocker.patch("ahriman.core.alpm.pacman.Pacman.database_copy")
|
||||
sync_mock = mocker.patch("ahriman.core.alpm.pacman.Pacman.database_sync")
|
||||
configuration.set_option("alpm", "use_ahriman_cache", "yes")
|
||||
|
||||
# pyalpm.Handle is trying to reach the directory we've asked, thus we need to patch it a bit
|
||||
with TemporaryDirectory(ignore_cleanup_errors=True) as pacman_root:
|
||||
mocker.patch.object(RepositoryPaths, "pacman", Path(pacman_root))
|
||||
# during the creation pyalpm.Handle will create also version file which we would like to remove later
|
||||
Pacman("x86_64", configuration, refresh_database=2)
|
||||
sync_mock.assert_called_once_with(True)
|
||||
|
||||
|
||||
def test_database_copy(pacman: Pacman, repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must copy database from root
|
||||
"""
|
||||
database = next(db for db in pacman.handle.get_syncdbs() if db.name == "core")
|
||||
path = Path("randomname")
|
||||
dst_path = Path("/var/lib/pacman/sync/core.db")
|
||||
mocker.patch("pathlib.Path.is_dir", return_value=True)
|
||||
# root database exists, local database does not
|
||||
mocker.patch("pathlib.Path.is_file", autospec=True, side_effect=lambda p: True if p.is_relative_to(path) else False)
|
||||
copy_mock = mocker.patch("shutil.copy")
|
||||
chown_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.chown")
|
||||
|
||||
pacman.database_copy(database, path, repository_paths, use_ahriman_cache=True)
|
||||
copy_mock.assert_called_once_with(path / "sync" / "core.db", dst_path)
|
||||
chown_mock.assert_called_once_with(dst_path)
|
||||
|
||||
|
||||
def test_database_copy_skip(pacman: Pacman, repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must do not copy database from root if local cache is disabled
|
||||
"""
|
||||
database = next(db for db in pacman.handle.get_syncdbs() if db.name == "core")
|
||||
path = Path("randomname")
|
||||
mocker.patch("pathlib.Path.is_dir", return_value=True)
|
||||
# root database exists, local database does not
|
||||
mocker.patch("pathlib.Path.is_file", autospec=True, side_effect=lambda p: True if p.is_relative_to(path) else False)
|
||||
copy_mock = mocker.patch("shutil.copy")
|
||||
|
||||
pacman.database_copy(database, path, repository_paths, use_ahriman_cache=False)
|
||||
copy_mock.assert_not_called()
|
||||
|
||||
|
||||
def test_database_copy_no_directory(pacman: Pacman, repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must do not copy database if local cache already exists
|
||||
"""
|
||||
database = next(db for db in pacman.handle.get_syncdbs() if db.name == "core")
|
||||
path = Path("randomname")
|
||||
mocker.patch("pathlib.Path.is_dir", return_value=False)
|
||||
# root database exists, local database does not
|
||||
mocker.patch("pathlib.Path.is_file", autospec=True, side_effect=lambda p: True if p.is_relative_to(path) else False)
|
||||
copy_mock = mocker.patch("shutil.copy")
|
||||
|
||||
pacman.database_copy(database, path, repository_paths, use_ahriman_cache=True)
|
||||
copy_mock.assert_not_called()
|
||||
|
||||
|
||||
def test_database_copy_no_root_file(pacman: Pacman, repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must do not copy database if no repository file exists in filesystem
|
||||
"""
|
||||
database = next(db for db in pacman.handle.get_syncdbs() if db.name == "core")
|
||||
path = Path("randomname")
|
||||
mocker.patch("pathlib.Path.is_dir", return_value=True)
|
||||
# root database does not exist, local database does not either
|
||||
mocker.patch("pathlib.Path.is_file", return_value=False)
|
||||
copy_mock = mocker.patch("shutil.copy")
|
||||
|
||||
pacman.database_copy(database, path, repository_paths, use_ahriman_cache=True)
|
||||
copy_mock.assert_not_called()
|
||||
|
||||
|
||||
def test_database_copy_database_exist(pacman: Pacman, repository_paths: RepositoryPaths, mocker: MockerFixture) -> None:
|
||||
"""
|
||||
must do not copy database if local cache already exists
|
||||
"""
|
||||
database = next(db for db in pacman.handle.get_syncdbs() if db.name == "core")
|
||||
mocker.patch("pathlib.Path.is_dir", return_value=True)
|
||||
# root database exists, local database does either
|
||||
mocker.patch("pathlib.Path.is_file", return_value=True)
|
||||
copy_mock = mocker.patch("shutil.copy")
|
||||
|
||||
pacman.database_copy(database, Path("root"), repository_paths, use_ahriman_cache=True)
|
||||
copy_mock.assert_not_called()
|
||||
|
||||
|
||||
def test_database_init(pacman: Pacman, configuration: Configuration) -> None:
|
||||
"""
|
||||
must init database with settings
|
||||
"""
|
||||
mirror = configuration.get("alpm", "mirror")
|
||||
database = pacman.database_init("test", mirror, "x86_64")
|
||||
assert len(database.servers) == 1
|
||||
|
||||
|
||||
def test_database_sync(pacman: Pacman) -> None:
|
||||
"""
|
||||
must sync databases
|
||||
"""
|
||||
handle_mock = MagicMock()
|
||||
core_mock = MagicMock()
|
||||
extra_mock = MagicMock()
|
||||
transaction_mock = MagicMock()
|
||||
handle_mock.get_syncdbs.return_value = [core_mock, extra_mock]
|
||||
handle_mock.init_transaction.return_value = transaction_mock
|
||||
pacman.handle = handle_mock
|
||||
|
||||
pacman.database_sync(False)
|
||||
handle_mock.init_transaction.assert_called_once_with()
|
||||
core_mock.update.assert_called_once_with(False)
|
||||
extra_mock.update.assert_called_once_with(False)
|
||||
transaction_mock.release.assert_called_once_with()
|
||||
|
||||
|
||||
def test_database_sync_failed(pacman: Pacman) -> None:
|
||||
"""
|
||||
must sync databases even if there was exception
|
||||
"""
|
||||
handle_mock = MagicMock()
|
||||
core_mock = MagicMock()
|
||||
core_mock.update.side_effect = PyalpmError()
|
||||
extra_mock = MagicMock()
|
||||
handle_mock.get_syncdbs.return_value = [core_mock, extra_mock]
|
||||
pacman.handle = handle_mock
|
||||
|
||||
pacman.database_sync(False)
|
||||
extra_mock.update.assert_called_once_with(False)
|
||||
|
||||
|
||||
def test_database_sync_forced(pacman: Pacman) -> None:
|
||||
"""
|
||||
must sync databases with force flag
|
||||
"""
|
||||
handle_mock = MagicMock()
|
||||
core_mock = MagicMock()
|
||||
handle_mock.get_syncdbs.return_value = [core_mock]
|
||||
pacman.handle = handle_mock
|
||||
|
||||
pacman.database_sync(True)
|
||||
handle_mock.init_transaction.assert_called_once_with()
|
||||
core_mock.update.assert_called_once_with(True)
|
||||
|
||||
|
||||
def test_package_get(pacman: Pacman) -> None:
|
||||
"""
|
||||
must retrieve package
|
||||
"""
|
||||
assert list(pacman.package_get("pacman"))
|
||||
|
||||
|
||||
def test_package_get_empty(pacman: Pacman) -> None:
|
||||
"""
|
||||
must return empty packages list without exception
|
||||
"""
|
||||
assert not list(pacman.package_get("some-random-name"))
|
||||
|
||||
|
||||
def test_packages(pacman: Pacman) -> None:
|
||||
"""
|
||||
package list must not be empty
|
||||
"""
|
||||
packages = pacman.all_packages()
|
||||
packages = pacman.packages()
|
||||
assert packages
|
||||
assert "pacman" in packages
|
||||
|
||||
|
||||
def test_all_packages_with_provides(pacman: Pacman) -> None:
|
||||
def test_packages_with_provides(pacman: Pacman) -> None:
|
||||
"""
|
||||
package list must contain provides packages
|
||||
"""
|
||||
assert "sh" in pacman.all_packages()
|
||||
|
||||
|
||||
def test_get(pacman: Pacman) -> None:
|
||||
"""
|
||||
must retrieve package
|
||||
"""
|
||||
assert list(pacman.get("pacman"))
|
||||
|
||||
|
||||
def test_get_empty(pacman: Pacman) -> None:
|
||||
"""
|
||||
must return empty packages list without exception
|
||||
"""
|
||||
assert not list(pacman.get("some-random-name"))
|
||||
assert "sh" in pacman.packages()
|
||||
|
@ -154,9 +154,5 @@ def test_tree_create(repository_paths: RepositoryPaths, mocker: MockerFixture) -
|
||||
chown_mock = mocker.patch("ahriman.models.repository_paths.RepositoryPaths.chown")
|
||||
|
||||
repository_paths.tree_create()
|
||||
mkdir_mock.assert_has_calls(
|
||||
[
|
||||
mock.call(mode=0o755, parents=True, exist_ok=True)
|
||||
for _ in paths
|
||||
], any_order=True)
|
||||
chown_mock.assert_has_calls([mock.call(getattr(repository_paths, path)) for path in paths], any_order=True)
|
||||
mkdir_mock.assert_has_calls([mock.call(mode=0o755, parents=True, exist_ok=True) for _ in paths], any_order=True)
|
||||
chown_mock.assert_has_calls([mock.call(pytest.helpers.anyvar(int)) for _ in paths], any_order=True)
|
||||
|
@ -5,8 +5,10 @@ database = ../../../ahriman-test.db
|
||||
|
||||
[alpm]
|
||||
database = /var/lib/pacman
|
||||
mirror = https://geo.mirror.pkgbuild.com/$repo/os/$arch
|
||||
repositories = core extra community multilib
|
||||
root = /
|
||||
use_ahriman_cache = no
|
||||
|
||||
[auth]
|
||||
client_id = client_id
|
||||
|
Loading…
Reference in New Issue
Block a user