mirror of
https://github.com/arcan1s/ahriman.git
synced 2025-07-23 02:39:57 +00:00
feat: add abillity to check broken dependencies (#122)
* implement elf dynamic linking check * load local database too in pacman wrapper
This commit is contained in:
@ -537,6 +537,9 @@ def _set_repo_check_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
parser.add_argument("--changes", help="calculate changes from the latest known commit if available. "
|
||||
"Only applicable in dry run mode",
|
||||
action=argparse.BooleanOptionalAction, default=True)
|
||||
parser.add_argument("--check-files", help="enable or disable checking of broken dependencies "
|
||||
"(e.g. dynamically linked libraries or modules directories)",
|
||||
action=argparse.BooleanOptionalAction, default=True)
|
||||
parser.add_argument("-e", "--exit-code", help="return non-zero exit status if result is empty", action="store_true")
|
||||
parser.add_argument("--vcs", help="fetch actual version of VCS packages",
|
||||
action=argparse.BooleanOptionalAction, default=True)
|
||||
@ -605,6 +608,9 @@ def _set_repo_daemon_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
parser.add_argument("--changes", help="calculate changes from the latest known commit if available. "
|
||||
"Only applicable in dry run mode",
|
||||
action=argparse.BooleanOptionalAction, default=True)
|
||||
parser.add_argument("--check-files", help="enable or disable checking of broken dependencies "
|
||||
"(e.g. dynamically linked libraries or modules directories)",
|
||||
action=argparse.BooleanOptionalAction, default=True)
|
||||
parser.add_argument("--dependencies", help="process missing package dependencies",
|
||||
action=argparse.BooleanOptionalAction, default=True)
|
||||
parser.add_argument("--dry-run", help="just perform check for updates, same as check command", action="store_true")
|
||||
@ -826,6 +832,9 @@ def _set_repo_update_parser(root: SubParserAction) -> argparse.ArgumentParser:
|
||||
parser.add_argument("--changes", help="calculate changes from the latest known commit if available. "
|
||||
"Only applicable in dry run mode",
|
||||
action=argparse.BooleanOptionalAction, default=True)
|
||||
parser.add_argument("--check-files", help="enable or disable checking of broken dependencies "
|
||||
"(e.g. dynamically linked libraries or modules directories)",
|
||||
action=argparse.BooleanOptionalAction, default=True)
|
||||
parser.add_argument("--dependencies", help="process missing package dependencies",
|
||||
action=argparse.BooleanOptionalAction, default=True)
|
||||
parser.add_argument("--dry-run", help="just perform check for updates, same as check command", action="store_true")
|
||||
|
@ -62,10 +62,13 @@ class Application(ApplicationPackages, ApplicationRepository):
|
||||
"""
|
||||
known_packages: set[str] = set()
|
||||
# local set
|
||||
# this action is not really needed in case if ``alpm.use_ahriman_cache`` set to yes, because pacman
|
||||
# will eventually contain all the local packages
|
||||
for base in self.repository.packages():
|
||||
for package, properties in base.packages.items():
|
||||
known_packages.add(package)
|
||||
known_packages.update(properties.provides)
|
||||
# known pacman databases
|
||||
known_packages.update(self.repository.pacman.packages())
|
||||
return known_packages
|
||||
|
||||
|
@ -91,10 +91,7 @@ class ApplicationRepository(ApplicationProperties):
|
||||
packages(Iterable[str]): only sign specified packages
|
||||
"""
|
||||
# copy to prebuilt directory
|
||||
for package in self.repository.packages():
|
||||
# no one requested this package
|
||||
if packages and package.base not in packages:
|
||||
continue
|
||||
for package in self.repository.packages(packages):
|
||||
for archive in package.packages.values():
|
||||
if archive.filepath is None:
|
||||
self.logger.warning("filepath is empty for %s", package.base)
|
||||
@ -179,7 +176,7 @@ class ApplicationRepository(ApplicationProperties):
|
||||
return result
|
||||
|
||||
def updates(self, filter_packages: Iterable[str], *,
|
||||
aur: bool, local: bool, manual: bool, vcs: bool) -> list[Package]:
|
||||
aur: bool, local: bool, manual: bool, vcs: bool, check_files: bool) -> list[Package]:
|
||||
"""
|
||||
get list of packages to run update process
|
||||
|
||||
@ -189,6 +186,7 @@ class ApplicationRepository(ApplicationProperties):
|
||||
local(bool): enable or disable checking of local packages for updates
|
||||
manual(bool): include or exclude manual updates
|
||||
vcs(bool): enable or disable checking of VCS packages
|
||||
check_files(bool): check for broken dependencies
|
||||
|
||||
Returns:
|
||||
list[Package]: list of out-of-dated packages
|
||||
@ -201,5 +199,7 @@ class ApplicationRepository(ApplicationProperties):
|
||||
updates.update({package.base: package for package in self.repository.updates_local(vcs=vcs)})
|
||||
if manual:
|
||||
updates.update({package.base: package for package in self.repository.updates_manual()})
|
||||
if check_files:
|
||||
updates.update({package.base: package for package in self.repository.updates_dependencies(filter_packages)})
|
||||
|
||||
return [package for _, package in sorted(updates.items())]
|
||||
|
@ -55,7 +55,7 @@ class Add(Handler):
|
||||
if not args.now:
|
||||
return
|
||||
|
||||
packages = application.updates(args.package, aur=False, local=False, manual=True, vcs=False)
|
||||
packages = application.updates(args.package, aur=False, local=False, manual=True, vcs=False, check_files=False)
|
||||
packages = application.with_dependencies(packages, process_dependencies=args.dependencies)
|
||||
packagers = Packagers(args.username, {package.base: package.packager for package in packages})
|
||||
|
||||
|
@ -18,10 +18,10 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import argparse
|
||||
import pwd
|
||||
import tarfile
|
||||
|
||||
from pathlib import Path
|
||||
from tarfile import TarFile
|
||||
from pwd import getpwuid
|
||||
|
||||
from ahriman.application.handlers.handler import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
@ -49,7 +49,7 @@ class Backup(Handler):
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
backup_paths = Backup.get_paths(configuration)
|
||||
with TarFile(args.path, mode="w") as archive: # well we don't actually use compression
|
||||
with tarfile.open(args.path, mode="w") as archive: # well we don't actually use compression
|
||||
for backup_path in backup_paths:
|
||||
archive.add(backup_path)
|
||||
|
||||
@ -77,7 +77,7 @@ class Backup(Handler):
|
||||
|
||||
# gnupg home with imported keys
|
||||
uid, _ = repository_paths.root_owner
|
||||
system_user = pwd.getpwuid(uid)
|
||||
system_user = getpwuid(uid)
|
||||
gnupg_home = Path(system_user.pw_dir) / ".gnupg"
|
||||
if gnupg_home.is_dir():
|
||||
paths.add(gnupg_home)
|
||||
|
@ -18,8 +18,7 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import argparse
|
||||
|
||||
from tarfile import TarFile
|
||||
import tarfile
|
||||
|
||||
from ahriman.application.handlers.handler import Handler
|
||||
from ahriman.core.configuration import Configuration
|
||||
@ -45,5 +44,5 @@ class Restore(Handler):
|
||||
configuration(Configuration): configuration instance
|
||||
report(bool): force enable or disable reporting
|
||||
"""
|
||||
with TarFile(args.path) as archive:
|
||||
archive.extractall(path=args.output)
|
||||
with tarfile.open(args.path) as archive:
|
||||
archive.extractall(path=args.output) # nosec
|
||||
|
@ -48,7 +48,8 @@ class Update(Handler):
|
||||
application = Application(repository_id, configuration, report=report, refresh_pacman_database=args.refresh)
|
||||
application.on_start()
|
||||
|
||||
packages = application.updates(args.package, aur=args.aur, local=args.local, manual=args.manual, vcs=args.vcs)
|
||||
packages = application.updates(args.package, aur=args.aur, local=args.local, manual=args.manual, vcs=args.vcs,
|
||||
check_files=args.check_files)
|
||||
if args.dry_run: # some check specific actions
|
||||
if args.changes: # generate changes if requested
|
||||
application.changes(packages)
|
||||
|
@ -18,24 +18,31 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import shutil
|
||||
import tarfile
|
||||
|
||||
from collections.abc import Callable, Generator
|
||||
from collections.abc import Generator, Iterable
|
||||
from functools import cached_property
|
||||
from pathlib import Path
|
||||
from pyalpm import DB, Handle, Package, SIG_PACKAGE, error as PyalpmError # type: ignore[import-not-found]
|
||||
from pyalpm import DB, Handle, Package, SIG_DATABASE_OPTIONAL, SIG_PACKAGE_OPTIONAL # type: ignore[import-not-found]
|
||||
from string import Template
|
||||
|
||||
from ahriman.core.alpm.pacman_database import PacmanDatabase
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.log import LazyLogging
|
||||
from ahriman.core.util import trim_package
|
||||
from ahriman.models.pacman_synchronization import PacmanSynchronization
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
from ahriman.models.repository_paths import RepositoryPaths
|
||||
|
||||
|
||||
class Pacman(LazyLogging):
|
||||
"""
|
||||
alpm wrapper
|
||||
|
||||
Attributes:
|
||||
configuration(Configuration): configuration instance
|
||||
refresh_database(PacmanSynchronization): synchronize local cache to remote
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
repository_path(RepositoryPaths): repository paths instance
|
||||
"""
|
||||
|
||||
def __init__(self, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
@ -48,8 +55,11 @@ class Pacman(LazyLogging):
|
||||
configuration(Configuration): configuration instance
|
||||
refresh_database(PacmanSynchronization): synchronize local cache to remote
|
||||
"""
|
||||
self.__create_handle_fn: Callable[[], Handle] = lambda: self.__create_handle(
|
||||
repository_id, configuration, refresh_database=refresh_database)
|
||||
self.configuration = configuration
|
||||
self.repository_id = repository_id
|
||||
self.repository_paths = configuration.repository_paths
|
||||
|
||||
self.refresh_database = refresh_database
|
||||
|
||||
@cached_property
|
||||
def handle(self) -> Handle:
|
||||
@ -59,40 +69,39 @@ class Pacman(LazyLogging):
|
||||
Returns:
|
||||
Handle: generated pyalpm handle instance
|
||||
"""
|
||||
return self.__create_handle_fn()
|
||||
return self.__create_handle(refresh_database=self.refresh_database)
|
||||
|
||||
def __create_handle(self, repository_id: RepositoryId, configuration: Configuration, *,
|
||||
refresh_database: PacmanSynchronization) -> Handle:
|
||||
def __create_handle(self, *, refresh_database: PacmanSynchronization) -> Handle:
|
||||
"""
|
||||
create lazy handle function
|
||||
|
||||
Args:
|
||||
repository_id(RepositoryId): repository unique identifier
|
||||
configuration(Configuration): configuration instance
|
||||
refresh_database(PacmanSynchronization): synchronize local cache to remote
|
||||
|
||||
Returns:
|
||||
Handle: fully initialized pacman handle
|
||||
"""
|
||||
root = configuration.getpath("alpm", "root")
|
||||
pacman_root = configuration.getpath("alpm", "database")
|
||||
use_ahriman_cache = configuration.getboolean("alpm", "use_ahriman_cache")
|
||||
mirror = configuration.get("alpm", "mirror")
|
||||
paths = configuration.repository_paths
|
||||
database_path = paths.pacman if use_ahriman_cache else pacman_root
|
||||
pacman_root = self.configuration.getpath("alpm", "database")
|
||||
use_ahriman_cache = self.configuration.getboolean("alpm", "use_ahriman_cache")
|
||||
|
||||
database_path = self.repository_paths.pacman if use_ahriman_cache else pacman_root
|
||||
root = self.configuration.getpath("alpm", "root")
|
||||
handle = Handle(str(root), str(database_path))
|
||||
for repository in configuration.getlist("alpm", "repositories"):
|
||||
database = self.database_init(handle, repository, mirror, repository_id.architecture)
|
||||
self.database_copy(handle, database, pacman_root, paths, use_ahriman_cache=use_ahriman_cache)
|
||||
|
||||
for repository in self.configuration.getlist("alpm", "repositories"):
|
||||
database = self.database_init(handle, repository, self.repository_id.architecture)
|
||||
self.database_copy(handle, database, pacman_root, use_ahriman_cache=use_ahriman_cache)
|
||||
|
||||
# install repository database too
|
||||
local_database = self.database_init(handle, self.repository_id.name, self.repository_id.architecture)
|
||||
self.database_copy(handle, local_database, pacman_root, use_ahriman_cache=use_ahriman_cache)
|
||||
|
||||
if use_ahriman_cache and refresh_database:
|
||||
self.database_sync(handle, force=refresh_database == PacmanSynchronization.Force)
|
||||
|
||||
return handle
|
||||
|
||||
def database_copy(self, handle: Handle, database: DB, pacman_root: Path, paths: RepositoryPaths, *,
|
||||
use_ahriman_cache: bool) -> None:
|
||||
def database_copy(self, handle: Handle, database: DB, pacman_root: Path, *, use_ahriman_cache: bool) -> None:
|
||||
"""
|
||||
copy database from the operating system root to the ahriman local home
|
||||
|
||||
@ -100,7 +109,6 @@ class Pacman(LazyLogging):
|
||||
handle(Handle): pacman handle which will be used for database copying
|
||||
database(DB): pacman database instance to be copied
|
||||
pacman_root(Path): operating system pacman root
|
||||
paths(RepositoryPaths): repository paths instance
|
||||
use_ahriman_cache(bool): use local ahriman cache instead of system one
|
||||
"""
|
||||
def repository_database(root: Path) -> Path:
|
||||
@ -122,30 +130,36 @@ class Pacman(LazyLogging):
|
||||
return # database for some reason deos not exist
|
||||
self.logger.info("copy pacman database from operating system root to ahriman's home")
|
||||
shutil.copy(src, dst)
|
||||
paths.chown(dst)
|
||||
self.repository_paths.chown(dst)
|
||||
|
||||
def database_init(self, handle: Handle, repository: str, mirror: str, architecture: str) -> DB:
|
||||
def database_init(self, handle: Handle, repository: str, architecture: str) -> DB:
|
||||
"""
|
||||
create database instance from pacman handler and set its properties
|
||||
|
||||
Args:
|
||||
handle(Handle): pacman handle which will be used for database initializing
|
||||
repository(str): pacman repository name (e.g. core)
|
||||
mirror(str): arch linux mirror url
|
||||
architecture(str): repository architecture
|
||||
|
||||
Returns:
|
||||
DB: loaded pacman database instance
|
||||
"""
|
||||
self.logger.info("loading pacman database %s", repository)
|
||||
database: DB = handle.register_syncdb(repository, SIG_PACKAGE)
|
||||
database: DB = handle.register_syncdb(repository, SIG_DATABASE_OPTIONAL | SIG_PACKAGE_OPTIONAL)
|
||||
|
||||
# replace variables in mirror address
|
||||
variables = {
|
||||
"arch": architecture,
|
||||
"repo": repository,
|
||||
}
|
||||
database.servers = [Template(mirror).safe_substitute(variables)]
|
||||
if repository != self.repository_id.name:
|
||||
mirror = self.configuration.get("alpm", "mirror")
|
||||
# replace variables in mirror address
|
||||
variables = {
|
||||
"arch": architecture,
|
||||
"repo": repository,
|
||||
}
|
||||
server = Template(mirror).safe_substitute(variables)
|
||||
else:
|
||||
# special case, same database, use local storage instead
|
||||
server = f"file://{self.repository_paths.repository}"
|
||||
|
||||
database.servers = [server]
|
||||
|
||||
return database
|
||||
|
||||
@ -160,13 +174,44 @@ class Pacman(LazyLogging):
|
||||
self.logger.info("refresh ahriman's home pacman database (force refresh %s)", force)
|
||||
transaction = handle.init_transaction()
|
||||
for database in handle.get_syncdbs():
|
||||
try:
|
||||
database.update(force)
|
||||
except PyalpmError:
|
||||
self.logger.exception("exception during update %s", database.name)
|
||||
PacmanDatabase(database, self.configuration).sync(force=force)
|
||||
transaction.release()
|
||||
|
||||
def package_get(self, package_name: str) -> Generator[Package, None, None]:
|
||||
def files(self, packages: Iterable[str] | None = None) -> dict[str, set[Path]]:
|
||||
"""
|
||||
extract list of known packages from the databases
|
||||
|
||||
Args:
|
||||
packages(Iterable[str] | None, optional): filter by package names (Default value = None)
|
||||
|
||||
Returns:
|
||||
dict[str, set[Path]]: map of package name to its list of files
|
||||
"""
|
||||
packages = packages or []
|
||||
|
||||
def extract(tar: tarfile.TarFile) -> Generator[tuple[str, set[Path]], None, None]:
|
||||
for descriptor in filter(lambda info: info.path.endswith("/files"), tar.getmembers()):
|
||||
package, *_ = str(Path(descriptor.path).parent).rsplit("-", 2)
|
||||
if packages and package not in packages:
|
||||
continue # skip unused packages
|
||||
content = tar.extractfile(descriptor)
|
||||
if content is None:
|
||||
continue
|
||||
files = {Path(filename.decode("utf8").rstrip()) for filename in content.readlines()}
|
||||
|
||||
yield package, files
|
||||
|
||||
result: dict[str, set[Path]] = {}
|
||||
for database in self.handle.get_syncdbs():
|
||||
database_file = self.repository_paths.pacman / "sync" / f"{database.name}.files.tar.gz"
|
||||
if not database_file.is_file():
|
||||
continue # no database file found
|
||||
with tarfile.open(database_file, "r:gz") as archive:
|
||||
result.update(extract(archive))
|
||||
|
||||
return result
|
||||
|
||||
def package(self, package_name: str) -> Generator[Package, None, None]:
|
||||
"""
|
||||
retrieve list of the packages from the repository by name
|
||||
|
||||
|
170
src/ahriman/core/alpm/pacman_database.py
Normal file
170
src/ahriman/core/alpm/pacman_database.py
Normal file
@ -0,0 +1,170 @@
|
||||
#
|
||||
# Copyright (c) 2021-2024 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from email.utils import parsedate_to_datetime
|
||||
from pathlib import Path
|
||||
from pyalpm import DB # type: ignore[import-not-found]
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.exceptions import PacmanError
|
||||
from ahriman.core.http import SyncHttpClient
|
||||
|
||||
|
||||
class PacmanDatabase(SyncHttpClient):
|
||||
"""
|
||||
implementation for database sync, because pyalpm is not always enough
|
||||
|
||||
Attributes:
|
||||
LAST_MODIFIED_HEADER(str): last modified header name
|
||||
database(DB): pyalpm database object
|
||||
repository_paths(RepositoryPaths): repository paths instance
|
||||
sync_files_database(bool): sync files database
|
||||
"""
|
||||
|
||||
LAST_MODIFIED_HEADER = "Last-Modified"
|
||||
|
||||
def __init__(self, database: DB, configuration: Configuration) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
database(DB): pyalpm database object
|
||||
configuration(Configuration): configuration instance
|
||||
"""
|
||||
SyncHttpClient.__init__(self)
|
||||
self.timeout = None # reset timeout
|
||||
|
||||
self.database = database
|
||||
self.repository_paths = configuration.repository_paths
|
||||
|
||||
self.sync_files_database = configuration.getboolean("alpm", "sync_files_database")
|
||||
|
||||
def copy(self, remote_path: Path, local_path: Path) -> None:
|
||||
"""
|
||||
copy local database file
|
||||
|
||||
Args:
|
||||
remote_path(Path): path to source (remote) file
|
||||
local_path(Path): path to locally stored file
|
||||
"""
|
||||
shutil.copy(remote_path, local_path)
|
||||
|
||||
def download(self, url: str, local_path: Path) -> None:
|
||||
"""
|
||||
download remote file and store it to local path with the correct last modified headers
|
||||
|
||||
Args:
|
||||
url(str): remote url to request file
|
||||
local_path(Path): path to locally stored file
|
||||
|
||||
Raises:
|
||||
PacmanError: in case if no last-modified header was found
|
||||
"""
|
||||
response = self.make_request("GET", url, stream=True)
|
||||
if self.LAST_MODIFIED_HEADER not in response.headers:
|
||||
raise PacmanError("No last-modified header found")
|
||||
|
||||
with local_path.open("wb") as local_file:
|
||||
for chunk in response.iter_content(chunk_size=1024):
|
||||
local_file.write(chunk)
|
||||
|
||||
# set correct (a,m)time for the file
|
||||
remote_changed = parsedate_to_datetime(response.headers[self.LAST_MODIFIED_HEADER]).timestamp()
|
||||
os.utime(local_path, (remote_changed, remote_changed))
|
||||
|
||||
def is_outdated(self, url: str, local_path: Path) -> bool:
|
||||
"""
|
||||
check if local file is outdated
|
||||
|
||||
Args:
|
||||
url(str): remote url to request last modified header
|
||||
local_path(Path): path to locally stored file
|
||||
|
||||
Returns:
|
||||
bool: True in case if remote file is newer than local file
|
||||
|
||||
Raises:
|
||||
PacmanError: in case if no last-modified header was found
|
||||
"""
|
||||
if not local_path.is_file():
|
||||
return True # no local file found, requires to update
|
||||
|
||||
response = self.make_request("HEAD", url)
|
||||
if self.LAST_MODIFIED_HEADER not in response.headers:
|
||||
raise PacmanError("No last-modified header found")
|
||||
|
||||
remote_changed = parsedate_to_datetime(response.headers["Last-Modified"]).timestamp()
|
||||
local_changed = local_path.stat().st_mtime
|
||||
|
||||
return remote_changed > local_changed
|
||||
|
||||
def sync(self, *, force: bool) -> None:
|
||||
"""
|
||||
sync packages and files databases
|
||||
|
||||
Args:
|
||||
force(bool): force database synchronization (same as ``pacman -Syy``)
|
||||
"""
|
||||
try:
|
||||
self.sync_packages(force=force)
|
||||
if self.sync_files_database:
|
||||
self.sync_files(force=force)
|
||||
except Exception:
|
||||
self.logger.exception("exception during update %s", self.database.name)
|
||||
|
||||
def sync_files(self, *, force: bool) -> None:
|
||||
"""
|
||||
sync files by using http request
|
||||
|
||||
Args:
|
||||
force(bool): force database synchronization (same as ``pacman -Syy``)
|
||||
"""
|
||||
server = next(iter(self.database.servers))
|
||||
filename = f"{self.database.name}.files.tar.gz"
|
||||
url = f"{server}/{filename}"
|
||||
|
||||
remote_uri = urlparse(url)
|
||||
local_path = Path(self.repository_paths.pacman / "sync" / filename)
|
||||
|
||||
match remote_uri.scheme:
|
||||
case "http" | "https":
|
||||
if not force and not self.is_outdated(url, local_path):
|
||||
return
|
||||
|
||||
self.download(url, local_path)
|
||||
|
||||
case "file":
|
||||
# just copy file as it is relatively cheap operation, no need to check timestamps
|
||||
self.copy(Path(remote_uri.path), local_path)
|
||||
|
||||
case other:
|
||||
raise PacmanError(f"Unknown or unsupported URL scheme {other}")
|
||||
|
||||
def sync_packages(self, *, force: bool) -> None:
|
||||
"""
|
||||
sync packages by using built-in pyalpm methods
|
||||
|
||||
Args:
|
||||
force(bool): force database synchronization (same as ``pacman -Syy``)
|
||||
"""
|
||||
self.database.update(force)
|
@ -56,6 +56,6 @@ class OfficialSyncdb(Official):
|
||||
raise UnknownPackageError(package_name)
|
||||
|
||||
try:
|
||||
return next(AURPackage.from_pacman(package) for package in pacman.package_get(package_name))
|
||||
return next(AURPackage.from_pacman(package) for package in pacman.package(package_name))
|
||||
except StopIteration:
|
||||
raise UnknownPackageError(package_name) from None
|
||||
|
@ -89,6 +89,11 @@ CONFIGURATION_SCHEMA: ConfigurationSchema = {
|
||||
"path_exists": True,
|
||||
"path_type": "dir",
|
||||
},
|
||||
"sync_files_database": {
|
||||
"type": "boolean",
|
||||
"coerce": "boolean",
|
||||
"required": True,
|
||||
},
|
||||
"use_ahriman_cache": {
|
||||
"type": "boolean",
|
||||
"coerce": "boolean",
|
||||
|
32
src/ahriman/core/database/migrations/m013_dependencies.py
Normal file
32
src/ahriman/core/database/migrations/m013_dependencies.py
Normal file
@ -0,0 +1,32 @@
|
||||
#
|
||||
# Copyright (c) 2021-2024 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
__all__ = ["steps"]
|
||||
|
||||
|
||||
steps = [
|
||||
"""
|
||||
create table package_dependencies (
|
||||
package_base text not null,
|
||||
repository text not null,
|
||||
dependencies json not null,
|
||||
unique (package_base, repository)
|
||||
)
|
||||
""",
|
||||
]
|
@ -20,6 +20,7 @@
|
||||
from ahriman.core.database.operations.auth_operations import AuthOperations
|
||||
from ahriman.core.database.operations.build_operations import BuildOperations
|
||||
from ahriman.core.database.operations.changes_operations import ChangesOperations
|
||||
from ahriman.core.database.operations.dependencies_operations import DependenciesOperations
|
||||
from ahriman.core.database.operations.logs_operations import LogsOperations
|
||||
from ahriman.core.database.operations.package_operations import PackageOperations
|
||||
from ahriman.core.database.operations.patch_operations import PatchOperations
|
||||
|
@ -64,7 +64,7 @@ class ChangesOperations(Operations):
|
||||
|
||||
def changes_insert(self, package_base: str, changes: Changes, repository_id: RepositoryId | None = None) -> None:
|
||||
"""
|
||||
insert packages to build queue
|
||||
insert package changes
|
||||
|
||||
Args:
|
||||
package_base(str): package base to insert
|
||||
|
124
src/ahriman/core/database/operations/dependencies_operations.py
Normal file
124
src/ahriman/core/database/operations/dependencies_operations.py
Normal file
@ -0,0 +1,124 @@
|
||||
#
|
||||
# Copyright (c) 2021-2024 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from pathlib import Path
|
||||
from sqlite3 import Connection
|
||||
|
||||
from ahriman.core.database.operations.operations import Operations
|
||||
from ahriman.models.dependencies import Dependencies
|
||||
from ahriman.models.repository_id import RepositoryId
|
||||
|
||||
|
||||
class DependenciesOperations(Operations):
|
||||
"""
|
||||
operations for dependencies table
|
||||
"""
|
||||
|
||||
def dependencies_get(self, package_base: str | None = None,
|
||||
repository_id: RepositoryId | None = None) -> list[Dependencies]:
|
||||
"""
|
||||
get dependencies for the specific package base if available
|
||||
|
||||
Args:
|
||||
package_base(str | None): package base to search
|
||||
repository_id(RepositoryId, optional): repository unique identifier override (Default value = None)
|
||||
|
||||
Returns:
|
||||
Dependencies: changes for the package base if available
|
||||
"""
|
||||
repository_id = repository_id or self._repository_id
|
||||
|
||||
def run(connection: Connection) -> list[Dependencies]:
|
||||
return [
|
||||
Dependencies(
|
||||
row["package_base"],
|
||||
{
|
||||
Path(path): packages
|
||||
for path, packages in row["dependencies"].items()
|
||||
}
|
||||
)
|
||||
for row in connection.execute(
|
||||
"""
|
||||
select package_base, dependencies from package_dependencies
|
||||
where (:package_base is null or package_base = :package_base)
|
||||
and repository = :repository
|
||||
""",
|
||||
{
|
||||
"package_base": package_base,
|
||||
"repository": repository_id.id,
|
||||
}
|
||||
)
|
||||
]
|
||||
|
||||
return self.with_connection(run)
|
||||
|
||||
def dependencies_insert(self, dependencies: Dependencies, repository_id: RepositoryId | None = None) -> None:
|
||||
"""
|
||||
insert package dependencies
|
||||
|
||||
Args:
|
||||
dependencies(Dependencies): package dependencies
|
||||
repository_id(RepositoryId, optional): repository unique identifier override (Default value = None)
|
||||
"""
|
||||
repository_id = repository_id or self._repository_id
|
||||
|
||||
def run(connection: Connection) -> None:
|
||||
connection.execute(
|
||||
"""
|
||||
insert into package_dependencies
|
||||
(package_base, repository, dependencies)
|
||||
values
|
||||
(:package_base, :repository, :dependencies)
|
||||
on conflict (package_base, repository) do update set
|
||||
dependencies = :dependencies
|
||||
""",
|
||||
{
|
||||
"package_base": dependencies.package_base,
|
||||
"repository": repository_id.id,
|
||||
"dependencies": {
|
||||
str(path): packages
|
||||
for path, packages in dependencies.paths.items()
|
||||
}
|
||||
})
|
||||
|
||||
return self.with_connection(run, commit=True)
|
||||
|
||||
def dependencies_remove(self, package_base: str | None, repository_id: RepositoryId | None = None) -> None:
|
||||
"""
|
||||
remove packages dependencies
|
||||
|
||||
Args:
|
||||
package_base(str | None): optional filter by package base
|
||||
repository_id(RepositoryId, optional): repository unique identifier override (Default value = None)
|
||||
"""
|
||||
repository_id = repository_id or self._repository_id
|
||||
|
||||
def run(connection: Connection) -> None:
|
||||
connection.execute(
|
||||
"""
|
||||
delete from package_dependencies
|
||||
where (:package_base is null or package_base = :package_base)
|
||||
and repository = :repository
|
||||
""",
|
||||
{
|
||||
"package_base": package_base,
|
||||
"repository": repository_id.id,
|
||||
})
|
||||
|
||||
return self.with_connection(run, commit=True)
|
@ -25,12 +25,19 @@ from typing import Self
|
||||
|
||||
from ahriman.core.configuration import Configuration
|
||||
from ahriman.core.database.migrations import Migrations
|
||||
from ahriman.core.database.operations import AuthOperations, BuildOperations, ChangesOperations, LogsOperations, \
|
||||
PackageOperations, PatchOperations
|
||||
from ahriman.core.database.operations import AuthOperations, BuildOperations, ChangesOperations, \
|
||||
DependenciesOperations, LogsOperations, PackageOperations, PatchOperations
|
||||
|
||||
|
||||
# pylint: disable=too-many-ancestors
|
||||
class SQLite(AuthOperations, BuildOperations, ChangesOperations, LogsOperations, PackageOperations, PatchOperations):
|
||||
class SQLite(
|
||||
AuthOperations,
|
||||
BuildOperations,
|
||||
ChangesOperations,
|
||||
DependenciesOperations,
|
||||
LogsOperations,
|
||||
PackageOperations,
|
||||
PatchOperations):
|
||||
"""
|
||||
wrapper for sqlite3 database
|
||||
|
||||
@ -94,3 +101,21 @@ class SQLite(AuthOperations, BuildOperations, ChangesOperations, LogsOperations,
|
||||
if configuration.getboolean("settings", "apply_migrations", fallback=True):
|
||||
self.with_connection(lambda connection: Migrations.migrate(connection, configuration))
|
||||
paths.chown(self.path)
|
||||
|
||||
def package_clear(self, package_base: str) -> None:
|
||||
"""
|
||||
completely remove package from all tables
|
||||
|
||||
Args:
|
||||
package_base(str): package base to remove
|
||||
|
||||
Examples:
|
||||
This method completely removes the package from all tables and must be used, e.g. on package removal::
|
||||
|
||||
>>> database.package_clear("ahriman")
|
||||
"""
|
||||
self.build_queue_clear(package_base)
|
||||
self.patches_remove(package_base, [])
|
||||
self.logs_remove(package_base, None)
|
||||
self.changes_remove(package_base)
|
||||
self.dependencies_remove(package_base)
|
||||
|
@ -219,6 +219,21 @@ class PackageInfoError(RuntimeError):
|
||||
RuntimeError.__init__(self, f"There are errors during reading package information: `{details}`")
|
||||
|
||||
|
||||
class PacmanError(RuntimeError):
|
||||
"""
|
||||
exception in case of pacman operation errors
|
||||
"""
|
||||
|
||||
def __init__(self, details: Any) -> None:
|
||||
"""
|
||||
default constructor
|
||||
|
||||
Args:
|
||||
details(Any): error details
|
||||
"""
|
||||
RuntimeError.__init__(self, f"Could not perform operation with pacman: `{details}`")
|
||||
|
||||
|
||||
class PathError(ValueError):
|
||||
"""
|
||||
exception which will be raised on path which is not belong to root directory
|
||||
|
@ -38,7 +38,7 @@ class SyncHttpClient(LazyLogging):
|
||||
Attributes:
|
||||
auth(tuple[str, str] | None): HTTP basic auth object if set
|
||||
suppress_errors(bool): suppress logging of request errors
|
||||
timeout(int): HTTP request timeout in seconds
|
||||
timeout(int | None): HTTP request timeout in seconds
|
||||
"""
|
||||
|
||||
def __init__(self, configuration: Configuration | None = None, section: str | None = None, *,
|
||||
@ -60,7 +60,7 @@ class SyncHttpClient(LazyLogging):
|
||||
password = configuration.get(section, "password", fallback=None)
|
||||
self.auth = (username, password) if username and password else None
|
||||
|
||||
self.timeout = configuration.getint(section, "timeout", fallback=30)
|
||||
self.timeout: int | None = configuration.getint(section, "timeout", fallback=30)
|
||||
self.suppress_errors = suppress_errors
|
||||
|
||||
@cached_property
|
||||
@ -90,25 +90,27 @@ class SyncHttpClient(LazyLogging):
|
||||
result: str = exception.response.text if exception.response is not None else ""
|
||||
return result
|
||||
|
||||
def make_request(self, method: Literal["DELETE", "GET", "POST", "PUT"], url: str, *,
|
||||
def make_request(self, method: Literal["DELETE", "GET", "HEAD", "POST", "PUT"], url: str, *,
|
||||
headers: dict[str, str] | None = None,
|
||||
params: list[tuple[str, str]] | None = None,
|
||||
data: Any | None = None,
|
||||
json: dict[str, Any] | None = None,
|
||||
files: dict[str, MultipartType] | None = None,
|
||||
stream: bool | None = None,
|
||||
session: requests.Session | None = None,
|
||||
suppress_errors: bool | None = None) -> requests.Response:
|
||||
"""
|
||||
perform request with specified parameters
|
||||
|
||||
Args:
|
||||
method(Literal["DELETE", "GET", "POST", "PUT"]): HTTP method to call
|
||||
method(Literal["DELETE", "GET", "HEAD", "POST", "PUT"]): HTTP method to call
|
||||
url(str): remote url to call
|
||||
headers(dict[str, str] | None, optional): request headers (Default value = None)
|
||||
params(list[tuple[str, str]] | None, optional): request query parameters (Default value = None)
|
||||
data(Any | None, optional): request raw data parameters (Default value = None)
|
||||
json(dict[str, Any] | None, optional): request json parameters (Default value = None)
|
||||
files(dict[str, MultipartType] | None, optional): multipart upload (Default value = None)
|
||||
stream(bool | None, optional): handle response as stream (Default value = None)
|
||||
session(requests.Session | None, optional): session object if any (Default value = None)
|
||||
suppress_errors(bool | None, optional): suppress logging errors (e.g. if no web server available). If none
|
||||
set, the instance-wide value will be used (Default value = None)
|
||||
@ -124,7 +126,7 @@ class SyncHttpClient(LazyLogging):
|
||||
|
||||
try:
|
||||
response = session.request(method, url, params=params, data=data, headers=headers, files=files, json=json,
|
||||
auth=self.auth, timeout=self.timeout)
|
||||
stream=stream, auth=self.auth, timeout=self.timeout)
|
||||
response.raise_for_status()
|
||||
return response
|
||||
except requests.HTTPError as ex:
|
||||
|
@ -29,6 +29,7 @@ from ahriman.core.repository.package_info import PackageInfo
|
||||
from ahriman.core.util import safe_filename
|
||||
from ahriman.models.changes import Changes
|
||||
from ahriman.models.package import Package
|
||||
from ahriman.models.package_archive import PackageArchive
|
||||
from ahriman.models.package_description import PackageDescription
|
||||
from ahriman.models.packagers import Packagers
|
||||
from ahriman.models.result import Result
|
||||
@ -77,6 +78,10 @@ class Executor(PackageInfo, Cleaner):
|
||||
last_commit_sha = build_single(single, Path(dir_name), packager.packager_id)
|
||||
# clear changes and update commit hash
|
||||
self.reporter.package_changes_set(single.base, Changes(last_commit_sha))
|
||||
# update dependencies list
|
||||
dependencies = PackageArchive(self.paths.build_directory, single).depends_on()
|
||||
self.database.dependencies_insert(dependencies)
|
||||
# update result set
|
||||
result.add_updated(single)
|
||||
except Exception:
|
||||
self.reporter.set_failed(single.base)
|
||||
@ -98,10 +103,7 @@ class Executor(PackageInfo, Cleaner):
|
||||
def remove_base(package_base: str) -> None:
|
||||
try:
|
||||
self.paths.tree_clear(package_base) # remove all internal files
|
||||
self.database.build_queue_clear(package_base)
|
||||
self.database.patches_remove(package_base, [])
|
||||
self.database.logs_remove(package_base, None)
|
||||
self.database.changes_remove(package_base)
|
||||
self.database.package_clear(package_base)
|
||||
self.reporter.package_remove(package_base) # we only update status page in case of base removal
|
||||
except Exception:
|
||||
self.logger.exception("could not remove base %s", package_base)
|
||||
|
@ -86,14 +86,21 @@ class PackageInfo(RepositoryProperties):
|
||||
|
||||
return Changes(last_commit_sha, changes)
|
||||
|
||||
def packages(self) -> list[Package]:
|
||||
def packages(self, filter_packages: Iterable[str] | None = None) -> list[Package]:
|
||||
"""
|
||||
generate list of repository packages
|
||||
|
||||
Args:
|
||||
filter_packages(Iterable[str] | None, optional): filter packages list by specified only
|
||||
|
||||
Returns:
|
||||
list[Package]: list of packages properties
|
||||
"""
|
||||
return self.load_archives(filter(package_like, self.paths.repository.iterdir()))
|
||||
packages = self.load_archives(filter(package_like, self.paths.repository.iterdir()))
|
||||
if filter_packages:
|
||||
packages = [package for package in packages if package.base in filter_packages]
|
||||
|
||||
return packages
|
||||
|
||||
def packages_built(self) -> list[Path]:
|
||||
"""
|
||||
|
@ -18,6 +18,7 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from collections.abc import Iterable
|
||||
from pathlib import Path
|
||||
|
||||
from ahriman.core.build_tools.sources import Sources
|
||||
from ahriman.core.exceptions import UnknownPackageError
|
||||
@ -55,17 +56,13 @@ class UpdateHandler(PackageInfo, Cleaner):
|
||||
continue
|
||||
raise UnknownPackageError(package.base)
|
||||
|
||||
local_versions = {package.base: package.version for package in self.packages()}
|
||||
|
||||
result: list[Package] = []
|
||||
for local in self.packages():
|
||||
with self.in_package_context(local.base, local_versions.get(local.base)):
|
||||
for local in self.packages(filter_packages):
|
||||
with self.in_package_context(local.base, local.version):
|
||||
if not local.remote.is_remote:
|
||||
continue # avoid checking local packages
|
||||
if local.base in self.ignore_list:
|
||||
continue
|
||||
if filter_packages and local.base not in filter_packages:
|
||||
continue
|
||||
|
||||
try:
|
||||
remote = load_remote(local)
|
||||
@ -82,6 +79,47 @@ class UpdateHandler(PackageInfo, Cleaner):
|
||||
|
||||
return result
|
||||
|
||||
def updates_dependencies(self, filter_packages: Iterable[str]) -> list[Package]:
|
||||
"""
|
||||
check packages which ae required to be rebuilt based on dynamic dependencies (e.g. linking, modules paths, etc.)
|
||||
|
||||
Args:
|
||||
filter_packages(Iterable[str]): do not check every package just specified in the list
|
||||
|
||||
Returns:
|
||||
list[Package]: list of packages for which there is breaking linking
|
||||
"""
|
||||
def extract_files(lookup_packages: Iterable[str]) -> dict[Path, set[str]]:
|
||||
database_files = self.pacman.files(lookup_packages)
|
||||
files: dict[Path, set[str]] = {}
|
||||
for package_name, package_files in database_files.items(): # invert map
|
||||
for package_file in package_files:
|
||||
files.setdefault(package_file, set()).add(package_name)
|
||||
|
||||
return files
|
||||
|
||||
dependencies = {dependency.package_base: dependency for dependency in self.database.dependencies_get()}
|
||||
|
||||
result: list[Package] = []
|
||||
for package in self.packages(filter_packages):
|
||||
if package.base not in dependencies:
|
||||
continue # skip check if no package dependencies found
|
||||
|
||||
required = dependencies[package.base].paths
|
||||
required_packages = {dep for dep_packages in required.values() for dep in dep_packages}
|
||||
filesystem = extract_files(required_packages)
|
||||
|
||||
for path, packages in required.items():
|
||||
found = filesystem.get(path, set())
|
||||
if found.intersection(packages):
|
||||
continue
|
||||
|
||||
# there are no packages found in filesystem with the same paths
|
||||
result.append(package)
|
||||
break
|
||||
|
||||
return result
|
||||
|
||||
def updates_local(self, *, vcs: bool) -> list[Package]:
|
||||
"""
|
||||
check local packages for updates
|
||||
|
35
src/ahriman/models/dependencies.py
Normal file
35
src/ahriman/models/dependencies.py
Normal file
@ -0,0 +1,35 @@
|
||||
#
|
||||
# Copyright (c) 2021-2024 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Dependencies:
|
||||
"""
|
||||
package paths dependencies
|
||||
|
||||
Attributes:
|
||||
package_base(str): package base
|
||||
paths(dict[Path, list[str]]): map of the paths used by this package to set of packages in which they were found
|
||||
"""
|
||||
|
||||
package_base: str
|
||||
paths: dict[Path, list[str]] = field(default_factory=dict)
|
165
src/ahriman/models/package_archive.py
Normal file
165
src/ahriman/models/package_archive.py
Normal file
@ -0,0 +1,165 @@
|
||||
#
|
||||
# Copyright (c) 2021-2024 ahriman team.
|
||||
#
|
||||
# This file is part of ahriman
|
||||
# (see https://github.com/arcan1s/ahriman).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from dataclasses import dataclass
|
||||
from elftools.elf.dynamic import DynamicSection
|
||||
from elftools.elf.elffile import ELFFile
|
||||
from pathlib import Path
|
||||
from typing import IO
|
||||
|
||||
from ahriman.core.util import walk
|
||||
from ahriman.models.dependencies import Dependencies
|
||||
from ahriman.models.package import Package
|
||||
|
||||
|
||||
@dataclass
|
||||
class PackageArchive:
|
||||
"""
|
||||
helper for package archives
|
||||
|
||||
Attributes:
|
||||
package(Package): package descriptor
|
||||
root(Path): path to root filesystem
|
||||
"""
|
||||
|
||||
root: Path
|
||||
package: Package
|
||||
|
||||
@staticmethod
|
||||
def dynamic_needed(binary_path: Path) -> list[str]:
|
||||
"""
|
||||
extract dynamic libraries required by the specified file
|
||||
|
||||
Args:
|
||||
binary_path(Path): path to library, file, etc
|
||||
|
||||
Returns:
|
||||
list[str]: libraries which this file linked dynamically. Returns empty set in case if file is not
|
||||
a binary or no dynamic section has been found
|
||||
"""
|
||||
with binary_path.open("rb") as binary_file:
|
||||
if not PackageArchive.is_elf(binary_file):
|
||||
return []
|
||||
|
||||
elf_file = ELFFile(binary_file) # type: ignore[no-untyped-call]
|
||||
dynamic_section = next(
|
||||
(section for section in elf_file.iter_sections() # type: ignore[no-untyped-call]
|
||||
if isinstance(section, DynamicSection)),
|
||||
None)
|
||||
if dynamic_section is None:
|
||||
return []
|
||||
|
||||
return [
|
||||
tag.needed
|
||||
for tag in dynamic_section.iter_tags() # type: ignore[no-untyped-call]
|
||||
if tag.entry.d_tag == "DT_NEEDED"
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def is_elf(content: IO[bytes]) -> bool:
|
||||
"""
|
||||
check if the content is actually elf file
|
||||
|
||||
Args:
|
||||
content(IO[bytes]): content of the file
|
||||
|
||||
Returns:
|
||||
bool: True in case if file has elf header and False otherwise
|
||||
"""
|
||||
expected = b"\x7fELF"
|
||||
length = len(expected)
|
||||
|
||||
magic_bytes = content.read(length)
|
||||
content.seek(0) # reset reading position
|
||||
|
||||
return magic_bytes == expected
|
||||
|
||||
def depends_on(self) -> Dependencies:
|
||||
"""
|
||||
extract packages and paths which are required for this package
|
||||
|
||||
Returns:
|
||||
Dependencies: map of the package name to set of paths used by this package
|
||||
"""
|
||||
dependencies, roots = self.depends_on_paths()
|
||||
|
||||
result: dict[Path, list[str]] = {}
|
||||
for package, (directories, files) in self.installed_packages().items():
|
||||
if package in self.package.packages:
|
||||
continue # skip package itself
|
||||
|
||||
required_by = [directory for directory in directories if directory in roots]
|
||||
required_by.extend(library for library in files if library.name in dependencies)
|
||||
|
||||
for path in required_by:
|
||||
result.setdefault(path, []).append(package)
|
||||
|
||||
return Dependencies(self.package.base, result)
|
||||
|
||||
def depends_on_paths(self) -> tuple[set[str], set[Path]]:
|
||||
"""
|
||||
extract dependencies from installation
|
||||
|
||||
Returns:
|
||||
tuple[set[str], set[Path]]: tuple of dynamically linked libraries and directory paths
|
||||
"""
|
||||
dependencies = set()
|
||||
roots: set[Path] = set()
|
||||
|
||||
package_dir = self.root / "build" / self.package.base / "pkg"
|
||||
for path in filter(lambda p: p.is_file(), walk(package_dir)):
|
||||
dependencies.update(PackageArchive.dynamic_needed(path))
|
||||
filesystem_path = Path(*path.relative_to(package_dir).parts[1:])
|
||||
roots.update(filesystem_path.parents[:-1]) # last element is always . because paths are relative
|
||||
|
||||
return dependencies, roots
|
||||
|
||||
def installed_packages(self) -> dict[str, tuple[list[Path], list[Path]]]:
|
||||
"""
|
||||
extract list of the installed packages and their content
|
||||
|
||||
Returns:
|
||||
dict[str, tuple[list[Path], list[Path]]]; map of package name to list of directories and files contained
|
||||
by this package
|
||||
"""
|
||||
result = {}
|
||||
|
||||
pacman_local_files = self.root / "var" / "lib" / "pacman" / "local"
|
||||
for path in filter(lambda fn: fn.name == "files", walk(pacman_local_files)):
|
||||
package, *_ = path.parent.name.rsplit("-", 2)
|
||||
|
||||
directories, files = [], []
|
||||
is_files = False
|
||||
for line in path.read_text(encoding="utf8").splitlines():
|
||||
if not line: # skip empty lines
|
||||
continue
|
||||
if line.startswith("%") and line.endswith("%"): # directive started
|
||||
is_files = line == "%FILES%"
|
||||
if not is_files: # not a files directive
|
||||
continue
|
||||
|
||||
entry = Path(line)
|
||||
if line.endswith("/"): # simple check if it is directory
|
||||
directories.append(entry)
|
||||
else:
|
||||
files.append(entry)
|
||||
|
||||
result[package] = directories, files
|
||||
|
||||
return result
|
@ -24,6 +24,7 @@ from collections.abc import Generator
|
||||
from dataclasses import dataclass, field
|
||||
from functools import cached_property
|
||||
from pathlib import Path
|
||||
from pwd import getpwuid
|
||||
|
||||
from ahriman.core.exceptions import PathError
|
||||
from ahriman.core.log import LazyLogging
|
||||
@ -83,6 +84,17 @@ class RepositoryPaths(LazyLogging):
|
||||
return Path(self.repository_id.architecture) # legacy tree suffix
|
||||
return Path(self.repository_id.name) / self.repository_id.architecture
|
||||
|
||||
@property
|
||||
def build_directory(self) -> Path:
|
||||
"""
|
||||
same as :attr:`chroot`, but exactly build chroot
|
||||
|
||||
Returns:
|
||||
Path: path to directory in which build process is run
|
||||
"""
|
||||
uid, _ = self.owner(self.root)
|
||||
return self.chroot / f"{self.repository_id.name}-{self.repository_id.architecture}" / getpwuid(uid).pw_name
|
||||
|
||||
@property
|
||||
def cache(self) -> Path:
|
||||
"""
|
||||
|
Reference in New Issue
Block a user