diff --git a/src/ahriman/core/repository/executor.py b/src/ahriman/core/repository/executor.py index 0ac135f4..cc3bb9d5 100644 --- a/src/ahriman/core/repository/executor.py +++ b/src/ahriman/core/repository/executor.py @@ -17,7 +17,9 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . # -from collections.abc import Iterable +import shutil + +from collections.abc import Generator, Iterable from pathlib import Path from tempfile import TemporaryDirectory @@ -25,7 +27,7 @@ from ahriman.core.build_tools.package_archive import PackageArchive from ahriman.core.build_tools.task import Task from ahriman.core.repository.cleaner import Cleaner from ahriman.core.repository.package_info import PackageInfo -from ahriman.core.utils import atomic_move, safe_filename +from ahriman.core.utils import atomic_move, filelock, package_like, safe_filename from ahriman.models.changes import Changes from ahriman.models.event import EventType from ahriman.models.package import Package @@ -39,6 +41,28 @@ class Executor(PackageInfo, Cleaner): trait for common repository update processes """ + def _archive_lookup(self, package: Package) -> Generator[Path, None, None]: + """ + check if there is a rebuilt package already + + Args: + package(Package): package to check + + Yields: + Path: list of built packages and signatures if available, empty list otherwise + """ + archive = self.paths.archive_for(package.base) + for path in filter(package_like, archive.iterdir()): + built = Package.from_archive(path, self.pacman) + # check if there is an archive with exact same version + if built.version != package.version: + continue + for single in built.packages.values(): + # we allow packages with either same architecture or any + if single.architecture not in ("any", self.architecture): + continue + yield from archive.glob(f"{single.filename}*") + def _archive_rename(self, description: PackageDescription, package_base: str) -> None: """ rename package archive removing special symbols @@ -74,7 +98,16 @@ class Executor(PackageInfo, Cleaner): task = Task(package, self.configuration, self.architecture, self.paths) patches = self.reporter.package_patches_get(package.base, None) commit_sha = task.init(path, patches, local_version) - built = task.build(path, PACKAGER=packager) + + loaded_package = Package.from_build(path, self.architecture, None) + if prebuilt := list(self._archive_lookup(loaded_package)): + built = [] + for artefact in prebuilt: + with filelock(artefact): + shutil.copy(artefact, path) + built.append(path / artefact.name) + else: + built = task.build(path, PACKAGER=packager) package.with_packages(built, self.pacman) for src in built: diff --git a/src/ahriman/core/utils.py b/src/ahriman/core/utils.py index 88c655b4..ac45c190 100644 --- a/src/ahriman/core/utils.py +++ b/src/ahriman/core/utils.py @@ -18,6 +18,7 @@ # along with this program. If not, see . # # pylint: disable=too-many-lines +import contextlib import datetime import fcntl import io @@ -47,6 +48,7 @@ __all__ = [ "dataclass_view", "enum_values", "extract_user", + "filelock", "filter_json", "full_version", "minmax", @@ -83,17 +85,8 @@ def atomic_move(src: Path, dst: Path) -> None: >>> atomic_move(src, dst) """ - lock_path = dst.with_name(f".{dst.name}") - try: - with lock_path.open("ab") as lock_file: - fd = lock_file.fileno() - try: - fcntl.flock(fd, fcntl.LOCK_EX) # lock file and wait lock is until available - shutil.move(src, dst) - finally: - fcntl.flock(fd, fcntl.LOCK_UN) # unlock file first - finally: - lock_path.unlink(missing_ok=True) # remove lock file at the end + with filelock(dst): + shutil.move(src, dst) # pylint: disable=too-many-locals @@ -264,6 +257,27 @@ def extract_user() -> str | None: return os.getenv("SUDO_USER") or os.getenv("DOAS_USER") or os.getenv("USER") +@contextlib.contextmanager +def filelock(path: Path) -> Generator[None, None, None]: + """ + lock on file passed as argument + + Args: + path(Path): path object on which lock must be performed + """ + lock_path = path.with_name(f".{path.name}") + try: + with lock_path.open("ab") as lock_file: + fd = lock_file.fileno() + try: + fcntl.flock(fd, fcntl.LOCK_EX) # lock file and wait lock is until available + yield + finally: + fcntl.flock(fd, fcntl.LOCK_UN) # unlock file first + finally: + lock_path.unlink(missing_ok=True) # remove lock file at the end + + def filter_json(source: dict[str, Any], known_fields: Iterable[str]) -> dict[str, Any]: """ filter json object by fields used for json-to-object conversion