diff --git a/src/ahriman/core/repository/executor.py b/src/ahriman/core/repository/executor.py
index 0ac135f4..0b926bf5 100644
--- a/src/ahriman/core/repository/executor.py
+++ b/src/ahriman/core/repository/executor.py
@@ -17,7 +17,9 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
-from collections.abc import Iterable
+import shutil
+
+from collections.abc import Generator, Iterable
from pathlib import Path
from tempfile import TemporaryDirectory
@@ -25,7 +27,7 @@ from ahriman.core.build_tools.package_archive import PackageArchive
from ahriman.core.build_tools.task import Task
from ahriman.core.repository.cleaner import Cleaner
from ahriman.core.repository.package_info import PackageInfo
-from ahriman.core.utils import atomic_move, safe_filename
+from ahriman.core.utils import atomic_move, filelock, package_like, safe_filename
from ahriman.models.changes import Changes
from ahriman.models.event import EventType
from ahriman.models.package import Package
@@ -39,6 +41,36 @@ class Executor(PackageInfo, Cleaner):
trait for common repository update processes
"""
+ def _archive_lookup(self, package: Package) -> Generator[Path, None, None]:
+ """
+ check if there is a rebuilt package already
+
+ Args:
+ package(Package): package to check
+
+ Yields:
+ Path: list of built packages and signatures if available, empty list otherwise
+ """
+ archive = self.paths.archive_for(package.base)
+
+ # find all packages which have same version
+ same_version = [
+ built
+ for path in filter(package_like, archive.iterdir())
+ if (built := Package.from_archive(path, self.pacman)).version == package.version
+ ]
+ # no packages of the same version found
+ if not same_version:
+ return
+
+ packages = [single for built in same_version for single in built.packages.values()]
+ # all packages must be either any or same architecture
+ if not all(single.architecture in ("any", self.architecture) for single in packages):
+ return
+
+ for single in packages:
+ yield from archive.glob(f"{single.filename}*")
+
def _archive_rename(self, description: PackageDescription, package_base: str) -> None:
"""
rename package archive removing special symbols
@@ -74,7 +106,16 @@ class Executor(PackageInfo, Cleaner):
task = Task(package, self.configuration, self.architecture, self.paths)
patches = self.reporter.package_patches_get(package.base, None)
commit_sha = task.init(path, patches, local_version)
- built = task.build(path, PACKAGER=packager)
+
+ loaded_package = Package.from_build(path, self.architecture, None)
+ if prebuilt := list(self._archive_lookup(loaded_package)):
+ built = []
+ for artefact in prebuilt:
+ with filelock(artefact):
+ shutil.copy(artefact, path)
+ built.append(path / artefact.name)
+ else:
+ built = task.build(path, PACKAGER=packager)
package.with_packages(built, self.pacman)
for src in built:
diff --git a/src/ahriman/core/utils.py b/src/ahriman/core/utils.py
index 88c655b4..ac45c190 100644
--- a/src/ahriman/core/utils.py
+++ b/src/ahriman/core/utils.py
@@ -18,6 +18,7 @@
# along with this program. If not, see .
#
# pylint: disable=too-many-lines
+import contextlib
import datetime
import fcntl
import io
@@ -47,6 +48,7 @@ __all__ = [
"dataclass_view",
"enum_values",
"extract_user",
+ "filelock",
"filter_json",
"full_version",
"minmax",
@@ -83,17 +85,8 @@ def atomic_move(src: Path, dst: Path) -> None:
>>> atomic_move(src, dst)
"""
- lock_path = dst.with_name(f".{dst.name}")
- try:
- with lock_path.open("ab") as lock_file:
- fd = lock_file.fileno()
- try:
- fcntl.flock(fd, fcntl.LOCK_EX) # lock file and wait lock is until available
- shutil.move(src, dst)
- finally:
- fcntl.flock(fd, fcntl.LOCK_UN) # unlock file first
- finally:
- lock_path.unlink(missing_ok=True) # remove lock file at the end
+ with filelock(dst):
+ shutil.move(src, dst)
# pylint: disable=too-many-locals
@@ -264,6 +257,27 @@ def extract_user() -> str | None:
return os.getenv("SUDO_USER") or os.getenv("DOAS_USER") or os.getenv("USER")
+@contextlib.contextmanager
+def filelock(path: Path) -> Generator[None, None, None]:
+ """
+ lock on file passed as argument
+
+ Args:
+ path(Path): path object on which lock must be performed
+ """
+ lock_path = path.with_name(f".{path.name}")
+ try:
+ with lock_path.open("ab") as lock_file:
+ fd = lock_file.fileno()
+ try:
+ fcntl.flock(fd, fcntl.LOCK_EX) # lock file and wait lock is until available
+ yield
+ finally:
+ fcntl.flock(fd, fcntl.LOCK_UN) # unlock file first
+ finally:
+ lock_path.unlink(missing_ok=True) # remove lock file at the end
+
+
def filter_json(source: dict[str, Any], known_fields: Iterable[str]) -> dict[str, Any]:
"""
filter json object by fields used for json-to-object conversion
diff --git a/tests/ahriman/core/test_utils.py b/tests/ahriman/core/test_utils.py
index 08871dee..e2b25375 100644
--- a/tests/ahriman/core/test_utils.py
+++ b/tests/ahriman/core/test_utils.py
@@ -17,6 +17,7 @@ from ahriman.core.utils import (
dataclass_view,
enum_values,
extract_user,
+ filelock,
filter_json,
full_version,
minmax,
@@ -43,47 +44,12 @@ def test_atomic_move(mocker: MockerFixture) -> None:
"""
must move file with locking
"""
- lock_mock = mocker.patch("fcntl.flock")
- open_mock = mocker.patch("pathlib.Path.open", autospec=True)
+ filelock_mock = mocker.patch("ahriman.core.utils.filelock")
move_mock = mocker.patch("shutil.move")
- unlink_mock = mocker.patch("pathlib.Path.unlink")
atomic_move(Path("source"), Path("destination"))
- open_mock.assert_called_once_with(Path(".destination"), "ab")
- lock_mock.assert_has_calls([
- MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_EX),
- MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_UN),
- ])
+ filelock_mock.assert_called_once_with(Path("destination"))
move_mock.assert_called_once_with(Path("source"), Path("destination"))
- unlink_mock.assert_called_once_with(missing_ok=True)
-
-
-def test_atomic_move_remove_lock(mocker: MockerFixture) -> None:
- """
- must remove lock file in case of exception
- """
- mocker.patch("pathlib.Path.open", side_effect=Exception)
- unlink_mock = mocker.patch("pathlib.Path.unlink")
-
- with pytest.raises(Exception):
- atomic_move(Path("source"), Path("destination"))
- unlink_mock.assert_called_once_with(missing_ok=True)
-
-
-def test_atomic_move_unlock(mocker: MockerFixture) -> None:
- """
- must unlock file in case of exception
- """
- mocker.patch("pathlib.Path.open")
- mocker.patch("shutil.move", side_effect=Exception)
- lock_mock = mocker.patch("fcntl.flock")
-
- with pytest.raises(Exception):
- atomic_move(Path("source"), Path("destination"))
- lock_mock.assert_has_calls([
- MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_EX),
- MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_UN),
- ])
def test_check_output(mocker: MockerFixture) -> None:
@@ -305,6 +271,53 @@ def test_extract_user() -> None:
assert extract_user() == "doas"
+def test_filelock(mocker: MockerFixture) -> None:
+ """
+ must perform file locking
+ """
+ lock_mock = mocker.patch("fcntl.flock")
+ open_mock = mocker.patch("pathlib.Path.open", autospec=True)
+ unlink_mock = mocker.patch("pathlib.Path.unlink")
+
+ with filelock(Path("local")):
+ pass
+ open_mock.assert_called_once_with(Path(".local"), "ab")
+ lock_mock.assert_has_calls([
+ MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_EX),
+ MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_UN),
+ ])
+ unlink_mock.assert_called_once_with(missing_ok=True)
+
+
+def test_filelock_remove_lock(mocker: MockerFixture) -> None:
+ """
+ must remove lock file in case of exception
+ """
+ mocker.patch("pathlib.Path.open", side_effect=Exception)
+ unlink_mock = mocker.patch("pathlib.Path.unlink")
+
+ with pytest.raises(Exception):
+ with filelock(Path("local")):
+ pass
+ unlink_mock.assert_called_once_with(missing_ok=True)
+
+
+def test_filelock_unlock(mocker: MockerFixture) -> None:
+ """
+ must unlock file in case of exception
+ """
+ mocker.patch("pathlib.Path.open")
+ lock_mock = mocker.patch("fcntl.flock")
+
+ with pytest.raises(Exception):
+ with filelock(Path("local")):
+ raise Exception
+ lock_mock.assert_has_calls([
+ MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_EX),
+ MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_UN),
+ ])
+
+
def test_filter_json(package_ahriman: Package) -> None:
"""
must filter fields by known list