mirror of
				https://github.com/arcan1s/ahriman.git
				synced 2025-10-31 13:53:41 +00:00 
			
		
		
		
	lookup through archive packages before build
This commit is contained in:
		| @ -17,7 +17,9 @@ | ||||
| # You should have received a copy of the GNU General Public License | ||||
| # along with this program. If not, see <http://www.gnu.org/licenses/>. | ||||
| # | ||||
| from collections.abc import Iterable | ||||
| import shutil | ||||
|  | ||||
| from collections.abc import Generator, Iterable | ||||
| from pathlib import Path | ||||
| from tempfile import TemporaryDirectory | ||||
|  | ||||
| @ -25,7 +27,7 @@ from ahriman.core.build_tools.package_archive import PackageArchive | ||||
| from ahriman.core.build_tools.task import Task | ||||
| from ahriman.core.repository.cleaner import Cleaner | ||||
| from ahriman.core.repository.package_info import PackageInfo | ||||
| from ahriman.core.utils import atomic_move, safe_filename | ||||
| from ahriman.core.utils import atomic_move, filelock, package_like, safe_filename | ||||
| from ahriman.models.changes import Changes | ||||
| from ahriman.models.event import EventType | ||||
| from ahriman.models.package import Package | ||||
| @ -39,6 +41,36 @@ class Executor(PackageInfo, Cleaner): | ||||
|     trait for common repository update processes | ||||
|     """ | ||||
|  | ||||
|     def _archive_lookup(self, package: Package) -> Generator[Path, None, None]: | ||||
|         """ | ||||
|         check if there is a rebuilt package already | ||||
|  | ||||
|         Args: | ||||
|             package(Package): package to check | ||||
|  | ||||
|         Yields: | ||||
|             Path: list of built packages and signatures if available, empty list otherwise | ||||
|         """ | ||||
|         archive = self.paths.archive_for(package.base) | ||||
|  | ||||
|         # find all packages which have same version | ||||
|         same_version = [ | ||||
|             built | ||||
|             for path in filter(package_like, archive.iterdir()) | ||||
|             if (built := Package.from_archive(path, self.pacman)).version == package.version | ||||
|         ] | ||||
|         # no packages of the same version found | ||||
|         if not same_version: | ||||
|             return | ||||
|  | ||||
|         packages = [single for built in same_version for single in built.packages.values()] | ||||
|         # all packages must be either any or same architecture | ||||
|         if not all(single.architecture in ("any", self.architecture) for single in packages): | ||||
|             return | ||||
|  | ||||
|         for single in packages: | ||||
|             yield from archive.glob(f"{single.filename}*") | ||||
|  | ||||
|     def _archive_rename(self, description: PackageDescription, package_base: str) -> None: | ||||
|         """ | ||||
|         rename package archive removing special symbols | ||||
| @ -74,7 +106,17 @@ class Executor(PackageInfo, Cleaner): | ||||
|         task = Task(package, self.configuration, self.architecture, self.paths) | ||||
|         patches = self.reporter.package_patches_get(package.base, None) | ||||
|         commit_sha = task.init(path, patches, local_version) | ||||
|         built = task.build(path, PACKAGER=packager) | ||||
|  | ||||
|         loaded_package = Package.from_build(path, self.architecture, None) | ||||
|         if prebuilt := list(self._archive_lookup(loaded_package)): | ||||
|             self.logger.info("using prebuilt packages for %s-%s", loaded_package.base, loaded_package.version) | ||||
|             built = [] | ||||
|             for artefact in prebuilt: | ||||
|                 with filelock(artefact): | ||||
|                     shutil.copy(artefact, path) | ||||
|                 built.append(path / artefact.name) | ||||
|         else: | ||||
|             built = task.build(path, PACKAGER=packager) | ||||
|  | ||||
|         package.with_packages(built, self.pacman) | ||||
|         for src in built: | ||||
|  | ||||
| @ -18,6 +18,7 @@ | ||||
| # along with this program. If not, see <http://www.gnu.org/licenses/>. | ||||
| # | ||||
| # pylint: disable=too-many-lines | ||||
| import contextlib | ||||
| import datetime | ||||
| import fcntl | ||||
| import io | ||||
| @ -47,6 +48,7 @@ __all__ = [ | ||||
|     "dataclass_view", | ||||
|     "enum_values", | ||||
|     "extract_user", | ||||
|     "filelock", | ||||
|     "filter_json", | ||||
|     "full_version", | ||||
|     "minmax", | ||||
| @ -83,17 +85,8 @@ def atomic_move(src: Path, dst: Path) -> None: | ||||
|  | ||||
|             >>> atomic_move(src, dst) | ||||
|     """ | ||||
|     lock_path = dst.with_name(f".{dst.name}") | ||||
|     try: | ||||
|         with lock_path.open("ab") as lock_file: | ||||
|             fd = lock_file.fileno() | ||||
|             try: | ||||
|                 fcntl.flock(fd, fcntl.LOCK_EX)  # lock file and wait lock is until available | ||||
|                 shutil.move(src, dst) | ||||
|             finally: | ||||
|                 fcntl.flock(fd, fcntl.LOCK_UN)  # unlock file first | ||||
|     finally: | ||||
|         lock_path.unlink(missing_ok=True)  # remove lock file at the end | ||||
|     with filelock(dst): | ||||
|         shutil.move(src, dst) | ||||
|  | ||||
|  | ||||
| # pylint: disable=too-many-locals | ||||
| @ -264,6 +257,27 @@ def extract_user() -> str | None: | ||||
|     return os.getenv("SUDO_USER") or os.getenv("DOAS_USER") or os.getenv("USER") | ||||
|  | ||||
|  | ||||
| @contextlib.contextmanager | ||||
| def filelock(path: Path) -> Generator[None, None, None]: | ||||
|     """ | ||||
|     lock on file passed as argument | ||||
|  | ||||
|     Args: | ||||
|         path(Path): path object on which lock must be performed | ||||
|     """ | ||||
|     lock_path = path.with_name(f".{path.name}") | ||||
|     try: | ||||
|         with lock_path.open("ab") as lock_file: | ||||
|             fd = lock_file.fileno() | ||||
|             try: | ||||
|                 fcntl.flock(fd, fcntl.LOCK_EX)  # lock file and wait lock is until available | ||||
|                 yield | ||||
|             finally: | ||||
|                 fcntl.flock(fd, fcntl.LOCK_UN)  # unlock file first | ||||
|     finally: | ||||
|         lock_path.unlink(missing_ok=True)  # remove lock file at the end | ||||
|  | ||||
|  | ||||
| def filter_json(source: dict[str, Any], known_fields: Iterable[str]) -> dict[str, Any]: | ||||
|     """ | ||||
|     filter json object by fields used for json-to-object conversion | ||||
|  | ||||
| @ -79,7 +79,7 @@ def test_run_repo_specific_triggers(args: argparse.Namespace, configuration: Con | ||||
|     _, repository_id = configuration.check_loaded() | ||||
|  | ||||
|     # remove unused sections | ||||
|     for section in ("customs3", "github:x86_64", "logs-rotation", "mirrorlist"): | ||||
|     for section in ("archive", "customs3", "github:x86_64", "logs-rotation", "mirrorlist"): | ||||
|         configuration.remove_section(section) | ||||
|  | ||||
|     configuration.set_option("report", "target", "test") | ||||
|  | ||||
| @ -1,5 +1,6 @@ | ||||
| import pytest | ||||
|  | ||||
| from dataclasses import replace | ||||
| from pathlib import Path | ||||
| from pytest_mock import MockerFixture | ||||
| from typing import Any | ||||
| @ -13,6 +14,57 @@ from ahriman.models.packagers import Packagers | ||||
| from ahriman.models.user import User | ||||
|  | ||||
|  | ||||
| def test_archive_lookup(executor: Executor, package_ahriman: Package, package_python_schedule: Package, | ||||
|                         mocker: MockerFixture) -> None: | ||||
|     """ | ||||
|     must existing packages which match the version | ||||
|     """ | ||||
|     mocker.patch("ahriman.models.repository_paths.RepositoryPaths.preserve_owner") | ||||
|     mocker.patch("pathlib.Path.iterdir", return_value=[ | ||||
|         Path("1.pkg.tar.zst"), | ||||
|         Path("2.pkg.tar.zst"), | ||||
|         Path("3.pkg.tar.zst"), | ||||
|     ]) | ||||
|     mocker.patch("ahriman.models.package.Package.from_archive", side_effect=[ | ||||
|         package_ahriman, | ||||
|         package_python_schedule, | ||||
|         replace(package_ahriman, version="1"), | ||||
|     ]) | ||||
|     glob_mock = mocker.patch("pathlib.Path.glob", return_value=[Path("1.pkg.tar.xz")]) | ||||
|  | ||||
|     assert list(executor._archive_lookup(package_ahriman)) == [Path("1.pkg.tar.xz")] | ||||
|     glob_mock.assert_called_once_with(f"{package_ahriman.packages[package_ahriman.base].filename}*") | ||||
|  | ||||
|  | ||||
| def test_archive_lookup_version_mismatch(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None: | ||||
|     """ | ||||
|     must return nothing if no packages found with the same version | ||||
|     """ | ||||
|     mocker.patch("ahriman.models.repository_paths.RepositoryPaths.preserve_owner") | ||||
|     mocker.patch("pathlib.Path.iterdir", return_value=[ | ||||
|         Path("1.pkg.tar.zst"), | ||||
|     ]) | ||||
|     mocker.patch("ahriman.models.package.Package.from_archive", return_value=replace(package_ahriman, version="1")) | ||||
|  | ||||
|     assert list(executor._archive_lookup(package_ahriman)) == [] | ||||
|  | ||||
|  | ||||
| def test_archive_lookup_architecture_mismatch(executor: Executor, package_ahriman: Package, | ||||
|                                               mocker: MockerFixture) -> None: | ||||
|     """ | ||||
|     must return nothing if architecture doesn't match | ||||
|     """ | ||||
|     package_ahriman.packages[package_ahriman.base].architecture = "x86_64" | ||||
|     mocker.patch("ahriman.core.repository.executor.Executor.architecture", return_value="i686") | ||||
|     mocker.patch("ahriman.models.repository_paths.RepositoryPaths.preserve_owner") | ||||
|     mocker.patch("pathlib.Path.iterdir", return_value=[ | ||||
|         Path("1.pkg.tar.zst"), | ||||
|     ]) | ||||
|     mocker.patch("ahriman.models.package.Package.from_archive", return_value=package_ahriman) | ||||
|  | ||||
|     assert list(executor._archive_lookup(package_ahriman)) == [] | ||||
|  | ||||
|  | ||||
| def test_archive_rename(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None: | ||||
|     """ | ||||
|     must correctly remove package archive | ||||
| @ -45,16 +97,39 @@ def test_package_build(executor: Executor, package_ahriman: Package, mocker: Moc | ||||
|     mocker.patch("ahriman.core.build_tools.task.Task.build", return_value=[Path(package_ahriman.base)]) | ||||
|     status_client_mock = mocker.patch("ahriman.core.status.Client.set_building") | ||||
|     init_mock = mocker.patch("ahriman.core.build_tools.task.Task.init", return_value="sha") | ||||
|     package_mock = mocker.patch("ahriman.models.package.Package.from_build", return_value=package_ahriman) | ||||
|     lookup_mock = mocker.patch("ahriman.core.repository.executor.Executor._archive_lookup", return_value=[]) | ||||
|     with_packages_mock = mocker.patch("ahriman.models.package.Package.with_packages") | ||||
|     rename_mock = mocker.patch("ahriman.core.repository.executor.atomic_move") | ||||
|  | ||||
|     assert executor._package_build(package_ahriman, Path("local"), "packager", None) == "sha" | ||||
|     status_client_mock.assert_called_once_with(package_ahriman.base) | ||||
|     init_mock.assert_called_once_with(pytest.helpers.anyvar(int), pytest.helpers.anyvar(int), None) | ||||
|     package_mock.assert_called_once_with(Path("local"), executor.architecture, None) | ||||
|     lookup_mock.assert_called_once_with(package_ahriman) | ||||
|     with_packages_mock.assert_called_once_with([Path(package_ahriman.base)], executor.pacman) | ||||
|     rename_mock.assert_called_once_with(Path(package_ahriman.base), executor.paths.packages / package_ahriman.base) | ||||
|  | ||||
|  | ||||
| def test_package_build_copy(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None: | ||||
|     """ | ||||
|     must copy package from archive if there are already built ones | ||||
|     """ | ||||
|     path = package_ahriman.packages[package_ahriman.base].filepath | ||||
|     mocker.patch("ahriman.core.build_tools.task.Task.build", return_value=[Path(package_ahriman.base)]) | ||||
|     mocker.patch("ahriman.core.build_tools.task.Task.init") | ||||
|     mocker.patch("ahriman.models.package.Package.from_build", return_value=package_ahriman) | ||||
|     mocker.patch("ahriman.core.repository.executor.Executor._archive_lookup", return_value=[path]) | ||||
|     mocker.patch("ahriman.core.repository.executor.atomic_move") | ||||
|     mocker.patch("ahriman.models.package.Package.with_packages") | ||||
|     copy_mock = mocker.patch("shutil.copy") | ||||
|     rename_mock = mocker.patch("ahriman.core.repository.executor.atomic_move") | ||||
|  | ||||
|     executor._package_build(package_ahriman, Path("local"), "packager", None) | ||||
|     copy_mock.assert_called_once_with(path, Path("local")) | ||||
|     rename_mock.assert_called_once_with(Path("local") / path, executor.paths.packages / path) | ||||
|  | ||||
|  | ||||
| def test_package_remove(executor: Executor, package_ahriman: Package, mocker: MockerFixture) -> None: | ||||
|     """ | ||||
|     must run remove for packages | ||||
|  | ||||
| @ -17,6 +17,7 @@ from ahriman.core.utils import ( | ||||
|     dataclass_view, | ||||
|     enum_values, | ||||
|     extract_user, | ||||
|     filelock, | ||||
|     filter_json, | ||||
|     full_version, | ||||
|     minmax, | ||||
| @ -43,47 +44,12 @@ def test_atomic_move(mocker: MockerFixture) -> None: | ||||
|     """ | ||||
|     must move file with locking | ||||
|     """ | ||||
|     lock_mock = mocker.patch("fcntl.flock") | ||||
|     open_mock = mocker.patch("pathlib.Path.open", autospec=True) | ||||
|     filelock_mock = mocker.patch("ahriman.core.utils.filelock") | ||||
|     move_mock = mocker.patch("shutil.move") | ||||
|     unlink_mock = mocker.patch("pathlib.Path.unlink") | ||||
|  | ||||
|     atomic_move(Path("source"), Path("destination")) | ||||
|     open_mock.assert_called_once_with(Path(".destination"), "ab") | ||||
|     lock_mock.assert_has_calls([ | ||||
|         MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_EX), | ||||
|         MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_UN), | ||||
|     ]) | ||||
|     filelock_mock.assert_called_once_with(Path("destination")) | ||||
|     move_mock.assert_called_once_with(Path("source"), Path("destination")) | ||||
|     unlink_mock.assert_called_once_with(missing_ok=True) | ||||
|  | ||||
|  | ||||
| def test_atomic_move_remove_lock(mocker: MockerFixture) -> None: | ||||
|     """ | ||||
|     must remove lock file in case of exception | ||||
|     """ | ||||
|     mocker.patch("pathlib.Path.open", side_effect=Exception) | ||||
|     unlink_mock = mocker.patch("pathlib.Path.unlink") | ||||
|  | ||||
|     with pytest.raises(Exception): | ||||
|         atomic_move(Path("source"), Path("destination")) | ||||
|     unlink_mock.assert_called_once_with(missing_ok=True) | ||||
|  | ||||
|  | ||||
| def test_atomic_move_unlock(mocker: MockerFixture) -> None: | ||||
|     """ | ||||
|     must unlock file in case of exception | ||||
|     """ | ||||
|     mocker.patch("pathlib.Path.open") | ||||
|     mocker.patch("shutil.move", side_effect=Exception) | ||||
|     lock_mock = mocker.patch("fcntl.flock") | ||||
|  | ||||
|     with pytest.raises(Exception): | ||||
|         atomic_move(Path("source"), Path("destination")) | ||||
|     lock_mock.assert_has_calls([ | ||||
|         MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_EX), | ||||
|         MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_UN), | ||||
|     ]) | ||||
|  | ||||
|  | ||||
| def test_check_output(mocker: MockerFixture) -> None: | ||||
| @ -305,6 +271,53 @@ def test_extract_user() -> None: | ||||
|     assert extract_user() == "doas" | ||||
|  | ||||
|  | ||||
| def test_filelock(mocker: MockerFixture) -> None: | ||||
|     """ | ||||
|     must perform file locking | ||||
|     """ | ||||
|     lock_mock = mocker.patch("fcntl.flock") | ||||
|     open_mock = mocker.patch("pathlib.Path.open", autospec=True) | ||||
|     unlink_mock = mocker.patch("pathlib.Path.unlink") | ||||
|  | ||||
|     with filelock(Path("local")): | ||||
|         pass | ||||
|     open_mock.assert_called_once_with(Path(".local"), "ab") | ||||
|     lock_mock.assert_has_calls([ | ||||
|         MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_EX), | ||||
|         MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_UN), | ||||
|     ]) | ||||
|     unlink_mock.assert_called_once_with(missing_ok=True) | ||||
|  | ||||
|  | ||||
| def test_filelock_remove_lock(mocker: MockerFixture) -> None: | ||||
|     """ | ||||
|     must remove lock file in case of exception | ||||
|     """ | ||||
|     mocker.patch("pathlib.Path.open", side_effect=Exception) | ||||
|     unlink_mock = mocker.patch("pathlib.Path.unlink") | ||||
|  | ||||
|     with pytest.raises(Exception): | ||||
|         with filelock(Path("local")): | ||||
|             pass | ||||
|     unlink_mock.assert_called_once_with(missing_ok=True) | ||||
|  | ||||
|  | ||||
| def test_filelock_unlock(mocker: MockerFixture) -> None: | ||||
|     """ | ||||
|     must unlock file in case of exception | ||||
|     """ | ||||
|     mocker.patch("pathlib.Path.open") | ||||
|     lock_mock = mocker.patch("fcntl.flock") | ||||
|  | ||||
|     with pytest.raises(Exception): | ||||
|         with filelock(Path("local")): | ||||
|             raise Exception | ||||
|     lock_mock.assert_has_calls([ | ||||
|         MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_EX), | ||||
|         MockCall(pytest.helpers.anyvar(int), fcntl.LOCK_UN), | ||||
|     ]) | ||||
|  | ||||
|  | ||||
| def test_filter_json(package_ahriman: Package) -> None: | ||||
|     """ | ||||
|     must filter fields by known list | ||||
|  | ||||
		Reference in New Issue
	
	Block a user