completely remove makepkg calls

This commit is contained in:
Evgenii Alekseev 2024-09-11 02:57:37 +03:00
parent abc8df8ef3
commit e53f744f9a
7 changed files with 63 additions and 47 deletions

View File

@ -157,3 +157,25 @@ class Task(LazyLogging):
patch.write(sources_dir / "PKGBUILD")
return last_commit_sha
def setup(self, sources_dir: Path) -> None:
"""
setup chroot environment without building package itself. This function, in particular, useful in case if it is
required to refresh pkgver to the actual value without package building
Args:
sources_dir(Path): path to where sources are
"""
command = [self.build_command, "-r", str(self.paths.chroot)]
command.extend(self.archbuild_flags)
command.extend(["--"] + self.makechrootpkg_flags)
command.extend(["--"] + self.makepkg_flags + ["--nobuild"])
self.logger.info("using %s for %s", command, self.package.base)
check_output(
*command,
exception=BuildError.from_process(self.package.base),
cwd=sources_dir,
logger=self.logger,
user=self.uid,
)

View File

@ -58,7 +58,7 @@ class PackageInfo(RepositoryProperties):
# force version to max of them
self.logger.warning("version of %s differs, found %s and %s",
current.base, current.version, local.version)
if current.is_outdated(local, self.paths, calculate_version=False):
if current.is_outdated(local, self.configuration, calculate_version=False):
current.version = local.version
current.packages.update(local.packages)
except Exception:

View File

@ -51,7 +51,6 @@ class RepositoryProperties(EventLogger, LazyLogging):
scan_paths(ScanPaths): scan paths for the implicit dependencies
sign(GPG): GPG wrapper instance
triggers(TriggerLoader): triggers holder
vcs_allowed_age(int): maximal age of the VCS packages before they will be checked
"""
def __init__(self, repository_id: RepositoryId, configuration: Configuration, database: SQLite, *, report: bool,
@ -68,8 +67,6 @@ class RepositoryProperties(EventLogger, LazyLogging):
self.configuration = configuration
self.database = database
self.vcs_allowed_age = configuration.getint("build", "vcs_allowed_age", fallback=0)
self.paths: RepositoryPaths = configuration.repository_paths # additional workaround for pycharm typing
self.ignore_list = configuration.getlist("build", "ignore_packages", fallback=[])

View File

@ -67,10 +67,7 @@ class UpdateHandler(PackageInfo, Cleaner):
try:
remote = load_remote(local)
if local.is_outdated(
remote, self.paths,
vcs_allowed_age=self.vcs_allowed_age,
calculate_version=vcs):
if local.is_outdated(remote, self.configuration, calculate_version=vcs):
self.reporter.set_pending(local.base)
self.event(local.base, EventType.PackageOutdated, "Remote version is newer than local")
result.append(remote)
@ -156,9 +153,7 @@ class UpdateHandler(PackageInfo, Cleaner):
if local.remote.is_remote:
continue # avoid checking AUR packages
if local.is_outdated(remote, self.paths,
vcs_allowed_age=self.vcs_allowed_age,
calculate_version=vcs):
if local.is_outdated(remote, self.configuration, calculate_version=vcs):
self.reporter.set_pending(local.base)
self.event(local.base, EventType.PackageOutdated, "Locally pulled sources are outdated")
result.append(remote)

View File

@ -26,20 +26,18 @@ from collections.abc import Callable, Generator, Iterable
from dataclasses import dataclass
from pathlib import Path
from pyalpm import vercmp # type: ignore[import-not-found]
from srcinfo.parse import parse_srcinfo # type: ignore[import-untyped]
from typing import Any, Self
from urllib.parse import urlparse
from ahriman.core.alpm.pacman import Pacman
from ahriman.core.alpm.remote import AUR, Official, OfficialSyncdb
from ahriman.core.exceptions import PackageInfoError
from ahriman.core.configuration import Configuration
from ahriman.core.log import LazyLogging
from ahriman.core.utils import check_output, dataclass_view, full_version, parse_version, srcinfo_property_list, utcnow
from ahriman.core.utils import dataclass_view, full_version, parse_version, srcinfo_property_list, utcnow
from ahriman.models.package_description import PackageDescription
from ahriman.models.package_source import PackageSource
from ahriman.models.pkgbuild import Pkgbuild
from ahriman.models.remote_source import RemoteSource
from ahriman.models.repository_paths import RepositoryPaths
@dataclass(kw_only=True)
@ -415,39 +413,43 @@ class Package(LazyLogging):
return sorted(set(generator()))
def actual_version(self, paths: RepositoryPaths) -> str:
def actual_version(self, configuration: Configuration) -> str:
"""
additional method to handle VCS package versions
Args:
paths(RepositoryPaths): repository paths instance
configuration(Configuration): configuration instance
Returns:
str: package version if package is not VCS and current version according to VCS otherwise
Raises:
PackageInfoError: if there are parsing errors
"""
if not self.is_vcs:
return self.version
from ahriman.core.build_tools.sources import Sources
from ahriman.core.build_tools.task import Task
Sources.load(paths.cache_for(self.base), self, [], paths)
_, repository_id = configuration.check_loaded()
paths = configuration.repository_paths
task = Task(self, configuration, repository_id.architecture, paths)
try:
# update pkgver first
check_output("makepkg", "--nodeps", "--nobuild", cwd=paths.cache_for(self.base), logger=self.logger)
# generate new .SRCINFO and put it to parser
srcinfo_source = check_output("makepkg", "--printsrcinfo",
cwd=paths.cache_for(self.base), logger=self.logger)
srcinfo, errors = parse_srcinfo(srcinfo_source)
if errors:
raise PackageInfoError(errors)
# create fresh chroot environment, fetch sources and - automagically - update PKGBUILD
task.init(paths.cache_for(self.base), [], None)
task.setup(paths.cache_for(self.base))
return full_version(srcinfo.get("epoch"), srcinfo["pkgver"], srcinfo["pkgrel"])
pkgbuild = Pkgbuild.from_file(paths.cache_for(self.base) / "PKGBUILD")
return full_version(
pkgbuild.get_as("epoch", str, default=None),
pkgbuild.get_as("pkgver", str),
pkgbuild.get_as("pkgrel", str),
)
except Exception:
self.logger.exception("cannot determine version of VCS package, make sure that VCS tools are installed")
self.logger.exception("cannot determine version of VCS package")
finally:
# clear log files generated by devtools
for log_file in paths.cache_for(self.base).glob("*.log"):
log_file.unlink()
return self.version
@ -502,26 +504,25 @@ class Package(LazyLogging):
if package.build_date is not None
)
def is_outdated(self, remote: Package, paths: RepositoryPaths, *,
vcs_allowed_age: float | int = 0,
def is_outdated(self, remote: Package, configuration: Configuration, *,
calculate_version: bool = True) -> bool:
"""
check if package is out-of-dated
Args:
remote(Package): package properties from remote source
paths(RepositoryPaths): repository paths instance. Required for VCS packages cache
vcs_allowed_age(float | int, optional): max age of the built packages before they will be
forced to calculate actual version (Default value = 0)
configuration(Configuration): configuration instance
calculate_version(bool, optional): expand version to actual value (by calculating git versions)
(Default value = True)
Returns:
bool: ``True`` if the package is out-of-dated and ``False`` otherwise
"""
vcs_allowed_age = configuration.getint("build", "vcs_allowed_age", fallback=0)
min_vcs_build_date = utcnow().timestamp() - vcs_allowed_age
if calculate_version and not self.is_newer_than(min_vcs_build_date):
remote_version = remote.actual_version(paths)
remote_version = remote.actual_version(configuration)
else:
remote_version = remote.version

View File

@ -65,10 +65,10 @@ class Pkgbuild(Mapping[str, str | list[str]]):
fields: dict[str, PkgbuildPatch]
_ARRAY_ASSIGNMENT_REGEX = re.compile(r"^(?P<key>\w+)=$")
_STRING_ASSIGNMENT_REGEX = re.compile(r"^(?P<key>\w+)=(?P<value>.+)$")
# in addition functions can have dash to usual assignment
_FUNCTION_DECLARATION_REGEX = re.compile(r"^(?P<key>[\w-]+)$")
_ARRAY_ASSIGNMENT = re.compile(r"^(?P<key>\w+)=$")
_STRING_ASSIGNMENT = re.compile(r"^(?P<key>\w+)=(?P<value>.+)$")
# in addition, functions can have dash to usual assignment
_FUNCTION_DECLARATION = re.compile(r"^(?P<key>[\w-]+)$")
@property
def variables(self) -> dict[str, str]:
@ -201,20 +201,20 @@ class Pkgbuild(Mapping[str, str | list[str]]):
StopIteration: if iteration reaches the end of the file'
"""
# simple assignment rule
if (match := Pkgbuild._STRING_ASSIGNMENT_REGEX.match(token)) is not None:
if (match := Pkgbuild._STRING_ASSIGNMENT.match(token)) is not None:
key = match.group("key")
value = match.group("value")
return key, PkgbuildPatch(key, value)
match parser.get_token():
# array processing. Arrays will be sent as "key=", "(", values, ")"
case PkgbuildToken.ArrayStarts if (match := Pkgbuild._ARRAY_ASSIGNMENT_REGEX.match(token)) is not None:
case PkgbuildToken.ArrayStarts if (match := Pkgbuild._ARRAY_ASSIGNMENT.match(token)) is not None:
key = match.group("key")
value = Pkgbuild._parse_array(parser)
return key, PkgbuildPatch(key, value)
# functions processing. Function will be sent as "name", "()", "{", body, "}"
case PkgbuildToken.FunctionDeclaration if Pkgbuild._FUNCTION_DECLARATION_REGEX.match(token):
case PkgbuildToken.FunctionDeclaration if Pkgbuild._FUNCTION_DECLARATION.match(token):
key = f"{token}{PkgbuildToken.FunctionDeclaration}"
value = Pkgbuild._parse_function(parser)
return token, PkgbuildPatch(key, value) # this is not mistake, assign to token without ()
@ -234,7 +234,7 @@ class Pkgbuild(Mapping[str, str | list[str]]):
Args:
key(str): key name
return_type(type[T]): return type, either ``str`` or ``list[str]``
default(U): default value to return if no key found
default(U, optional): default value to return if no key found
Returns:
T | U: value associated with key or default value if no value found and fallback is provided

View File

@ -176,7 +176,8 @@ class PkgbuildPatch:
variables(dict[str, str]): map of variables available for usage
Returns:
str | list[str]: substituted value. All unknown variables will remain the same
str | list[str]: substituted value. All unknown variables will remain as links to their values.
This function doesn't support recursive substitution
"""
if isinstance(self.value, str):
return Template(self.value).safe_substitute(variables)