mirror of
https://github.com/arcan1s/ahriman.git
synced 2025-04-28 01:07:18 +00:00
completely remove makepkg calls
This commit is contained in:
parent
abc8df8ef3
commit
e53f744f9a
@ -157,3 +157,25 @@ class Task(LazyLogging):
|
|||||||
patch.write(sources_dir / "PKGBUILD")
|
patch.write(sources_dir / "PKGBUILD")
|
||||||
|
|
||||||
return last_commit_sha
|
return last_commit_sha
|
||||||
|
|
||||||
|
def setup(self, sources_dir: Path) -> None:
|
||||||
|
"""
|
||||||
|
setup chroot environment without building package itself. This function, in particular, useful in case if it is
|
||||||
|
required to refresh pkgver to the actual value without package building
|
||||||
|
|
||||||
|
Args:
|
||||||
|
sources_dir(Path): path to where sources are
|
||||||
|
"""
|
||||||
|
command = [self.build_command, "-r", str(self.paths.chroot)]
|
||||||
|
command.extend(self.archbuild_flags)
|
||||||
|
command.extend(["--"] + self.makechrootpkg_flags)
|
||||||
|
command.extend(["--"] + self.makepkg_flags + ["--nobuild"])
|
||||||
|
self.logger.info("using %s for %s", command, self.package.base)
|
||||||
|
|
||||||
|
check_output(
|
||||||
|
*command,
|
||||||
|
exception=BuildError.from_process(self.package.base),
|
||||||
|
cwd=sources_dir,
|
||||||
|
logger=self.logger,
|
||||||
|
user=self.uid,
|
||||||
|
)
|
||||||
|
@ -58,7 +58,7 @@ class PackageInfo(RepositoryProperties):
|
|||||||
# force version to max of them
|
# force version to max of them
|
||||||
self.logger.warning("version of %s differs, found %s and %s",
|
self.logger.warning("version of %s differs, found %s and %s",
|
||||||
current.base, current.version, local.version)
|
current.base, current.version, local.version)
|
||||||
if current.is_outdated(local, self.paths, calculate_version=False):
|
if current.is_outdated(local, self.configuration, calculate_version=False):
|
||||||
current.version = local.version
|
current.version = local.version
|
||||||
current.packages.update(local.packages)
|
current.packages.update(local.packages)
|
||||||
except Exception:
|
except Exception:
|
||||||
|
@ -51,7 +51,6 @@ class RepositoryProperties(EventLogger, LazyLogging):
|
|||||||
scan_paths(ScanPaths): scan paths for the implicit dependencies
|
scan_paths(ScanPaths): scan paths for the implicit dependencies
|
||||||
sign(GPG): GPG wrapper instance
|
sign(GPG): GPG wrapper instance
|
||||||
triggers(TriggerLoader): triggers holder
|
triggers(TriggerLoader): triggers holder
|
||||||
vcs_allowed_age(int): maximal age of the VCS packages before they will be checked
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, repository_id: RepositoryId, configuration: Configuration, database: SQLite, *, report: bool,
|
def __init__(self, repository_id: RepositoryId, configuration: Configuration, database: SQLite, *, report: bool,
|
||||||
@ -68,8 +67,6 @@ class RepositoryProperties(EventLogger, LazyLogging):
|
|||||||
self.configuration = configuration
|
self.configuration = configuration
|
||||||
self.database = database
|
self.database = database
|
||||||
|
|
||||||
self.vcs_allowed_age = configuration.getint("build", "vcs_allowed_age", fallback=0)
|
|
||||||
|
|
||||||
self.paths: RepositoryPaths = configuration.repository_paths # additional workaround for pycharm typing
|
self.paths: RepositoryPaths = configuration.repository_paths # additional workaround for pycharm typing
|
||||||
|
|
||||||
self.ignore_list = configuration.getlist("build", "ignore_packages", fallback=[])
|
self.ignore_list = configuration.getlist("build", "ignore_packages", fallback=[])
|
||||||
|
@ -67,10 +67,7 @@ class UpdateHandler(PackageInfo, Cleaner):
|
|||||||
try:
|
try:
|
||||||
remote = load_remote(local)
|
remote = load_remote(local)
|
||||||
|
|
||||||
if local.is_outdated(
|
if local.is_outdated(remote, self.configuration, calculate_version=vcs):
|
||||||
remote, self.paths,
|
|
||||||
vcs_allowed_age=self.vcs_allowed_age,
|
|
||||||
calculate_version=vcs):
|
|
||||||
self.reporter.set_pending(local.base)
|
self.reporter.set_pending(local.base)
|
||||||
self.event(local.base, EventType.PackageOutdated, "Remote version is newer than local")
|
self.event(local.base, EventType.PackageOutdated, "Remote version is newer than local")
|
||||||
result.append(remote)
|
result.append(remote)
|
||||||
@ -156,9 +153,7 @@ class UpdateHandler(PackageInfo, Cleaner):
|
|||||||
if local.remote.is_remote:
|
if local.remote.is_remote:
|
||||||
continue # avoid checking AUR packages
|
continue # avoid checking AUR packages
|
||||||
|
|
||||||
if local.is_outdated(remote, self.paths,
|
if local.is_outdated(remote, self.configuration, calculate_version=vcs):
|
||||||
vcs_allowed_age=self.vcs_allowed_age,
|
|
||||||
calculate_version=vcs):
|
|
||||||
self.reporter.set_pending(local.base)
|
self.reporter.set_pending(local.base)
|
||||||
self.event(local.base, EventType.PackageOutdated, "Locally pulled sources are outdated")
|
self.event(local.base, EventType.PackageOutdated, "Locally pulled sources are outdated")
|
||||||
result.append(remote)
|
result.append(remote)
|
||||||
|
@ -26,20 +26,18 @@ from collections.abc import Callable, Generator, Iterable
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from pyalpm import vercmp # type: ignore[import-not-found]
|
from pyalpm import vercmp # type: ignore[import-not-found]
|
||||||
from srcinfo.parse import parse_srcinfo # type: ignore[import-untyped]
|
|
||||||
from typing import Any, Self
|
from typing import Any, Self
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from ahriman.core.alpm.pacman import Pacman
|
from ahriman.core.alpm.pacman import Pacman
|
||||||
from ahriman.core.alpm.remote import AUR, Official, OfficialSyncdb
|
from ahriman.core.alpm.remote import AUR, Official, OfficialSyncdb
|
||||||
from ahriman.core.exceptions import PackageInfoError
|
from ahriman.core.configuration import Configuration
|
||||||
from ahriman.core.log import LazyLogging
|
from ahriman.core.log import LazyLogging
|
||||||
from ahriman.core.utils import check_output, dataclass_view, full_version, parse_version, srcinfo_property_list, utcnow
|
from ahriman.core.utils import dataclass_view, full_version, parse_version, srcinfo_property_list, utcnow
|
||||||
from ahriman.models.package_description import PackageDescription
|
from ahriman.models.package_description import PackageDescription
|
||||||
from ahriman.models.package_source import PackageSource
|
from ahriman.models.package_source import PackageSource
|
||||||
from ahriman.models.pkgbuild import Pkgbuild
|
from ahriman.models.pkgbuild import Pkgbuild
|
||||||
from ahriman.models.remote_source import RemoteSource
|
from ahriman.models.remote_source import RemoteSource
|
||||||
from ahriman.models.repository_paths import RepositoryPaths
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(kw_only=True)
|
@dataclass(kw_only=True)
|
||||||
@ -415,39 +413,43 @@ class Package(LazyLogging):
|
|||||||
|
|
||||||
return sorted(set(generator()))
|
return sorted(set(generator()))
|
||||||
|
|
||||||
def actual_version(self, paths: RepositoryPaths) -> str:
|
def actual_version(self, configuration: Configuration) -> str:
|
||||||
"""
|
"""
|
||||||
additional method to handle VCS package versions
|
additional method to handle VCS package versions
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
paths(RepositoryPaths): repository paths instance
|
configuration(Configuration): configuration instance
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: package version if package is not VCS and current version according to VCS otherwise
|
str: package version if package is not VCS and current version according to VCS otherwise
|
||||||
|
|
||||||
Raises:
|
|
||||||
PackageInfoError: if there are parsing errors
|
|
||||||
"""
|
"""
|
||||||
if not self.is_vcs:
|
if not self.is_vcs:
|
||||||
return self.version
|
return self.version
|
||||||
|
|
||||||
from ahriman.core.build_tools.sources import Sources
|
from ahriman.core.build_tools.task import Task
|
||||||
|
|
||||||
Sources.load(paths.cache_for(self.base), self, [], paths)
|
_, repository_id = configuration.check_loaded()
|
||||||
|
paths = configuration.repository_paths
|
||||||
|
task = Task(self, configuration, repository_id.architecture, paths)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# update pkgver first
|
# create fresh chroot environment, fetch sources and - automagically - update PKGBUILD
|
||||||
check_output("makepkg", "--nodeps", "--nobuild", cwd=paths.cache_for(self.base), logger=self.logger)
|
task.init(paths.cache_for(self.base), [], None)
|
||||||
# generate new .SRCINFO and put it to parser
|
task.setup(paths.cache_for(self.base))
|
||||||
srcinfo_source = check_output("makepkg", "--printsrcinfo",
|
|
||||||
cwd=paths.cache_for(self.base), logger=self.logger)
|
|
||||||
srcinfo, errors = parse_srcinfo(srcinfo_source)
|
|
||||||
if errors:
|
|
||||||
raise PackageInfoError(errors)
|
|
||||||
|
|
||||||
return full_version(srcinfo.get("epoch"), srcinfo["pkgver"], srcinfo["pkgrel"])
|
pkgbuild = Pkgbuild.from_file(paths.cache_for(self.base) / "PKGBUILD")
|
||||||
|
|
||||||
|
return full_version(
|
||||||
|
pkgbuild.get_as("epoch", str, default=None),
|
||||||
|
pkgbuild.get_as("pkgver", str),
|
||||||
|
pkgbuild.get_as("pkgrel", str),
|
||||||
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
self.logger.exception("cannot determine version of VCS package, make sure that VCS tools are installed")
|
self.logger.exception("cannot determine version of VCS package")
|
||||||
|
finally:
|
||||||
|
# clear log files generated by devtools
|
||||||
|
for log_file in paths.cache_for(self.base).glob("*.log"):
|
||||||
|
log_file.unlink()
|
||||||
|
|
||||||
return self.version
|
return self.version
|
||||||
|
|
||||||
@ -502,26 +504,25 @@ class Package(LazyLogging):
|
|||||||
if package.build_date is not None
|
if package.build_date is not None
|
||||||
)
|
)
|
||||||
|
|
||||||
def is_outdated(self, remote: Package, paths: RepositoryPaths, *,
|
def is_outdated(self, remote: Package, configuration: Configuration, *,
|
||||||
vcs_allowed_age: float | int = 0,
|
|
||||||
calculate_version: bool = True) -> bool:
|
calculate_version: bool = True) -> bool:
|
||||||
"""
|
"""
|
||||||
check if package is out-of-dated
|
check if package is out-of-dated
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
remote(Package): package properties from remote source
|
remote(Package): package properties from remote source
|
||||||
paths(RepositoryPaths): repository paths instance. Required for VCS packages cache
|
configuration(Configuration): configuration instance
|
||||||
vcs_allowed_age(float | int, optional): max age of the built packages before they will be
|
|
||||||
forced to calculate actual version (Default value = 0)
|
|
||||||
calculate_version(bool, optional): expand version to actual value (by calculating git versions)
|
calculate_version(bool, optional): expand version to actual value (by calculating git versions)
|
||||||
(Default value = True)
|
(Default value = True)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: ``True`` if the package is out-of-dated and ``False`` otherwise
|
bool: ``True`` if the package is out-of-dated and ``False`` otherwise
|
||||||
"""
|
"""
|
||||||
|
vcs_allowed_age = configuration.getint("build", "vcs_allowed_age", fallback=0)
|
||||||
min_vcs_build_date = utcnow().timestamp() - vcs_allowed_age
|
min_vcs_build_date = utcnow().timestamp() - vcs_allowed_age
|
||||||
|
|
||||||
if calculate_version and not self.is_newer_than(min_vcs_build_date):
|
if calculate_version and not self.is_newer_than(min_vcs_build_date):
|
||||||
remote_version = remote.actual_version(paths)
|
remote_version = remote.actual_version(configuration)
|
||||||
else:
|
else:
|
||||||
remote_version = remote.version
|
remote_version = remote.version
|
||||||
|
|
||||||
|
@ -65,10 +65,10 @@ class Pkgbuild(Mapping[str, str | list[str]]):
|
|||||||
|
|
||||||
fields: dict[str, PkgbuildPatch]
|
fields: dict[str, PkgbuildPatch]
|
||||||
|
|
||||||
_ARRAY_ASSIGNMENT_REGEX = re.compile(r"^(?P<key>\w+)=$")
|
_ARRAY_ASSIGNMENT = re.compile(r"^(?P<key>\w+)=$")
|
||||||
_STRING_ASSIGNMENT_REGEX = re.compile(r"^(?P<key>\w+)=(?P<value>.+)$")
|
_STRING_ASSIGNMENT = re.compile(r"^(?P<key>\w+)=(?P<value>.+)$")
|
||||||
# in addition functions can have dash to usual assignment
|
# in addition, functions can have dash to usual assignment
|
||||||
_FUNCTION_DECLARATION_REGEX = re.compile(r"^(?P<key>[\w-]+)$")
|
_FUNCTION_DECLARATION = re.compile(r"^(?P<key>[\w-]+)$")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def variables(self) -> dict[str, str]:
|
def variables(self) -> dict[str, str]:
|
||||||
@ -201,20 +201,20 @@ class Pkgbuild(Mapping[str, str | list[str]]):
|
|||||||
StopIteration: if iteration reaches the end of the file'
|
StopIteration: if iteration reaches the end of the file'
|
||||||
"""
|
"""
|
||||||
# simple assignment rule
|
# simple assignment rule
|
||||||
if (match := Pkgbuild._STRING_ASSIGNMENT_REGEX.match(token)) is not None:
|
if (match := Pkgbuild._STRING_ASSIGNMENT.match(token)) is not None:
|
||||||
key = match.group("key")
|
key = match.group("key")
|
||||||
value = match.group("value")
|
value = match.group("value")
|
||||||
return key, PkgbuildPatch(key, value)
|
return key, PkgbuildPatch(key, value)
|
||||||
|
|
||||||
match parser.get_token():
|
match parser.get_token():
|
||||||
# array processing. Arrays will be sent as "key=", "(", values, ")"
|
# array processing. Arrays will be sent as "key=", "(", values, ")"
|
||||||
case PkgbuildToken.ArrayStarts if (match := Pkgbuild._ARRAY_ASSIGNMENT_REGEX.match(token)) is not None:
|
case PkgbuildToken.ArrayStarts if (match := Pkgbuild._ARRAY_ASSIGNMENT.match(token)) is not None:
|
||||||
key = match.group("key")
|
key = match.group("key")
|
||||||
value = Pkgbuild._parse_array(parser)
|
value = Pkgbuild._parse_array(parser)
|
||||||
return key, PkgbuildPatch(key, value)
|
return key, PkgbuildPatch(key, value)
|
||||||
|
|
||||||
# functions processing. Function will be sent as "name", "()", "{", body, "}"
|
# functions processing. Function will be sent as "name", "()", "{", body, "}"
|
||||||
case PkgbuildToken.FunctionDeclaration if Pkgbuild._FUNCTION_DECLARATION_REGEX.match(token):
|
case PkgbuildToken.FunctionDeclaration if Pkgbuild._FUNCTION_DECLARATION.match(token):
|
||||||
key = f"{token}{PkgbuildToken.FunctionDeclaration}"
|
key = f"{token}{PkgbuildToken.FunctionDeclaration}"
|
||||||
value = Pkgbuild._parse_function(parser)
|
value = Pkgbuild._parse_function(parser)
|
||||||
return token, PkgbuildPatch(key, value) # this is not mistake, assign to token without ()
|
return token, PkgbuildPatch(key, value) # this is not mistake, assign to token without ()
|
||||||
@ -234,7 +234,7 @@ class Pkgbuild(Mapping[str, str | list[str]]):
|
|||||||
Args:
|
Args:
|
||||||
key(str): key name
|
key(str): key name
|
||||||
return_type(type[T]): return type, either ``str`` or ``list[str]``
|
return_type(type[T]): return type, either ``str`` or ``list[str]``
|
||||||
default(U): default value to return if no key found
|
default(U, optional): default value to return if no key found
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
T | U: value associated with key or default value if no value found and fallback is provided
|
T | U: value associated with key or default value if no value found and fallback is provided
|
||||||
|
@ -176,7 +176,8 @@ class PkgbuildPatch:
|
|||||||
variables(dict[str, str]): map of variables available for usage
|
variables(dict[str, str]): map of variables available for usage
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str | list[str]: substituted value. All unknown variables will remain the same
|
str | list[str]: substituted value. All unknown variables will remain as links to their values.
|
||||||
|
This function doesn't support recursive substitution
|
||||||
"""
|
"""
|
||||||
if isinstance(self.value, str):
|
if isinstance(self.value, str):
|
||||||
return Template(self.value).safe_substitute(variables)
|
return Template(self.value).safe_substitute(variables)
|
||||||
|
Loading…
Reference in New Issue
Block a user