mirror of
https://github.com/arcan1s/ahriman.git
synced 2025-07-15 06:55:48 +00:00
add pylint integration & fix some pylint warnings
This commit is contained in:
@ -39,7 +39,7 @@ def _call(args: argparse.Namespace, architecture: str, config: Configuration) ->
|
||||
:return: True on success, False otherwise
|
||||
'''
|
||||
try:
|
||||
with Lock(args.lock, architecture, args.force, args.unsafe, config):
|
||||
with Lock(args.lock, architecture, config):
|
||||
args.fn(args, architecture, config)
|
||||
return True
|
||||
except Exception:
|
||||
@ -75,8 +75,9 @@ def dump_config(args: argparse.Namespace, architecture: str, config: Configurati
|
||||
:param architecture: repository architecture
|
||||
:param config: configuration instance
|
||||
'''
|
||||
result = config.dump(architecture)
|
||||
for section, values in sorted(result.items()):
|
||||
del args
|
||||
config_dump = config.dump(architecture)
|
||||
for section, values in sorted(config_dump.items()):
|
||||
print(f'[{section}]')
|
||||
for key, value in sorted(values.items()):
|
||||
print(f'{key} = {value}')
|
||||
@ -90,6 +91,7 @@ def rebuild(args: argparse.Namespace, architecture: str, config: Configuration)
|
||||
:param architecture: repository architecture
|
||||
:param config: configuration instance
|
||||
'''
|
||||
del args
|
||||
app = Application(architecture, config)
|
||||
packages = app.repository.packages()
|
||||
app.update(packages)
|
||||
@ -151,6 +153,7 @@ def web(args: argparse.Namespace, architecture: str, config: Configuration) -> N
|
||||
:param architecture: repository architecture
|
||||
:param config: configuration instance
|
||||
'''
|
||||
del args
|
||||
from ahriman.web.web import run_server, setup_service
|
||||
application = setup_service(architecture, config)
|
||||
run_server(application, architecture)
|
||||
@ -221,13 +224,14 @@ if __name__ == '__main__':
|
||||
web_parser = subparsers.add_parser('web', description='start web server')
|
||||
web_parser.set_defaults(fn=web, lock=None)
|
||||
|
||||
args = parser.parse_args()
|
||||
if 'fn' not in args:
|
||||
cmd_args = parser.parse_args()
|
||||
if 'fn' not in cmd_args:
|
||||
parser.print_help()
|
||||
exit(1)
|
||||
sys.exit(1)
|
||||
|
||||
config = Configuration.from_path(args.config)
|
||||
with Pool(len(args.architecture)) as pool:
|
||||
result = pool.starmap(_call, [(args, architecture, config) for architecture in args.architecture])
|
||||
configuration = Configuration.from_path(cmd_args.config)
|
||||
with Pool(len(cmd_args.architecture)) as pool:
|
||||
result = pool.starmap(
|
||||
_call, [(cmd_args, architecture, configuration) for architecture in cmd_args.architecture])
|
||||
|
||||
sys.exit(0 if all(result) else 1)
|
||||
|
@ -136,15 +136,15 @@ class Application:
|
||||
:param no_packages: do not clear directory with built packages
|
||||
'''
|
||||
if not no_build:
|
||||
self.repository._clear_build()
|
||||
self.repository.clear_build()
|
||||
if not no_cache:
|
||||
self.repository._clear_cache()
|
||||
self.repository.clear_cache()
|
||||
if not no_chroot:
|
||||
self.repository._clear_chroot()
|
||||
self.repository.clear_chroot()
|
||||
if not no_manual:
|
||||
self.repository._clear_manual()
|
||||
self.repository.clear_manual()
|
||||
if not no_packages:
|
||||
self.repository._clear_packages()
|
||||
self.repository.clear_packages()
|
||||
|
||||
def remove(self, names: Iterable[str]) -> None:
|
||||
'''
|
||||
|
@ -19,6 +19,7 @@
|
||||
#
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import os
|
||||
|
||||
from types import TracebackType
|
||||
@ -40,19 +41,16 @@ class Lock:
|
||||
:ivar unsafe: skip user check
|
||||
'''
|
||||
|
||||
def __init__(self, path: Optional[str], architecture: str, force: bool, unsafe: bool,
|
||||
config: Configuration) -> None:
|
||||
def __init__(self, args: argparse.Namespace, architecture: str, config: Configuration) -> None:
|
||||
'''
|
||||
default constructor
|
||||
:param path: optional path to lock file, if empty no file lock will be used
|
||||
:param args: command line args
|
||||
:param architecture: repository architecture
|
||||
:param force: remove lock file on start if any
|
||||
:param unsafe: skip user check
|
||||
:param config: configuration instance
|
||||
'''
|
||||
self.path = f'{path}_{architecture}' if path is not None else None
|
||||
self.force = force
|
||||
self.unsafe = unsafe
|
||||
self.path = f'{args.lock}_{architecture}' if args.lock is not None else None
|
||||
self.force = args.force
|
||||
self.unsafe = args.unsafe
|
||||
|
||||
self.root = config.get('repository', 'root')
|
||||
self.reporter = Client.load(architecture, config)
|
||||
|
@ -72,4 +72,3 @@ class Report:
|
||||
generate report for the specified packages
|
||||
:param packages: list of packages to generate report
|
||||
'''
|
||||
pass
|
||||
|
@ -72,35 +72,35 @@ class Repository:
|
||||
self.repo = Repo(self.name, self.paths, self.sign.repository_sign_args)
|
||||
self.reporter = Client.load(architecture, config)
|
||||
|
||||
def _clear_build(self) -> None:
|
||||
def clear_build(self) -> None:
|
||||
'''
|
||||
clear sources directory
|
||||
'''
|
||||
for package in os.listdir(self.paths.sources):
|
||||
shutil.rmtree(os.path.join(self.paths.sources, package))
|
||||
|
||||
def _clear_cache(self) -> None:
|
||||
def clear_cache(self) -> None:
|
||||
'''
|
||||
clear cache directory
|
||||
'''
|
||||
for package in os.listdir(self.paths.cache):
|
||||
shutil.rmtree(os.path.join(self.paths.cache, package))
|
||||
|
||||
def _clear_chroot(self) -> None:
|
||||
def clear_chroot(self) -> None:
|
||||
'''
|
||||
clear cache directory. Warning: this method is architecture independent and will clear every chroot
|
||||
'''
|
||||
for chroot in os.listdir(self.paths.chroot):
|
||||
shutil.rmtree(os.path.join(self.paths.chroot, chroot))
|
||||
|
||||
def _clear_manual(self) -> None:
|
||||
def clear_manual(self) -> None:
|
||||
'''
|
||||
clear directory with manual package updates
|
||||
'''
|
||||
for package in os.listdir(self.paths.manual):
|
||||
shutil.rmtree(os.path.join(self.paths.manual, package))
|
||||
|
||||
def _clear_packages(self) -> None:
|
||||
def clear_packages(self) -> None:
|
||||
'''
|
||||
clear directory with built packages (NOT repository itself)
|
||||
'''
|
||||
@ -157,7 +157,7 @@ class Repository:
|
||||
self.reporter.set_failed(package.base)
|
||||
self.logger.exception(f'{package.base} ({self.architecture}) build exception', exc_info=True)
|
||||
continue
|
||||
self._clear_build()
|
||||
self.clear_build()
|
||||
|
||||
return self.packages_built()
|
||||
|
||||
@ -226,7 +226,7 @@ class Repository:
|
||||
except Exception:
|
||||
self.reporter.set_failed(local.base)
|
||||
self.logger.exception(f'could not process {package}', exc_info=True)
|
||||
self._clear_packages()
|
||||
self.clear_packages()
|
||||
|
||||
return self.repo.repo_path
|
||||
|
||||
@ -280,6 +280,6 @@ class Repository:
|
||||
self.reporter.set_pending(local.base)
|
||||
except Exception:
|
||||
self.logger.exception(f'could not add package from {fn}', exc_info=True)
|
||||
self._clear_manual()
|
||||
self.clear_manual()
|
||||
|
||||
return result
|
||||
|
@ -59,6 +59,16 @@ class GPG:
|
||||
return []
|
||||
return ['--sign', '--key', self.default_key]
|
||||
|
||||
@staticmethod
|
||||
def sign_cmd(path: str, key: str) -> List[str]:
|
||||
'''
|
||||
gpg command to run
|
||||
:param path: path to file to sign
|
||||
:param key: PGP key ID
|
||||
:return: gpg command with all required arguments
|
||||
'''
|
||||
return ['gpg', '-u', key, '-b', path]
|
||||
|
||||
def process(self, path: str, key: str) -> List[str]:
|
||||
'''
|
||||
gpg command wrapper
|
||||
@ -67,21 +77,12 @@ class GPG:
|
||||
:return: list of generated files including original file
|
||||
'''
|
||||
check_output(
|
||||
*self.sign_cmd(path, key),
|
||||
*GPG.sign_cmd(path, key),
|
||||
exception=BuildFailed(path),
|
||||
cwd=os.path.dirname(path),
|
||||
logger=self.logger)
|
||||
return [path, f'{path}.sig']
|
||||
|
||||
def sign_cmd(self, path: str, key: str) -> List[str]:
|
||||
'''
|
||||
gpg command to run
|
||||
:param path: path to file to sign
|
||||
:param key: PGP key ID
|
||||
:return: gpg command with all required arguments
|
||||
'''
|
||||
return ['gpg', '-u', key, '-b', path]
|
||||
|
||||
def sign_package(self, path: str, base: str) -> List[str]:
|
||||
'''
|
||||
sign package if required by configuration
|
||||
|
@ -92,7 +92,7 @@ class Tree:
|
||||
'''
|
||||
result: List[List[Package]] = []
|
||||
|
||||
unprocessed = [leaf for leaf in self.leaves]
|
||||
unprocessed = self.leaves[:]
|
||||
while unprocessed:
|
||||
result.append([leaf.package for leaf in unprocessed if leaf.is_root(unprocessed)])
|
||||
unprocessed = [leaf for leaf in unprocessed if not leaf.is_root(unprocessed)]
|
||||
|
@ -72,4 +72,3 @@ class Uploader:
|
||||
sync data to remote server
|
||||
:param path: local path to sync
|
||||
'''
|
||||
pass
|
||||
|
@ -79,16 +79,16 @@ def pretty_size(size: Optional[float], level: int = 0) -> str:
|
||||
def str_level() -> str:
|
||||
if level == 0:
|
||||
return 'B'
|
||||
elif level == 1:
|
||||
if level == 1:
|
||||
return 'KiB'
|
||||
elif level == 2:
|
||||
if level == 2:
|
||||
return 'MiB'
|
||||
elif level == 3:
|
||||
if level == 3:
|
||||
return 'GiB'
|
||||
raise InvalidOption(level) # I hope it will not be more than 1024 GiB
|
||||
|
||||
if size is None:
|
||||
return ''
|
||||
elif size < 1024:
|
||||
if size < 1024:
|
||||
return f'{round(size, 2)} {str_level()}'
|
||||
return pretty_size(size / 1024, level + 1)
|
||||
|
@ -35,14 +35,12 @@ class Client:
|
||||
:param package: package properties
|
||||
:param status: current package build status
|
||||
'''
|
||||
pass
|
||||
|
||||
def remove(self, base: str) -> None:
|
||||
'''
|
||||
remove packages from watcher
|
||||
:param base: basename to remove
|
||||
'''
|
||||
pass
|
||||
|
||||
def update(self, base: str, status: BuildStatusEnum) -> None:
|
||||
'''
|
||||
@ -50,14 +48,12 @@ class Client:
|
||||
:param base: package base to update
|
||||
:param status: current package build status
|
||||
'''
|
||||
pass
|
||||
|
||||
def update_self(self, status: BuildStatusEnum) -> None:
|
||||
'''
|
||||
update ahriman status itself
|
||||
:param status: current ahriman status
|
||||
'''
|
||||
pass
|
||||
|
||||
def set_building(self, base: str) -> None:
|
||||
'''
|
||||
|
@ -50,7 +50,7 @@ class Watcher:
|
||||
'''
|
||||
:return: list of packages together with their statuses
|
||||
'''
|
||||
return [pair for pair in self.known.values()]
|
||||
return list(self.known.values())
|
||||
|
||||
def get(self, base: str) -> Tuple[Package, BuildStatus]:
|
||||
'''
|
||||
|
@ -113,4 +113,4 @@ class WebClient(Client):
|
||||
response = requests.post(self._ahriman_url(), json=payload)
|
||||
response.raise_for_status()
|
||||
except Exception:
|
||||
self.logger.exception(f'could not update service status', exc_info=True)
|
||||
self.logger.exception('could not update service status', exc_info=True)
|
||||
|
@ -46,11 +46,11 @@ class BuildStatusEnum(Enum):
|
||||
'''
|
||||
if self == BuildStatusEnum.Pending:
|
||||
return 'yellow'
|
||||
elif self == BuildStatusEnum.Building:
|
||||
if self == BuildStatusEnum.Building:
|
||||
return 'yellow'
|
||||
elif self == BuildStatusEnum.Failed:
|
||||
if self == BuildStatusEnum.Failed:
|
||||
return 'critical'
|
||||
elif self == BuildStatusEnum.Success:
|
||||
if self == BuildStatusEnum.Success:
|
||||
return 'success'
|
||||
return 'inactive'
|
||||
|
||||
|
@ -19,9 +19,8 @@
|
||||
#
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
import aur # type: ignore
|
||||
import logging
|
||||
import os
|
||||
|
||||
from dataclasses import dataclass
|
||||
@ -95,12 +94,12 @@ class Package:
|
||||
# update pkgver first
|
||||
check_output('makepkg', '--nodeps', '--nobuild', exception=None, cwd=clone_dir, logger=logger)
|
||||
# generate new .SRCINFO and put it to parser
|
||||
src_info_source = check_output('makepkg', '--printsrcinfo', exception=None, cwd=clone_dir, logger=logger)
|
||||
src_info, errors = parse_srcinfo(src_info_source)
|
||||
srcinfo_source = check_output('makepkg', '--printsrcinfo', exception=None, cwd=clone_dir, logger=logger)
|
||||
srcinfo, errors = parse_srcinfo(srcinfo_source)
|
||||
if errors:
|
||||
raise InvalidPackageInfo(errors)
|
||||
|
||||
return self.full_version(src_info.get('epoch'), src_info['pkgver'], src_info['pkgrel'])
|
||||
return self.full_version(srcinfo.get('epoch'), srcinfo['pkgver'], srcinfo['pkgrel'])
|
||||
|
||||
@classmethod
|
||||
def from_archive(cls: Type[Package], path: str, pacman: Pacman, aur_url: str) -> Package:
|
||||
@ -134,14 +133,14 @@ class Package:
|
||||
:param aur_url: AUR root url
|
||||
:return: package properties
|
||||
'''
|
||||
with open(os.path.join(path, '.SRCINFO')) as fn:
|
||||
src_info, errors = parse_srcinfo(fn.read())
|
||||
with open(os.path.join(path, '.SRCINFO')) as srcinfo_file:
|
||||
srcinfo, errors = parse_srcinfo(srcinfo_file.read())
|
||||
if errors:
|
||||
raise InvalidPackageInfo(errors)
|
||||
packages = {key: PackageDescription() for key in src_info['packages'].keys()}
|
||||
version = cls.full_version(src_info.get('epoch'), src_info['pkgver'], src_info['pkgrel'])
|
||||
packages = {key: PackageDescription() for key in srcinfo['packages']}
|
||||
version = cls.full_version(srcinfo.get('epoch'), srcinfo['pkgver'], srcinfo['pkgrel'])
|
||||
|
||||
return cls(src_info['pkgbase'], version, aur_url, packages)
|
||||
return cls(srcinfo['pkgbase'], version, aur_url, packages)
|
||||
|
||||
@staticmethod
|
||||
def dependencies(path: str) -> Set[str]:
|
||||
@ -150,17 +149,17 @@ class Package:
|
||||
:param path: path to package sources directory
|
||||
:return: list of package dependencies including makedepends array, but excluding packages from this base
|
||||
'''
|
||||
with open(os.path.join(path, '.SRCINFO')) as fn:
|
||||
src_info, errors = parse_srcinfo(fn.read())
|
||||
with open(os.path.join(path, '.SRCINFO')) as srcinfo_file:
|
||||
srcinfo, errors = parse_srcinfo(srcinfo_file.read())
|
||||
if errors:
|
||||
raise InvalidPackageInfo(errors)
|
||||
makedepends = src_info.get('makedepends', [])
|
||||
makedepends = srcinfo.get('makedepends', [])
|
||||
# sum over each package
|
||||
depends: List[str] = src_info.get('depends', [])
|
||||
for package in src_info['packages'].values():
|
||||
depends: List[str] = srcinfo.get('depends', [])
|
||||
for package in srcinfo['packages'].values():
|
||||
depends.extend(package.get('depends', []))
|
||||
# we are not interested in dependencies inside pkgbase
|
||||
packages = set(src_info['packages'].keys())
|
||||
packages = set(srcinfo['packages'].keys())
|
||||
return set(depends + makedepends) - packages
|
||||
|
||||
@staticmethod
|
||||
|
@ -43,6 +43,6 @@ class SignSettings(Enum):
|
||||
'''
|
||||
if value.lower() in ('package', 'packages', 'sign-package'):
|
||||
return SignSettings.SignPackages
|
||||
elif value.lower() in ('repository', 'sign-repository'):
|
||||
if value.lower() in ('repository', 'sign-repository'):
|
||||
return SignSettings.SignRepository
|
||||
raise InvalidOption(value)
|
||||
|
@ -43,6 +43,6 @@ class UploadSettings(Enum):
|
||||
'''
|
||||
if value.lower() in ('rsync',):
|
||||
return UploadSettings.Rsync
|
||||
elif value.lower() in ('s3',):
|
||||
if value.lower() in ('s3',):
|
||||
return UploadSettings.S3
|
||||
raise InvalidOption(value)
|
||||
|
@ -51,7 +51,7 @@ class IndexView(BaseView):
|
||||
packages = [
|
||||
{
|
||||
'base': package.base,
|
||||
'packages': [p for p in sorted(package.packages)],
|
||||
'packages': list(sorted(package.packages)),
|
||||
'status': status.status.value,
|
||||
'timestamp': pretty_datetime(status.timestamp),
|
||||
'version': package.version,
|
||||
|
Reference in New Issue
Block a user