forked from Mirror/pmbootstrap
pmb: Reformat with Ruff 0.9 (MR 2525)
See https://astral.sh/blog/ruff-v0.9.0
This commit is contained in:
parent
2be49f8caf
commit
3061e702ab
48 changed files with 114 additions and 145 deletions
|
@ -114,9 +114,7 @@ def main() -> int:
|
|||
|
||||
# Still active notice
|
||||
if mount.ismount(Chroot.native() / "dev"):
|
||||
logging.info(
|
||||
"NOTE: chroot is still active (use 'pmbootstrap" " shutdown' as necessary)"
|
||||
)
|
||||
logging.info("NOTE: chroot is still active (use 'pmbootstrap shutdown' as necessary)")
|
||||
logging.info("DONE!")
|
||||
|
||||
except KeyboardInterrupt:
|
||||
|
@ -147,7 +145,7 @@ def main() -> int:
|
|||
|
||||
print_log_hint()
|
||||
print()
|
||||
print("Before you report this error, ensure that pmbootstrap is " "up to date.")
|
||||
print("Before you report this error, ensure that pmbootstrap is up to date.")
|
||||
print(
|
||||
"Find the latest version here: https://gitlab.postmarketos.org/postmarketOS/pmbootstrap/-/tags"
|
||||
)
|
||||
|
|
|
@ -74,7 +74,7 @@ def generate(pkgname: str, fork_alpine: bool, fork_alpine_retain_branch: bool =
|
|||
|
||||
# Confirm overwrite
|
||||
if options["confirm_overwrite"] and os.path.exists(path_target):
|
||||
logging.warning("WARNING: Target folder already exists: " f"{path_target}")
|
||||
logging.warning(f"WARNING: Target folder already exists: {path_target}")
|
||||
if not pmb.helpers.cli.confirm("Continue and overwrite?"):
|
||||
raise RuntimeError("Aborted.")
|
||||
|
||||
|
|
|
@ -176,7 +176,7 @@ def get_upstream_aport(pkgname: str, arch: Arch | None = None, retain_branch: bo
|
|||
aports_upstream_path = get_context().config.work / "cache_git/aports_upstream"
|
||||
|
||||
if retain_branch:
|
||||
logging.info("Not changing aports branch as --fork-alpine-retain-branch was " "used.")
|
||||
logging.info("Not changing aports branch as --fork-alpine-retain-branch was used.")
|
||||
else:
|
||||
# Checkout branch
|
||||
channel_cfg = pmb.config.pmaports.read_config_channel()
|
||||
|
@ -195,9 +195,9 @@ def get_upstream_aport(pkgname: str, arch: Arch | None = None, retain_branch: bo
|
|||
# Search package
|
||||
paths = list(aports_upstream_path.glob(f"*/{pkgname}"))
|
||||
if len(paths) > 1:
|
||||
raise RuntimeError("Package " + pkgname + " found in multiple" " aports subfolders.")
|
||||
raise RuntimeError("Package " + pkgname + " found in multiple aports subfolders.")
|
||||
elif len(paths) == 0:
|
||||
raise RuntimeError("Package " + pkgname + " not found in alpine" " aports repository.")
|
||||
raise RuntimeError("Package " + pkgname + " not found in alpine aports repository.")
|
||||
aport_path = paths[0]
|
||||
|
||||
# Parse APKBUILD
|
||||
|
|
|
@ -65,7 +65,7 @@ def ask_for_chassis() -> str:
|
|||
|
||||
def ask_for_external_storage() -> bool:
|
||||
return pmb.helpers.cli.confirm(
|
||||
"Does the device have a sdcard or" " other external storage medium?"
|
||||
"Does the device have a sdcard or other external storage medium?"
|
||||
)
|
||||
|
||||
|
||||
|
@ -80,7 +80,7 @@ def ask_for_flash_method() -> str:
|
|||
if method == "heimdall":
|
||||
heimdall_types = ["isorec", "bootimg"]
|
||||
while True:
|
||||
logging.info('Does the device use the "isolated' ' recovery" or boot.img?')
|
||||
logging.info('Does the device use the "isolated recovery" or boot.img?')
|
||||
logging.info(
|
||||
"<https://wiki.postmarketos.org/wiki"
|
||||
"/Deviceinfo_flash_methods#Isorec_or_bootimg"
|
||||
|
|
|
@ -23,7 +23,7 @@ def generate(pkgname: str) -> None:
|
|||
upstream = pkgrepo_default_path() / "main/gcc6"
|
||||
based_on = "main/gcc6 (from postmarketOS)"
|
||||
else:
|
||||
raise ValueError(f"Invalid prefix '{prefix}', expected gcc, gcc4 or" " gcc6.")
|
||||
raise ValueError(f"Invalid prefix '{prefix}', expected gcc, gcc4 or gcc6.")
|
||||
pmb.helpers.run.user(["cp", "-r", upstream, context.config.work / "aportgen"])
|
||||
|
||||
# Rewrite APKBUILD
|
||||
|
@ -48,7 +48,7 @@ def generate(pkgname: str) -> None:
|
|||
|
||||
# Latest gcc only, not gcc4 and gcc6
|
||||
if prefix == "gcc":
|
||||
fields["subpackages"] = f"g++-{arch}:gpp" f" libstdc++-dev-{arch}:libcxx_dev"
|
||||
fields["subpackages"] = f"g++-{arch}:gpp libstdc++-dev-{arch}:libcxx_dev"
|
||||
|
||||
below_header = (
|
||||
"CTARGET_ARCH="
|
||||
|
|
|
@ -89,7 +89,7 @@ def get_depends(context: Context, apkbuild: dict[str, Any]) -> list[str]:
|
|||
# Don't recurse forever when a package depends on itself (#948)
|
||||
for pkgname in [apkbuild["pkgname"], *apkbuild["subpackages"].keys()]:
|
||||
if pkgname in ret:
|
||||
logging.verbose(apkbuild["pkgname"] + ": ignoring dependency on" " itself: " + pkgname)
|
||||
logging.verbose(apkbuild["pkgname"] + ": ignoring dependency on itself: " + pkgname)
|
||||
ret.remove(pkgname)
|
||||
|
||||
# FIXME: is this needed? is this sensible?
|
||||
|
|
|
@ -273,7 +273,7 @@ def run_abuild(
|
|||
if strict or "pmb:strict" in apkbuild["options"]:
|
||||
if not strict:
|
||||
logging.debug(
|
||||
apkbuild["pkgname"] + ": 'pmb:strict' found in" " options, building in strict mode"
|
||||
apkbuild["pkgname"] + ": 'pmb:strict' found in options, building in strict mode"
|
||||
)
|
||||
cmd += ["-r"] # install depends with abuild
|
||||
else:
|
||||
|
|
|
@ -228,7 +228,7 @@ def package_kernel(args: PmbArgs) -> None:
|
|||
"""Frontend for 'pmbootstrap build --envkernel': creates a package from envkernel output."""
|
||||
pkgname = args.packages[0]
|
||||
if len(args.packages) > 1 or not pkgname.startswith("linux-"):
|
||||
raise RuntimeError("--envkernel needs exactly one linux-* package as " "argument.")
|
||||
raise RuntimeError("--envkernel needs exactly one linux-* package as argument.")
|
||||
|
||||
aport = pmb.helpers.pmaports.find(pkgname)
|
||||
context = get_context()
|
||||
|
|
|
@ -218,7 +218,7 @@ def configure_ccache(chroot: Chroot = Chroot.native(), verify: bool = False) ->
|
|||
return
|
||||
if verify:
|
||||
raise RuntimeError(
|
||||
f"Failed to configure ccache: {path}\nTry to" " delete the file (or zap the chroot)."
|
||||
f"Failed to configure ccache: {path}\nTry to delete the file (or zap the chroot)."
|
||||
)
|
||||
|
||||
# Set the size and verify
|
||||
|
|
|
@ -54,7 +54,7 @@ def check_min_version(chroot: Chroot = Chroot.native()) -> None:
|
|||
version_installed = installed_pkgs["apk-tools"].version
|
||||
pmb.helpers.apk.check_outdated(
|
||||
version_installed,
|
||||
"Delete your http cache and zap all chroots, then try again:" " 'pmbootstrap zap -hc'",
|
||||
"Delete your http cache and zap all chroots, then try again: 'pmbootstrap zap -hc'",
|
||||
)
|
||||
|
||||
|
||||
|
@ -206,7 +206,7 @@ def install(packages: list[str], chroot: Chroot, build: bool = True, quiet: bool
|
|||
context = get_context()
|
||||
|
||||
if not packages:
|
||||
logging.verbose("pmb.chroot.apk.install called with empty packages list," " ignoring")
|
||||
logging.verbose("pmb.chroot.apk.install called with empty packages list, ignoring")
|
||||
return
|
||||
|
||||
# Initialize chroot
|
||||
|
|
|
@ -99,9 +99,7 @@ def warn_if_chroots_outdated() -> None:
|
|||
else:
|
||||
msg += "some of your chroots are"
|
||||
logging.warning(
|
||||
f"WARNING: {msg} older than"
|
||||
f" {days_warn} days. Consider running"
|
||||
" 'pmbootstrap zap'."
|
||||
f"WARNING: {msg} older than {days_warn} days. Consider running 'pmbootstrap zap'."
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ def extract(flavor: str | None, chroot: Chroot, extra: bool = False) -> Path:
|
|||
outside = chroot / inside
|
||||
if outside.exists():
|
||||
if not pmb.helpers.cli.confirm(
|
||||
f"Extraction folder {outside}" " already exists." " Do you want to overwrite it?"
|
||||
f"Extraction folder {outside} already exists. Do you want to overwrite it?"
|
||||
):
|
||||
raise RuntimeError("Aborted!")
|
||||
pmb.chroot.root(["rm", "-r", inside], chroot)
|
||||
|
@ -59,7 +59,7 @@ def extract(flavor: str | None, chroot: Chroot, extra: bool = False) -> Path:
|
|||
# Extraction script (because passing a file to stdin is not allowed
|
||||
# in pmbootstrap's chroot/shell functions for security reasons)
|
||||
with (chroot / "tmp/_extract.sh").open("w") as handle:
|
||||
handle.write("#!/bin/sh\n" f"cd {inside} && cpio -i < _initfs\n")
|
||||
handle.write(f"#!/bin/sh\ncd {inside} && cpio -i < _initfs\n")
|
||||
|
||||
# Extract
|
||||
commands = [
|
||||
|
|
|
@ -41,7 +41,7 @@ def ls(suffix: Chroot) -> None:
|
|||
def add(hook: str, suffix: Chroot) -> None:
|
||||
if hook not in list_aports():
|
||||
raise RuntimeError(
|
||||
"Invalid hook name!" " Run 'pmbootstrap initfs hook_ls'" " to get a list of all hooks."
|
||||
"Invalid hook name! Run 'pmbootstrap initfs hook_ls' to get a list of all hooks."
|
||||
)
|
||||
prefix = pmb.config.initfs_hook_prefix
|
||||
pmb.chroot.apk.install([f"{prefix}{hook}"], suffix)
|
||||
|
|
|
@ -145,7 +145,7 @@ def zap_pkgs_local_mismatch(confirm: bool = True, dry: bool = False) -> None:
|
|||
apk_path_short = f"{arch}/{pkgname}-{version}.apk"
|
||||
apk_path = f"{get_context().config.work}/packages/{channel}/{apk_path_short}"
|
||||
if not os.path.exists(apk_path):
|
||||
logging.info("WARNING: Package mentioned in index not" f" found: {apk_path_short}")
|
||||
logging.info(f"WARNING: Package mentioned in index not found: {apk_path_short}")
|
||||
continue
|
||||
|
||||
if origin is None:
|
||||
|
@ -154,7 +154,7 @@ def zap_pkgs_local_mismatch(confirm: bool = True, dry: bool = False) -> None:
|
|||
# Aport path
|
||||
aport_path = pmb.helpers.pmaports.find_optional(origin)
|
||||
if not aport_path:
|
||||
logging.info(f"% rm {apk_path_short}" f" ({origin} aport not found)")
|
||||
logging.info(f"% rm {apk_path_short} ({origin} aport not found)")
|
||||
if not dry:
|
||||
pmb.helpers.run.root(["rm", apk_path])
|
||||
reindex = True
|
||||
|
@ -164,7 +164,7 @@ def zap_pkgs_local_mismatch(confirm: bool = True, dry: bool = False) -> None:
|
|||
apkbuild = pmb.parse.apkbuild(aport_path)
|
||||
version_aport = f"{apkbuild['pkgver']}-r{apkbuild['pkgrel']}"
|
||||
if version != version_aport:
|
||||
logging.info(f"% rm {apk_path_short}" f" ({origin} aport: {version_aport})")
|
||||
logging.info(f"% rm {apk_path_short} ({origin} aport: {version_aport})")
|
||||
if not dry:
|
||||
pmb.helpers.run.root(["rm", apk_path])
|
||||
reindex = True
|
||||
|
@ -178,7 +178,7 @@ def zap_pkgs_online_mismatch(confirm: bool = True, dry: bool = False) -> None:
|
|||
paths = list(get_context().config.work.glob("cache_apk_*"))
|
||||
if not len(paths):
|
||||
return
|
||||
if confirm and not pmb.helpers.cli.confirm("Remove outdated" " binary packages?"):
|
||||
if confirm and not pmb.helpers.cli.confirm("Remove outdated binary packages?"):
|
||||
return
|
||||
|
||||
# Iterate over existing apk caches
|
||||
|
|
|
@ -174,7 +174,7 @@ def run_scripts(topdir: Path, scripts: dict[str, CiScriptDescriptor]) -> None:
|
|||
where = "native"
|
||||
|
||||
script_path = f".ci/{script_name}.sh"
|
||||
logging.info(f"*** ({step}/{steps}) RUNNING CI SCRIPT: {script_path}" f" [{where}] ***")
|
||||
logging.info(f"*** ({step}/{steps}) RUNNING CI SCRIPT: {script_path} [{where}] ***")
|
||||
|
||||
if "native" in script["options"]:
|
||||
rc = pmb.helpers.run.user([script_path], topdir, output="tui")
|
||||
|
|
|
@ -34,12 +34,12 @@ class RepoBootstrap(commands.Command):
|
|||
|
||||
if not cfg:
|
||||
raise ValueError(
|
||||
"pmaports.cfg of current branch does not have any" " sections starting with 'repo:'"
|
||||
"pmaports.cfg of current branch does not have any sections starting with 'repo:'"
|
||||
)
|
||||
|
||||
logging.info(f"Valid repositories: {', '.join(cfg.keys())}")
|
||||
raise ValueError(
|
||||
f"Couldn't find section 'repo:{self.repo}' in pmaports.cfg of" " current branch"
|
||||
f"Couldn't find section 'repo:{self.repo}' in pmaports.cfg of current branch"
|
||||
)
|
||||
|
||||
def __init__(self, arch: Arch | None, repository: str):
|
||||
|
@ -143,8 +143,7 @@ class RepoBootstrap(commands.Command):
|
|||
)
|
||||
if self.arch.cpu_emulation_required():
|
||||
msg += (
|
||||
" or remove the path manually (to keep cross compilers if"
|
||||
" you just built them)"
|
||||
" or remove the path manually (to keep cross compilers if you just built them)"
|
||||
)
|
||||
|
||||
raise RuntimeError(f"{msg}!")
|
||||
|
|
|
@ -612,7 +612,7 @@ aportgen_mirror_alpine = "http://dl-4.alpinelinux.org/alpine/"
|
|||
newapkbuild_arguments_strings = [
|
||||
["-n", "pkgname", "set package name (only use with SRCURL)"],
|
||||
["-d", "pkgdesc", "set package description"],
|
||||
["-l", "license", "set package license identifier from" " <https://spdx.org/licenses/>"],
|
||||
["-l", "license", "set package license identifier from <https://spdx.org/licenses/>"],
|
||||
["-u", "url", "set package URL"],
|
||||
]
|
||||
newapkbuild_arguments_switches_pkgtypes = [
|
||||
|
|
|
@ -237,7 +237,7 @@ def ask_for_ui_extras(config: Config, ui: str) -> bool:
|
|||
if extra is None:
|
||||
return False
|
||||
|
||||
logging.info("This user interface has an extra package:" f" {extra['pkgdesc']}")
|
||||
logging.info(f"This user interface has an extra package: {extra['pkgdesc']}")
|
||||
|
||||
return pmb.helpers.cli.confirm("Enable this package?", default=config.ui_extras)
|
||||
|
||||
|
@ -274,7 +274,7 @@ def ask_for_keymaps(config: Config, deviceinfo: Deviceinfo) -> str:
|
|||
if not deviceinfo.keymaps or deviceinfo.keymaps.strip() == "":
|
||||
return ""
|
||||
options = deviceinfo.keymaps.split(" ")
|
||||
logging.info(f"Available keymaps for device ({len(options)}): " f"{', '.join(options)}")
|
||||
logging.info(f"Available keymaps for device ({len(options)}): {', '.join(options)}")
|
||||
if config.keymap == "":
|
||||
config.keymap = options[0]
|
||||
|
||||
|
@ -340,7 +340,7 @@ def ask_for_provider_select(apkbuild: dict[str, Any], providers_cfg: dict[str, s
|
|||
# Display as default provider
|
||||
styles = pmb.config.styles
|
||||
logging.info(
|
||||
f"* {short}: {pkg['pkgdesc']} " f"{styles['BOLD']}(default){styles['END']}"
|
||||
f"* {short}: {pkg['pkgdesc']} {styles['BOLD']}(default){styles['END']}"
|
||||
)
|
||||
has_default = True
|
||||
else:
|
||||
|
@ -575,7 +575,7 @@ def ask_for_additional_options(config: Config) -> None:
|
|||
|
||||
# Mirrors
|
||||
# prompt for mirror change
|
||||
logging.info("Selected mirror:" f" {context.config.mirrors['pmaports']}")
|
||||
logging.info(f"Selected mirror: {context.config.mirrors['pmaports']}")
|
||||
if pmb.helpers.cli.confirm("Change mirror?", default=False):
|
||||
mirror = ask_for_mirror()
|
||||
config.mirrors["pmaports"] = mirror
|
||||
|
|
|
@ -20,9 +20,7 @@ import pmb.parse.version
|
|||
|
||||
|
||||
def clone() -> None:
|
||||
logging.info(
|
||||
"Setting up the native chroot and cloning the package build" " recipes (pmaports)..."
|
||||
)
|
||||
logging.info("Setting up the native chroot and cloning the package build recipes (pmaports)...")
|
||||
|
||||
# Set up the native chroot and clone pmaports
|
||||
pmb.helpers.git.clone("pmaports")
|
||||
|
@ -54,7 +52,7 @@ def check_version_pmbootstrap(min_ver: str) -> None:
|
|||
|
||||
# Show versions
|
||||
logging.info(
|
||||
f"NOTE: you are using pmbootstrap version {real}, but" f" version {min_ver} is required."
|
||||
f"NOTE: you are using pmbootstrap version {real}, but version {min_ver} is required."
|
||||
)
|
||||
|
||||
# Error for git clone
|
||||
|
@ -116,7 +114,7 @@ def read_config(aports: Path | None = None) -> configparser.SectionProxy:
|
|||
# Require the config
|
||||
path_cfg = aports / "pmaports.cfg"
|
||||
if not os.path.exists(path_cfg):
|
||||
raise RuntimeError("Invalid pmaports repository, could not find the" f" config: {path_cfg}")
|
||||
raise RuntimeError(f"Invalid pmaports repository, could not find the config: {path_cfg}")
|
||||
|
||||
# Load the config
|
||||
cfg = configparser.ConfigParser()
|
||||
|
@ -219,7 +217,7 @@ def switch_to_channel_branch(channel_new: str) -> bool:
|
|||
f"Currently checked out branch '{branch_current}' of"
|
||||
f" pmaports.git is on channel '{channel_current}'."
|
||||
)
|
||||
logging.info(f"Switching to branch '{branch_new}' on channel" f" '{channel_new}'...")
|
||||
logging.info(f"Switching to branch '{branch_new}' on channel '{channel_new}'...")
|
||||
|
||||
# Make sure we don't have mounts related to the old channel
|
||||
pmb.chroot.shutdown()
|
||||
|
|
|
@ -93,7 +93,7 @@ def chroot_check_channel(chroot: Chroot) -> bool:
|
|||
" To do this automatically, run 'pmbootstrap config"
|
||||
" auto_zap_misconfigured_chroots yes'."
|
||||
)
|
||||
msg_unknown = "Could not figure out on which release channel the" f" '{chroot}' chroot is."
|
||||
msg_unknown = f"Could not figure out on which release channel the '{chroot}' chroot is."
|
||||
if not os.path.exists(path):
|
||||
raise RuntimeError(f"{msg_unknown} {msg_again}")
|
||||
|
||||
|
@ -115,7 +115,7 @@ def chroot_check_channel(chroot: Chroot) -> bool:
|
|||
if config.auto_zap_misconfigured_chroots.noisy():
|
||||
logging.info(msg)
|
||||
logging.info(
|
||||
"Automatically zapping since" " auto_zap_misconfigured_chroots is enabled."
|
||||
"Automatically zapping since auto_zap_misconfigured_chroots is enabled."
|
||||
)
|
||||
logging.info(
|
||||
"NOTE: You can silence this message with 'pmbootstrap"
|
||||
|
|
|
@ -56,7 +56,7 @@ class Chroot:
|
|||
|
||||
# A native suffix must not have a name.
|
||||
if self.__type == ChrootType.NATIVE and self.__name != "":
|
||||
raise ValueError(f"The native suffix can't have a name but got: " f"'{self.__name}'")
|
||||
raise ValueError(f"The native suffix can't have a name but got: '{self.__name}'")
|
||||
|
||||
if self.__type == ChrootType.IMAGE and not Path(self.__name).exists():
|
||||
raise ValueError(f"Image file '{self.__name}' does not exist")
|
||||
|
@ -98,7 +98,7 @@ class Chroot:
|
|||
if arch is not None:
|
||||
return arch
|
||||
|
||||
raise ValueError(f"Invalid chroot suffix: {self}" " (wrong device chosen in 'init' step?)")
|
||||
raise ValueError(f"Invalid chroot suffix: {self} (wrong device chosen in 'init' step?)")
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if isinstance(other, str):
|
||||
|
|
|
@ -56,7 +56,7 @@ def odin(device: str, flavor: str, folder: Path) -> None:
|
|||
odin_device_tar = f"{device}.tar"
|
||||
odin_device_tar_md5 = f"{device}.tar.md5"
|
||||
|
||||
handle.write("#!/bin/sh\n" f"cd {temp_folder}\n")
|
||||
handle.write(f"#!/bin/sh\ncd {temp_folder}\n")
|
||||
if method == "heimdall-isorec":
|
||||
handle.write(
|
||||
# Kernel: copy and append md5
|
||||
|
@ -96,7 +96,7 @@ def odin(device: str, flavor: str, folder: Path) -> None:
|
|||
pmb.chroot.root(
|
||||
[
|
||||
"mv",
|
||||
f"/mnt/rootfs_{device}{temp_folder}" f"/{odin_device_tar_md5}",
|
||||
f"/mnt/rootfs_{device}{temp_folder}/{odin_device_tar_md5}",
|
||||
"/home/pmos/rootfs/",
|
||||
]
|
||||
),
|
||||
|
|
|
@ -29,9 +29,7 @@ def symlinks(flavor: str, folder: Path) -> None:
|
|||
|
||||
# File descriptions
|
||||
info = {
|
||||
f"boot.img{suffix}": (
|
||||
"Fastboot compatible boot.img file," " contains initramfs and kernel"
|
||||
),
|
||||
f"boot.img{suffix}": ("Fastboot compatible boot.img file, contains initramfs and kernel"),
|
||||
"dtbo.img": "Fastboot compatible dtbo image",
|
||||
f"initramfs{suffix}": "Initramfs",
|
||||
f"initramfs{suffix}-extra": "Extra initramfs files in /boot",
|
||||
|
|
|
@ -39,8 +39,8 @@ def kernel(
|
|||
else:
|
||||
logging.info("(native) flash kernel " + flavor)
|
||||
pmb.flasher.run(deviceinfo, method, "flash_kernel", flavor)
|
||||
logging.info("You will get an IP automatically assigned to your " "USB interface shortly.")
|
||||
logging.info("Then you can connect to your device using ssh after pmOS has" " booted:")
|
||||
logging.info("You will get an IP automatically assigned to your USB interface shortly.")
|
||||
logging.info("Then you can connect to your device using ssh after pmOS has booted:")
|
||||
logging.info(f"ssh {get_context().config.user}@{pmb.config.default_ip}")
|
||||
logging.info(
|
||||
"NOTE: If you enabled full disk encryption, you should make"
|
||||
|
@ -64,7 +64,7 @@ def rootfs(deviceinfo: Deviceinfo, method: str) -> None:
|
|||
img_path = Chroot.native() / "home/pmos/rootfs" / f"{deviceinfo.codename}{suffix}"
|
||||
if not img_path.exists():
|
||||
raise RuntimeError(
|
||||
"The rootfs has not been generated yet, please run" " 'pmbootstrap install' first."
|
||||
"The rootfs has not been generated yet, please run 'pmbootstrap install' first."
|
||||
)
|
||||
|
||||
# Do not flash if using fastboot & image is too large
|
||||
|
@ -72,7 +72,7 @@ def rootfs(deviceinfo: Deviceinfo, method: str) -> None:
|
|||
img_size = img_path.stat().st_size / 1024**2
|
||||
max_size = int(deviceinfo.flash_fastboot_max_size)
|
||||
if img_size > max_size:
|
||||
raise RuntimeError("The rootfs is too large for fastboot to" " flash.")
|
||||
raise RuntimeError("The rootfs is too large for fastboot to flash.")
|
||||
|
||||
# Run the flasher
|
||||
logging.info("(native) flash rootfs image")
|
||||
|
|
|
@ -78,12 +78,12 @@ def run(
|
|||
|
||||
if no_reboot and ("flash" not in action or method != "heimdall-bootimg"):
|
||||
raise RuntimeError(
|
||||
"The '--no-reboot' option is only" " supported when flashing with heimall-bootimg."
|
||||
"The '--no-reboot' option is only supported when flashing with heimall-bootimg."
|
||||
)
|
||||
|
||||
if resume and ("flash" not in action or method != "heimdall-bootimg"):
|
||||
raise RuntimeError(
|
||||
"The '--resume' option is only" " supported when flashing with heimall-bootimg."
|
||||
"The '--resume' option is only supported when flashing with heimall-bootimg."
|
||||
)
|
||||
|
||||
# Run the commands of each action
|
||||
|
|
|
@ -94,8 +94,7 @@ def variables(
|
|||
"$RECOVERY_ZIP": f"/mnt/{Chroot.buildroot(deviceinfo.arch)}"
|
||||
"/var/lib/postmarketos-android-recovery-installer"
|
||||
f"/pmos-{device}.zip",
|
||||
"$UUU_SCRIPT": f"/mnt/{Chroot.rootfs(deviceinfo.codename)}"
|
||||
"/usr/share/uuu/flash_script.lst",
|
||||
"$UUU_SCRIPT": f"/mnt/{Chroot.rootfs(deviceinfo.codename)}/usr/share/uuu/flash_script.lst",
|
||||
"$NO_REBOOT": _no_reboot,
|
||||
"$RESUME": _resume,
|
||||
}
|
||||
|
|
|
@ -73,7 +73,7 @@ def update_repository_list(
|
|||
if path.exists():
|
||||
pmb.helpers.run.root(["rm", path])
|
||||
for line in lines_new:
|
||||
pmb.helpers.run.root(["sh", "-c", "echo " f"{shlex.quote(line)} >> {path}"])
|
||||
pmb.helpers.run.root(["sh", "-c", f"echo {shlex.quote(line)} >> {path}"])
|
||||
update_repository_list(
|
||||
root,
|
||||
user_repository=user_repository,
|
||||
|
|
|
@ -49,8 +49,7 @@ def init_req_headers() -> None:
|
|||
req_headers_github["Authorization"] = f"token {token}"
|
||||
else:
|
||||
logging.info(
|
||||
"NOTE: Consider using a GITHUB_TOKEN environment variable"
|
||||
" to increase your rate limit"
|
||||
"NOTE: Consider using a GITHUB_TOKEN environment variable to increase your rate limit"
|
||||
)
|
||||
|
||||
|
||||
|
@ -90,7 +89,7 @@ def get_package_version_info_gitlab(
|
|||
|
||||
# Get the commits for the repository
|
||||
commits = pmb.helpers.http.retrieve_json(
|
||||
f"{gitlab_host}/api/v4/projects/{repo_name_safe}/repository" f"/commits{ref_arg}",
|
||||
f"{gitlab_host}/api/v4/projects/{repo_name_safe}/repository/commits{ref_arg}",
|
||||
headers=req_headers,
|
||||
)
|
||||
latest_commit = commits[0]
|
||||
|
@ -117,7 +116,7 @@ def upgrade_git_package(args: PmbArgs, pkgname: str, package: Apkbuild) -> None:
|
|||
source = source[-1]
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"Unhandled number of source elements. Please open" f" a bug report: {source}"
|
||||
f"Unhandled number of source elements. Please open a bug report: {source}"
|
||||
)
|
||||
|
||||
verinfo = None
|
||||
|
@ -190,7 +189,7 @@ def upgrade_stable_package(args: PmbArgs, pkgname: str, package: Apkbuild) -> No
|
|||
# Looking up if there's a custom mapping from postmarketOS package name
|
||||
# to Anitya project name.
|
||||
mappings = pmb.helpers.http.retrieve_json(
|
||||
f"{ANITYA_API_BASE}/packages/?distribution=postmarketOS" f"&name={pkgname}",
|
||||
f"{ANITYA_API_BASE}/packages/?distribution=postmarketOS&name={pkgname}",
|
||||
headers=req_headers,
|
||||
)
|
||||
if mappings["total_items"] < 1:
|
||||
|
@ -204,7 +203,7 @@ def upgrade_stable_package(args: PmbArgs, pkgname: str, package: Apkbuild) -> No
|
|||
project_name = mappings["items"][0]["project"]
|
||||
ecosystem = mappings["items"][0]["ecosystem"]
|
||||
projects = pmb.helpers.http.retrieve_json(
|
||||
f"{ANITYA_API_BASE}/projects/?name={project_name}&" f"ecosystem={ecosystem}",
|
||||
f"{ANITYA_API_BASE}/projects/?name={project_name}&ecosystem={ecosystem}",
|
||||
headers=req_headers,
|
||||
)
|
||||
|
||||
|
@ -245,7 +244,7 @@ def upgrade_stable_package(args: PmbArgs, pkgname: str, package: Apkbuild) -> No
|
|||
pkgrel_new = 0
|
||||
|
||||
if not pmb.parse.version.validate(pkgver_new):
|
||||
logging.warning(f"{pkgname}: would upgrade to invalid pkgver:" f" {pkgver_new}, ignoring")
|
||||
logging.warning(f"{pkgname}: would upgrade to invalid pkgver: {pkgver_new}, ignoring")
|
||||
return
|
||||
|
||||
logging.info(f"{pkgname}: upgrading pmaport")
|
||||
|
|
|
@ -174,7 +174,7 @@ def chroot(args: PmbArgs) -> None:
|
|||
and chroot != Chroot.native()
|
||||
and chroot.type not in [ChrootType.BUILDROOT, ChrootType.IMAGE]
|
||||
):
|
||||
raise RuntimeError("--user is only supported for native or" " buildroot_* chroots.")
|
||||
raise RuntimeError("--user is only supported for native or buildroot_* chroots.")
|
||||
if args.xauth and chroot != Chroot.native():
|
||||
raise RuntimeError("--xauth is only supported for native chroot.")
|
||||
|
||||
|
@ -304,19 +304,17 @@ def install(args: PmbArgs) -> None:
|
|||
device = config.device
|
||||
deviceinfo = pmb.parse.deviceinfo(device)
|
||||
if args.no_fde:
|
||||
logging.warning("WARNING: --no-fde is deprecated," " as it is now the default.")
|
||||
logging.warning("WARNING: --no-fde is deprecated, as it is now the default.")
|
||||
if args.rsync and args.full_disk_encryption:
|
||||
raise ValueError("Installation using rsync is not compatible with full" " disk encryption.")
|
||||
raise ValueError("Installation using rsync is not compatible with full disk encryption.")
|
||||
if args.rsync and not args.disk:
|
||||
raise ValueError("Installation using rsync only works with --disk.")
|
||||
|
||||
if args.rsync and args.filesystem == "btrfs":
|
||||
raise ValueError(
|
||||
"Installation using rsync" " is not currently supported on btrfs filesystem."
|
||||
)
|
||||
raise ValueError("Installation using rsync is not currently supported on btrfs filesystem.")
|
||||
|
||||
pmb.helpers.pmaports.require_bootstrap(
|
||||
deviceinfo.arch, f"do 'pmbootstrap install' for {deviceinfo.arch}" " (deviceinfo_arch)"
|
||||
deviceinfo.arch, f"do 'pmbootstrap install' for {deviceinfo.arch} (deviceinfo_arch)"
|
||||
)
|
||||
|
||||
# On-device installer checks
|
||||
|
@ -335,20 +333,20 @@ def install(args: PmbArgs) -> None:
|
|||
" --android-recovery-zip (patches welcome)"
|
||||
)
|
||||
if args.no_image:
|
||||
raise ValueError("--on-device-installer cannot be combined with" " --no-image")
|
||||
raise ValueError("--on-device-installer cannot be combined with --no-image")
|
||||
if args.rsync:
|
||||
raise ValueError("--on-device-installer cannot be combined with" " --rsync")
|
||||
raise ValueError("--on-device-installer cannot be combined with --rsync")
|
||||
if args.filesystem:
|
||||
raise ValueError("--on-device-installer cannot be combined with" " --filesystem")
|
||||
raise ValueError("--on-device-installer cannot be combined with --filesystem")
|
||||
|
||||
if deviceinfo.cgpt_kpart:
|
||||
raise ValueError("--on-device-installer cannot be used with" " ChromeOS devices")
|
||||
raise ValueError("--on-device-installer cannot be used with ChromeOS devices")
|
||||
else:
|
||||
if args.ondev_cp:
|
||||
raise ValueError("--cp can only be combined with --ondev")
|
||||
if args.ondev_no_rootfs:
|
||||
raise ValueError(
|
||||
"--no-rootfs can only be combined with --ondev." " Do you mean --no-image?"
|
||||
"--no-rootfs can only be combined with --ondev. Do you mean --no-image?"
|
||||
)
|
||||
if args.ondev_no_rootfs:
|
||||
_install_ondev_verify_no_rootfs(device, args.ondev_cp)
|
||||
|
@ -377,7 +375,7 @@ def install(args: PmbArgs) -> None:
|
|||
# Android recovery zip related
|
||||
if args.android_recovery_zip and args.filesystem:
|
||||
raise ValueError(
|
||||
"--android-recovery-zip cannot be combined with" " --filesystem (patches welcome)"
|
||||
"--android-recovery-zip cannot be combined with --filesystem (patches welcome)"
|
||||
)
|
||||
if args.android_recovery_zip and args.full_disk_encryption:
|
||||
logging.info(
|
||||
|
@ -445,7 +443,7 @@ def newapkbuild(args: PmbArgs) -> None:
|
|||
# Sanity check: -n is only allowed with SRCURL
|
||||
if args.pkgname and not is_url:
|
||||
raise RuntimeError(
|
||||
"You can only specify a pkgname (-n) when using" " SRCURL as last parameter."
|
||||
"You can only specify a pkgname (-n) when using SRCURL as last parameter."
|
||||
)
|
||||
|
||||
# Passthrough: Strings (e.g. -d "my description")
|
||||
|
@ -588,7 +586,7 @@ def ci(args: PmbArgs) -> None:
|
|||
scripts_selected = {}
|
||||
if args.scripts:
|
||||
if args.all:
|
||||
raise RuntimeError("Combining --all with script names doesn't" " make sense")
|
||||
raise RuntimeError("Combining --all with script names doesn't make sense")
|
||||
for script in args.scripts:
|
||||
if script not in scripts_available:
|
||||
logging.error(
|
||||
|
|
|
@ -128,8 +128,7 @@ def get_upstream_remote(aports: Path) -> str:
|
|||
return line.split("\t", 1)[0]
|
||||
|
||||
raise RuntimeError(
|
||||
f"{name_repo}: could not find remote name for any URL '{urls}' in git"
|
||||
f" repository: {aports}"
|
||||
f"{name_repo}: could not find remote name for any URL '{urls}' in git repository: {aports}"
|
||||
)
|
||||
|
||||
|
||||
|
@ -220,9 +219,7 @@ def parse_channels_cfg(aports: Path) -> dict:
|
|||
try:
|
||||
cfg.read_string(stdout)
|
||||
except configparser.MissingSectionHeaderError:
|
||||
logging.info(
|
||||
"NOTE: fix this by fetching your pmaports.git, e.g." " with 'pmbootstrap pull'"
|
||||
)
|
||||
logging.info("NOTE: fix this by fetching your pmaports.git, e.g. with 'pmbootstrap pull'")
|
||||
raise RuntimeError(
|
||||
"Failed to read channels.cfg from"
|
||||
f" '{remote}/master' branch of your local"
|
||||
|
|
|
@ -80,7 +80,7 @@ def download(
|
|||
|
||||
# Offline and not cached
|
||||
if context.offline:
|
||||
raise RuntimeError("File not found in cache and offline flag is" f" enabled: {url}")
|
||||
raise RuntimeError(f"File not found in cache and offline flag is enabled: {url}")
|
||||
|
||||
# Download the file
|
||||
logging.log(loglevel, "Download " + url)
|
||||
|
|
|
@ -43,7 +43,7 @@ def check_grsec() -> None:
|
|||
return
|
||||
|
||||
raise RuntimeError(
|
||||
"You're running a kernel based on the grsec" " patchset. This is not supported."
|
||||
"You're running a kernel based on the grsec patchset. This is not supported."
|
||||
)
|
||||
|
||||
|
||||
|
@ -203,7 +203,7 @@ def validate_hostname(hostname: str) -> bool:
|
|||
|
||||
# Check that doesn't begin or end with a minus sign or period
|
||||
if re.search(r"^-|^\.|-$|\.$", hostname):
|
||||
logging.fatal("ERROR: Hostname must not begin or end with a minus" " sign or period")
|
||||
logging.fatal("ERROR: Hostname must not begin or end with a minus sign or period")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
|
@ -109,7 +109,7 @@ def get(
|
|||
for depend in ret.depends:
|
||||
depend_data = get(depend, arch, must_exist=False, try_other_arches=try_other_arches)
|
||||
if not depend_data:
|
||||
logging.warning(f"WARNING: {pkgname}: failed to resolve" f" dependency '{depend}'")
|
||||
logging.warning(f"WARNING: {pkgname}: failed to resolve dependency '{depend}'")
|
||||
# Can't replace potential subpkgname
|
||||
if depend not in depends_new:
|
||||
depends_new += [depend]
|
||||
|
|
|
@ -428,9 +428,7 @@ def require_bootstrap_error(repo: str, arch: Arch, trigger_str: str) -> None:
|
|||
f"ERROR: Trying to {trigger_str} with {repo} enabled, but the"
|
||||
f" {repo} repo needs to be bootstrapped first."
|
||||
)
|
||||
raise RuntimeError(
|
||||
f"Run 'pmbootstrap repo_bootstrap {repo} --arch={arch}'" " and then try again."
|
||||
)
|
||||
raise RuntimeError(f"Run 'pmbootstrap repo_bootstrap {repo} --arch={arch}' and then try again.")
|
||||
|
||||
|
||||
def require_bootstrap(arch: Arch, trigger_str: str) -> None:
|
||||
|
|
|
@ -268,7 +268,7 @@ def foreground_pipe(
|
|||
logging.info(
|
||||
"Process did not write any output for " + str(timeout) + " seconds. Killing it."
|
||||
)
|
||||
logging.info("NOTE: The timeout can be increased with" " 'pmbootstrap -t'.")
|
||||
logging.info("NOTE: The timeout can be increased with 'pmbootstrap -t'.")
|
||||
kill_command(process.pid, sudo)
|
||||
continue
|
||||
|
||||
|
@ -292,7 +292,7 @@ def foreground_tui(
|
|||
This is the only way text-based user interfaces (ncurses programs like
|
||||
vim, nano or the kernel's menuconfig) work properly.
|
||||
"""
|
||||
logging.debug("*** output passed to pmbootstrap stdout, not to this log" " ***")
|
||||
logging.debug("*** output passed to pmbootstrap stdout, not to this log ***")
|
||||
process = subprocess.Popen(cmd, cwd=working_dir)
|
||||
return process.wait()
|
||||
|
||||
|
@ -310,8 +310,7 @@ def check_return_code(code: int, log_message: str) -> None:
|
|||
logging.debug("^" * 70)
|
||||
log_file = get_context().log
|
||||
logging.info(
|
||||
"NOTE: The failed command's output is above the ^^^ line"
|
||||
f" in the log file: {log_file}"
|
||||
f"NOTE: The failed command's output is above the ^^^ line in the log file: {log_file}"
|
||||
)
|
||||
raise RuntimeError(f"Command failed (exit code {code}): " + log_message)
|
||||
|
||||
|
@ -347,7 +346,7 @@ def add_proxy_env_vars(env: Env) -> None:
|
|||
"FTP_PROXY",
|
||||
"HTTPS_PROXY",
|
||||
"HTTP_PROXY",
|
||||
"HTTP_PROXY_AUTH" "ftp_proxy",
|
||||
"HTTP_PROXY_AUTHftp_proxy",
|
||||
"http_proxy",
|
||||
"https_proxy",
|
||||
]
|
||||
|
|
|
@ -48,16 +48,16 @@ def mount_disk(path: Path) -> None:
|
|||
raise RuntimeError(f"The disk block device does not exist: {path}")
|
||||
for path_mount in path.parent.glob(f"{path.name}*"):
|
||||
if pmb.helpers.mount.ismount(path_mount):
|
||||
raise RuntimeError(f"{path_mount} is mounted! Will not attempt to" " format this!")
|
||||
raise RuntimeError(f"{path_mount} is mounted! Will not attempt to format this!")
|
||||
logging.info(f"(native) mount /dev/install (host: {path})")
|
||||
pmb.helpers.mount.bind_file(path, Chroot.native() / "dev/install")
|
||||
if previous_install(path):
|
||||
if not pmb.helpers.cli.confirm(
|
||||
"WARNING: This device has a" " previous installation of pmOS." " CONTINUE?"
|
||||
"WARNING: This device has a previous installation of pmOS. CONTINUE?"
|
||||
):
|
||||
raise RuntimeError("Aborted.")
|
||||
else:
|
||||
if not pmb.helpers.cli.confirm(f"EVERYTHING ON {path} WILL BE" " ERASED! CONTINUE?"):
|
||||
if not pmb.helpers.cli.confirm(f"EVERYTHING ON {path} WILL BE ERASED! CONTINUE?"):
|
||||
raise RuntimeError("Aborted.")
|
||||
|
||||
|
||||
|
@ -95,8 +95,7 @@ def create_and_mount_image(
|
|||
free = round((disk_data.f_bsize * disk_data.f_bavail) / (1024**2))
|
||||
if size_mb > free:
|
||||
raise RuntimeError(
|
||||
"Not enough free space to create rootfs image! "
|
||||
f"(free: {free}M, required: {size_mb}M)"
|
||||
f"Not enough free space to create rootfs image! (free: {free}M, required: {size_mb}M)"
|
||||
)
|
||||
|
||||
# Create empty image files
|
||||
|
@ -108,7 +107,7 @@ def create_and_mount_image(
|
|||
if split:
|
||||
images = {img_path_boot: size_mb_boot, img_path_root: size_mb_root}
|
||||
for img_path, image_size_mb in images.items():
|
||||
logging.info(f"(native) create {img_path.name} " f"({image_size_mb})")
|
||||
logging.info(f"(native) create {img_path.name} ({image_size_mb})")
|
||||
pmb.chroot.root(["truncate", "-s", image_size_mb, img_path])
|
||||
|
||||
# Mount to /dev/install
|
||||
|
|
|
@ -25,7 +25,7 @@ def format_and_mount_boot(args: PmbArgs, device: str, boot_label: str) -> None:
|
|||
mountpoint = "/mnt/install/boot"
|
||||
filesystem = pmb.parse.deviceinfo().boot_filesystem or "ext2"
|
||||
install_fsprogs(filesystem)
|
||||
logging.info(f"(native) format {device} (boot, {filesystem}), mount to" f" {mountpoint}")
|
||||
logging.info(f"(native) format {device} (boot, {filesystem}), mount to {mountpoint}")
|
||||
if filesystem == "fat16":
|
||||
pmb.chroot.root(["mkfs.fat", "-F", "16", "-n", boot_label, device])
|
||||
elif filesystem == "fat32":
|
||||
|
@ -46,7 +46,7 @@ def format_luks_root(args: PmbArgs, device: str) -> None:
|
|||
"""
|
||||
mountpoint = "/dev/mapper/pm_crypt"
|
||||
|
||||
logging.info(f"(native) format {device} (root, luks), mount to" f" {mountpoint}")
|
||||
logging.info(f"(native) format {device} (root, luks), mount to {mountpoint}")
|
||||
logging.info(" *** TYPE IN THE FULL DISK ENCRYPTION PASSWORD (TWICE!) ***")
|
||||
|
||||
# Avoid cryptsetup warning about missing locking directory
|
||||
|
|
|
@ -32,7 +32,7 @@ def mount(img_path: Path, _sector_size: int | None = None) -> Path:
|
|||
for i in range(0, 5):
|
||||
# Retry
|
||||
if i > 0:
|
||||
logging.debug("loop module might not be initialized yet, retry in" " one second...")
|
||||
logging.debug("loop module might not be initialized yet, retry in one second...")
|
||||
time.sleep(1)
|
||||
|
||||
# Mount and return on success
|
||||
|
|
|
@ -40,9 +40,7 @@ def partitions_mount(device: str, layout: PartitionLayout, disk: Path | None) ->
|
|||
if os.path.exists(f"{partition_prefix}1"):
|
||||
found = True
|
||||
break
|
||||
logging.debug(
|
||||
f"NOTE: ({i + 1}/{tries}) failed to find the install " "partition. Retrying..."
|
||||
)
|
||||
logging.debug(f"NOTE: ({i + 1}/{tries}) failed to find the install partition. Retrying...")
|
||||
time.sleep(0.1)
|
||||
|
||||
if not found:
|
||||
|
|
|
@ -21,7 +21,7 @@ def get_groups(config: Config) -> list[str]:
|
|||
apkbuild = pmb.helpers.pmaports.get(meta)
|
||||
groups = apkbuild["_pmb_groups"]
|
||||
if groups:
|
||||
logging.debug(f"{meta}: install _pmb_groups:" f" {', '.join(groups)}")
|
||||
logging.debug(f"{meta}: install _pmb_groups: {', '.join(groups)}")
|
||||
ret += groups
|
||||
|
||||
# UI-extras subpackage
|
||||
|
@ -29,7 +29,7 @@ def get_groups(config: Config) -> list[str]:
|
|||
if config.ui_extras and meta_extras in apkbuild["subpackages"]:
|
||||
groups = apkbuild["subpackages"][meta_extras]["_pmb_groups"]
|
||||
if groups:
|
||||
logging.debug(f"{meta_extras}: install _pmb_groups:" f" {', '.join(groups)}")
|
||||
logging.debug(f"{meta_extras}: install _pmb_groups: {', '.join(groups)}")
|
||||
ret += groups
|
||||
|
||||
return ret
|
||||
|
|
|
@ -28,10 +28,10 @@ def start_nbd_server(device: str, replace: bool, ip: str = "172.16.42.2", port:
|
|||
rootfs_path2 = Path("/home/pmos/rootfs") / f"{device}.img"
|
||||
if not (chroot / rootfs_path2).exists():
|
||||
raise RuntimeError(
|
||||
"The rootfs has not been generated yet, please " "run 'pmbootstrap install' first."
|
||||
"The rootfs has not been generated yet, please run 'pmbootstrap install' first."
|
||||
)
|
||||
if replace and not pmb.helpers.cli.confirm(
|
||||
f"Are you sure you want to " f"replace the rootfs for " f"{device}?"
|
||||
f"Are you sure you want to replace the rootfs for {device}?"
|
||||
):
|
||||
return
|
||||
pmb.chroot.root(["cp", rootfs_path2, rootfs_path])
|
||||
|
|
|
@ -195,7 +195,7 @@ def parse_next_attribute(
|
|||
i += 1
|
||||
|
||||
raise RuntimeError(
|
||||
f"Can't find closing quote sign ({end_char}) for" f" attribute '{attribute}' in: {path}"
|
||||
f"Can't find closing quote sign ({end_char}) for attribute '{attribute}' in: {path}"
|
||||
)
|
||||
|
||||
|
||||
|
@ -363,7 +363,7 @@ def apkbuild(path: Path, check_pkgver: bool = True, check_pkgname: bool = True)
|
|||
logging.info(f"Folder: '{os.path.dirname(path)}'")
|
||||
logging.info(f"Pkgname: '{ret['pkgname']}'")
|
||||
raise RuntimeError(
|
||||
"The pkgname must be equal to the name of" " the folder that contains the APKBUILD!"
|
||||
"The pkgname must be equal to the name of the folder that contains the APKBUILD!"
|
||||
)
|
||||
|
||||
# Sanity check: pkgver
|
||||
|
@ -373,7 +373,7 @@ def apkbuild(path: Path, check_pkgver: bool = True, check_pkgname: bool = True)
|
|||
"NOTE: Valid pkgvers are described here: "
|
||||
"https://wiki.alpinelinux.org/wiki/APKBUILD_Reference#pkgver"
|
||||
)
|
||||
raise RuntimeError(f"Invalid pkgver '{ret['pkgver']}' in" f" APKBUILD: {path}")
|
||||
raise RuntimeError(f"Invalid pkgver '{ret['pkgver']}' in APKBUILD: {path}")
|
||||
|
||||
# Fill cache
|
||||
return ret
|
||||
|
|
|
@ -70,7 +70,7 @@ def parse_next_block(path: Path, lines: list[str]) -> ApkindexBlock | None:
|
|||
if required_found != len(required_apkindex_keys):
|
||||
for key in required_apkindex_keys:
|
||||
if key not in ret:
|
||||
raise RuntimeError(f"Missing required key '{key}' in block " f"{ret}, file: {path}")
|
||||
raise RuntimeError(f"Missing required key '{key}' in block {ret}, file: {path}")
|
||||
raise RuntimeError(
|
||||
f"Expected {len(required_apkindex_keys)} required keys,"
|
||||
f" but found {required_found} in block: {ret}, file: {path}"
|
||||
|
@ -274,7 +274,7 @@ def parse(
|
|||
|
||||
# Skip virtual packages
|
||||
if block.timestamp is None:
|
||||
logging.verbose(f"Skipped virtual package {block} in" f" file: {path}")
|
||||
logging.verbose(f"Skipped virtual package {block} in file: {path}")
|
||||
continue
|
||||
|
||||
# Add the next package and all aliases
|
||||
|
@ -480,5 +480,5 @@ def package(
|
|||
|
||||
# No provider
|
||||
if must_exist:
|
||||
raise RuntimeError("Package '" + package + "' not found in any" " APKINDEX.")
|
||||
raise RuntimeError("Package '" + package + "' not found in any APKINDEX.")
|
||||
return None
|
||||
|
|
|
@ -72,7 +72,7 @@ def type_ondev_cp(val: str) -> list[str]:
|
|||
ret = val.split(":")
|
||||
|
||||
if len(ret) != 2:
|
||||
raise argparse.ArgumentTypeError("does not have HOST_SRC:CHROOT_DEST" f" format: {val}")
|
||||
raise argparse.ArgumentTypeError(f"does not have HOST_SRC:CHROOT_DEST format: {val}")
|
||||
host_src = ret[0]
|
||||
if not os.path.exists(host_src):
|
||||
raise argparse.ArgumentTypeError(f"HOST_SRC not found: {host_src}")
|
||||
|
@ -81,7 +81,7 @@ def type_ondev_cp(val: str) -> list[str]:
|
|||
|
||||
chroot_dest = ret[1]
|
||||
if not chroot_dest.startswith("/"):
|
||||
raise argparse.ArgumentTypeError("CHROOT_DEST must start with '/':" f" {chroot_dest}")
|
||||
raise argparse.ArgumentTypeError(f"CHROOT_DEST must start with '/': {chroot_dest}")
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -151,8 +151,7 @@ def arguments_install(subparser: argparse._SubParsersAction) -> None:
|
|||
)
|
||||
group.add_argument(
|
||||
"--android-recovery-zip",
|
||||
help="generate TWRP flashable zip (recommended read:"
|
||||
" https://postmarketos.org/recoveryzip)",
|
||||
help="generate TWRP flashable zip (recommended read: https://postmarketos.org/recoveryzip)",
|
||||
action="store_true",
|
||||
dest="android_recovery_zip",
|
||||
)
|
||||
|
@ -356,8 +355,7 @@ def arguments_flasher(subparser: argparse._SubParsersAction) -> argparse.Argumen
|
|||
flash_kernel.add_argument(
|
||||
"--partition",
|
||||
default=None,
|
||||
help="partition to flash the kernel to (defaults"
|
||||
" to deviceinfo_flash_*_partition_kernel)",
|
||||
help="partition to flash the kernel to (defaults to deviceinfo_flash_*_partition_kernel)",
|
||||
)
|
||||
|
||||
# Flash lk2nd
|
||||
|
@ -529,9 +527,7 @@ def arguments_qemu(subparser: argparse._SubParsersAction) -> argparse.ArgumentPa
|
|||
dest="qemu_gl",
|
||||
default=True,
|
||||
action="store_false",
|
||||
help="Avoid using GL for"
|
||||
" accelerating graphics in QEMU (use software"
|
||||
" rasterizer, slow!)",
|
||||
help="Avoid using GL for accelerating graphics in QEMU (use software rasterizer, slow!)",
|
||||
)
|
||||
ret.add_argument(
|
||||
"--video",
|
||||
|
@ -569,9 +565,7 @@ def arguments_pkgrel_bump(subparser: argparse._SubParsersAction) -> argparse.Arg
|
|||
ret.add_argument(
|
||||
"--dry",
|
||||
action="store_true",
|
||||
help="instead of modifying"
|
||||
" APKBUILDs, exit with >0 when a package would have been"
|
||||
" bumped",
|
||||
help="instead of modifying APKBUILDs, exit with >0 when a package would have been bumped",
|
||||
)
|
||||
|
||||
# Mutually exclusive: "--auto" or package names
|
||||
|
@ -1058,7 +1052,7 @@ def get_parser() -> argparse.ArgumentParser:
|
|||
"-a",
|
||||
"--all",
|
||||
action=toggle_other_boolean_flags(*zap_all_delete_args),
|
||||
help="delete everything, equivalent to: " f"--{' --'.join(zap_all_delete_args_print)}",
|
||||
help=f"delete everything, equivalent to: --{' --'.join(zap_all_delete_args_print)}",
|
||||
)
|
||||
|
||||
# Action: stats
|
||||
|
|
|
@ -102,8 +102,7 @@ def bootimg(path: Path) -> Bootimg:
|
|||
if "android bootimg" not in file_output.lower():
|
||||
if get_context().force:
|
||||
logging.warning(
|
||||
"WARNING: boot.img file seems to be invalid, but"
|
||||
" proceeding anyway (-f specified)"
|
||||
"WARNING: boot.img file seems to be invalid, but proceeding anyway (-f specified)"
|
||||
)
|
||||
else:
|
||||
logging.info(
|
||||
|
|
|
@ -31,7 +31,7 @@ def package_provider(
|
|||
|
||||
# 2. Provider with the same package name
|
||||
if pkgname in providers:
|
||||
logging.verbose(f"{pkgname}: choosing package of the same name as " "provider")
|
||||
logging.verbose(f"{pkgname}: choosing package of the same name as provider")
|
||||
return providers[pkgname]
|
||||
|
||||
# 3. Pick a package that will be installed anyway
|
||||
|
|
|
@ -212,9 +212,9 @@ class Deviceinfo:
|
|||
" let us know in the postmarketOS issues!)"
|
||||
)
|
||||
if "nonfree" in info:
|
||||
raise RuntimeError("deviceinfo_nonfree is unused. " f"Please delete it in: {path}")
|
||||
raise RuntimeError(f"deviceinfo_nonfree is unused. Please delete it in: {path}")
|
||||
if "dev_keyboard" in info:
|
||||
raise RuntimeError("deviceinfo_dev_keyboard is unused. " f"Please delete it in: {path}")
|
||||
raise RuntimeError(f"deviceinfo_dev_keyboard is unused. Please delete it in: {path}")
|
||||
if "date" in info:
|
||||
raise RuntimeError(
|
||||
"deviceinfo_date was replaced by deviceinfo_year. "
|
||||
|
@ -224,7 +224,7 @@ class Deviceinfo:
|
|||
# "codename" is required
|
||||
codename = os.path.basename(os.path.dirname(path))[7:]
|
||||
if "codename" not in info or info["codename"] != codename:
|
||||
raise RuntimeError(f"Please add 'deviceinfo_codename=\"{codename}\"' " f"to: {path}")
|
||||
raise RuntimeError(f"Please add 'deviceinfo_codename=\"{codename}\"' to: {path}")
|
||||
|
||||
# "chassis" is required
|
||||
chassis_types = pmb.config.deviceinfo_chassis_types
|
||||
|
|
|
@ -345,7 +345,5 @@ def check_file(
|
|||
"""
|
||||
arch = extract_arch(config_path)
|
||||
version = extract_version(config_path)
|
||||
logging.debug(
|
||||
f"Check kconfig: parsed arch={arch}, version={version} from " f"file: {config_path}"
|
||||
)
|
||||
logging.debug(f"Check kconfig: parsed arch={arch}, version={version} from file: {config_path}")
|
||||
return check_config(config_path, arch, version, components_list, details=details)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue