Use simplified Python 3.8 syntax (MR 2327)

This commit was generated with:

    ruff check --fix --extend-select=UP .
This commit is contained in:
Hugo Osvaldo Barrera 2024-06-23 14:38:19 +02:00 committed by Oliver Smith
parent fa2a7c502d
commit f3f392ef66
No known key found for this signature in database
GPG key ID: 5AE7F5513E0885CB
13 changed files with 51 additions and 60 deletions

View file

@ -417,9 +417,9 @@ def ask_for_device(context: Context):
codenames = []
if new_vendor:
logging.info(
"The specified vendor ({}) could not be found in"
f"The specified vendor ({vendor}) could not be found in"
" existing ports, do you want to start a new"
" port?".format(vendor)
" port?"
)
if not pmb.helpers.cli.confirm(default=True):
continue
@ -451,7 +451,7 @@ def ask_for_device(context: Context):
continue
# New port creation confirmed
logging.info("Generating new aports for: {}...".format(device))
logging.info(f"Generating new aports for: {device}...")
pmb.aportgen.generate(f"device-{device}", False)
pmb.aportgen.generate(f"linux-{device}", False)
elif any("archived" == x for x in device_path.parts):
@ -551,7 +551,7 @@ def ask_for_mirror():
json_path = pmb.helpers.http.download(
"https://postmarketos.org/mirrors.json", "pmos_mirrors", cache=False
)
with open(json_path, "rt") as handle:
with open(json_path) as handle:
s = handle.read()
logging.info("List of available mirrors:")
@ -635,7 +635,7 @@ def ask_build_pkgs_on_install(default: bool) -> bool:
def get_locales():
ret = []
list_path = f"{pmb.config.pmb_src}/pmb/data/locales"
with open(list_path, "r") as handle:
with open(list_path) as handle:
for line in handle:
ret += [line.rstrip()]
return ret

View file

@ -6,7 +6,7 @@ from functools import lru_cache
from typing import Optional
@lru_cache()
@lru_cache
def which_sudo() -> Optional[str]:
"""Return a command required to run commands as root, if any.

View file

@ -49,7 +49,7 @@ def init_req_headers() -> None:
def get_package_version_info_github(repo_name: str, ref: Optional[str]):
logging.debug("Trying GitHub repository: {}".format(repo_name))
logging.debug(f"Trying GitHub repository: {repo_name}")
# Get the URL argument to request a special ref, if needed
ref_arg = ""
@ -71,7 +71,7 @@ def get_package_version_info_github(repo_name: str, ref: Optional[str]):
def get_package_version_info_gitlab(gitlab_host: str, repo_name: str, ref: Optional[str]):
logging.debug("Trying GitLab repository: {}".format(repo_name))
logging.debug(f"Trying GitLab repository: {repo_name}")
repo_name_safe = urllib.parse.quote(repo_name, safe="")
@ -125,7 +125,7 @@ def upgrade_git_package(args: PmbArgs, pkgname: str, package) -> None:
if verinfo is None:
# ignore for now
logging.warning("{}: source not handled: {}".format(pkgname, source))
logging.warning(f"{pkgname}: source not handled: {source}")
return
# Get the new commit sha
@ -151,10 +151,10 @@ def upgrade_git_package(args: PmbArgs, pkgname: str, package) -> None:
pkgrel_new = 0
if sha == sha_new:
logging.info("{}: up-to-date".format(pkgname))
logging.info(f"{pkgname}: up-to-date")
return
logging.info("{}: upgrading pmaport".format(pkgname))
logging.info(f"{pkgname}: upgrading pmaport")
if args.dry:
logging.info(f" Would change _commit from {sha} to {sha_new}")
logging.info(f" Would change pkgver from {pkgver} to {pkgver_new}")
@ -216,14 +216,14 @@ def upgrade_stable_package(args: PmbArgs, pkgname: str, package) -> None:
# Check that we got a version number
if len(project["stable_versions"]) < 1:
logging.warning("{}: got no version number, ignoring".format(pkgname))
logging.warning(f"{pkgname}: got no version number, ignoring")
return
version = project["stable_versions"][0]
# Compare the pmaports version with the project version
if package["pkgver"] == version:
logging.info("{}: up-to-date".format(pkgname))
logging.info(f"{pkgname}: up-to-date")
return
if package["pkgver"] == "9999":
@ -240,7 +240,7 @@ def upgrade_stable_package(args: PmbArgs, pkgname: str, package) -> None:
logging.warning(f"{pkgname}: would upgrade to invalid pkgver:" f" {pkgver_new}, ignoring")
return
logging.info("{}: upgrading pmaport".format(pkgname))
logging.info(f"{pkgname}: upgrading pmaport")
if args.dry:
logging.info(f" Would change pkgver from {pkgver} to {pkgver_new}")
logging.info(f" Would change pkgrel from {pkgrel} to {pkgrel_new}")

View file

@ -36,11 +36,11 @@ def replace_apkbuild(args: PmbArgs, pkgname, key, new, in_quotes=False):
# Prepare old/new strings
if in_quotes:
line_old = '{}="{}"'.format(key, old)
line_new = '{}="{}"'.format(key, new)
line_old = f'{key}="{old}"'
line_new = f'{key}="{new}"'
else:
line_old = "{}={}".format(key, old)
line_new = "{}={}".format(key, new)
line_old = f"{key}={old}"
line_new = f"{key}={new}"
# Replace
replace(path, "\n" + line_old + "\n", "\n" + line_new + "\n")
@ -50,9 +50,9 @@ def replace_apkbuild(args: PmbArgs, pkgname, key, new, in_quotes=False):
apkbuild = pmb.parse.apkbuild(path)
if apkbuild[key] != str(new):
raise RuntimeError(
"Failed to set '{}' for pmaport '{}'. Make sure"
" that there's a line with exactly the string '{}'"
" and nothing else in: {}".format(key, pkgname, line_old, path)
f"Failed to set '{key}' for pmaport '{pkgname}'. Make sure"
f" that there's a line with exactly the string '{line_old}'"
f" and nothing else in: {path}"
)

View file

@ -107,9 +107,7 @@ def get_upstream_remote(aports: Path):
if any(u in line for u in urls):
return line.split("\t", 1)[0]
raise RuntimeError(
"{}: could not find remote name for any URL '{}' in git" " repository: {}".format(
name_repo, urls, aports
)
f"{name_repo}: could not find remote name for any URL '{urls}' in git" f" repository: {aports}"
)
@ -198,7 +196,7 @@ def pull(repo_name: str):
# Skip if not on official branch
branch = rev_parse(repo, extra_args=["--abbrev-ref"])
msg_start = "{} (branch: {}):".format(repo_name, branch)
msg_start = f"{repo_name} (branch: {branch}):"
if not branch_looks_official(repo, branch):
if repo.parts[-1] == "pmaports":
official_looking_branches = "master, v24.06, …"
@ -220,9 +218,7 @@ def pull(repo_name: str):
remote_ref = rev_parse(repo, branch + "@{u}", ["--abbrev-ref"])
if remote_ref != branch_upstream:
logging.warning(
"{} is tracking unexpected remote branch '{}' instead" " of '{}'".format(
msg_start, remote_ref, branch_upstream
)
f"{msg_start} is tracking unexpected remote branch '{remote_ref}' instead" f" of '{branch_upstream}'"
)
return -3
@ -239,9 +235,8 @@ def pull(repo_name: str):
# Skip if we can't fast-forward
if not can_fast_forward(repo, branch_upstream):
logging.warning(
"{} can't fast-forward to {}, looks like you changed"
f"{msg_start} can't fast-forward to {branch_upstream}, looks like you changed"
" the git history of your local branch. Skipping pull!"
"".format(msg_start, branch_upstream)
)
return -4

View file

@ -14,7 +14,7 @@ def ismount(folder: Path):
Workaround for: https://bugs.python.org/issue29707
"""
folder = folder.resolve()
with open("/proc/mounts", "r") as handle:
with open("/proc/mounts") as handle:
for line in handle:
words = line.split()
if len(words) >= 2 and Path(words[1]) == folder:

View file

@ -84,7 +84,7 @@ def migrate_work_folder(args: PmbArgs):
current = 0
path = context.config.work / "version"
if os.path.exists(path):
with open(path, "r") as f:
with open(path) as f:
current = int(f.read().rstrip())
# Compare version, print warning or do nothing
@ -168,7 +168,7 @@ def migrate_work_folder(args: PmbArgs):
# Require git, set cache_git ownership
pmb.config.init.require_programs()
if os.path.exists(path):
uid_gid = "{}:{}".format(os.getuid(), os.getgid())
uid_gid = f"{os.getuid()}:{os.getgid()}"
pmb.helpers.run.root(["chown", "-R", uid_gid, path])
else:
os.makedirs(path, 0o700, True)

View file

@ -73,14 +73,14 @@ def auto_apkindex_package(args: PmbArgs, arch, aport, apk, dry=False):
compare = pmb.parse.version.compare(version_aport, version_apk)
if compare == -1:
logging.warning(
"{}: skipping, because the aport version {} is lower"
" than the binary version {}".format(pkgname, version_aport, version_apk)
f"{pkgname}: skipping, because the aport version {version_aport} is lower"
f" than the binary version {version_apk}"
)
return
if compare == 1:
logging.verbose(
"{}: skipping, because the aport version {} is higher"
" than the binary version {}".format(pkgname, version_aport, version_apk)
f"{pkgname}: skipping, because the aport version {version_aport} is higher"
f" than the binary version {version_apk}"
)
return
@ -123,7 +123,7 @@ def auto(args: PmbArgs, dry=False):
continue
aport_path = pmb.helpers.pmaports.find_optional(origin)
if not aport_path:
logging.warning("{}: origin '{}' aport not found".format(pkgname, origin))
logging.warning(f"{pkgname}: origin '{origin}' aport not found")
continue
aport = pmb.parse.apkbuild(aport_path)
if auto_apkindex_package(args, arch, aport, apk, dry):

View file

@ -132,7 +132,7 @@ def generate(arch, overview, pkgname=None, built=False):
"""
# Log message
packages_str = pkgname if pkgname else "all packages"
logging.info("Calculate packages that need to be built ({}, {})" "".format(packages_str, arch))
logging.info(f"Calculate packages that need to be built ({packages_str}, {arch})")
# Order relevant packages
ret = get_relevant_packages(arch, pkgname, built)

View file

@ -302,7 +302,7 @@ def copy_ssh_keys(config: Config):
return
keys = []
for key in glob.glob(os.path.expanduser(config.ssh_key_glob)):
with open(key, "r") as infile:
with open(key) as infile:
keys += infile.readlines()
if not len(keys):
@ -607,9 +607,7 @@ def embed_firmware(args: PmbArgs, suffix: Chroot):
step = int(pmb.parse.deviceinfo().sd_embed_firmware_step_size)
except ValueError:
raise RuntimeError(
"Value for " "deviceinfo_sd_embed_firmware_step_size " "is not valid: {}".format(
step
)
"Value for " "deviceinfo_sd_embed_firmware_step_size " f"is not valid: {step}"
)
device_rootfs = mount_device_rootfs(suffix)
@ -619,9 +617,7 @@ def embed_firmware(args: PmbArgs, suffix: Chroot):
for binary, offset in binary_list:
binary_file = os.path.join("/usr/share", binary)
logging.info(
"Embed firmware {} in the SD card image at offset {} with" " step size {}".format(
binary, offset, step
)
f"Embed firmware {binary} in the SD card image at offset {offset} with" f" step size {step}"
)
filename = os.path.join(device_rootfs, binary_file.lstrip("/"))
pmb.chroot.root(
@ -666,8 +662,8 @@ def sanity_check_disk(args: PmbArgs):
device_name = os.path.basename(device)
if not os.path.exists(device):
raise RuntimeError(f"{device} doesn't exist, is the disk plugged?")
if os.path.isdir("/sys/class/block/{}".format(device_name)):
with open("/sys/class/block/{}/ro".format(device_name), "r") as handle:
if os.path.isdir(f"/sys/class/block/{device_name}"):
with open(f"/sys/class/block/{device_name}/ro") as handle:
ro = handle.read()
if ro == "1\n":
raise RuntimeError(f"{device} is read-only, maybe a locked SD card?")
@ -686,7 +682,7 @@ def sanity_check_disk_size(args: PmbArgs):
# Size is in 512-byte blocks
size = int(raw.strip())
human = "{:.2f} GiB".format(size / 2 / 1024 / 1024)
human = f"{size / 2 / 1024 / 1024:.2f} GiB"
# Warn if the size is larger than 100GiB
if not args.assume_yes and size > (100 * 2 * 1024 * 1024):

View file

@ -12,7 +12,7 @@ def binfmt_info(arch_qemu):
full = {}
info = pmb.config.pmb_src / "pmb/data/qemu-user-binfmt.txt"
logging.verbose(f"parsing: {info}")
with open(info, "r") as handle:
with open(info) as handle:
for line in handle:
if line.startswith("#") or "=" not in line:
continue

View file

@ -128,28 +128,28 @@ def bootimg(path: Path):
# Get base, offsets, pagesize, cmdline and qcdt info
# This file does not exist for example for qcdt images
if os.path.isfile(f"{bootimg_path}-header_version"):
with open(f"{bootimg_path}-header_version", "r") as f:
with open(f"{bootimg_path}-header_version") as f:
header_version = int(f.read().replace("\n", ""))
output["header_version"] = str(header_version)
if header_version >= 3:
output["pagesize"] = "4096"
else:
with open(f"{bootimg_path}-base", "r") as f:
with open(f"{bootimg_path}-base") as f:
output["base"] = "0x%08x" % int(f.read().replace("\n", ""), 16)
with open(f"{bootimg_path}-kernel_offset", "r") as f:
with open(f"{bootimg_path}-kernel_offset") as f:
output["kernel_offset"] = "0x%08x" % int(f.read().replace("\n", ""), 16)
with open(f"{bootimg_path}-ramdisk_offset", "r") as f:
with open(f"{bootimg_path}-ramdisk_offset") as f:
output["ramdisk_offset"] = "0x%08x" % int(f.read().replace("\n", ""), 16)
with open(f"{bootimg_path}-second_offset", "r") as f:
with open(f"{bootimg_path}-second_offset") as f:
output["second_offset"] = "0x%08x" % int(f.read().replace("\n", ""), 16)
with open(f"{bootimg_path}-tags_offset", "r") as f:
with open(f"{bootimg_path}-tags_offset") as f:
output["tags_offset"] = "0x%08x" % int(f.read().replace("\n", ""), 16)
with open(f"{bootimg_path}-pagesize", "r") as f:
with open(f"{bootimg_path}-pagesize") as f:
output["pagesize"] = f.read().replace("\n", "")
if header_version == 2:
with open(f"{bootimg_path}-dtb_offset", "r") as f:
with open(f"{bootimg_path}-dtb_offset") as f:
output["dtb_offset"] = "0x%08x" % int(f.read().replace("\n", ""), 16)
if get_mtk_label(f"{bootimg_path}-kernel") is not None:
@ -168,7 +168,7 @@ def bootimg(path: Path):
output["dtb_second"] = "true" if is_dtb(f"{bootimg_path}-second") else "false"
with open(f"{bootimg_path}-cmdline", "r") as f:
with open(f"{bootimg_path}-cmdline") as f:
output["cmdline"] = f.read().replace("\n", "")
# Cleanup

View file

@ -18,7 +18,7 @@ def arm_big_little_first_group_ncpus() -> Optional[int]:
counter = 0
part = None
with open("/proc/cpuinfo", "r") as cpuinfo:
with open("/proc/cpuinfo") as cpuinfo:
for line in cpuinfo:
match = pattern.match(line)
if match: