1
0
Fork 1
mirror of https://gitlab.postmarketos.org/postmarketOS/pmbootstrap.git synced 2025-07-13 03:19:47 +03:00
Hugo Osvaldo Barrera 2024-06-23 14:20:56 +02:00 committed by Oliver Smith
parent 5a8e2c6cad
commit e421bb2d41
No known key found for this signature in database
GPG key ID: 5AE7F5513E0885CB
109 changed files with 4044 additions and 2984 deletions

View file

@ -9,54 +9,58 @@ import datetime
from typing import Any, Dict
sys.path.insert(0, os.path.abspath('..')) # Allow modules to be found
sys.path.insert(0, os.path.abspath("..")) # Allow modules to be found
from pmb import __version__
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# -- Project information -----------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
project = 'pmbootstrap'
copyright = str(datetime.date.today().year) + ', postmarketOS developers'
author = 'postmarketOS developers'
project = "pmbootstrap"
copyright = str(datetime.date.today().year) + ", postmarketOS developers"
author = "postmarketOS developers"
release = __version__
version = '.'.join(release.split('.')[:3])
version = ".".join(release.split(".")[:3])
# -- General configuration ---------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'sphinx.ext.doctest', 'sphinxcontrib.autoprogram']
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.doctest",
"sphinxcontrib.autoprogram",
]
templates_path = ['_templates']
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
templates_path = ["_templates"]
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# -- Options for HTML output -------------------------------------------------
html_theme = 'sphinx_rtd_theme'
html_favicon = 'https://wiki.postmarketos.org/favicon.ico'
html_theme = "sphinx_rtd_theme"
html_favicon = "https://wiki.postmarketos.org/favicon.ico"
html_theme_options: Dict[str, Any] = {'style_nav_header_background': '008b69',}
html_theme_options: Dict[str, Any] = {
"style_nav_header_background": "008b69",
}
# Output file base name for HTML help builder.
htmlhelp_basename = 'pmboostrapdoc'
htmlhelp_basename = "pmboostrapdoc"
html_theme_options = {
'display_version': True,
'style_external_links': True,
"display_version": True,
"style_external_links": True,
}
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pmbootstrap', 'pmbootstrap Documentation',
['postmarketOS Developers'], 1)
]
man_pages = [("index", "pmbootstrap", "pmbootstrap Documentation", ["postmarketOS Developers"], 1)]

View file

@ -27,8 +27,7 @@ __version__ = "3.0.0_alpha"
version = sys.version_info
if version < (3, 9):
print("You need at least Python 3.9 to run pmbootstrap")
print("(You are running it with Python " + str(version.major) +
"." + str(version.minor) + ")")
print("(You are running it with Python " + str(version.major) + "." + str(version.minor) + ")")
sys.exit()
@ -38,8 +37,10 @@ def print_log_hint() -> None:
# Hints about the log file (print to stdout only)
log_hint = "Run 'pmbootstrap log' for details."
if not os.path.exists(log):
log_hint += (" Alternatively you can use '--details-to-stdout' to get more"
" output, e.g. 'pmbootstrap --details-to-stdout init'.")
log_hint += (
" Alternatively you can use '--details-to-stdout' to get more"
" output, e.g. 'pmbootstrap --details-to-stdout init'."
)
print()
print(log_hint)
@ -67,11 +68,13 @@ def main() -> int:
if args.action == "init":
return config_init.frontend(args)
elif not os.path.exists(args.config):
raise RuntimeError("Please specify a config file, or run"
" 'pmbootstrap init' to generate one.")
raise RuntimeError(
"Please specify a config file, or run" " 'pmbootstrap init' to generate one."
)
elif not os.path.exists(context.config.work):
raise RuntimeError("Work path not found, please run 'pmbootstrap"
" init' to create it.")
raise RuntimeError(
"Work path not found, please run 'pmbootstrap" " init' to create it."
)
# Migrate work folder if necessary
if args.action not in ["shutdown", "zap", "log"]:
@ -85,8 +88,9 @@ def main() -> int:
# Still active notice
if mount.ismount(Chroot.native() / "dev"):
logging.info("NOTE: chroot is still active (use 'pmbootstrap"
" shutdown' as necessary)")
logging.info(
"NOTE: chroot is still active (use 'pmbootstrap" " shutdown' as necessary)"
)
logging.info("DONE!")
except KeyboardInterrupt:
@ -106,6 +110,7 @@ def main() -> int:
# Dump log to stdout when args (and therefore logging) init failed
if "args" not in locals():
import logging as pylogging
pylogging.getLogger().setLevel(logging.DEBUG)
logging.info("ERROR: " + str(e))
@ -114,10 +119,8 @@ def main() -> int:
print_log_hint()
print()
print("Before you report this error, ensure that pmbootstrap is "
"up to date.")
print("Find the latest version here:"
" https://gitlab.com/postmarketOS/pmbootstrap/-/tags")
print("Before you report this error, ensure that pmbootstrap is " "up to date.")
print("Find the latest version here:" " https://gitlab.com/postmarketOS/pmbootstrap/-/tags")
print(f"Your version: {__version__}")
return 1

View file

@ -47,18 +47,21 @@ def properties(pkgname):
for prefix in options["prefixes"]:
if pkgname.startswith(prefix):
return (prefix, folder, options)
logging.info("NOTE: aportgen is for generating postmarketOS specific"
logging.info(
"NOTE: aportgen is for generating postmarketOS specific"
" aports, such as the cross-compiler related packages"
" or the linux kernel fork packages.")
logging.info("NOTE: If you wanted to package new software in general, try"
" 'pmbootstrap newapkbuild' to generate a template.")
" or the linux kernel fork packages."
)
logging.info(
"NOTE: If you wanted to package new software in general, try"
" 'pmbootstrap newapkbuild' to generate a template."
)
raise ValueError("No generator available for " + pkgname + "!")
def generate(pkgname: str, fork_alpine: bool):
if fork_alpine:
prefix, folder, options = (pkgname, "temp",
{"confirm_overwrite": True})
prefix, folder, options = (pkgname, "temp", {"confirm_overwrite": True})
else:
prefix, folder, options = properties(pkgname)
config = get_context().config
@ -66,8 +69,7 @@ def generate(pkgname: str, fork_alpine: bool):
# Confirm overwrite
if options["confirm_overwrite"] and os.path.exists(path_target):
logging.warning("WARNING: Target folder already exists: "
f"{path_target}")
logging.warning("WARNING: Target folder already exists: " f"{path_target}")
if not pmb.helpers.cli.confirm("Continue and overwrite?"):
raise RuntimeError("Aborted.")
@ -77,10 +79,10 @@ def generate(pkgname: str, fork_alpine: bool):
pmb.helpers.run.user(["rm", "-r", aportgen])
if fork_alpine:
upstream = pmb.aportgen.core.get_upstream_aport(pkgname)
pmb.helpers.run.user(["cp", "-r", upstream,
aportgen])
pmb.aportgen.core.rewrite(pkgname, replace_simple={
"# Contributor:*": None, "# Maintainer:*": None})
pmb.helpers.run.user(["cp", "-r", upstream, aportgen])
pmb.aportgen.core.rewrite(
pkgname, replace_simple={"# Contributor:*": None, "# Maintainer:*": None}
)
else:
# Run pmb.aportgen.PREFIX.generate()
# FIXME: this is really bad and hacky let's not do this please
@ -89,7 +91,6 @@ def generate(pkgname: str, fork_alpine: bool):
# Move to the aports folder
if os.path.exists(path_target):
pmb.helpers.run.user(["rm", "-r", path_target])
pmb.helpers.run.user(
["mv", aportgen, path_target])
pmb.helpers.run.user(["mv", aportgen, path_target])
logging.info(f"*** pmaport generated: {path_target}")

View file

@ -25,8 +25,7 @@ def generate(pkgname: str):
tempdir = Path("/tmp/aportgen")
aportgen = context.config.work / "aportgen"
pmb.chroot.root(["rm", "-rf", tempdir])
pmb.helpers.run.user(["mkdir", "-p", aportgen,
Chroot.native() / tempdir])
pmb.helpers.run.user(["mkdir", "-p", aportgen, Chroot.native() / tempdir])
# Write the APKBUILD
channel_cfg = pmb.config.pmaports.read_config_channel()

View file

@ -49,9 +49,16 @@ def format_function(name, body, remove_indent=4):
return name + "() {\n" + ret + "}\n"
def rewrite(pkgname, path_original="", fields={}, replace_pkgname=None,
replace_functions={}, replace_simple={}, below_header="",
remove_indent=4):
def rewrite(
pkgname,
path_original="",
fields={},
replace_pkgname=None,
replace_functions={},
replace_simple={},
below_header="",
remove_indent=4,
):
"""
Append a header to $WORK/aportgen/APKBUILD, delete maintainer/contributor
lines (so they won't be bugged with issues regarding our generated aports),
@ -99,8 +106,7 @@ def rewrite(pkgname, path_original="", fields={}, replace_pkgname=None,
skip_in_func = False
for line in handle.readlines():
# Skip maintainer/contributor
if line.startswith("# Maintainer") or line.startswith(
"# Contributor"):
if line.startswith("# Maintainer") or line.startswith("# Contributor"):
continue
# Replace functions
@ -113,8 +119,7 @@ def rewrite(pkgname, path_original="", fields={}, replace_pkgname=None,
if line.startswith(func + "() {"):
skip_in_func = True
if body:
lines_new += format_function(
func, body, remove_indent=remove_indent)
lines_new += format_function(func, body, remove_indent=remove_indent)
break
if skip_in_func:
continue
@ -171,30 +176,28 @@ def get_upstream_aport(pkgname: str, arch=None):
args = pmb.helpers.args.please_i_really_need_args()
if getattr(args, "fork_alpine_retain_branch", False):
logging.info("Not changing aports branch as --fork-alpine-retain-branch was "
"used.")
logging.info("Not changing aports branch as --fork-alpine-retain-branch was " "used.")
else:
# Checkout branch
channel_cfg = pmb.config.pmaports.read_config_channel()
branch = channel_cfg["branch_aports"]
logging.info(f"Checkout aports.git branch: {branch}")
if pmb.helpers.run.user(["git", "checkout", branch],
aports_upstream_path, check=False):
if pmb.helpers.run.user(["git", "checkout", branch], aports_upstream_path, check=False):
logging.info("NOTE: run 'pmbootstrap pull' and try again")
logging.info("NOTE: if it still fails, your aports.git was cloned with"
logging.info(
"NOTE: if it still fails, your aports.git was cloned with"
" an older version of pmbootstrap, as shallow clone."
" Unshallow it, or remove it and let pmbootstrap clone it"
f" again: {aports_upstream_path}")
f" again: {aports_upstream_path}"
)
raise RuntimeError("Branch checkout failed.")
# Search package
paths = list(aports_upstream_path.glob(f"*/{pkgname}"))
if len(paths) > 1:
raise RuntimeError("Package " + pkgname + " found in multiple"
" aports subfolders.")
raise RuntimeError("Package " + pkgname + " found in multiple" " aports subfolders.")
elif len(paths) == 0:
raise RuntimeError("Package " + pkgname + " not found in alpine"
" aports repository.")
raise RuntimeError("Package " + pkgname + " not found in alpine" " aports repository.")
aport_path = paths[0]
# Parse APKBUILD
@ -213,17 +216,23 @@ def get_upstream_aport(pkgname: str, arch=None):
# APKBUILD > binary: this is fine
if compare == 1:
logging.info(f"NOTE: {pkgname} {arch} binary package has a lower"
logging.info(
f"NOTE: {pkgname} {arch} binary package has a lower"
f" version {package['version']} than the APKBUILD"
f" {apkbuild_version}")
f" {apkbuild_version}"
)
return aport_path
# APKBUILD < binary: aports.git is outdated
if compare == -1:
logging.warning("WARNING: Package '" + pkgname + "' has a lower version in"
" local checkout of Alpine's aports (" + apkbuild_version +
") compared to Alpine's binary package (" +
package["version"] + ")!")
logging.warning(
"WARNING: Package '" + pkgname + "' has a lower version in"
" local checkout of Alpine's aports ("
+ apkbuild_version
+ ") compared to Alpine's binary package ("
+ package["version"]
+ ")!"
)
logging.info("NOTE: You can update your local checkout with: 'pmbootstrap pull'")
return aport_path

View file

@ -17,14 +17,17 @@ def ask_for_architecture():
if "armhf" in architectures:
architectures.remove("armhf")
while True:
ret = pmb.helpers.cli.ask("Device architecture", architectures,
"aarch64", complete=architectures)
ret = pmb.helpers.cli.ask(
"Device architecture", architectures, "aarch64", complete=architectures
)
if ret in architectures:
return Arch.from_str(ret)
logging.fatal("ERROR: Invalid architecture specified. If you want to"
logging.fatal(
"ERROR: Invalid architecture specified. If you want to"
" add a new architecture, edit"
" build_device_architectures in"
" pmb/config/__init__.py.")
" pmb/config/__init__.py."
)
def ask_for_manufacturer():
@ -37,8 +40,7 @@ def ask_for_name(manufacturer):
ret = pmb.helpers.cli.ask("Name", None, None, False)
# Always add the manufacturer
if not ret.startswith(manufacturer) and \
not ret.startswith("Google"):
if not ret.startswith(manufacturer) and not ret.startswith("Google"):
ret = manufacturer + " " + ret
return ret
@ -46,8 +48,7 @@ def ask_for_name(manufacturer):
def ask_for_year():
# Regex from https://stackoverflow.com/a/12240826
logging.info("In what year was the device released (e.g. 2012)?")
return pmb.helpers.cli.ask("Year", None, None, False,
validation_regex=r'^[1-9]\d{3,}$')
return pmb.helpers.cli.ask("Year", None, None, False, validation_regex=r"^[1-9]\d{3,}$")
def ask_for_chassis():
@ -55,58 +56,60 @@ def ask_for_chassis():
logging.info("What type of device is it?")
logging.info("Valid types are: " + ", ".join(types))
return pmb.helpers.cli.ask("Chassis", None, None, True,
validation_regex='|'.join(types),
complete=types)
return pmb.helpers.cli.ask(
"Chassis", None, None, True, validation_regex="|".join(types), complete=types
)
def ask_for_keyboard() -> bool:
return pmb.helpers.cli.confirm("Does the device have a hardware"
" keyboard?")
return pmb.helpers.cli.confirm("Does the device have a hardware" " keyboard?")
def ask_for_external_storage() -> bool:
return pmb.helpers.cli.confirm("Does the device have a sdcard or"
" other external storage medium?")
return pmb.helpers.cli.confirm(
"Does the device have a sdcard or" " other external storage medium?"
)
def ask_for_flash_method():
while True:
logging.info("Which flash method does the device support?")
method = pmb.helpers.cli.ask("Flash method",
pmb.config.flash_methods,
"none",
complete=pmb.config.flash_methods)
method = pmb.helpers.cli.ask(
"Flash method", pmb.config.flash_methods, "none", complete=pmb.config.flash_methods
)
if method in pmb.config.flash_methods:
if method == "heimdall":
heimdall_types = ["isorec", "bootimg"]
while True:
logging.info("Does the device use the \"isolated"
" recovery\" or boot.img?")
logging.info("<https://wiki.postmarketos.org/wiki"
logging.info('Does the device use the "isolated' ' recovery" or boot.img?')
logging.info(
"<https://wiki.postmarketos.org/wiki"
"/Deviceinfo_flash_methods#Isorec_or_bootimg"
".3F>")
heimdall_type = pmb.helpers.cli.ask("Type",
heimdall_types,
heimdall_types[0])
".3F>"
)
heimdall_type = pmb.helpers.cli.ask("Type", heimdall_types, heimdall_types[0])
if heimdall_type in heimdall_types:
method += "-" + heimdall_type
break
logging.fatal("ERROR: Invalid type specified.")
return method
logging.fatal("ERROR: Invalid flash method specified. If you want to"
logging.fatal(
"ERROR: Invalid flash method specified. If you want to"
" add a new flash method, edit flash_methods in"
" pmb/config/__init__.py.")
" pmb/config/__init__.py."
)
def ask_for_bootimg():
logging.info("You can analyze a known working boot.img file to"
logging.info(
"You can analyze a known working boot.img file to"
" automatically fill out the flasher information for your"
" deviceinfo file. Either specify the path to an image or"
" press return to skip this step (you can do it later with"
" 'pmbootstrap bootimg_analyze').")
" 'pmbootstrap bootimg_analyze')."
)
while True:
response = pmb.helpers.cli.ask("Path", None, "", False)
@ -121,7 +124,8 @@ def ask_for_bootimg():
def generate_deviceinfo_fastboot_content(bootimg=None):
if bootimg is None:
bootimg = {"cmdline": "",
bootimg = {
"cmdline": "",
"qcdt": "false",
"dtb_second": "false",
"base": "",
@ -131,7 +135,8 @@ def generate_deviceinfo_fastboot_content(bootimg=None):
"tags_offset": "",
"pagesize": "2048",
"mtk_label_kernel": "",
"mtk_label_ramdisk": ""}
"mtk_label_ramdisk": "",
}
content = f"""\
deviceinfo_kernel_cmdline="{bootimg["cmdline"]}"
@ -178,9 +183,18 @@ def generate_deviceinfo_fastboot_content(bootimg=None):
return content
def generate_deviceinfo(pkgname: str, name: str, manufacturer: str, year: str, arch: Arch,
chassis: str, has_keyboard: bool, has_external_storage: bool,
flash_method: str, bootimg=None):
def generate_deviceinfo(
pkgname: str,
name: str,
manufacturer: str,
year: str,
arch: Arch,
chassis: str,
has_keyboard: bool,
has_external_storage: bool,
flash_method: str,
bootimg=None,
):
codename = "-".join(pkgname.split("-")[1:])
external_storage = "true" if has_external_storage else "false"
# Note: New variables must be added to pmb/config/__init__.py as well
@ -270,8 +284,7 @@ def generate_modules_initfs() -> None:
def generate_apkbuild(pkgname: str, name: str, arch: Arch, flash_method: str):
# Dependencies
depends = ["postmarketos-base",
"linux-" + "-".join(pkgname.split("-")[1:])]
depends = ["postmarketos-base", "linux-" + "-".join(pkgname.split("-")[1:])]
if flash_method in ["fastboot", "heimdall-bootimg"]:
depends.append("mkbootimg")
if flash_method == "0xffff":
@ -332,8 +345,17 @@ def generate(pkgname: str):
if flash_method in ["fastboot", "heimdall-bootimg"]:
bootimg = ask_for_bootimg()
generate_deviceinfo(pkgname, name, manufacturer, year, arch,
chassis, has_keyboard, has_external_storage,
flash_method, bootimg)
generate_deviceinfo(
pkgname,
name,
manufacturer,
year,
arch,
chassis,
has_keyboard,
has_external_storage,
flash_method,
bootimg,
)
generate_modules_initfs()
generate_apkbuild(pkgname, name, arch, flash_method)

View file

@ -22,8 +22,7 @@ def generate(pkgname: str):
upstream = pkgrepo_default_path() / "main/gcc6"
based_on = "main/gcc6 (from postmarketOS)"
else:
raise ValueError(f"Invalid prefix '{prefix}', expected gcc, gcc4 or"
" gcc6.")
raise ValueError(f"Invalid prefix '{prefix}', expected gcc, gcc4 or" " gcc6.")
pmb.helpers.run.user(["cp", "-r", upstream, context.config.work / "aportgen"])
# Rewrite APKBUILD
@ -37,11 +36,9 @@ def generate(pkgname: str):
"makedepends_host": "linux-headers gmp-dev mpfr-dev mpc1-dev isl-dev"
f" zlib-dev musl-dev-{arch} binutils-{arch}",
"subpackages": "",
# gcc6: options is already there, so we need to replace it and not only
# set it below the header like done below.
"options": "!strip",
"LIBGOMP": "false",
"LIBGCC": "false",
"LIBATOMIC": "false",
@ -50,10 +47,12 @@ def generate(pkgname: str):
# Latest gcc only, not gcc4 and gcc6
if prefix == "gcc":
fields["subpackages"] = f"g++-{arch}:gpp" \
f" libstdc++-dev-{arch}:libcxx_dev"
fields["subpackages"] = f"g++-{arch}:gpp" f" libstdc++-dev-{arch}:libcxx_dev"
below_header = "CTARGET_ARCH=" + arch + """
below_header = (
"CTARGET_ARCH="
+ arch
+ """
CTARGET="$(arch_to_hostspec ${CTARGET_ARCH})"
LANG_D=false
LANG_OBJC=false
@ -74,22 +73,21 @@ def generate(pkgname: str):
_cross_configure="--disable-bootstrap --with-sysroot=/usr/$CTARGET"
"""
)
replace_simple = {
# Do not package libstdc++, do not add "g++-$ARCH" here (already
# did that explicitly in the subpackages variable above, so
# pmbootstrap picks it up properly).
'*subpackages="$subpackages libstdc++:libcxx:*': None,
# We set the cross_configure variable at the beginning, so it does not
# use CBUILDROOT as sysroot. In the original APKBUILD this is a local
# variable, but we make it a global one.
'*_cross_configure=*': None,
"*_cross_configure=*": None,
# Do not build foreign arch libgcc, we use the one from Alpine (#2168)
'_libgcc=true*': '_libgcc=false',
"_libgcc=true*": "_libgcc=false",
}
pmb.aportgen.core.rewrite(pkgname, based_on, fields,
replace_simple=replace_simple,
below_header=below_header)
pmb.aportgen.core.rewrite(
pkgname, based_on, fields, replace_simple=replace_simple, below_header=below_header
)

View file

@ -26,8 +26,7 @@ def generate(pkgname):
tempdir = Path("/tmp/aportgen")
aportgen = context.config.work / "aportgen"
pmb.chroot.root(["rm", "-rf", tempdir])
pmb.helpers.run.user(["mkdir", "-p", aportgen,
Chroot.native() / tempdir])
pmb.helpers.run.user(["mkdir", "-p", aportgen, Chroot.native() / tempdir])
# Write the APKBUILD
channel_cfg = pmb.config.pmaports.read_config_channel()

View file

@ -12,8 +12,16 @@ def generate_apkbuild(pkgname: str, deviceinfo: Deviceinfo, patches: List[str]):
device = "-".join(pkgname.split("-")[1:])
carch = deviceinfo.arch.kernel()
makedepends = ["bash", "bc", "bison", "devicepkg-dev", "findutils", "flex",
"openssl-dev", "perl"]
makedepends = [
"bash",
"bc",
"bison",
"devicepkg-dev",
"findutils",
"flex",
"openssl-dev",
"perl",
]
build = """
unset LDFLAGS
@ -34,8 +42,7 @@ def generate_apkbuild(pkgname: str, deviceinfo: Deviceinfo, patches: List[str]):
build += """\n
# Master DTB (deviceinfo_bootimg_qcdt)"""
vendors = ["spreadtrum", "exynos", "other"]
soc_vendor = pmb.helpers.cli.ask("SoC vendor", vendors,
vendors[-1], complete=vendors)
soc_vendor = pmb.helpers.cli.ask("SoC vendor", vendors, vendors[-1], complete=vendors)
if soc_vendor == "spreadtrum":
makedepends.append("dtbtool-sprd")
build += """
@ -125,8 +132,8 @@ def generate(pkgname: str):
"kernel-use-the-gnu89-standard-explicitly.patch",
]
for patch in patches:
pmb.helpers.run.user(["ln", "-s",
"../../.shared-patches/linux/" + patch,
(work / "aportgen" / patch)])
pmb.helpers.run.user(
["ln", "-s", "../../.shared-patches/linux/" + patch, (work / "aportgen" / patch)]
)
generate_apkbuild(pkgname, deviceinfo, patches)

View file

@ -25,8 +25,7 @@ def generate(pkgname):
tempdir = Path("/tmp/aportgen")
aportgen = work / "aportgen"
pmb.chroot.root(["rm", "-rf", tempdir])
pmb.helpers.run.user(["mkdir", "-p", aportgen,
Chroot.native() / tempdir])
pmb.helpers.run.user(["mkdir", "-p", aportgen, Chroot.native() / tempdir])
# Write the APKBUILD
channel_cfg = pmb.config.pmaports.read_config_channel()

View file

@ -4,8 +4,13 @@ from pmb.build.init import init, init_abuild_minimal, init_compiler
from pmb.build.envkernel import package_kernel
from pmb.build.kconfig import menuconfig
from pmb.build.newapkbuild import newapkbuild
from pmb.build.other import copy_to_buildpath, get_status, \
index_repo
from pmb.build.other import copy_to_buildpath, get_status, index_repo
from .backend import mount_pmaports
from pmb.build._package import BootstrapStage, packages, \
output_path, BuildQueueItem, get_apkbuild, get_depends
from pmb.build._package import (
BootstrapStage,
packages,
output_path,
BuildQueueItem,
get_apkbuild,
get_depends,
)

View file

@ -47,18 +47,21 @@ def check_build_for_arch(pkgname: str, arch: Arch):
if binary:
pmaport = pmb.helpers.pmaports.get(pkgname)
pmaport_version = pmaport["pkgver"] + "-r" + pmaport["pkgrel"]
logging.debug(pkgname + ": found pmaport (" + pmaport_version + ") and"
logging.debug(
pkgname + ": found pmaport (" + pmaport_version + ") and"
" binary package (" + binary["version"] + ", from"
" postmarketOS or Alpine), but pmaport can't be built"
f" for {arch} -> using binary package")
f" for {arch} -> using binary package"
)
return False
# No binary package exists and can't build it
logging.info("NOTE: You can edit the 'arch=' line inside the APKBUILD")
if context.command == "build":
logging.info("NOTE: Alternatively, use --arch to build for another"
" architecture ('pmbootstrap build --arch=armhf " +
pkgname + "')")
logging.info(
"NOTE: Alternatively, use --arch to build for another"
" architecture ('pmbootstrap build --arch=armhf " + pkgname + "')"
)
raise RuntimeError(f"Can't build '{pkgname}' for architecture {arch}")
@ -80,11 +83,9 @@ def get_depends(context: Context, apkbuild):
ret = sorted(set(ret))
# Don't recurse forever when a package depends on itself (#948)
for pkgname in ([apkbuild["pkgname"]] +
list(apkbuild["subpackages"].keys())):
for pkgname in [apkbuild["pkgname"]] + list(apkbuild["subpackages"].keys()):
if pkgname in ret:
logging.verbose(apkbuild["pkgname"] + ": ignoring dependency on"
" itself: " + pkgname)
logging.verbose(apkbuild["pkgname"] + ": ignoring dependency on" " itself: " + pkgname)
ret.remove(pkgname)
# FIXME: is this needed? is this sensible?
@ -123,7 +124,7 @@ def output_path(arch: Arch, pkgname: str, pkgver: str, pkgrel: str) -> Path:
def finish(apkbuild, channel, arch, output: Path, chroot: Chroot, strict=False):
"""Various finishing tasks that need to be done after a build."""
# Verify output file
out_dir = (get_context().config.work / "packages" / channel)
out_dir = get_context().config.work / "packages" / channel
if not (out_dir / output).exists():
raise RuntimeError(f"Package not found after build: {(out_dir / output)}")
@ -135,16 +136,22 @@ def finish(apkbuild, channel, arch, output: Path, chroot: Chroot, strict=False):
# Uninstall build dependencies (strict mode)
if strict or "pmb:strict" in apkbuild["options"]:
logging.info(f"({chroot}) uninstall build dependencies")
pmb.chroot.user(["abuild", "undeps"], chroot, Path("/home/pmos/build"),
env={"SUDO_APK": "abuild-apk --no-progress"})
pmb.chroot.user(
["abuild", "undeps"],
chroot,
Path("/home/pmos/build"),
env={"SUDO_APK": "abuild-apk --no-progress"},
)
# If the build depends contain postmarketos-keys or postmarketos-base,
# abuild will have removed the postmarketOS repository key (pma#1230)
pmb.chroot.init_keys()
logging.info(f"@YELLOW@=>@END@ @BLUE@{channel}/{apkbuild['pkgname']}@END@: Done!")
_package_cache: Dict[str, List[str]] = {}
def is_cached_or_cache(arch: Arch, pkgname: str) -> bool:
"""Check if a package is in the built packages cache, if not
then mark it as built. We must mark as built before building
@ -194,15 +201,22 @@ class BuildQueueItem(TypedDict):
# arch is set if we should build for a specific arch
def process_package(context: Context, queue_build: Callable, pkgname: str,
arch: Optional[Arch], fallback_arch: Arch, force: bool) -> List[str]:
def process_package(
context: Context,
queue_build: Callable,
pkgname: str,
arch: Optional[Arch],
fallback_arch: Arch,
force: bool,
) -> List[str]:
# Only build when APKBUILD exists
base_aports, base_apkbuild = get_apkbuild(pkgname)
if not base_apkbuild:
if pmb.parse.apkindex.providers(pkgname, fallback_arch, False):
return []
raise RuntimeError(f"{pkgname}: Could not find aport, and"
" could not find this package in any APKINDEX!")
raise RuntimeError(
f"{pkgname}: Could not find aport, and" " could not find this package in any APKINDEX!"
)
if arch is None:
arch = pmb.build.autodetect.arch(base_apkbuild)
@ -252,29 +266,36 @@ def process_package(context: Context, queue_build: Callable, pkgname: str,
cross = pmb.build.autodetect.crosscompile(apkbuild, arch)
_dep_arch = Arch.native() if cross == "native" else arch
if not pmb.parse.apkindex.package(dep, _dep_arch, False):
raise RuntimeError("Missing binary package for dependency '" +
dep + "' of '" + parent + "', but"
raise RuntimeError(
"Missing binary package for dependency '" + dep + "' of '" + parent + "', but"
" pmbootstrap won't build any depends since"
" it was started with --no-depends.")
" it was started with --no-depends."
)
bstatus = pmb.build.get_status(arch, apkbuild)
if bstatus.necessary():
if context.no_depends:
raise RuntimeError(f"Binary package for dependency '{dep}'"
raise RuntimeError(
f"Binary package for dependency '{dep}'"
f" of '{parent}' is outdated, but"
f" pmbootstrap won't build any depends"
f" since it was started with --no-depends.")
f" since it was started with --no-depends."
)
deps = get_depends(context, apkbuild)
# Preserve the old behaviour where we don't build second order dependencies by default
# unless they are NEW packages, in which case we
if base_build_status.necessary() or bstatus == BuildStatus.NEW:
logging.debug(f"BUILDQUEUE: queue {dep} (dependency of {parent}) for build, reason: {bstatus}")
logging.debug(
f"BUILDQUEUE: queue {dep} (dependency of {parent}) for build, reason: {bstatus}"
)
queue_build(aports, apkbuild, deps, cross)
else:
logging.info(f"@YELLOW@SKIP:@END@ NOT building {arch}/{dep}: it is a"
logging.info(
f"@YELLOW@SKIP:@END@ NOT building {arch}/{dep}: it is a"
f" dependency of {pkgname} which isn't marked for build."
f" Call with --force or consider building {dep} manually")
f" Call with --force or consider building {dep} manually"
)
logging.verbose(f"{arch}/{dep}: Inserting {len(deps)} dependencies")
depends = deps + depends
@ -283,8 +304,16 @@ def process_package(context: Context, queue_build: Callable, pkgname: str,
return depends
def packages(context: Context, pkgnames: List[str], arch: Optional[Arch]=None, force=False, strict=False,
src=None, bootstrap_stage=BootstrapStage.NONE, log_callback: Optional[Callable]=None) -> List[str]:
def packages(
context: Context,
pkgnames: List[str],
arch: Optional[Arch] = None,
force=False,
strict=False,
src=None,
bootstrap_stage=BootstrapStage.NONE,
log_callback: Optional[Callable] = None,
) -> List[str]:
"""
Build a package and its dependencies with Alpine Linux' abuild.
@ -307,7 +336,9 @@ def packages(context: Context, pkgnames: List[str], arch: Optional[Arch]=None, f
# Add a package to the build queue, fetch it's dependency, and
# add record build helpers to installed (e.g. sccache)
def queue_build(aports: Path, apkbuild: Dict[str, Any], depends: List[str], cross: Optional[str] = None) -> List[str]:
def queue_build(
aports: Path, apkbuild: Dict[str, Any], depends: List[str], cross: Optional[str] = None
) -> List[str]:
# Skip if already queued
name = apkbuild["pkgname"]
if any(item["name"] == name for item in build_queue):
@ -316,18 +347,21 @@ def packages(context: Context, pkgnames: List[str], arch: Optional[Arch]=None, f
pkg_arch = pmb.build.autodetect.arch(apkbuild) if arch is None else arch
chroot = pmb.build.autodetect.chroot(apkbuild, pkg_arch)
cross = cross or pmb.build.autodetect.crosscompile(apkbuild, pkg_arch)
build_queue.append({
build_queue.append(
{
"name": name,
"arch": pkg_arch,
"aports": aports.name, # the pmaports source repo (e.g. "systemd")
"apkbuild": apkbuild,
"output_path": output_path(pkg_arch, apkbuild["pkgname"],
apkbuild["pkgver"], apkbuild["pkgrel"]),
"output_path": output_path(
pkg_arch, apkbuild["pkgname"], apkbuild["pkgver"], apkbuild["pkgrel"]
),
"channel": pmb.config.pmaports.read_config(aports)["channel"],
"depends": depends,
"chroot": chroot,
"cross": cross
})
"cross": cross,
}
)
# If we just queued a package that was request to be built explicitly then
# record it, since we return which packages we actually built
@ -357,7 +391,9 @@ def packages(context: Context, pkgnames: List[str], arch: Optional[Arch]=None, f
# dependencies that need building as well as the package itself
all_dependencies: List[str] = []
for pkgname in pkgnames:
all_dependencies += process_package(context, queue_build, pkgname, arch, fallback_arch, force)
all_dependencies += process_package(
context, queue_build, pkgname, arch, fallback_arch, force
)
if not len(build_queue):
return []
@ -380,7 +416,9 @@ def packages(context: Context, pkgnames: List[str], arch: Optional[Arch]=None, f
channel = pkg["channel"]
output = pkg["output_path"]
if not log_callback:
logging.info(f"@YELLOW@=>@END@ @BLUE@{channel}/{pkg['name']}@END@: Installing dependencies")
logging.info(
f"@YELLOW@=>@END@ @BLUE@{channel}/{pkg['name']}@END@: Installing dependencies"
)
else:
log_callback(pkg)
@ -408,8 +446,18 @@ def packages(context: Context, pkgnames: List[str], arch: Optional[Arch]=None, f
# Build and finish up
logging.info(f"@YELLOW@=>@END@ @BLUE@{channel}/{pkg['name']}@END@: Building package")
try:
run_abuild(context, pkg["apkbuild"], channel, pkg_arch, strict, force, cross,
chroot, src, bootstrap_stage)
run_abuild(
context,
pkg["apkbuild"],
channel,
pkg_arch,
strict,
force,
cross,
chroot,
src,
bootstrap_stage,
)
except RuntimeError:
raise BuildFailedError(f"Couldn't build {output}!")
finish(pkg["apkbuild"], channel, pkg_arch, output, chroot, strict)

View file

@ -1,4 +1,3 @@
import enum
from pathlib import Path
from typing import Dict
@ -17,6 +16,7 @@ class BootstrapStage(enum.IntEnum):
Pass a BOOTSTRAP= environment variable with the given value to abuild. See
bootstrap_1 etc. at https://postmarketos.org/pmaports.cfg for details.
"""
NONE = 0
# We don't need explicit representations of the other numbers.
@ -45,11 +45,16 @@ def override_source(apkbuild, pkgver, src, chroot: Chroot=Chroot.native()):
pkgname = apkbuild["pkgname"]
# Appended content
append = """
append = (
"""
# ** Overrides below appended by pmbootstrap for --src **
pkgver=\"""" + pkgver + """\"
pkgdesc=\"""" + pkgdesc + """\"
pkgver=\""""
+ pkgver
+ """\"
pkgdesc=\""""
+ pkgdesc
+ """\"
_pmb_src_copy="/tmp/pmbootstrap-local-source-copy"
# Empty $source avoids patching in prepare()
@ -59,17 +64,25 @@ def override_source(apkbuild, pkgver, src, chroot: Chroot=Chroot.native()):
fetch() {
# Update source copy
msg "Copying source from host system: """ + src + """\"
local exclude_from=\"""" + mount_path + """/.gitignore\"
msg "Copying source from host system: """
+ src
+ """\"
local exclude_from=\""""
+ mount_path
+ """/.gitignore\"
local rsync_args=""
if [ -f "$exclude_from" ]; then
rsync_args="--exclude-from=\"$exclude_from\""
fi
if ! [ \"""" + pkgname + """\" = "$(cat /tmp/src-pkgname)" ]; then
if ! [ \""""
+ pkgname
+ """\" = "$(cat /tmp/src-pkgname)" ]; then
rsync_args="--delete $rsync_args"
fi
rsync -a --exclude=".git/" $rsync_args --ignore-errors --force \\
\"""" + mount_path + """\" "$_pmb_src_copy" || true
\""""
+ mount_path
+ """\" "$_pmb_src_copy" || true
# Link local source files (e.g. kernel config)
mkdir "$srcdir"
@ -78,13 +91,16 @@ def override_source(apkbuild, pkgver, src, chroot: Chroot=Chroot.native()):
is_remote "$s" || ln -sf "$startdir/$s" "$srcdir/"
done
echo \"""" + pkgname + """\" > /tmp/src-pkgname
echo \""""
+ pkgname
+ """\" > /tmp/src-pkgname
}
unpack() {
ln -sv "$_pmb_src_copy" "$builddir"
}
"""
)
# Write and log append file
with open(append_path_outside, "w", encoding="utf-8") as handle:
@ -94,14 +110,11 @@ def override_source(apkbuild, pkgver, src, chroot: Chroot=Chroot.native()):
# Append it to the APKBUILD
apkbuild_path = "/home/pmos/build/APKBUILD"
shell_cmd = ("cat " + apkbuild_path + " " + append_path + " > " +
append_path + "_")
shell_cmd = "cat " + apkbuild_path + " " + append_path + " > " + append_path + "_"
pmb.chroot.user(["sh", "-c", shell_cmd], chroot)
pmb.chroot.user(["mv", append_path + "_", apkbuild_path], chroot)
def mount_pmaports(chroot: Chroot = Chroot.native()) -> Dict[str, Path]:
"""
Mount pmaports.git in chroot.
@ -119,7 +132,6 @@ def mount_pmaports(chroot: Chroot=Chroot.native()) -> Dict[str, Path]:
return dest_paths
def link_to_git_dir(chroot: Chroot):
"""Make ``/home/pmos/build/.git`` point to the .git dir from pmaports.git, with a
symlink so abuild does not fail (#1841).
@ -143,9 +155,7 @@ def link_to_git_dir(chroot: Chroot):
# Create .git symlink
pmb.chroot.user(["mkdir", "-p", "/home/pmos/build"], chroot)
pmb.chroot.user(["ln", "-sf", dest_paths["pmaports"] / ".git",
"/home/pmos/build/.git"], chroot)
pmb.chroot.user(["ln", "-sf", dest_paths["pmaports"] / ".git", "/home/pmos/build/.git"], chroot)
def handle_csum_failure(apkbuild, chroot: Chroot):
@ -155,18 +165,32 @@ def handle_csum_failure(apkbuild, chroot: Chroot):
reason = csum_fail_path.open().read().strip()
if reason == "local":
logging.info("WARNING: Some checksums didn't match, run"
f" 'pmbootstrap checksum {apkbuild['pkgname']}' to fix them.")
logging.info(
"WARNING: Some checksums didn't match, run"
f" 'pmbootstrap checksum {apkbuild['pkgname']}' to fix them."
)
else:
logging.error(f"ERROR: Remote checksum mismatch for {apkbuild['pkgname']}")
logging.error("NOTE: If you just modified this package:")
logging.error(f" * run 'pmbootstrap checksum {apkbuild['pkgname']}' to update the checksums.")
logging.error(
f" * run 'pmbootstrap checksum {apkbuild['pkgname']}' to update the checksums."
)
logging.error("If you didn't modify it, try building again to re-download the sources.")
raise RuntimeError(f"Remote checksum mismatch for {apkbuild['pkgname']}")
def run_abuild(context: Context, apkbuild, channel, arch: Arch, strict=False, force=False, cross=None,
suffix: Chroot=Chroot.native(), src=None, bootstrap_stage=BootstrapStage.NONE):
def run_abuild(
context: Context,
apkbuild,
channel,
arch: Arch,
strict=False,
force=False,
cross=None,
suffix: Chroot = Chroot.native(),
src=None,
bootstrap_stage=BootstrapStage.NONE,
):
"""
Set up all environment variables and construct the abuild command (all
depending on the cross-compiler method and target architecture), copy
@ -182,30 +206,40 @@ def run_abuild(context: Context, apkbuild, channel, arch: Arch, strict=False, fo
"""
# Sanity check
if cross == "native" and "!tracedeps" not in apkbuild["options"]:
logging.info("WARNING: Option !tracedeps is not set, but we're"
logging.info(
"WARNING: Option !tracedeps is not set, but we're"
" cross-compiling in the native chroot. This will"
" probably fail!")
" probably fail!"
)
pkgdir = context.config.work / "packages" / channel
if not pkgdir.exists():
pmb.helpers.run.root(["mkdir", "-p", pkgdir])
pmb.helpers.run.root(["chown", "-R", f"{pmb.config.chroot_uid_user}:{pmb.config.chroot_uid_user}",
pkgdir.parent])
pmb.helpers.run.root(
[
"chown",
"-R",
f"{pmb.config.chroot_uid_user}:{pmb.config.chroot_uid_user}",
pkgdir.parent,
]
)
pmb.chroot.rootm([["mkdir", "-p", "/home/pmos/packages"],
pmb.chroot.rootm(
[
["mkdir", "-p", "/home/pmos/packages"],
["rm", "-f", "/home/pmos/packages/pmos"],
["ln", "-sf", f"/mnt/pmbootstrap/packages/{channel}",
"/home/pmos/packages/pmos"]], suffix)
["ln", "-sf", f"/mnt/pmbootstrap/packages/{channel}", "/home/pmos/packages/pmos"],
],
suffix,
)
# Environment variables
env = {"CARCH": arch,
"SUDO_APK": "abuild-apk --no-progress"}
env = {"CARCH": arch, "SUDO_APK": "abuild-apk --no-progress"}
if cross == "native":
hostspec = arch.alpine_triple()
env["CROSS_COMPILE"] = hostspec + "-"
env["CC"] = hostspec + "-gcc"
if cross == "crossdirect":
env["PATH"] = ":".join([f"/native/usr/lib/crossdirect/{arch}",
pmb.config.chroot_path])
env["PATH"] = ":".join([f"/native/usr/lib/crossdirect/{arch}", pmb.config.chroot_path])
if not context.ccache:
env["CCACHE_DISABLE"] = "1"
@ -232,8 +266,9 @@ def run_abuild(context: Context, apkbuild, channel, arch: Arch, strict=False, fo
cmd = ["abuild", "-D", "postmarketOS"]
if strict or "pmb:strict" in apkbuild["options"]:
if not strict:
logging.debug(apkbuild["pkgname"] + ": 'pmb:strict' found in"
" options, building in strict mode")
logging.debug(
apkbuild["pkgname"] + ": 'pmb:strict' found in" " options, building in strict mode"
)
cmd += ["-r"] # install depends with abuild
else:
cmd += ["-d"] # do not install depends with abuild
@ -256,4 +291,3 @@ def run_abuild(context: Context, apkbuild, channel, arch: Arch, strict=False, fo
pmb.chroot.user(cmd, suffix, Path("/home/pmos/build"), env=env)
finally:
handle_csum_failure(apkbuild, suffix)

View file

@ -16,8 +16,7 @@ def update(pkgname):
pmb.build.init_abuild_minimal()
pmb.build.copy_to_buildpath(pkgname, no_override=True)
logging.info("(native) generate checksums for " + pkgname)
pmb.chroot.user(["abuild", "checksum"],
working_dir=Path("/home/pmos/build"))
pmb.chroot.user(["abuild", "checksum"], working_dir=Path("/home/pmos/build"))
# Copy modified APKBUILD back
source = Chroot.native() / "home/pmos/build/APKBUILD"
@ -33,5 +32,4 @@ def verify(pkgname):
# Fetch and verify sources, "fetch" alone does not verify them:
# https://github.com/alpinelinux/abuild/pull/86
pmb.chroot.user(["abuild", "fetch", "verify"],
working_dir=Path("/home/pmos/build"))
pmb.chroot.user(["abuild", "fetch", "verify"], working_dir=Path("/home/pmos/build"))

View file

@ -33,14 +33,14 @@ def match_kbuild_out(word):
empty string when a separate build output directory isn't used.
None, when no output directory is found.
"""
prefix = "^\\\"?\\$({?builddir}?|{?srcdir}?)\\\"?/"
prefix = '^\\"?\\$({?builddir}?|{?srcdir}?)\\"?/'
kbuild_out = "(.*\\/)*"
postfix = "(arch\\/.*\\/boot.*)\\\"?$"
postfix = '(arch\\/.*\\/boot.*)\\"?$'
match = re.match(prefix + kbuild_out + postfix, word)
if match is None:
postfix = "(include\\/config\\/kernel\\.release)\\\"?$"
postfix = '(include\\/config\\/kernel\\.release)\\"?$'
match = re.match(prefix + kbuild_out + postfix, word)
if match is None:
@ -83,15 +83,19 @@ def find_kbuild_output_dir(function_body):
it = iter(guesses)
first = next(it, None)
if first is None:
raise RuntimeError("Couldn't find a kbuild out directory. Is your "
raise RuntimeError(
"Couldn't find a kbuild out directory. Is your "
"APKBUILD messed up? If not, then consider "
"adjusting the patterns in pmb/build/envkernel.py "
"to work with your APKBUILD, or submit an issue.")
"to work with your APKBUILD, or submit an issue."
)
if all(first == rest for rest in it):
return first
raise RuntimeError("Multiple kbuild out directories found. Can you modify "
raise RuntimeError(
"Multiple kbuild out directories found. Can you modify "
"your APKBUILD so it only has one output path? If you "
"can't resolve it, please open an issue.")
"can't resolve it, please open an issue."
)
def modify_apkbuild(pkgname: str, aport: Path):
@ -103,15 +107,15 @@ def modify_apkbuild(pkgname: str, aport: Path):
pmb.helpers.run.user(["rm", "-r", work / "aportgen"])
pmb.helpers.run.user(["mkdir", work / "aportgen"])
pmb.helpers.run.user(["cp", "-r", apkbuild_path,
work / "aportgen"])
pmb.helpers.run.user(["cp", "-r", apkbuild_path, work / "aportgen"])
pkgver = pmb.build._package.get_pkgver(apkbuild["pkgver"],
original_source=False)
fields = {"pkgver": pkgver,
pkgver = pmb.build._package.get_pkgver(apkbuild["pkgver"], original_source=False)
fields = {
"pkgver": pkgver,
"pkgrel": "0",
"subpackages": "",
"builddir": "/home/pmos/build/src"}
"builddir": "/home/pmos/build/src",
}
pmb.aportgen.core.rewrite(pkgname, apkbuild_path, fields=fields)
@ -137,12 +141,14 @@ def run_abuild(context: Context, pkgname: str, arch: Arch, apkbuild_path: Path,
pmb.helpers.mount.bind(Path("."), chroot / "mnt/linux")
if not os.path.exists(chroot / kbuild_out_source):
raise RuntimeError("No '.output' dir found in your kernel source dir. "
raise RuntimeError(
"No '.output' dir found in your kernel source dir. "
"Compile the " + context.config.device + " kernel first and "
"then try again. See https://postmarketos.org/envkernel"
"for details. If building on your host and only using "
"--envkernel for packaging, make sure you have O=.output "
"as an argument to make.")
"as an argument to make."
)
# Create working directory for abuild
pmb.build.copy_to_buildpath(pkgname)
@ -154,32 +160,43 @@ def run_abuild(context: Context, pkgname: str, arch: Arch, apkbuild_path: Path,
pkgdir = context.config.work / "packages" / channel
if not pkgdir.exists():
pmb.helpers.run.root(["mkdir", "-p", pkgdir])
pmb.helpers.run.root(["chown", "-R", f"{pmb.config.chroot_uid_user}:{pmb.config.chroot_uid_user}",
pkgdir.parent])
pmb.helpers.run.root(
[
"chown",
"-R",
f"{pmb.config.chroot_uid_user}:{pmb.config.chroot_uid_user}",
pkgdir.parent,
]
)
pmb.chroot.rootm([["mkdir", "-p", "/home/pmos/packages"],
pmb.chroot.rootm(
[
["mkdir", "-p", "/home/pmos/packages"],
["rm", "-f", "/home/pmos/packages/pmos"],
["ln", "-sf", f"/mnt/pmbootstrap/packages/{channel}",
"/home/pmos/packages/pmos"]], chroot)
["ln", "-sf", f"/mnt/pmbootstrap/packages/{channel}", "/home/pmos/packages/pmos"],
],
chroot,
)
# Create symlink from abuild working directory to envkernel build directory
if kbuild_out != "":
if os.path.islink(chroot / "mnt/linux" / kbuild_out) and \
os.path.lexists(chroot / "mnt/linux" / kbuild_out):
if os.path.islink(chroot / "mnt/linux" / kbuild_out) and os.path.lexists(
chroot / "mnt/linux" / kbuild_out
):
pmb.chroot.root(["rm", "/mnt/linux" / kbuild_out])
pmb.chroot.root(["ln", "-s", "/mnt/linux",
build_path / "src"])
pmb.chroot.root(["ln", "-s", kbuild_out_source,
build_path / "src" / kbuild_out])
pmb.chroot.root(["ln", "-s", "/mnt/linux", build_path / "src"])
pmb.chroot.root(["ln", "-s", kbuild_out_source, build_path / "src" / kbuild_out])
cmd: List[PathString] = ["cp", apkbuild_path, chroot / build_path / "APKBUILD"]
pmb.helpers.run.root(cmd)
# Create the apk package
env = {"CARCH": str(arch),
env = {
"CARCH": str(arch),
"CHOST": str(arch),
"CBUILD": Arch.native(),
"SUDO_APK": "abuild-apk --no-progress"}
"SUDO_APK": "abuild-apk --no-progress",
}
cmd = ["abuild", "rootpkg"]
pmb.chroot.user(cmd, working_dir=build_path, env=env)
@ -188,8 +205,9 @@ def run_abuild(context: Context, pkgname: str, arch: Arch, apkbuild_path: Path,
# Clean up symlinks
if kbuild_out != "":
if os.path.islink(chroot / "mnt/linux" / kbuild_out) and \
os.path.lexists(chroot / "mnt/linux" / kbuild_out):
if os.path.islink(chroot / "mnt/linux" / kbuild_out) and os.path.lexists(
chroot / "mnt/linux" / kbuild_out
):
pmb.chroot.root(["rm", "/mnt/linux" / kbuild_out])
pmb.chroot.root(["rm", build_path / "src"])
@ -198,8 +216,7 @@ def package_kernel(args: PmbArgs):
"""Frontend for 'pmbootstrap build --envkernel': creates a package from envkernel output."""
pkgname = args.packages[0]
if len(args.packages) > 1 or not pkgname.startswith("linux-"):
raise RuntimeError("--envkernel needs exactly one linux-* package as "
"argument.")
raise RuntimeError("--envkernel needs exactly one linux-* package as " "argument.")
aport = pmb.helpers.pmaports.find(pkgname)
context = get_context()
@ -223,8 +240,9 @@ def package_kernel(args: PmbArgs):
depends.append(f"binutils-{arch}")
pmb.chroot.apk.install(depends, chroot)
output = pmb.build.output_path(arch, apkbuild["pkgname"], apkbuild["pkgver"],
apkbuild["pkgrel"])
output = pmb.build.output_path(
arch, apkbuild["pkgname"], apkbuild["pkgver"], apkbuild["pkgrel"]
)
message = f"({chroot}) build {output}"
logging.info(message)

View file

@ -28,10 +28,8 @@ def init_abuild_minimal(chroot: Chroot=Chroot.native(), additional_pkgs: List[st
pmb.chroot.apk.install(["abuild"] + additional_pkgs, chroot, build=False)
# Fix permissions
pmb.chroot.root(["chown", "root:abuild",
"/var/cache/distfiles"], chroot)
pmb.chroot.root(["chmod", "g+w",
"/var/cache/distfiles"], chroot)
pmb.chroot.root(["chown", "root:abuild", "/var/cache/distfiles"], chroot)
pmb.chroot.root(["chmod", "g+w", "/var/cache/distfiles"], chroot)
# Add user to group abuild
pmb.chroot.root(["adduser", "pmos", "abuild"], chroot)
@ -55,8 +53,9 @@ def init(chroot: Chroot=Chroot.native()) -> bool:
# Generate package signing keys
if not os.path.exists(get_context().config.work / "config_abuild/abuild.conf"):
logging.info(f"({chroot}) generate abuild keys")
pmb.chroot.user(["abuild-keygen", "-n", "-q", "-a"],
chroot, env={"PACKAGER": "pmos <pmos@local>"})
pmb.chroot.user(
["abuild-keygen", "-n", "-q", "-a"], chroot, env={"PACKAGER": "pmos <pmos@local>"}
)
# Copy package signing key to /etc/apk/keys
for key in (chroot / "mnt/pmbootstrap/abuild-config").glob("*.pub"):
@ -66,8 +65,7 @@ def init(chroot: Chroot=Chroot.native()) -> bool:
apk_arch = chroot.arch
# Add apk wrapper that runs native apk and lies about arch
if apk_arch.cpu_emulation_required() and \
not (chroot / "usr/local/bin/abuild-apk").exists():
if apk_arch.cpu_emulation_required() and not (chroot / "usr/local/bin/abuild-apk").exists():
with (chroot / "tmp/apk_wrapper.sh").open("w") as handle:
content = f"""
#!/bin/sh
@ -94,20 +92,18 @@ def init(chroot: Chroot=Chroot.native()) -> bool:
for i in range(len(lines)):
lines[i] = lines[i][16:]
handle.write("\n".join(lines))
pmb.chroot.root(["cp", "/tmp/apk_wrapper.sh",
"/usr/local/bin/abuild-apk"], chroot)
pmb.chroot.root(["cp", "/tmp/apk_wrapper.sh", "/usr/local/bin/abuild-apk"], chroot)
pmb.chroot.root(["chmod", "+x", "/usr/local/bin/abuild-apk"], chroot)
# abuild.conf: Don't clean the build folder after building, so we can
# inspect it afterwards for debugging
pmb.chroot.root(["sed", "-i", "-e", "s/^CLEANUP=.*/CLEANUP=''/",
"/etc/abuild.conf"], chroot)
pmb.chroot.root(["sed", "-i", "-e", "s/^CLEANUP=.*/CLEANUP=''/", "/etc/abuild.conf"], chroot)
# abuild.conf: Don't clean up installed packages in strict mode, so
# abuild exits directly when pressing ^C in pmbootstrap.
pmb.chroot.root(["sed", "-i", "-e",
"s/^ERROR_CLEANUP=.*/ERROR_CLEANUP=''/",
"/etc/abuild.conf"], chroot)
pmb.chroot.root(
["sed", "-i", "-e", "s/^ERROR_CLEANUP=.*/ERROR_CLEANUP=''/", "/etc/abuild.conf"], chroot
)
pathlib.Path(marker).touch()
return True

View file

@ -34,14 +34,18 @@ def get_arch(apkbuild):
# Disabled package (arch="")
if not apkbuild["arch"]:
raise RuntimeError(f"'{pkgname}' is disabled (arch=\"\"). Please use"
" '--arch' to specify the desired architecture.")
raise RuntimeError(
f"'{pkgname}' is disabled (arch=\"\"). Please use"
" '--arch' to specify the desired architecture."
)
# Multiple architectures
if len(apkbuild["arch"]) > 1:
raise RuntimeError(f"'{pkgname}' supports multiple architectures"
raise RuntimeError(
f"'{pkgname}' supports multiple architectures"
f" ({', '.join(apkbuild['arch'])}). Please use"
" '--arch' to specify the desired architecture.")
" '--arch' to specify the desired architecture."
)
return apkbuild["arch"][0]
@ -57,18 +61,22 @@ def get_outputdir(pkgname: str, apkbuild: Dict[str, Any]) -> Path:
chroot = Chroot.native()
if os.path.exists(chroot / ret / ".config"):
logging.warning("*****")
logging.warning("NOTE: The code in this linux APKBUILD is pretty old."
logging.warning(
"NOTE: The code in this linux APKBUILD is pretty old."
" Consider making a backup and migrating to a modern"
" version with: pmbootstrap aportgen " + pkgname)
" version with: pmbootstrap aportgen " + pkgname
)
logging.warning("*****")
return ret
# New style ($builddir)
cmd = "srcdir=/home/pmos/build/src source APKBUILD; echo $builddir"
ret = Path(pmb.chroot.user(["sh", "-c", cmd],
chroot, Path("/home/pmos/build"),
output_return=True).rstrip())
ret = Path(
pmb.chroot.user(
["sh", "-c", cmd], chroot, Path("/home/pmos/build"), output_return=True
).rstrip()
)
if (chroot / ret / ".config").exists():
return ret
# Some Mediatek kernels use a 'kernel' subdirectory
@ -80,9 +88,11 @@ def get_outputdir(pkgname: str, apkbuild: Dict[str, Any]) -> Path:
return ret / apkbuild["_outdir"]
# Not found
raise RuntimeError("Could not find the kernel config. Consider making a"
raise RuntimeError(
"Could not find the kernel config. Consider making a"
" backup of your APKBUILD and recreating it from the"
" template with: pmbootstrap aportgen " + pkgname)
" template with: pmbootstrap aportgen " + pkgname
)
def extract_and_patch_sources(pkgname: str, arch):
@ -90,8 +100,12 @@ def extract_and_patch_sources(pkgname: str, arch):
logging.info("(native) extract kernel source")
pmb.chroot.user(["abuild", "unpack"], working_dir=Path("/home/pmos/build"))
logging.info("(native) apply patches")
pmb.chroot.user(["abuild", "prepare"], working_dir=Path("/home/pmos/build"),
output="interactive", env={"CARCH": arch})
pmb.chroot.user(
["abuild", "prepare"],
working_dir=Path("/home/pmos/build"),
output="interactive",
env={"CARCH": arch},
)
def menuconfig(args: PmbArgs, pkgname: str, use_oldconfig):
@ -143,16 +157,17 @@ def menuconfig(args: PmbArgs, pkgname: str, use_oldconfig):
# Run make menuconfig
outputdir = get_outputdir(pkgname, apkbuild)
logging.info("(native) make " + kopt)
env = {"ARCH": arch.kernel(),
env = {
"ARCH": arch.kernel(),
"DISPLAY": os.environ.get("DISPLAY"),
"XAUTHORITY": "/home/pmos/.Xauthority"}
"XAUTHORITY": "/home/pmos/.Xauthority",
}
if cross:
env["CROSS_COMPILE"] = f"{hostspec}-"
env["CC"] = f"{hostspec}-gcc"
if color:
env["MENUCONFIG_COLOR"] = color
pmb.chroot.user(["make", kopt], Chroot.native(),
outputdir, output="tui", env=env)
pmb.chroot.user(["make", kopt], Chroot.native(), outputdir, output="tui", env=env)
# Find the updated config
source = Chroot.native() / outputdir / ".config"

View file

@ -47,8 +47,7 @@ def copy_to_buildpath(package, chroot: Chroot=Chroot.native(), no_override: bool
if not no_override:
abuild_overrides(build / "APKBUILD")
pmb.chroot.root(["chown", "-R", "pmos:pmos",
"/home/pmos/build"], chroot)
pmb.chroot.root(["chown", "-R", "pmos:pmos", "/home/pmos/build"], chroot)
def abuild_overrides(apkbuild: Path):
@ -78,6 +77,7 @@ class BuildStatus(enum.Enum):
def necessary(self):
return self in [BuildStatus.OUTDATED, BuildStatus.NEW]
def get_status(arch, apkbuild, indexes=None) -> BuildStatus:
"""Check if the package has already been built.
@ -93,31 +93,36 @@ def get_status(arch, apkbuild, indexes=None) -> BuildStatus:
msg = "Build is necessary for package '" + package + "': "
# Get version from APKINDEX
index_data = pmb.parse.apkindex.package(package, arch, False,
indexes)
index_data = pmb.parse.apkindex.package(package, arch, False, indexes)
if not index_data:
logging.debug(msg + "No binary package available")
return BuildStatus.NEW
# Can't build pmaport for arch: use Alpine's package (#1897)
if arch and not pmb.helpers.pmaports.check_arches(apkbuild["arch"], arch):
logging.verbose(f"{package}: build is not necessary, because pmaport"
logging.verbose(
f"{package}: build is not necessary, because pmaport"
" can't be built for {arch}. Using Alpine's binary"
" package.")
" package."
)
return BuildStatus.CANT_BUILD
# a) Binary repo has a newer version
version_binary = index_data["version"]
if pmb.parse.version.compare(version_binary, version_pmaports) == 1:
logging.warning(f"WARNING: about to install {package} {version_binary}"
logging.warning(
f"WARNING: about to install {package} {version_binary}"
f" (local pmaports: {version_pmaports}, consider"
" 'pmbootstrap pull')")
" 'pmbootstrap pull')"
)
return BuildStatus.UNNECESSARY
# b) Local pmaports has a newer version
if version_pmaports != version_binary:
logging.debug(f"{msg}binary package out of date (binary: "
f"{version_binary}, local pmaports: {version_pmaports})")
logging.debug(
f"{msg}binary package out of date (binary: "
f"{version_binary}, local pmaports: {version_pmaports})"
)
return BuildStatus.OUTDATED
# Local pmaports and binary repo have the same version
@ -136,7 +141,7 @@ def index_repo(arch=None):
paths: List[Path] = []
for channel in pmb.config.pmaports.all_channels():
pkgdir: Path = (get_context().config.work / "packages" / channel)
pkgdir: Path = get_context().config.work / "packages" / channel
if arch:
paths.append(pkgdir / arch)
else:
@ -150,11 +155,15 @@ def index_repo(arch=None):
description = str(datetime.datetime.now())
commands = [
# Wrap the index command with sh so we can use '*.apk'
["sh", "-c", "apk -q index --output APKINDEX.tar.gz_"
[
"sh",
"-c",
"apk -q index --output APKINDEX.tar.gz_"
" --description " + shlex.quote(description) + ""
" --rewrite-arch " + shlex.quote(path_arch) + " *.apk"],
" --rewrite-arch " + shlex.quote(path_arch) + " *.apk",
],
["abuild-sign", "APKINDEX.tar.gz_"],
["mv", "APKINDEX.tar.gz_", "APKINDEX.tar.gz"]
["mv", "APKINDEX.tar.gz_", "APKINDEX.tar.gz"],
]
pmb.chroot.userm(commands, working_dir=path_repo_chroot)
else:
@ -176,13 +185,15 @@ def configure_abuild(chroot: Chroot, verify=False):
continue
if line != (prefix + jobs + "\n"):
if verify:
raise RuntimeError(f"Failed to configure abuild: {path}"
raise RuntimeError(
f"Failed to configure abuild: {path}"
"\nTry to delete the file"
"(or zap the chroot).")
pmb.chroot.root(["sed", "-i", "-e",
f"s/^{prefix}.*/{prefix}{jobs}/",
"/etc/abuild.conf"],
chroot)
"(or zap the chroot)."
)
pmb.chroot.root(
["sed", "-i", "-e", f"s/^{prefix}.*/{prefix}{jobs}/", "/etc/abuild.conf"],
chroot,
)
configure_abuild(chroot, True)
return
pmb.chroot.root(["sed", "-i", f"$ a\\{prefix}{jobs}", "/etc/abuild.conf"], chroot)
@ -203,10 +214,10 @@ def configure_ccache(chroot: Chroot=Chroot.native(), verify=False):
if line == ("max_size = " + config.ccache_size + "\n"):
return
if verify:
raise RuntimeError(f"Failed to configure ccache: {path}\nTry to"
" delete the file (or zap the chroot).")
raise RuntimeError(
f"Failed to configure ccache: {path}\nTry to" " delete the file (or zap the chroot)."
)
# Set the size and verify
pmb.chroot.user(["ccache", "--max-size", config.ccache_size],
chroot)
pmb.chroot.user(["ccache", "--max-size", config.ccache_size], chroot)
configure_ccache(chroot, True)

View file

@ -27,8 +27,9 @@ from pmb.types import PathString
@Cache("chroot", "user_repository", mirrors_exclude=[])
def update_repository_list(chroot: Chroot, user_repository=False, mirrors_exclude: List[str]=[],
check=False):
def update_repository_list(
chroot: Chroot, user_repository=False, mirrors_exclude: List[str] = [], check=False
):
"""
Update /etc/apk/repositories, if it is outdated (when the user changed the
--mirror-alpine or --mirror-pmOS parameters).
@ -52,7 +53,9 @@ def update_repository_list(chroot: Chroot, user_repository=False, mirrors_exclud
pmb.helpers.run.root(["mkdir", "-p", path.parent])
# Up to date: Save cache, return
lines_new = pmb.helpers.repo.urls(user_repository=user_repository, mirrors_exclude=mirrors_exclude)
lines_new = pmb.helpers.repo.urls(
user_repository=user_repository, mirrors_exclude=mirrors_exclude
)
if lines_old == lines_new:
return
@ -65,10 +68,10 @@ def update_repository_list(chroot: Chroot, user_repository=False, mirrors_exclud
if path.exists():
pmb.helpers.run.root(["rm", path])
for line in lines_new:
pmb.helpers.run.root(["sh", "-c", "echo "
f"{shlex.quote(line)} >> {path}"])
update_repository_list(chroot, user_repository=user_repository,
mirrors_exclude=mirrors_exclude, check=True)
pmb.helpers.run.root(["sh", "-c", "echo " f"{shlex.quote(line)} >> {path}"])
update_repository_list(
chroot, user_repository=user_repository, mirrors_exclude=mirrors_exclude, check=True
)
@Cache("chroot")
@ -80,16 +83,18 @@ def check_min_version(chroot: Chroot=Chroot.native()):
# Skip if apk is not installed yet
if not (chroot / "sbin/apk").exists():
logging.debug(f"NOTE: Skipped apk version check for chroot '{chroot}'"
", because it is not installed yet!")
logging.debug(
f"NOTE: Skipped apk version check for chroot '{chroot}'"
", because it is not installed yet!"
)
return
# Compare
version_installed = installed(chroot)["apk-tools"]["version"]
pmb.helpers.apk.check_outdated(
version_installed,
"Delete your http cache and zap all chroots, then try again:"
" 'pmbootstrap zap -hc'")
"Delete your http cache and zap all chroots, then try again:" " 'pmbootstrap zap -hc'",
)
def packages_split_to_add_del(packages):
@ -148,11 +153,12 @@ def packages_get_locally_built_apks(packages, arch: Arch) -> List[Path]:
break
# Record all the packages we have visited so far
walked |= set([data_repo['pkgname'], package])
walked |= set([data_repo["pkgname"], package])
# Add all dependencies to the list of packages to check, excluding
# meta-deps like cmd:* and so:* as well as conflicts (!).
packages |= set(filter(lambda x: ":" not in x and "!" not in x,
data_repo["depends"])) - walked
packages |= (
set(filter(lambda x: ":" not in x and "!" not in x, data_repo["depends"])) - walked
)
return local
@ -185,8 +191,10 @@ def install_run_apk(to_add: List[str], to_add_local: List[Path], to_del: List[st
# Use a virtual package to mark only the explicitly requested packages as
# explicitly installed, not the ones in to_add_local
if to_add_local:
commands += [["add", "-u", "--virtual", ".pmbootstrap"] + local_add,
["del", ".pmbootstrap"]]
commands += [
["add", "-u", "--virtual", ".pmbootstrap"] + local_add,
["del", ".pmbootstrap"],
]
if to_del:
commands += [["del"] + to_del]
@ -214,12 +222,20 @@ def install_run_apk(to_add: List[str], to_add_local: List[Path], to_del: List[st
traceback.print_stack(file=logging.logfd)
pmb.chroot.init(chroot)
for (i, command) in enumerate(commands):
for i, command in enumerate(commands):
# --no-interactive is a parameter to `add`, so it must be appended or apk
# gets confused
command += ["--no-interactive"]
command = ["--root", chroot.path, "--arch", arch, "--cache-dir", apk_cache,
"--repository", user_repo] + command
command = [
"--root",
chroot.path,
"--arch",
arch,
"--cache-dir",
apk_cache,
"--repository",
user_repo,
] + command
# Ignore missing repos before initial build (bpo#137)
if os.getenv("PMB_APK_FORCE_MISSING_REPOSITORIES") == "1":
@ -253,8 +269,7 @@ def install(packages, chroot: Chroot, build=True):
context = get_context()
if not packages:
logging.verbose("pmb.chroot.apk.install called with empty packages list,"
" ignoring")
logging.verbose("pmb.chroot.apk.install called with empty packages list," " ignoring")
return
# Initialize chroot

View file

@ -35,10 +35,12 @@ def read_signature_info(tar):
sigfilename = filename
break
if not sigfilename:
raise RuntimeError("Could not find signature filename in apk."
raise RuntimeError(
"Could not find signature filename in apk."
" This means that your apk file is damaged."
" Delete it and try again."
" If the problem persists, fill out a bug report.")
" If the problem persists, fill out a bug report."
)
sigkey = sigfilename[len(prefix) :]
logging.debug(f"sigfilename: {sigfilename}")
logging.debug(f"sigkey: {sigkey}")
@ -57,14 +59,8 @@ def extract_temp(tar, sigfilename):
Extract apk.static and signature as temporary files.
"""
ret = {
"apk": {
"filename": "sbin/apk.static",
"temp_path": None
},
"sig": {
"filename": sigfilename,
"temp_path": None
}
"apk": {"filename": "sbin/apk.static", "temp_path": None},
"sig": {"filename": sigfilename, "temp_path": None},
}
for ftype in ret.keys():
member = tar.getmember(ret[ftype]["filename"])
@ -88,17 +84,27 @@ def verify_signature(files, sigkey_path):
"""
logging.debug(f"Verify apk.static signature with {sigkey_path}")
try:
pmb.helpers.run.user(["openssl", "dgst", "-sha1", "-verify",
sigkey_path, "-signature", files[
"sig"]["temp_path"],
files["apk"]["temp_path"]])
pmb.helpers.run.user(
[
"openssl",
"dgst",
"-sha1",
"-verify",
sigkey_path,
"-signature",
files["sig"]["temp_path"],
files["apk"]["temp_path"],
]
)
except BaseException:
os.unlink(files["sig"]["temp_path"])
os.unlink(files["apk"]["temp_path"])
raise RuntimeError("Failed to validate signature of apk.static."
raise RuntimeError(
"Failed to validate signature of apk.static."
" Either openssl is not installed, or the"
" download failed. Run 'pmbootstrap zap -hc' to"
" delete the download and try again.")
" delete the download and try again."
)
def extract(version, apk_path):
@ -118,19 +124,23 @@ def extract(version, apk_path):
temp_path = files["apk"]["temp_path"]
# Verify the version that the extracted binary reports
logging.debug("Verify the version reported by the apk.static binary"
f" (must match the package version {version})")
logging.debug(
"Verify the version reported by the apk.static binary"
f" (must match the package version {version})"
)
os.chmod(temp_path, os.stat(temp_path).st_mode | stat.S_IEXEC)
version_bin = pmb.helpers.run.user_output([temp_path, "--version"])
version_bin = version_bin.split(" ")[1].split(",")[0]
if not version.startswith(f"{version_bin}-r"):
os.unlink(temp_path)
raise RuntimeError(f"Downloaded apk-tools-static-{version}.apk,"
raise RuntimeError(
f"Downloaded apk-tools-static-{version}.apk,"
" but the apk binary inside that package reports"
f" to be version: {version_bin}!"
" Looks like a downgrade attack"
" from a malicious server! Switch the server (-m)"
" and try again.")
" and try again."
)
# Move it to the right path
target_path = get_context().config.work / "apk.static"
@ -153,13 +163,11 @@ def init():
"""
# Get and parse the APKINDEX
apkindex = pmb.helpers.repo.alpine_apkindex_path("main")
index_data = pmb.parse.apkindex.package("apk-tools-static",
indexes=[apkindex])
index_data = pmb.parse.apkindex.package("apk-tools-static", indexes=[apkindex])
version = index_data["version"]
# Verify the apk-tools-static version
pmb.helpers.apk.check_outdated(
version, "Run 'pmbootstrap update', then try again.")
pmb.helpers.apk.check_outdated(version, "Run 'pmbootstrap update', then try again.")
# Download, extract, verify apk-tools-static
apk_name = f"apk-tools-static-{version}.apk"

View file

@ -48,8 +48,7 @@ def register(arch: Arch):
mask = info["mask"]
interpreter = "/usr/bin/qemu-" + arch_qemu + "-static"
flags = "C"
code = ":".join(["", name, type, offset, magic, mask, interpreter,
flags])
code = ":".join(["", name, type, offset, magic, mask, interpreter, flags])
# Register in binfmt_misc
logging.info("Register qemu binfmt (" + arch_qemu + ")")

View file

@ -21,11 +21,13 @@ from pmb.core.context import get_context
cache_chroot_is_outdated: List[str] = []
class UsrMerge(enum.Enum):
"""
Merge /usr while initializing chroot.
https://systemd.io/THE_CASE_FOR_THE_USR_MERGE/
"""
AUTO = 0
ON = 1
OFF = 2
@ -78,8 +80,7 @@ def init_keys():
def init_usr_merge(chroot: Chroot):
logging.info(f"({chroot}) merge /usr")
script = f"{pmb.config.pmb_src}/pmb/data/merge-usr.sh"
pmb.helpers.run.root(["sh", "-e", script, "CALLED_FROM_PMB",
chroot.path])
pmb.helpers.run.root(["sh", "-e", script, "CALLED_FROM_PMB", chroot.path])
def warn_if_chroot_is_outdated(chroot: Chroot):
@ -91,9 +92,11 @@ def warn_if_chroot_is_outdated(chroot: Chroot):
if pmb.config.workdir.chroots_outdated(chroot):
days_warn = int(pmb.config.chroot_outdated / 3600 / 24)
logging.warning(f"WARNING: Your {chroot} chroot is older than"
logging.warning(
f"WARNING: Your {chroot} chroot is older than"
f" {days_warn} days. Consider running"
" 'pmbootstrap zap'.")
" 'pmbootstrap zap'."
)
cache_chroot_is_outdated += [str(chroot)]
@ -137,8 +140,7 @@ def init(chroot: Chroot, usr_merge=UsrMerge.AUTO):
# Initialize cache
apk_cache = config.work / f"cache_apk_{arch}"
pmb.helpers.run.root(["ln", "-s", "-f", "/var/cache/apk",
chroot / "etc/apk/cache"])
pmb.helpers.run.root(["ln", "-s", "-f", "/var/cache/apk", chroot / "etc/apk/cache"])
# Initialize /etc/apk/keys/, resolv.conf, repositories
init_keys()
@ -155,10 +157,9 @@ def init(chroot: Chroot, usr_merge=UsrMerge.AUTO):
# way to install/run it.
if chroot.type == ChrootType.NATIVE:
pkgs += ["apk-tools-static"]
pmb.chroot.apk_static.run(["--root", chroot.path,
"--cache-dir", apk_cache,
"--initdb", "--arch", arch,
"add"] + pkgs)
pmb.chroot.apk_static.run(
["--root", chroot.path, "--cache-dir", apk_cache, "--initdb", "--arch", arch, "add"] + pkgs
)
# Merge /usr
if usr_merge is UsrMerge.AUTO and pmb.config.is_systemd_selected(config):
@ -168,9 +169,7 @@ def init(chroot: Chroot, usr_merge=UsrMerge.AUTO):
# Building chroots: create "pmos" user, add symlinks to /home/pmos
if not chroot.type == ChrootType.ROOTFS:
pmb.chroot.root(["adduser", "-D", "pmos", "-u",
pmb.config.chroot_uid_user],
chroot)
pmb.chroot.root(["adduser", "-D", "pmos", "-u", pmb.config.chroot_uid_user], chroot)
# Create the links (with subfolders if necessary)
for target, link_name in pmb.config.chroot_home_symlinks.items():

View file

@ -23,12 +23,10 @@ def build(flavor, chroot: Chroot):
if pmaports_cfg.get("supported_mkinitfs_without_flavors", False):
pmb.chroot.root(["mkinitfs"], chroot)
else:
release_file = (chroot / "usr/share/kernel" / flavor / "kernel.release")
release_file = chroot / "usr/share/kernel" / flavor / "kernel.release"
with release_file.open() as handle:
release = handle.read().rstrip()
pmb.chroot.root(["mkinitfs", "-o",
f"/boot/initramfs-{flavor}", release],
chroot)
pmb.chroot.root(["mkinitfs", "-o", f"/boot/initramfs-{flavor}", release], chroot)
def extract(flavor, chroot: Chroot, extra=False):
@ -50,26 +48,25 @@ def extract(flavor, chroot: Chroot, extra=False):
outside = chroot / inside
if outside.exists():
if not pmb.helpers.cli.confirm(f"Extraction folder {outside}"
" already exists."
" Do you want to overwrite it?"):
if not pmb.helpers.cli.confirm(
f"Extraction folder {outside}" " already exists." " Do you want to overwrite it?"
):
raise RuntimeError("Aborted!")
pmb.chroot.root(["rm", "-r", inside], chroot)
# Extraction script (because passing a file to stdin is not allowed
# in pmbootstrap's chroot/shell functions for security reasons)
with (chroot / "tmp/_extract.sh").open("w") as handle:
handle.write(
"#!/bin/sh\n"
f"cd {inside} && cpio -i < _initfs\n")
handle.write("#!/bin/sh\n" f"cd {inside} && cpio -i < _initfs\n")
# Extract
commands = [["mkdir", "-p", inside],
commands = [
["mkdir", "-p", inside],
["cp", initfs_file, f"{inside}/_initfs.gz"],
["gzip", "-d", f"{inside}/_initfs.gz"],
["cat", "/tmp/_extract.sh"], # for the log
["sh", "/tmp/_extract.sh"],
["rm", "/tmp/_extract.sh", f"{inside}/_initfs"]
["rm", "/tmp/_extract.sh", f"{inside}/_initfs"],
]
for command in commands:
pmb.chroot.root(command, chroot)

View file

@ -40,9 +40,9 @@ def ls(suffix: Chroot):
def add(hook, suffix: Chroot):
if hook not in list_aports():
raise RuntimeError("Invalid hook name!"
" Run 'pmbootstrap initfs hook_ls'"
" to get a list of all hooks.")
raise RuntimeError(
"Invalid hook name!" " Run 'pmbootstrap initfs hook_ls'" " to get a list of all hooks."
)
prefix = pmb.config.initfs_hook_prefix
pmb.chroot.apk.install([f"{prefix}{hook}"], suffix)

View file

@ -27,7 +27,9 @@ def mount_chroot_image(chroot: Chroot):
chroot_native = Chroot.native()
pmb.chroot.init(chroot_native)
loopdev = pmb.install.losetup.mount(Path("/") / Path(chroot.name).relative_to(chroot_native.path))
loopdev = pmb.install.losetup.mount(
Path("/") / Path(chroot.name).relative_to(chroot_native.path)
)
pmb.helpers.mount.bind_file(loopdev, chroot_native / "dev/install")
# Set up device mapper bits
pmb.chroot.root(["kpartx", "-u", "/dev/install"], chroot_native)
@ -40,6 +42,7 @@ def mount_chroot_image(chroot: Chroot):
logging.info(f"({chroot}) mounted {chroot.name}")
def create_device_nodes(chroot: Chroot):
"""
Create device nodes for null, zero, full, random, urandom in the chroot.
@ -49,13 +52,17 @@ def create_device_nodes(chroot: Chroot):
for dev in pmb.config.chroot_device_nodes:
path = chroot / "dev" / str(dev[4])
if not path.exists():
pmb.helpers.run.root(["mknod",
"-m", str(dev[0]), # permissions
pmb.helpers.run.root(
[
"mknod",
"-m",
str(dev[0]), # permissions
path, # name
str(dev[1]), # type
str(dev[2]), # major
str(dev[3]), # minor
])
]
)
# Verify major and minor numbers of created nodes
for dev in pmb.config.chroot_device_nodes:
@ -68,7 +75,7 @@ def create_device_nodes(chroot: Chroot):
# Verify /dev/zero reading and writing
path = chroot / "dev/zero"
with open(path, "r+b", 0) as handle:
assert handle.write(bytes([0xff])), f"Write failed for {path}"
assert handle.write(bytes([0xFF])), f"Write failed for {path}"
assert handle.read(1) == bytes([0x00]), f"Read failed for {path}"
# On failure: Show filesystem-related error
@ -90,15 +97,13 @@ def mount_dev_tmpfs(chroot: Chroot=Chroot.native()):
# Create the $chroot/dev folder and mount tmpfs there
pmb.helpers.run.root(["mkdir", "-p", dev])
pmb.helpers.run.root(["mount", "-t", "tmpfs",
"-o", "size=1M,noexec,dev",
"tmpfs", dev])
pmb.helpers.run.root(["mount", "-t", "tmpfs", "-o", "size=1M,noexec,dev", "tmpfs", dev])
# Create pts, shm folders and device nodes
pmb.helpers.run.root(["mkdir", "-p", dev / "pts", dev / "shm"])
pmb.helpers.run.root(["mount", "-t", "tmpfs",
"-o", "nodev,nosuid,noexec",
"tmpfs", dev / "shm"])
pmb.helpers.run.root(
["mount", "-t", "tmpfs", "-o", "nodev,nosuid,noexec", "tmpfs", dev / "shm"]
)
create_device_nodes(chroot)
# Setup /dev/fd as a symlink
@ -128,7 +133,6 @@ def mount(chroot: Chroot):
if not pmb.helpers.mount.ismount(target_outer):
pmb.helpers.mount.bind(source, target_outer)
# Set up binfmt
if not arch.cpu_emulation_required():
return
@ -137,9 +141,11 @@ def mount(chroot: Chroot):
# mount --bind the qemu-user binary
pmb.chroot.binfmt.register(arch)
pmb.helpers.mount.bind_file(Chroot.native() / f"usr/bin/qemu-{arch_qemu}",
pmb.helpers.mount.bind_file(
Chroot.native() / f"usr/bin/qemu-{arch_qemu}",
chroot / f"usr/bin/qemu-{arch_qemu}-static",
create_folders=True)
create_folders=True,
)
def mount_native_into_foreign(chroot: Chroot):
@ -148,12 +154,12 @@ def mount_native_into_foreign(chroot: Chroot):
pmb.helpers.mount.bind(source, target)
musl = next(source.glob("lib/ld-musl-*.so.1")).name
musl_link = (chroot / "lib" / musl)
musl_link = chroot / "lib" / musl
if not musl_link.is_symlink():
pmb.helpers.run.root(["ln", "-s", "/native/lib/" + musl,
musl_link])
pmb.helpers.run.root(["ln", "-s", "/native/lib/" + musl, musl_link])
# pmb.helpers.run.root(["ln", "-sf", "/native/usr/bin/pigz", "/usr/local/bin/pigz"])
def remove_mnt_pmbootstrap(chroot: Chroot):
"""Safely remove /mnt/pmbootstrap directories from the chroot, without
running rm -r as root and potentially removing data inside the

View file

@ -26,8 +26,7 @@ def kernel_flavor_installed(chroot: Chroot, autoinstall=True):
if not chroot.is_mounted():
pmb.chroot.init(chroot)
config = get_context().config
packages = ([f"device-{config.device}"] +
pmb.install.get_kernel_package(config))
packages = [f"device-{config.device}"] + pmb.install.get_kernel_package(config)
pmb.chroot.apk.install(packages, chroot)
glob_result = list((chroot / "usr/share/kernel").glob("*"))
@ -59,19 +58,22 @@ def copy_xauthority(args: PmbArgs):
# Check $DISPLAY
logging.info("(native) copy host Xauthority")
if not os.environ.get("DISPLAY"):
raise RuntimeError("Your $DISPLAY variable is not set. If you have an"
raise RuntimeError(
"Your $DISPLAY variable is not set. If you have an"
" X11 server running as your current user, try"
" 'export DISPLAY=:0' and run your last"
" pmbootstrap command again.")
" pmbootstrap command again."
)
# Check $XAUTHORITY
original = os.environ.get("XAUTHORITY")
if not original:
original = os.path.join(os.environ['HOME'], '.Xauthority')
original = os.path.join(os.environ["HOME"], ".Xauthority")
if not os.path.exists(original):
raise RuntimeError("Could not find your Xauthority file, try to export"
" your $XAUTHORITY correctly. Looked here: " +
original)
raise RuntimeError(
"Could not find your Xauthority file, try to export"
" your $XAUTHORITY correctly. Looked here: " + original
)
# Copy to chroot and chown
copy = Chroot.native() / "home/pmos/.Xauthority"

View file

@ -22,16 +22,26 @@ def executables_absolute_path():
for binary in ["sh", "chroot"]:
path = shutil.which(binary, path=pmb.config.chroot_host_path)
if not path:
raise RuntimeError(f"Could not find the '{binary}'"
raise RuntimeError(
f"Could not find the '{binary}'"
" executable. Make sure that it is in"
" your current user's PATH.")
" your current user's PATH."
)
ret[binary] = path
return ret
def rootm(cmds: Sequence[Sequence[PathString]], chroot: Chroot=Chroot.native(), working_dir: PurePath=PurePath("/"), output="log",
output_return=False, check=None, env={},
disable_timeout=False, add_proxy_env_vars=True):
def rootm(
cmds: Sequence[Sequence[PathString]],
chroot: Chroot = Chroot.native(),
working_dir: PurePath = PurePath("/"),
output="log",
output_return=False,
check=None,
env={},
disable_timeout=False,
add_proxy_env_vars=True,
):
"""
Run a list of commands inside a chroot as root.
@ -59,14 +69,16 @@ def rootm(cmds: Sequence[Sequence[PathString]], chroot: Chroot=Chroot.native(),
msg += "; ".join([" ".join(cmd_str) for cmd_str in cmd_strs])
# Merge env with defaults into env_all
env_all: Env = {"CHARSET": "UTF-8",
env_all: Env = {
"CHARSET": "UTF-8",
"HISTFILE": "~/.ash_history",
"HOME": "/root",
"LANG": "UTF-8",
"PATH": pmb.config.chroot_path,
"PYTHONUNBUFFERED": "1",
"SHELL": "/bin/ash",
"TERM": "xterm"}
"TERM": "xterm",
}
for key, value in env.items():
env_all[key] = value
if add_proxy_env_vars:
@ -77,26 +89,60 @@ def rootm(cmds: Sequence[Sequence[PathString]], chroot: Chroot=Chroot.native(),
# cmd_chroot: ["/sbin/chroot", "/..._native", "/bin/sh", "-c", "echo test"]
# cmd_sudo: ["sudo", "env", "-i", "sh", "-c", "PATH=... /sbin/chroot ..."]
executables = executables_absolute_path()
cmd_chroot = [executables["chroot"], chroot.path, "/bin/sh", "-c",
pmb.helpers.run_core.flat_cmd(cmd_strs, Path(working_dir))]
cmd_sudo = pmb.config.sudo([
"env", "-i", executables["sh"], "-c",
pmb.helpers.run_core.flat_cmd([cmd_chroot], env=env_all)]
cmd_chroot = [
executables["chroot"],
chroot.path,
"/bin/sh",
"-c",
pmb.helpers.run_core.flat_cmd(cmd_strs, Path(working_dir)),
]
cmd_sudo = pmb.config.sudo(
[
"env",
"-i",
executables["sh"],
"-c",
pmb.helpers.run_core.flat_cmd([cmd_chroot], env=env_all),
]
)
return pmb.helpers.run_core.core(
msg, cmd_sudo, None, output, output_return, check, True, disable_timeout
)
return pmb.helpers.run_core.core(msg, cmd_sudo, None, output,
output_return, check, True,
disable_timeout)
def root(cmds: Sequence[PathString], chroot: Chroot=Chroot.native(), working_dir: PurePath=PurePath("/"), output="log",
output_return=False, check=None, env={},
disable_timeout=False, add_proxy_env_vars=True):
return rootm([cmds], chroot, working_dir, output, output_return, check, env,
disable_timeout, add_proxy_env_vars)
def root(
cmds: Sequence[PathString],
chroot: Chroot = Chroot.native(),
working_dir: PurePath = PurePath("/"),
output="log",
output_return=False,
check=None,
env={},
disable_timeout=False,
add_proxy_env_vars=True,
):
return rootm(
[cmds],
chroot,
working_dir,
output,
output_return,
check,
env,
disable_timeout,
add_proxy_env_vars,
)
def userm(cmds: Sequence[Sequence[PathString]], chroot: Chroot=Chroot.native(), working_dir: Path = Path("/"), output="log",
output_return=False, check=None, env={}):
def userm(
cmds: Sequence[Sequence[PathString]],
chroot: Chroot = Chroot.native(),
working_dir: Path = Path("/"),
output="log",
output_return=False,
check=None,
env={},
):
"""
Run a command inside a chroot as "user". We always use the BusyBox
implementation of 'su', because other implementations may override the PATH
@ -116,13 +162,20 @@ def userm(cmds: Sequence[Sequence[PathString]], chroot: Chroot=Chroot.native(),
flat_cmd = pmb.helpers.run_core.flat_cmd(cmds, env=env)
cmd = ["busybox", "su", "pmos", "-c", flat_cmd]
return pmb.chroot.root(cmd, chroot, working_dir, output,
output_return, check, {},
add_proxy_env_vars=False)
return pmb.chroot.root(
cmd, chroot, working_dir, output, output_return, check, {}, add_proxy_env_vars=False
)
def user(cmd: Sequence[PathString], chroot: Chroot=Chroot.native(), working_dir: Path = Path("/"), output="log",
output_return=False, check=None, env={}):
def user(
cmd: Sequence[PathString],
chroot: Chroot = Chroot.native(),
working_dir: Path = Path("/"),
output="log",
output_return=False,
check=None,
env={},
):
userm([cmd], chroot, working_dir, output, output_return, check, env)
@ -133,7 +186,7 @@ def exists(username, chroot: Chroot=Chroot.native()):
:param username: User name
:returns: bool
"""
output = pmb.chroot.root(["getent", "passwd", username],
chroot, output_return=True, check=False)
output = pmb.chroot.root(
["getent", "passwd", username], chroot, output_return=True, check=False
)
return len(output) > 0

View file

@ -40,12 +40,13 @@ def shutdown_cryptsetup_device(name: str):
if not (Chroot.native() / "dev/mapper" / name).exists():
return
pmb.chroot.apk.install(["cryptsetup"], Chroot.native())
status = pmb.chroot.root(["cryptsetup", "status", name],
output_return=True, check=False)
status = pmb.chroot.root(["cryptsetup", "status", name], output_return=True, check=False)
if not status:
logging.warning("WARNING: Failed to run cryptsetup to get the status"
logging.warning(
"WARNING: Failed to run cryptsetup to get the status"
" for " + name + ", assuming it is not mounted"
" (shutdown fails later if it is)!")
" (shutdown fails later if it is)!"
)
return
if status.startswith("/dev/mapper/" + name + " is active."):

View file

@ -6,6 +6,7 @@ from pmb.core.context import get_context
from .apk import packages_get_locally_built_apks
import pmb.config.pmaports
@pytest.fixture
def apk_mocks(monkeypatch):
def _pmaports_config(_aports=None):
@ -69,7 +70,6 @@ def test_get_local_apks(pmb_args, apk_mocks):
assert len(local) == 1
assert local[0].parts[-2:] == apk_file.parts[-2:]
create_apk("package2", arch)
create_apk("package3", arch)
create_apk("package4", arch)
@ -78,4 +78,3 @@ def test_get_local_apks(pmb_args, apk_mocks):
local = packages_get_locally_built_apks(["package3"], arch)
print(local)
assert len(local) == 4

View file

@ -36,9 +36,17 @@ def del_chroot(path: Path, confirm=True, dry=False):
pmb.helpers.run.root(["rm", "-rf", path])
def zap(confirm=True, dry=False, pkgs_local=False, http=False,
pkgs_local_mismatch=False, pkgs_online_mismatch=False, distfiles=False,
rust=False, netboot=False):
def zap(
confirm=True,
dry=False,
pkgs_local=False,
http=False,
pkgs_local_mismatch=False,
pkgs_online_mismatch=False,
distfiles=False,
rust=False,
netboot=False,
):
"""
Shutdown everything inside the chroots (e.g. adb), umount
everything and then safely remove folders from the work-directory.
@ -90,8 +98,7 @@ def zap(confirm=True, dry=False, pkgs_local=False, http=False,
pattern = os.path.realpath(f"{get_context().config.work}/{pattern}")
matches = glob.glob(pattern)
for match in matches:
if (not confirm or
pmb.helpers.cli.confirm(f"Remove {match}?")):
if not confirm or pmb.helpers.cli.confirm(f"Remove {match}?"):
logging.info(f"% rm -rf {match}")
if not dry:
pmb.helpers.run.root(["rm", "-rf", match])
@ -114,13 +121,17 @@ def zap_pkgs_local_mismatch(confirm=True, dry=False):
if not os.path.exists(f"{get_context().config.work}/packages/{channel}"):
return
question = "Remove binary packages that are newer than the corresponding" \
question = (
"Remove binary packages that are newer than the corresponding"
f" pmaports (channel '{channel}')?"
)
if confirm and not pmb.helpers.cli.confirm(question):
return
reindex = False
for apkindex_path in (get_context().config.work / "packages" / channel).glob("*/APKINDEX.tar.gz"):
for apkindex_path in (get_context().config.work / "packages" / channel).glob(
"*/APKINDEX.tar.gz"
):
# Delete packages without same version in aports
blocks = pmb.parse.apkindex.parse_blocks(apkindex_path)
for block in blocks:
@ -133,15 +144,13 @@ def zap_pkgs_local_mismatch(confirm=True, dry=False):
apk_path_short = f"{arch}/{pkgname}-{version}.apk"
apk_path = f"{get_context().config.work}/packages/{channel}/{apk_path_short}"
if not os.path.exists(apk_path):
logging.info("WARNING: Package mentioned in index not"
f" found: {apk_path_short}")
logging.info("WARNING: Package mentioned in index not" f" found: {apk_path_short}")
continue
# Aport path
aport_path = pmb.helpers.pmaports.find_optional(origin)
if not aport_path:
logging.info(f"% rm {apk_path_short}"
f" ({origin} aport not found)")
logging.info(f"% rm {apk_path_short}" f" ({origin} aport not found)")
if not dry:
pmb.helpers.run.root(["rm", apk_path])
reindex = True
@ -151,8 +160,7 @@ def zap_pkgs_local_mismatch(confirm=True, dry=False):
apkbuild = pmb.parse.apkbuild(aport_path)
version_aport = f"{apkbuild['pkgver']}-r{apkbuild['pkgrel']}"
if version != version_aport:
logging.info(f"% rm {apk_path_short}"
f" ({origin} aport: {version_aport})")
logging.info(f"% rm {apk_path_short}" f" ({origin} aport: {version_aport})")
if not dry:
pmb.helpers.run.root(["rm", apk_path])
reindex = True
@ -166,8 +174,7 @@ def zap_pkgs_online_mismatch(confirm=True, dry=False):
paths = list(get_context().config.work.glob("cache_apk_*"))
if not len(paths):
return
if (confirm and not pmb.helpers.cli.confirm("Remove outdated"
" binary packages?")):
if confirm and not pmb.helpers.cli.confirm("Remove outdated" " binary packages?"):
return
# Iterate over existing apk caches

View file

@ -48,12 +48,13 @@ def get_ci_scripts(topdir):
for option in options:
if option not in pmb.config.ci_valid_options:
raise RuntimeError(f"{script}: unsupported option '{option}'."
" Typo in script or pmbootstrap too old?")
raise RuntimeError(
f"{script}: unsupported option '{option}'."
" Typo in script or pmbootstrap too old?"
)
short_name = os.path.basename(script).split(".", -1)[0]
ret[short_name] = {"description": description,
"options": options}
ret[short_name] = {"description": description, "options": options}
return ret
@ -104,8 +105,7 @@ def ask_which_scripts_to_run(scripts_available):
logging.info(f"* {script_name}: {script['description']}{extra}")
choices += [script_name]
selection = pmb.helpers.cli.ask("Which script?", None, "all",
complete=choices)
selection = pmb.helpers.cli.ask("Which script?", None, "all", complete=choices)
if selection == "all":
return scripts_available
@ -131,14 +131,12 @@ def copy_git_repo_to_chroot(topdir):
handle.write(file)
handle.write("\n")
pmb.helpers.run.user(["tar", "-cf", tarball_path, "-T",
f"{tarball_path}.files"], topdir)
pmb.helpers.run.user(["tar", "-cf", tarball_path, "-T", f"{tarball_path}.files"], topdir)
ci_dir = Path("/home/pmos/ci")
pmb.chroot.user(["rm", "-rf", ci_dir])
pmb.chroot.user(["mkdir", ci_dir])
pmb.chroot.user(["tar", "-xf", "/tmp/git.tar.gz"],
working_dir=ci_dir)
pmb.chroot.user(["tar", "-xf", "/tmp/git.tar.gz"], working_dir=ci_dir)
def run_scripts(topdir, scripts):
@ -164,12 +162,10 @@ def run_scripts(topdir, scripts):
where = "native"
script_path = f".ci/{script_name}.sh"
logging.info(f"*** ({step}/{steps}) RUNNING CI SCRIPT: {script_path}"
f" [{where}] ***")
logging.info(f"*** ({step}/{steps}) RUNNING CI SCRIPT: {script_path}" f" [{where}] ***")
if "native" in script["options"]:
rc = pmb.helpers.run.user([script_path], topdir,
output="tui")
rc = pmb.helpers.run.user([script_path], topdir, output="tui")
continue
else:
# Run inside pmbootstrap chroot
@ -178,9 +174,9 @@ def run_scripts(topdir, scripts):
repo_copied = True
env = {"TESTUSER": "pmos"}
rc = pmb.chroot.root([script_path], check=False, env=env,
working_dir=Path("/home/pmos/ci"),
output="tui")
rc = pmb.chroot.root(
[script_path], check=False, env=env, working_dir=Path("/home/pmos/ci"), output="tui"
)
if rc:
logging.error(f"ERROR: CI script failed: {script_name}")
exit(1)

View file

@ -52,6 +52,7 @@ unmigrated_commands = [
"bootimg_analyze",
]
def run_command(args: PmbArgs):
# Handle deprecated command format
if args.action in unmigrated_commands:

View file

@ -1,7 +1,8 @@
# Copyright 2024 Caleb Connolly
# SPDX-License-Identifier: GPL-3.0-or-later
class Command():
class Command:
"""Base class for pmbootstrap commands."""
def run(self):

View file

@ -5,10 +5,10 @@ from __future__ import annotations
from pmb import commands
import pmb.build.other
class Index(commands.Command):
def __init__(self):
pass
def run(self):
pmb.build.other.index_repo()

View file

@ -9,6 +9,7 @@ from pmb.helpers import run
from pmb.core.context import get_context
import pmb.config
class Log(commands.Command):
clear_log: bool
lines: int
@ -38,4 +39,3 @@ class Log(commands.Command):
cmd += [context.log]
run.user(cmd, output="tui")

View file

@ -17,6 +17,7 @@ from pmb.core.context import get_context
from pmb import commands
class RepoBootstrap(commands.Command):
arch: Arch
repo: str
@ -33,13 +34,14 @@ class RepoBootstrap(commands.Command):
return
if not cfg:
raise ValueError("pmaports.cfg of current branch does not have any"
" sections starting with 'repo:'")
raise ValueError(
"pmaports.cfg of current branch does not have any" " sections starting with 'repo:'"
)
logging.info(f"Valid repositories: {', '.join(cfg.keys())}")
raise ValueError(f"Couldn't find section 'repo:{self.repo}' in pmaports.cfg of"
" current branch")
raise ValueError(
f"Couldn't find section 'repo:{self.repo}' in pmaports.cfg of" " current branch"
)
def __init__(self, arch: Optional[Arch], repository: str):
context = get_context()
@ -56,7 +58,6 @@ class RepoBootstrap(commands.Command):
self.check_repo_arg()
def get_packages(self, bootstrap_line):
ret = []
for word in bootstrap_line.split(" "):
@ -65,7 +66,6 @@ class RepoBootstrap(commands.Command):
ret += [word]
return ret
def set_progress_total(self, steps):
self.progress_total = 0
@ -80,14 +80,12 @@ class RepoBootstrap(commands.Command):
if self.arch.cpu_emulation_required():
self.progress_total += len(steps)
def log_progress(self, msg):
percent = int(100 * self.progress_done / self.progress_total)
logging.info(f"*** {percent}% [{self.progress_step}] {msg} ***")
self.progress_done += 1
def run_steps(self, steps):
chroot: Chroot
if self.arch.cpu_emulation_required():
@ -116,17 +114,23 @@ class RepoBootstrap(commands.Command):
pmb.chroot.init(chroot, usr_merge)
bootstrap_stage = int(step.split("bootstrap_", 1)[1])
def log_wrapper(pkg: BuildQueueItem):
self.log_progress(f"building {pkg['name']}")
packages = self.get_packages(bootstrap_line)
pmb.build.packages(self.context, packages, self.arch, force=True,
strict=True, bootstrap_stage=bootstrap_stage,
log_callback=log_wrapper)
pmb.build.packages(
self.context,
packages,
self.arch,
force=True,
strict=True,
bootstrap_stage=bootstrap_stage,
log_callback=log_wrapper,
)
self.log_progress("bootstrap complete!")
def check_existing_pkgs(self):
channel = pmb.config.pmaports.read_config()["channel"]
path = self.context.config.work / "packages" / channel / self.arch
@ -134,15 +138,18 @@ class RepoBootstrap(commands.Command):
if glob.glob(f"{path}/*"):
logging.info(f"Packages path: {path}")
msg = f"Found previously built packages for {channel}/{self.arch}, run" \
msg = (
f"Found previously built packages for {channel}/{self.arch}, run"
" 'pmbootstrap zap -p' first"
)
if self.arch.cpu_emulation_required():
msg += " or remove the path manually (to keep cross compilers if" \
msg += (
" or remove the path manually (to keep cross compilers if"
" you just built them)"
)
raise RuntimeError(f"{msg}!")
def get_steps(self):
cfg = pmb.config.pmaports.read_config_repos()
prev_step = 0
@ -153,15 +160,16 @@ class RepoBootstrap(commands.Command):
continue
step = int(key.split("bootstrap_", 1)[1])
assert step == prev_step + 1, (f"{key}: wrong order of steps, expected"
f" bootstrap_{prev_step + 1} (previous: bootstrap_{prev_step})")
assert step == prev_step + 1, (
f"{key}: wrong order of steps, expected"
f" bootstrap_{prev_step + 1} (previous: bootstrap_{prev_step})"
)
prev_step = step
ret[key] = packages
return ret
def run(self): # noqa: F821
self.check_existing_pkgs()

View file

@ -5,10 +5,10 @@ from __future__ import annotations
from pmb import commands
import pmb.chroot
class Shutdown(commands.Command):
def __init__(self):
pass
def run(self):
pmb.chroot.shutdown()

View file

@ -11,6 +11,7 @@ import time
"""Various internal test commands for performance testing and debugging."""
def apkindex_parse_all():
indexes = pmb.helpers.repo.apkindex_files(Arch.native())
@ -31,4 +32,3 @@ class Test(commands.Command):
def run(self):
if self.action == "apkindex_parse_all":
apkindex_parse_all()

View file

@ -20,14 +20,15 @@ from pmb.config.other import is_systemd_selected
# Exported variables (internal configuration)
#
pmb_src: Path = Path(Path(__file__) / "../../..").resolve()
apk_keys_path: Path = (pmb_src / "pmb/data/keys")
apk_keys_path: Path = pmb_src / "pmb/data/keys"
# apk-tools minimum version
# https://pkgs.alpinelinux.org/packages?name=apk-tools&branch=edge
# Update this frequently to prevent a MITM attack with an outdated version
# (which may contain a vulnerable apk/openssl, and allows an attacker to
# exploit the system!)
apk_tools_min_version = {"edge": "2.14.4-r0",
apk_tools_min_version = {
"edge": "2.14.4-r0",
"v3.20": "2.14.4-r0",
"v3.19": "2.14.4-r0",
"v3.18": "2.14.4-r0",
@ -36,7 +37,8 @@ apk_tools_min_version = {"edge": "2.14.4-r0",
"v3.15": "2.12.7-r3",
"v3.14": "2.12.7-r0",
"v3.13": "2.12.7-r0",
"v3.12": "2.10.8-r1"}
"v3.12": "2.10.8-r1",
}
# postmarketOS aports compatibility (checked against "version" in pmaports.cfg)
pmaports_min_version = "7"
@ -74,8 +76,9 @@ def sudo(cmd: Sequence[PathString]) -> Sequence[PathString]:
defaults: Dict[str, PathString] = {
"cipher": "aes-xts-plain64",
"config": Path((os.environ.get('XDG_CONFIG_HOME') or
os.path.expanduser("~/.config")) + "/pmbootstrap.cfg"),
"config": Path(
(os.environ.get("XDG_CONFIG_HOME") or os.path.expanduser("~/.config")) + "/pmbootstrap.cfg"
),
# A higher value is typically desired, but this can lead to VERY long open
# times on slower devices due to host systems being MUCH faster than the
# target device (see issue #429).
@ -84,19 +87,17 @@ defaults: Dict[str, PathString] = {
# Whether we're connected to a TTY (which allows things like e.g. printing
# progress bars)
is_interactive = sys.stdout.isatty() and \
sys.stderr.isatty() and \
sys.stdin.isatty()
is_interactive = sys.stdout.isatty() and sys.stderr.isatty() and sys.stdin.isatty()
# ANSI escape codes to highlight stdout
styles = {
"BLUE": '\033[94m',
"BOLD": '\033[1m',
"GREEN": '\033[92m',
"RED": '\033[91m',
"YELLOW": '\033[93m',
"END": '\033[0m'
"BLUE": "\033[94m",
"BOLD": "\033[1m",
"GREEN": "\033[92m",
"RED": "\033[91m",
"YELLOW": "\033[93m",
"END": "\033[0m",
}
if "NO_COLOR" in os.environ:
@ -104,16 +105,17 @@ if "NO_COLOR" in os.environ:
styles[style] = ""
# Supported filesystems and their fstools packages
filesystems = {"btrfs": "btrfs-progs",
filesystems = {
"btrfs": "btrfs-progs",
"ext2": "e2fsprogs",
"ext4": "e2fsprogs",
"f2fs": "f2fs-tools",
"fat16": "dosfstools",
"fat32": "dosfstools"}
"fat32": "dosfstools",
}
# Legacy channels and their new names (pmb#2015)
pmaports_channels_legacy = {"stable": "v20.05",
"stable-next": "v21.03"}
pmaports_channels_legacy = {"stable": "v20.05", "stable-next": "v21.03"}
#
# CHROOT
#
@ -124,14 +126,16 @@ pmaports_channels_legacy = {"stable": "v20.05",
chroot_uid_user = "12345"
# The PATH variable used inside all chroots
chroot_path = ":".join([
chroot_path = ":".join(
[
"/usr/lib/ccache/bin",
"/usr/local/sbin",
"/usr/local/bin",
"/usr/sbin:/usr/bin",
"/sbin",
"/bin"
])
"/bin",
]
)
# The PATH variable used on the host, to find the "chroot" and "sh"
# executables. As pmbootstrap runs as user, not as root, the location
@ -269,11 +273,7 @@ kconfig_options = {
"TZDEV": False,
}
},
"<5.2.0": {
"armhf armv7 x86": {
"LBDAF": True
}
}
"<5.2.0": {"armhf armv7 x86": {"LBDAF": True}},
}
# Necessary waydroid kernel config options (android app support)
@ -319,7 +319,7 @@ kconfig_options_waydroid = {
"all": {
"ASHMEM": True,
}
}
},
}
# Necessary iwd kernel config options (inet wireless daemon)
@ -453,7 +453,7 @@ kconfig_options_containers = {
"x86 x86_64": { # only for x86, x86_64 (and sparc64, ia64)
"HUGETLB_PAGE": True,
"CGROUP_HUGETLB": True, # Optional section
}
},
},
">=3.6 <6.1_rc1": { # option has been dropped
"all": {
@ -641,18 +641,15 @@ apkbuild_package_attributes = {
"provider_priority": {"int": True},
"install": {"array": True},
"triggers": {"array": True},
# Packages can specify soft dependencies in "_pmb_recommends" to be
# explicitly installed by default, and not implicitly as a hard dependency
# of the package ("depends"). This makes these apps uninstallable, without
# removing the meta-package. (#1933). To disable this feature, use:
# "pmbootstrap install --no-recommends".
"_pmb_recommends": {"array": True},
# UI meta-packages can specify groups to which the user must be added
# to access specific hardware such as LED indicators.
"_pmb_groups": {"array": True},
# postmarketos-base, UI and device packages can use _pmb_select to provide
# additional configuration options in "pmbootstrap init" that allow
# selecting alternative providers for a virtual APK package.
@ -662,7 +659,6 @@ apkbuild_package_attributes = {
# Variables in APKBUILD files that get parsed
apkbuild_attributes = {
**apkbuild_package_attributes,
"arch": {"array": True},
"depends_dev": {"array": True},
"makedepends": {"array": True},
@ -675,35 +671,28 @@ apkbuild_attributes = {
"sha512sums": {},
"subpackages": {},
"url": {},
# cross-compilers
"makedepends_build": {"array": True},
"makedepends_host": {"array": True},
# kernels
"_flavor": {},
"_device": {},
"_kernver": {},
"_outdir": {},
"_config": {},
# linux-edge
"_depends_dev": {"array": True},
# mesa
"_llvmver": {},
# Overridden packages
"_pkgver": {},
"_pkgname": {},
# git commit
"_commit": {},
"source": {"array": True},
# gcc
"_pkgbase": {},
"_pkgsnap": {}
"_pkgsnap": {},
}
# Reference: https://postmarketos.org/apkbuild-options
@ -736,7 +725,7 @@ deviceinfo_chassis_types = [
"handset",
"watch",
"embedded",
"vm"
"vm",
]
#
@ -800,24 +789,26 @@ flashers: Dict[str, Dict[str, bool | List[str] | Dict[str, List[List[str]]]]] =
"depends": [], # pmaports.cfg: supported_fastboot_depends
"actions": {
"list_devices": [["fastboot", "devices", "-l"]],
"flash_rootfs": [["fastboot", "flash", "$PARTITION_ROOTFS",
"$IMAGE"]],
"flash_kernel": [["fastboot", "flash", "$PARTITION_KERNEL",
"$BOOT/boot.img$FLAVOR"]],
"flash_rootfs": [["fastboot", "flash", "$PARTITION_ROOTFS", "$IMAGE"]],
"flash_kernel": [["fastboot", "flash", "$PARTITION_KERNEL", "$BOOT/boot.img$FLAVOR"]],
"flash_vbmeta": [
# Generate vbmeta image with "disable verification" flag
["avbtool", "make_vbmeta_image", "--flags", "2",
"--padding_size", "$FLASH_PAGESIZE",
"--output", "/vbmeta.img"],
["fastboot", "flash", "$PARTITION_VBMETA", "/vbmeta.img"],
["rm", "-f", "/vbmeta.img"]
[
"avbtool",
"make_vbmeta_image",
"--flags",
"2",
"--padding_size",
"$FLASH_PAGESIZE",
"--output",
"/vbmeta.img",
],
"flash_dtbo": [["fastboot", "flash", "$PARTITION_DTBO",
"$BOOT/dtbo.img"]],
"boot": [["fastboot", "--cmdline", "$KERNEL_CMDLINE",
"boot", "$BOOT/boot.img$FLAVOR"]],
"flash_lk2nd": [["fastboot", "flash", "$PARTITION_KERNEL",
"$BOOT/lk2nd.img"]]
["fastboot", "flash", "$PARTITION_VBMETA", "/vbmeta.img"],
["rm", "-f", "/vbmeta.img"],
],
"flash_dtbo": [["fastboot", "flash", "$PARTITION_DTBO", "$BOOT/dtbo.img"]],
"boot": [["fastboot", "--cmdline", "$KERNEL_CMDLINE", "boot", "$BOOT/boot.img$FLAVOR"]],
"flash_lk2nd": [["fastboot", "flash", "$PARTITION_KERNEL", "$BOOT/lk2nd.img"]],
},
},
# Some devices provide Fastboot but using Android boot images is not
@ -831,10 +822,8 @@ flashers: Dict[str, Dict[str, bool | List[str] | Dict[str, List[List[str]]]]] =
"depends": ["android-tools"],
"actions": {
"list_devices": [["fastboot", "devices", "-l"]],
"flash_rootfs": [["fastboot", "flash", "$PARTITION_ROOTFS",
"$IMAGE_SPLIT_ROOT"]],
"flash_kernel": [["fastboot", "flash", "$PARTITION_KERNEL",
"$IMAGE_SPLIT_BOOT"]],
"flash_rootfs": [["fastboot", "flash", "$PARTITION_ROOTFS", "$IMAGE_SPLIT_ROOT"]],
"flash_kernel": [["fastboot", "flash", "$PARTITION_KERNEL", "$IMAGE_SPLIT_BOOT"]],
# TODO: Add support for boot
},
},
@ -849,11 +838,17 @@ flashers: Dict[str, Dict[str, bool | List[str] | Dict[str, List[List[str]]]]] =
"list_devices": [["heimdall", "detect"]],
"flash_rootfs": [
["heimdall_wait_for_device.sh"],
["heimdall", "flash", "--$PARTITION_ROOTFS", "$IMAGE"]],
"flash_kernel": [["heimdall_flash_kernel.sh",
"$BOOT/initramfs$FLAVOR", "$PARTITION_INITFS",
["heimdall", "flash", "--$PARTITION_ROOTFS", "$IMAGE"],
],
"flash_kernel": [
[
"heimdall_flash_kernel.sh",
"$BOOT/initramfs$FLAVOR",
"$PARTITION_INITFS",
"$BOOT/vmlinuz$FLAVOR$DTB",
"$PARTITION_KERNEL"]]
"$PARTITION_KERNEL",
]
],
},
},
# Some Samsung devices need a 'boot.img' file, just like the one generated
@ -864,34 +859,63 @@ flashers: Dict[str, Dict[str, bool | List[str] | Dict[str, List[List[str]]]]] =
"list_devices": [["heimdall", "detect"]],
"flash_rootfs": [
["heimdall_wait_for_device.sh"],
["heimdall", "flash", "--$PARTITION_ROOTFS", "$IMAGE",
"$NO_REBOOT", "$RESUME"]],
["heimdall", "flash", "--$PARTITION_ROOTFS", "$IMAGE", "$NO_REBOOT", "$RESUME"],
],
"flash_kernel": [
["heimdall_wait_for_device.sh"],
["heimdall", "flash", "--$PARTITION_KERNEL",
"$BOOT/boot.img$FLAVOR", "$NO_REBOOT", "$RESUME"]],
[
"heimdall",
"flash",
"--$PARTITION_KERNEL",
"$BOOT/boot.img$FLAVOR",
"$NO_REBOOT",
"$RESUME",
],
],
"flash_vbmeta": [
["avbtool", "make_vbmeta_image", "--flags", "2",
"--padding_size", "$FLASH_PAGESIZE",
"--output", "/vbmeta.img"],
["heimdall", "flash", "--$PARTITION_VBMETA", "/vbmeta.img",
"$NO_REBOOT", "$RESUME"],
["rm", "-f", "/vbmeta.img"]],
[
"avbtool",
"make_vbmeta_image",
"--flags",
"2",
"--padding_size",
"$FLASH_PAGESIZE",
"--output",
"/vbmeta.img",
],
[
"heimdall",
"flash",
"--$PARTITION_VBMETA",
"/vbmeta.img",
"$NO_REBOOT",
"$RESUME",
],
["rm", "-f", "/vbmeta.img"],
],
"flash_lk2nd": [
["heimdall_wait_for_device.sh"],
["heimdall", "flash", "--$PARTITION_KERNEL", "$BOOT/lk2nd.img",
"$NO_REBOOT", "$RESUME"]]
[
"heimdall",
"flash",
"--$PARTITION_KERNEL",
"$BOOT/lk2nd.img",
"$NO_REBOOT",
"$RESUME",
],
],
},
},
"adb": {
"depends": ["android-tools"],
"actions": {
"list_devices": [["adb", "-P", "5038", "devices"]],
"sideload": [["echo", "< wait for any device >"],
"sideload": [
["echo", "< wait for any device >"],
["adb", "-P", "5038", "wait-for-usb-sideload"],
["adb", "-P", "5038", "sideload",
"$RECOVERY_ZIP"]],
}
["adb", "-P", "5038", "sideload", "$RECOVERY_ZIP"],
],
},
},
"uuu": {
"depends": ["nxp-mfgtools-uuu"],
@ -910,46 +934,51 @@ flashers: Dict[str, Dict[str, bool | List[str] | Dict[str, List[List[str]]]]] =
"actions": {
"list_devices": [["rkdeveloptool", "list"]],
"flash_rootfs": [
["rkdeveloptool", "write-partition", "$PARTITION_ROOTFS",
"$IMAGE_SPLIT_ROOT"]
["rkdeveloptool", "write-partition", "$PARTITION_ROOTFS", "$IMAGE_SPLIT_ROOT"]
],
"flash_kernel": [
["rkdeveloptool", "write-partition", "$PARTITION_KERNEL",
"$IMAGE_SPLIT_BOOT"]
["rkdeveloptool", "write-partition", "$PARTITION_KERNEL", "$IMAGE_SPLIT_BOOT"]
],
},
},
"mtkclient": {
"depends": ["mtkclient"],
"actions": {
"flash_rootfs": [["mtk", "w", "$PARTITION_ROOTFS",
"$IMAGE"]],
"flash_kernel": [["mtk", "w", "$PARTITION_KERNEL",
"$BOOT/boot.img$FLAVOR"]],
"flash_rootfs": [["mtk", "w", "$PARTITION_ROOTFS", "$IMAGE"]],
"flash_kernel": [["mtk", "w", "$PARTITION_KERNEL", "$BOOT/boot.img$FLAVOR"]],
"flash_vbmeta": [
# Generate vbmeta image with "disable verification" flag
["avbtool", "make_vbmeta_image", "--flags", "2",
"--padding_size", "$FLASH_PAGESIZE",
"--output", "/vbmeta.img"],
["mtk", "w", "$PARTITION_VBMETA", "/vbmeta.img"],
["rm", "-f", "/vbmeta.img"]
[
"avbtool",
"make_vbmeta_image",
"--flags",
"2",
"--padding_size",
"$FLASH_PAGESIZE",
"--output",
"/vbmeta.img",
],
"flash_dtbo": [["mtk", "w", "$PARTITION_DTBO",
"$BOOT/dtbo.img"]],
"flash_lk2nd": [["mtk", "w", "$PARTITION_KERNEL",
"$BOOT/lk2nd.img"]]
}
}
["mtk", "w", "$PARTITION_VBMETA", "/vbmeta.img"],
["rm", "-f", "/vbmeta.img"],
],
"flash_dtbo": [["mtk", "w", "$PARTITION_DTBO", "$BOOT/dtbo.img"]],
"flash_lk2nd": [["mtk", "w", "$PARTITION_KERNEL", "$BOOT/lk2nd.img"]],
},
},
}
#
# GIT
#
git_repos = {
"aports_upstream": ["https://gitlab.alpinelinux.org/alpine/aports.git",
"git@gitlab.alpinelinux.org:alpine/aports.git"],
"pmaports": ["https://gitlab.com/postmarketOS/pmaports.git",
"git@gitlab.com:postmarketos/pmaports.git"],
"aports_upstream": [
"https://gitlab.alpinelinux.org/alpine/aports.git",
"git@gitlab.alpinelinux.org:alpine/aports.git",
],
"pmaports": [
"https://gitlab.com/postmarketOS/pmaports.git",
"git@gitlab.com:postmarketos/pmaports.git",
],
}
#
@ -963,7 +992,7 @@ aportgen: Dict[str, AportGenEntry] = {
"device/testing": {
"prefixes": ["device", "linux"],
"confirm_overwrite": True,
}
},
}
# Use a deterministic mirror URL instead of CDN for aportgen. Otherwise we may
@ -982,8 +1011,7 @@ aportgen_mirror_alpine = "http://dl-4.alpinelinux.org/alpine/"
newapkbuild_arguments_strings = [
["-n", "pkgname", "set package name (only use with SRCURL)"],
["-d", "pkgdesc", "set package description"],
["-l", "license", "set package license identifier from"
" <https://spdx.org/licenses/>"],
["-l", "license", "set package license identifier from" " <https://spdx.org/licenses/>"],
["-u", "url", "set package URL"],
]
newapkbuild_arguments_switches_pkgtypes = [
@ -1005,8 +1033,16 @@ newapkbuild_arguments_switches_other = [
#
# Patterns of package names to ignore for automatic pmaport upgrading
# ("pmbootstrap aportupgrade --all")
upgrade_ignore = ["device-*", "firmware-*", "linux-*", "postmarketos-*",
"*-aarch64", "*-armhf", "*-armv7", "*-riscv64"]
upgrade_ignore = [
"device-*",
"firmware-*",
"linux-*",
"postmarketos-*",
"*-aarch64",
"*-armhf",
"*-armv7",
"*-riscv64",
]
#
# SIDELOAD

View file

@ -45,7 +45,9 @@ def load(path: Path) -> Config:
setattr(config, key, Path(cfg["pmbootstrap"][key]))
# Yeah this really sucks and there isn't a better way to do it without external
# libraries
elif isinstance(getattr(Config, key), List) and isinstance(getattr(Config, key)[0], PosixPath):
elif isinstance(getattr(Config, key), List) and isinstance(
getattr(Config, key)[0], PosixPath
):
value = cfg["pmbootstrap"][key]
if not value:
setattr(config, key, value)
@ -95,7 +97,9 @@ def serialize(config: Config, skip_defaults=True) -> configparser.ConfigParser:
# Convert strings to paths
elif type(getattr(Config, key)) == PosixPath:
cfg["pmbootstrap"][key] = str(getattr(config, key))
elif isinstance(getattr(Config, key), List) and isinstance(getattr(Config, key)[0], PosixPath):
elif isinstance(getattr(Config, key), List) and isinstance(
getattr(Config, key)[0], PosixPath
):
cfg["pmbootstrap"][key] = ",".join(os.fspath(p) for p in getattr(config, key))
elif isinstance(getattr(Config, key), bool):
cfg["pmbootstrap"][key] = str(getattr(config, key))
@ -104,6 +108,7 @@ def serialize(config: Config, skip_defaults=True) -> configparser.ConfigParser:
return cfg
# FIXME: we should have distinct Config and ConfigFile types
def save(output: Path, config: Config):
"""Save the config object to the specified path.

View file

@ -38,9 +38,11 @@ def require_programs():
if not shutil.which(program):
missing.append(program)
if missing:
raise RuntimeError("Can't find all programs required to run"
raise RuntimeError(
"Can't find all programs required to run"
" pmbootstrap. Please install first:"
f" {', '.join(missing)}")
f" {', '.join(missing)}"
)
def ask_for_username(args: PmbArgs, default_user: str):
@ -49,12 +51,13 @@ def ask_for_username(args: PmbArgs, default_user: str):
:returns: the username
"""
while True:
ret = pmb.helpers.cli.ask("Username", None, default_user, False,
"[a-z_][a-z0-9_-]*")
ret = pmb.helpers.cli.ask("Username", None, default_user, False, "[a-z_][a-z0-9_-]*")
if ret == "root":
logging.fatal("ERROR: don't put \"root\" here. This is about"
logging.fatal(
'ERROR: don\'t put "root" here. This is about'
" creating an additional non-root user. Don't worry,"
" the root user will also be created ;)")
" the root user will also be created ;)"
)
continue
return ret
@ -67,22 +70,26 @@ def ask_for_work_path(args: PmbArgs):
* exists: is False when the folder did not exist before we tested whether we can create it
"""
logging.info("Location of the 'work' path. Multiple chroots"
logging.info(
"Location of the 'work' path. Multiple chroots"
" (native, device arch, device rootfs) will be created"
" in there.")
" in there."
)
while True:
try:
work = os.path.expanduser(pmb.helpers.cli.ask(
"Work path", None, get_context().config.work, False))
work = os.path.expanduser(
pmb.helpers.cli.ask("Work path", None, get_context().config.work, False)
)
work = os.path.realpath(work)
exists = os.path.exists(work)
# Work must not be inside the pmbootstrap path
if (work == pmb.config.pmb_src or
work.startswith(f"{pmb.config.pmb_src}/")):
logging.fatal("ERROR: The work path must not be inside the"
if work == pmb.config.pmb_src or work.startswith(f"{pmb.config.pmb_src}/"):
logging.fatal(
"ERROR: The work path must not be inside the"
" pmbootstrap path. Please specify another"
" location.")
" location."
)
continue
# Create the folder with a version file
@ -102,8 +109,9 @@ def ask_for_work_path(args: PmbArgs):
os.makedirs(f"{work}/cache_git", 0o700, True)
return (work, exists)
except OSError:
logging.fatal("ERROR: Could not create this folder, or write"
" inside it! Please try again.")
logging.fatal(
"ERROR: Could not create this folder, or write" " inside it! Please try again."
)
def ask_for_channel(config: Config):
@ -136,12 +144,12 @@ def ask_for_channel(config: Config):
# Ask until user gives valid channel
while True:
ret = pmb.helpers.cli.ask("Channel", None, default,
complete=choices)
ret = pmb.helpers.cli.ask("Channel", None, default, complete=choices)
if ret in choices:
return ret
logging.fatal("ERROR: Invalid channel specified, please type in one"
" from the list above.")
logging.fatal(
"ERROR: Invalid channel specified, please type in one" " from the list above."
)
def ask_for_ui(deviceinfo):
@ -151,9 +159,7 @@ def ask_for_ui(deviceinfo):
if not device_is_accelerated:
for i in reversed(range(len(ui_list))):
pkgname = f"postmarketos-ui-{ui_list[i][0]}"
apkbuild = pmb.helpers.pmaports.get(pkgname,
subpackages=False,
must_exist=False)
apkbuild = pmb.helpers.pmaports.get(pkgname, subpackages=False, must_exist=False)
if apkbuild and "pmb:gpu-accel" in apkbuild["options"]:
ui_list.pop(i)
hidden_ui_count += 1
@ -169,21 +175,26 @@ def ask_for_ui(deviceinfo):
logging.info(f"* {ui[0]}: {ui[1]}")
ui_completion_list.append(ui[0])
if hidden_ui_count > 0:
logging.info(f"NOTE: {hidden_ui_count} UIs are hidden because"
" \"deviceinfo_gpu_accelerated\" is not set (see"
" https://postmarketos.org/deviceinfo).")
logging.info(
f"NOTE: {hidden_ui_count} UIs are hidden because"
' "deviceinfo_gpu_accelerated" is not set (see'
" https://postmarketos.org/deviceinfo)."
)
while True:
ret = pmb.helpers.cli.ask("User interface", None, default, True,
complete=ui_completion_list)
ret = pmb.helpers.cli.ask(
"User interface", None, default, True, complete=ui_completion_list
)
if ret in dict(ui_list).keys():
return ret
logging.fatal("ERROR: Invalid user interface specified, please type in"
" one from the list above.")
logging.fatal(
"ERROR: Invalid user interface specified, please type in" " one from the list above."
)
def ask_for_ui_extras(config: Config, ui):
apkbuild = pmb.helpers.pmaports.get(f"postmarketos-ui-{ui}",
subpackages=False, must_exist=False)
apkbuild = pmb.helpers.pmaports.get(
f"postmarketos-ui-{ui}", subpackages=False, must_exist=False
)
if not apkbuild:
return False
@ -191,11 +202,9 @@ def ask_for_ui_extras(config: Config, ui):
if extra is None:
return False
logging.info("This user interface has an extra package:"
f" {extra['pkgdesc']}")
logging.info("This user interface has an extra package:" f" {extra['pkgdesc']}")
return pmb.helpers.cli.confirm("Enable this package?",
default=config.ui_extras)
return pmb.helpers.cli.confirm("Enable this package?", default=config.ui_extras)
def ask_for_systemd(config: Config, ui):
@ -203,40 +212,42 @@ def ask_for_systemd(config: Config, ui):
return config.systemd
if pmb.helpers.ui.check_option(ui, "pmb:systemd-never"):
logging.info("Based on your UI selection, OpenRC will be used as init"
" system. This UI does not support systemd.")
logging.info(
"Based on your UI selection, OpenRC will be used as init"
" system. This UI does not support systemd."
)
return config.systemd
default_is_systemd = pmb.helpers.ui.check_option(ui, "pmb:systemd")
not_str = " " if default_is_systemd else " not "
logging.info("Based on your UI selection, 'default' will result"
f" in{not_str}installing systemd.")
logging.info(
"Based on your UI selection, 'default' will result" f" in{not_str}installing systemd."
)
choices = SystemdConfig.choices()
answer = pmb.helpers.cli.ask("Install systemd?",
answer = pmb.helpers.cli.ask(
"Install systemd?",
choices,
config.systemd,
validation_regex=f"^({'|'.join(choices)})$",
complete=choices)
complete=choices,
)
return answer
def ask_for_keymaps(args: PmbArgs, deviceinfo: Deviceinfo):
if not deviceinfo.keymaps or deviceinfo.keymaps.strip() == "":
return ""
options = deviceinfo.keymaps.split(' ')
logging.info(f"Available keymaps for device ({len(options)}): "
f"{', '.join(options)}")
options = deviceinfo.keymaps.split(" ")
logging.info(f"Available keymaps for device ({len(options)}): " f"{', '.join(options)}")
if args.keymap == "":
args.keymap = options[0]
while True:
ret = pmb.helpers.cli.ask("Keymap", None, args.keymap,
True, complete=options)
ret = pmb.helpers.cli.ask("Keymap", None, args.keymap, True, complete=options)
if ret in options:
return ret
logging.fatal("ERROR: Invalid keymap specified, please type in"
" one from the list above.")
logging.fatal("ERROR: Invalid keymap specified, please type in" " one from the list above.")
def ask_for_timezone():
@ -256,11 +267,9 @@ def ask_for_timezone():
pass
if tz:
logging.info(f"Your host timezone: {tz}")
if pmb.helpers.cli.confirm("Use this timezone instead of GMT?",
default="y"):
if pmb.helpers.cli.confirm("Use this timezone instead of GMT?", default="y"):
return tz
logging.info("WARNING: Unable to determine timezone configuration on host,"
" using GMT.")
logging.info("WARNING: Unable to determine timezone configuration on host," " using GMT.")
return "GMT"
@ -277,12 +286,12 @@ def ask_for_provider_select(apkbuild, providers_cfg):
has_default = False
providers_short = {}
last_selected = providers_cfg.get(select, 'default')
last_selected = providers_cfg.get(select, "default")
for pkgname, pkg in providers:
# Strip provider prefix if possible
short = pkgname
if short.startswith(f'{select}-'):
if short.startswith(f"{select}-"):
short = short[len(f"{select}-") :]
# Allow selecting the package using both short and long name
@ -292,20 +301,22 @@ def ask_for_provider_select(apkbuild, providers_cfg):
if pkgname == last_selected:
last_selected = short
if not has_default and pkg.get('provider_priority', 0) != 0:
if not has_default and pkg.get("provider_priority", 0) != 0:
# Display as default provider
styles = pmb.config.styles
logging.info(f"* {short}: {pkg['pkgdesc']} "
f"{styles['BOLD']}(default){styles['END']}")
logging.info(
f"* {short}: {pkg['pkgdesc']} " f"{styles['BOLD']}(default){styles['END']}"
)
has_default = True
else:
logging.info(f"* {short}: {pkg['pkgdesc']}")
while True:
ret = pmb.helpers.cli.ask("Provider", None, last_selected, True,
complete=providers_short.keys())
ret = pmb.helpers.cli.ask(
"Provider", None, last_selected, True, complete=providers_short.keys()
)
if has_default and ret == 'default':
if has_default and ret == "default":
# Selecting default means to not select any provider explicitly
# In other words, apk chooses it automatically based on
# "provider_priority"
@ -315,8 +326,9 @@ def ask_for_provider_select(apkbuild, providers_cfg):
if ret in providers_short:
providers_cfg[select] = providers_short[ret]
break
logging.fatal("ERROR: Invalid provider specified, please type in"
" one from the list above.")
logging.fatal(
"ERROR: Invalid provider specified, please type in" " one from the list above."
)
def ask_for_provider_select_pkg(pkgname, providers_cfg):
@ -327,8 +339,7 @@ def ask_for_provider_select_pkg(pkgname, providers_cfg):
:param providers_cfg: the configuration section with previously selected
providers. Updated with new providers after selection
"""
apkbuild = pmb.helpers.pmaports.get(pkgname,
subpackages=False, must_exist=False)
apkbuild = pmb.helpers.pmaports.get(pkgname, subpackages=False, must_exist=False)
if not apkbuild:
return
@ -358,24 +369,23 @@ def ask_for_device_kernel(config: Config, device: str):
# Ask for kernel (extra message when downstream and upstream are available)
logging.info("Which kernel do you want to use with your device?")
if "downstream" in kernels:
logging.info("Downstream kernels are typically the outdated Android"
" kernel forks.")
logging.info("Downstream kernels are typically the outdated Android" " kernel forks.")
if "downstream" in kernels and len(kernels) > 1:
logging.info("Upstream kernels (mainline, stable, ...) get security"
logging.info(
"Upstream kernels (mainline, stable, ...) get security"
" updates, but may have less working features than"
" downstream kernels.")
" downstream kernels."
)
# List kernels
logging.info(f"Available kernels ({len(kernels)}):")
for type in sorted(kernels.keys()):
logging.info(f"* {type}: {kernels[type]}")
while True:
ret = pmb.helpers.cli.ask("Kernel", None, default, True,
complete=kernels)
ret = pmb.helpers.cli.ask("Kernel", None, default, True, complete=kernels)
if ret in kernels.keys():
return ret
logging.fatal("ERROR: Invalid kernel specified, please type in one"
" from the list above.")
logging.fatal("ERROR: Invalid kernel specified, please type in one" " from the list above.")
return ret
@ -389,8 +399,9 @@ def ask_for_device(context: Context):
* kernel: type of kernel (downstream, etc)
"""
vendors = sorted(pmb.helpers.devices.list_vendors())
logging.info("Choose your target device vendor (either an "
"existing one, or a new one for porting).")
logging.info(
"Choose your target device vendor (either an " "existing one, or a new one for porting)."
)
logging.info(f"Available vendors ({len(vendors)}): {', '.join(vendors)}")
current_vendor = None
@ -400,42 +411,41 @@ def ask_for_device(context: Context):
current_codename = context.config.device.split("-", 1)[1]
while True:
vendor = pmb.helpers.cli.ask("Vendor", None, current_vendor,
False, r"[a-z0-9]+", vendors)
vendor = pmb.helpers.cli.ask("Vendor", None, current_vendor, False, r"[a-z0-9]+", vendors)
new_vendor = vendor not in vendors
codenames = []
if new_vendor:
logging.info("The specified vendor ({}) could not be found in"
logging.info(
"The specified vendor ({}) could not be found in"
" existing ports, do you want to start a new"
" port?".format(vendor))
" port?".format(vendor)
)
if not pmb.helpers.cli.confirm(default=True):
continue
else:
# Archived devices can be selected, but are not displayed
devices = sorted(pmb.helpers.devices.list_codenames(
vendor, archived=False))
devices = sorted(pmb.helpers.devices.list_codenames(vendor, archived=False))
# Remove "vendor-" prefixes from device list
codenames = [x.split('-', 1)[1] for x in devices]
logging.info(f"Available codenames ({len(codenames)}): " +
", ".join(codenames))
codenames = [x.split("-", 1)[1] for x in devices]
logging.info(f"Available codenames ({len(codenames)}): " + ", ".join(codenames))
if current_vendor != vendor:
current_codename = ''
codename = pmb.helpers.cli.ask("Device codename", None,
current_codename, False, r"[a-z0-9]+",
codenames)
current_codename = ""
codename = pmb.helpers.cli.ask(
"Device codename", None, current_codename, False, r"[a-z0-9]+", codenames
)
device = f"{vendor}-{codename}"
device_path = pmb.helpers.devices.find_path(device, 'deviceinfo')
device_path = pmb.helpers.devices.find_path(device, "deviceinfo")
if device_path is None:
if device == context.device:
raise RuntimeError(
"This device does not exist anymore, check"
" <https://postmarketos.org/renamed>"
" to see if it was renamed")
logging.info("You are about to do"
f" a new device port for '{device}'.")
" to see if it was renamed"
)
logging.info("You are about to do" f" a new device port for '{device}'.")
if not pmb.helpers.cli.confirm(default=True):
current_vendor = vendor
continue
@ -459,70 +469,77 @@ def ask_for_device(context: Context):
def ask_for_additional_options(config):
context = pmb.core.context.get_context()
# Allow to skip additional options
logging.info("Additional options:"
logging.info(
"Additional options:"
f" extra free space: {config.extra_space} MB,"
f" boot partition size: {config.boot_size} MB,"
f" parallel jobs: {config.jobs},"
f" ccache per arch: {config.ccache_size},"
f" sudo timer: {context.sudo_timer},"
f" mirror: {config.mirrors["pmaports"]}")
f" mirror: {config.mirrors["pmaports"]}"
)
if not pmb.helpers.cli.confirm("Change them?",
default=False):
if not pmb.helpers.cli.confirm("Change them?", default=False):
return
# Extra space
logging.info("Set extra free space to 0, unless you ran into a 'No space"
logging.info(
"Set extra free space to 0, unless you ran into a 'No space"
" left on device' error. In that case, the size of the"
" rootfs could not be calculated properly on your machine,"
" and we need to add extra free space to make the image big"
" enough to fit the rootfs (pmbootstrap#1904)."
" How much extra free space do you want to add to the image"
" (in MB)?")
answer = pmb.helpers.cli.ask("Extra space size", None,
config.extra_space, validation_regex="^[0-9]+$")
" (in MB)?"
)
answer = pmb.helpers.cli.ask(
"Extra space size", None, config.extra_space, validation_regex="^[0-9]+$"
)
config.extra_space = answer
# Boot size
logging.info("What should be the boot partition size (in MB)?")
answer = pmb.helpers.cli.ask("Boot size", None, config.boot_size,
validation_regex="^[1-9][0-9]*$")
answer = pmb.helpers.cli.ask(
"Boot size", None, config.boot_size, validation_regex="^[1-9][0-9]*$"
)
config.boot_size = int(answer)
# Parallel job count
logging.info("How many jobs should run parallel on this machine, when"
" compiling?")
answer = pmb.helpers.cli.ask("Jobs", None, config.jobs,
validation_regex="^[1-9][0-9]*$")
logging.info("How many jobs should run parallel on this machine, when" " compiling?")
answer = pmb.helpers.cli.ask("Jobs", None, config.jobs, validation_regex="^[1-9][0-9]*$")
config.jobs = int(answer)
# Ccache size
logging.info("We use ccache to speed up building the same code multiple"
logging.info(
"We use ccache to speed up building the same code multiple"
" times. How much space should the ccache folder take up per"
" architecture? After init is through, you can check the"
" current usage with 'pmbootstrap stats'. Answer with 0 for"
" infinite.")
" infinite."
)
regex = "0|[0-9]+(k|M|G|T|Ki|Mi|Gi|Ti)"
answer = pmb.helpers.cli.ask("Ccache size", None, config.ccache_size,
lowercase_answer=False,
validation_regex=regex)
answer = pmb.helpers.cli.ask(
"Ccache size", None, config.ccache_size, lowercase_answer=False, validation_regex=regex
)
config.ccache_size = answer
# Sudo timer
logging.info("pmbootstrap does everything in Alpine Linux chroots, so"
logging.info(
"pmbootstrap does everything in Alpine Linux chroots, so"
" your host system does not get modified. In order to"
" work with these chroots, pmbootstrap calls 'sudo'"
" internally. For long running operations, it is possible"
" that you'll have to authorize sudo more than once.")
answer = pmb.helpers.cli.confirm("Enable background timer to prevent"
" repeated sudo authorization?",
default=context.sudo_timer)
" that you'll have to authorize sudo more than once."
)
answer = pmb.helpers.cli.confirm(
"Enable background timer to prevent" " repeated sudo authorization?",
default=context.sudo_timer,
)
config.sudo_timer = str(answer)
# Mirrors
# prompt for mirror change
logging.info("Selected mirror:"
f" {','.join(context.config.mirrors_postmarketos)}")
logging.info("Selected mirror:" f" {','.join(context.config.mirrors_postmarketos)}")
if pmb.helpers.cli.confirm("Change mirror?", default=False):
mirror = ask_for_mirror()
config.mirrors["pmaports"] = mirror
@ -532,8 +549,8 @@ def ask_for_mirror():
regex = "^[1-9][0-9]*$" # single non-zero number only
json_path = pmb.helpers.http.download(
"https://postmarketos.org/mirrors.json", "pmos_mirrors",
cache=False)
"https://postmarketos.org/mirrors.json", "pmos_mirrors", cache=False
)
with open(json_path, "rt") as handle:
s = handle.read()
@ -572,9 +589,9 @@ def ask_for_mirror():
mirror = ""
# require one valid mirror index selected by user
while len(mirror) == 0:
answer = pmb.helpers.cli.ask("Select a mirror", None,
",".join(mirror_indexes),
validation_regex=regex)
answer = pmb.helpers.cli.ask(
"Select a mirror", None, ",".join(mirror_indexes), validation_regex=regex
)
i = int(answer)
if i < 1 or i > len(urls):
logging.info("You must select one valid mirror!")
@ -585,8 +602,9 @@ def ask_for_mirror():
def ask_for_hostname(default: Optional[str], device):
while True:
ret = pmb.helpers.cli.ask("Device hostname (short form, e.g. 'foo')",
None, (default or device), True)
ret = pmb.helpers.cli.ask(
"Device hostname (short form, e.g. 'foo')", None, (default or device), True
)
if not pmb.helpers.other.validate_hostname(ret):
continue
# Don't store device name in user's config (gets replaced in install)
@ -598,18 +616,20 @@ def ask_for_hostname(default: Optional[str], device):
def ask_for_ssh_keys(default: bool) -> bool:
if not len(glob.glob(os.path.expanduser("~/.ssh/id_*.pub"))):
return False
return pmb.helpers.cli.confirm("Would you like to copy your SSH public"
" keys to the device?",
default=default)
return pmb.helpers.cli.confirm(
"Would you like to copy your SSH public" " keys to the device?", default=default
)
def ask_build_pkgs_on_install(default: bool) -> bool:
logging.info("After pmaports are changed, the binary packages may be"
logging.info(
"After pmaports are changed, the binary packages may be"
" outdated. If you want to install postmarketOS without"
" changes, reply 'n' for a faster installation.")
return pmb.helpers.cli.confirm("Build outdated packages during"
" 'pmbootstrap install'?",
default=default)
" changes, reply 'n' for a faster installation."
)
return pmb.helpers.cli.confirm(
"Build outdated packages during" " 'pmbootstrap install'?", default=default
)
def get_locales():
@ -623,20 +643,23 @@ def get_locales():
def ask_for_locale(current_locale: str):
locales = get_locales()
logging.info("Choose your preferred locale, like e.g. en_US. Only UTF-8"
logging.info(
"Choose your preferred locale, like e.g. en_US. Only UTF-8"
" is supported, it gets appended automatically. Use"
" tab-completion if needed.")
" tab-completion if needed."
)
while True:
ret = pmb.helpers.cli.ask("Locale",
ret = pmb.helpers.cli.ask(
"Locale",
choices=None,
default=current_locale.replace(".UTF-8", ""),
lowercase_answer=False,
complete=locales)
complete=locales,
)
ret = ret.replace(".UTF-8", "")
if ret not in locales:
logging.info("WARNING: this locale is not in the list of known"
" valid locales.")
logging.info("WARNING: this locale is not in the list of known" " valid locales.")
if pmb.helpers.cli.ask() != "y":
# Ask again
continue
@ -670,8 +693,7 @@ def frontend(args: PmbArgs):
# Copy the git hooks if master was checked out. (Don't symlink them and
# only do it on master, so the git hooks don't change unexpectedly when
# having a random branch checked out.)
branch_current = pmb.helpers.git.rev_parse(pkgrepo_default_path(),
extra_args=["--abbrev-ref"])
branch_current = pmb.helpers.git.rev_parse(pkgrepo_default_path(), extra_args=["--abbrev-ref"])
if branch_current == "master":
logging.info("NOTE: pmaports is on master branch, copying git hooks.")
pmb.config.pmaports.install_githooks()
@ -682,7 +704,7 @@ def frontend(args: PmbArgs):
config.kernel = kernel
deviceinfo = pmb.parse.deviceinfo(device)
apkbuild_path = pmb.helpers.devices.find_path(device, 'APKBUILD')
apkbuild_path = pmb.helpers.devices.find_path(device, "APKBUILD")
if apkbuild_path:
apkbuild = pmb.parse.apkbuild(apkbuild_path)
ask_for_provider_select(apkbuild, config.providers)
@ -703,17 +725,18 @@ def frontend(args: PmbArgs):
# systemd
config.systemd = ask_for_systemd(config, ui)
ask_for_provider_select_pkg(f"postmarketos-ui-{ui}",
config.providers)
ask_for_provider_select_pkg(f"postmarketos-ui-{ui}", config.providers)
ask_for_additional_options(config)
# Extra packages to be installed to rootfs
logging.info("Additional packages that will be installed to rootfs."
logging.info(
"Additional packages that will be installed to rootfs."
" Specify them in a comma separated list (e.g.: vim,file)"
" or \"none\"")
extra = pmb.helpers.cli.ask("Extra packages", None,
config.extra_packages,
validation_regex=r"^([-.+\w]+)(,[-.+\w]+)*$")
' or "none"'
)
extra = pmb.helpers.cli.ask(
"Extra packages", None, config.extra_packages, validation_regex=r"^([-.+\w]+)(,[-.+\w]+)*$"
)
config.extra_packages = extra
# Configure timezone info
@ -738,11 +761,12 @@ def frontend(args: PmbArgs):
pmb.config.save(args.config, config)
# Zap existing chroots
if (work_exists and device_exists and
len(list(Chroot.iter_patterns())) and
pmb.helpers.cli.confirm(
"Zap existing chroots to apply configuration?",
default=True)):
if (
work_exists
and device_exists
and len(list(Chroot.iter_patterns()))
and pmb.helpers.cli.confirm("Zap existing chroots to apply configuration?", default=True)
):
setattr(args, "deviceinfo", deviceinfo)
# Do not zap any existing packages or cache_http directories

View file

@ -16,8 +16,9 @@ import pmb.parse.version
def clone():
logging.info("Setting up the native chroot and cloning the package build"
" recipes (pmaports)...")
logging.info(
"Setting up the native chroot and cloning the package build" " recipes (pmaports)..."
)
# Set up the native chroot and clone pmaports
pmb.helpers.git.clone("pmaports")
@ -30,8 +31,14 @@ def check_version_pmaports(real):
return
# Outated error
logging.info("NOTE: your pmaports folder has version " + real + ", but" +
" version " + min + " is required.")
logging.info(
"NOTE: your pmaports folder has version "
+ real
+ ", but"
+ " version "
+ min
+ " is required."
)
raise RuntimeError("Run 'pmbootstrap pull' to update your pmaports.")
@ -42,21 +49,26 @@ def check_version_pmbootstrap(min_ver):
return
# Show versions
logging.info(f"NOTE: you are using pmbootstrap version {real}, but"
f" version {min_ver} is required.")
logging.info(
f"NOTE: you are using pmbootstrap version {real}, but" f" version {min_ver} is required."
)
# Error for git clone
pmb_src = pmb.config.pmb_src
if os.path.exists(pmb_src / ".git"):
raise RuntimeError("Please update your local pmbootstrap repository."
f" Usually with: 'git -C \"{pmb_src}\" pull'")
raise RuntimeError(
"Please update your local pmbootstrap repository."
f" Usually with: 'git -C \"{pmb_src}\" pull'"
)
# Error for package manager installation
raise RuntimeError("Please update your pmbootstrap version (with your"
raise RuntimeError(
"Please update your pmbootstrap version (with your"
" distribution's package manager, or with pip, "
" depending on how you have installed it). If that is"
" not possible, consider cloning the latest version"
" of pmbootstrap from git.")
" of pmbootstrap from git."
)
@Cache()
@ -100,8 +112,7 @@ def read_config(aports: Optional[Path] = None):
# Require the config
path_cfg = aports / "pmaports.cfg"
if not os.path.exists(path_cfg):
raise RuntimeError("Invalid pmaports repository, could not find the"
f" config: {path_cfg}")
raise RuntimeError("Invalid pmaports repository, could not find the" f" config: {path_cfg}")
# Load the config
cfg = configparser.ConfigParser()
@ -150,19 +161,24 @@ def read_config_channel():
return channels_cfg["channels"][channel]
# Channel not in channels.cfg, try to be helpful
branch = pmb.helpers.git.rev_parse(aports,
extra_args=["--abbrev-ref"])
branch = pmb.helpers.git.rev_parse(aports, extra_args=["--abbrev-ref"])
remote = pmb.helpers.git.get_upstream_remote(aports)
logging.info("NOTE: fix the error by rebasing or cherry picking relevant"
logging.info(
"NOTE: fix the error by rebasing or cherry picking relevant"
" commits from this branch onto a branch that is on a"
" supported channel: master, v24.06, …")
logging.info("NOTE: as workaround, you may pass --config-channels with a"
" supported channel: master, v24.06, …"
)
logging.info(
"NOTE: as workaround, you may pass --config-channels with a"
" custom channels.cfg. Reference:"
" https://postmarketos.org/channels.cfg")
raise RuntimeError(f"Current branch '{branch}' of pmaports.git is on"
" https://postmarketos.org/channels.cfg"
)
raise RuntimeError(
f"Current branch '{branch}' of pmaports.git is on"
f" channel '{channel}', but this channel was not"
f" found in channels.cfg (of {remote}/master"
" branch). Looks like a very old branch.")
" branch). Looks like a very old branch."
)
def init():
@ -187,23 +203,24 @@ def switch_to_channel_branch(channel_new):
# List current and new branches/channels
channels_cfg = pmb.helpers.git.parse_channels_cfg(aports)
branch_new = channels_cfg["channels"][channel_new]["branch_pmaports"]
branch_current = pmb.helpers.git.rev_parse(aports,
extra_args=["--abbrev-ref"])
logging.info(f"Currently checked out branch '{branch_current}' of"
f" pmaports.git is on channel '{channel_current}'.")
logging.info(f"Switching to branch '{branch_new}' on channel"
f" '{channel_new}'...")
branch_current = pmb.helpers.git.rev_parse(aports, extra_args=["--abbrev-ref"])
logging.info(
f"Currently checked out branch '{branch_current}' of"
f" pmaports.git is on channel '{channel_current}'."
)
logging.info(f"Switching to branch '{branch_new}' on channel" f" '{channel_new}'...")
# Make sure we don't have mounts related to the old channel
pmb.chroot.shutdown()
# Attempt to switch branch (git gives a nice error message, mentioning
# which files need to be committed/stashed, so just pass it through)
if pmb.helpers.run.user(["git", "checkout", branch_new],
aports, "interactive", check=False):
raise RuntimeError("Failed to switch branch. Go to your pmaports and"
if pmb.helpers.run.user(["git", "checkout", branch_new], aports, "interactive", check=False):
raise RuntimeError(
"Failed to switch branch. Go to your pmaports and"
" fix what git complained about, then try again: "
f"{aports}")
f"{aports}"
)
# Verify pmaports.cfg on new branch
read_config()

View file

@ -16,20 +16,24 @@ def which_sudo() -> Optional[str]:
if os.getuid() == 0:
return None
supported_sudos = ['doas', 'sudo']
supported_sudos = ["doas", "sudo"]
user_set_sudo = os.getenv("PMB_SUDO")
if user_set_sudo is not None:
if shutil.which(user_set_sudo) is None:
raise RuntimeError("PMB_SUDO environmental variable is set to"
raise RuntimeError(
"PMB_SUDO environmental variable is set to"
f" {user_set_sudo} but pmbootstrap cannot find"
" this command on your system.")
" this command on your system."
)
return user_set_sudo
for sudo in supported_sudos:
if shutil.which(sudo) is not None:
return sudo
raise RuntimeError("Can't find sudo or doas required to run pmbootstrap."
raise RuntimeError(
"Can't find sudo or doas required to run pmbootstrap."
" Please install sudo, doas, or specify your own sudo"
" with the PMB_SUDO environmental variable.")
" with the PMB_SUDO environmental variable."
)

View file

@ -6,6 +6,7 @@ from pmb.core.config import SystemdConfig
"""Test the config file serialization and deserialization."""
def test_load(config_file):
config = pmb.config.load(config_file)
assert config.build_default_device_arch
@ -75,6 +76,7 @@ def test_migrate_2_to_3(config_file_2_3_x, tmp_path, monkeypatch):
tmp_path = tmp_path / "pmbootstrap-new.cfg"
did_migrate = False
def mock_save(path, config):
nonlocal did_migrate
did_migrate = True
@ -92,4 +94,5 @@ def test_migrate_2_to_3(config_file_2_3_x, tmp_path, monkeypatch):
# Check that save was called (which happens on a config migration)
assert did_migrate
# FIXME: add save tests and better type checks

View file

@ -4,6 +4,7 @@
for example the init dates of the chroots. This is not saved in
pmbootstrap.cfg, because pmbootstrap.cfg is not tied to a specific work
dir."""
import configparser
import os
import time
@ -75,11 +76,12 @@ def chroot_check_channel(chroot: Chroot) -> bool:
the user has auto_zap_misconfigured_chroots enabled), False otherwise."""
config = get_context().config
path = config.work / "workdir.cfg"
msg_again = "Run 'pmbootstrap zap' to delete your chroots and try again." \
" To do this automatically, run 'pmbootstrap config" \
msg_again = (
"Run 'pmbootstrap zap' to delete your chroots and try again."
" To do this automatically, run 'pmbootstrap config"
" auto_zap_misconfigured_chroots yes'."
msg_unknown = ("Could not figure out on which release channel the"
f" '{chroot}' chroot is.")
)
msg_unknown = "Could not figure out on which release channel the" f" '{chroot}' chroot is."
if not os.path.exists(path):
raise RuntimeError(f"{msg_unknown} {msg_again}")
@ -91,17 +93,22 @@ def chroot_check_channel(chroot: Chroot) -> bool:
channel = pmb.config.pmaports.read_config()["channel"]
channel_cfg = cfg[key][str(chroot)]
msg = f"Chroot '{chroot}' is for the '{channel_cfg}' channel," \
msg = (
f"Chroot '{chroot}' is for the '{channel_cfg}' channel,"
f" but you are on the '{channel}' channel."
)
if channel != channel_cfg:
if config.auto_zap_misconfigured_chroots.enabled():
if config.auto_zap_misconfigured_chroots.noisy():
logging.info(msg)
logging.info("Automatically zapping since"
" auto_zap_misconfigured_chroots is enabled.")
logging.info("NOTE: You can silence this message with 'pmbootstrap"
" config auto_zap_misconfigured_chroots silently'")
logging.info(
"Automatically zapping since" " auto_zap_misconfigured_chroots is enabled."
)
logging.info(
"NOTE: You can silence this message with 'pmbootstrap"
" config auto_zap_misconfigured_chroots silently'"
)
else:
logging.debug(f"{msg} Zapping chroot.")
return True

View file

@ -10,6 +10,7 @@ from pmb.helpers.args import init as init_args
_testdir = Path(__file__).parent / "data/tests"
@pytest.fixture
def config_file(tmp_path_factory, request):
"""Fixture to create a temporary pmbootstrap.cfg file."""
@ -23,8 +24,7 @@ def config_file(tmp_path_factory, request):
workdir = tmp_path / "work"
workdir.mkdir()
configs = {"default": f"aports = {workdir / 'cache_git' / 'pmaports'}",
"no-repos": "aports = "}
configs = {"default": f"aports = {workdir / 'cache_git' / 'pmaports'}", "no-repos": "aports = "}
file = _testdir / "pmbootstrap.cfg"
print(f"CONFIG: {out_file}")
@ -43,8 +43,7 @@ def device_package(config_file):
pkgdir.mkdir()
for file in ["APKBUILD", "deviceinfo"]:
shutil.copy(_testdir / f"{file}.{MOCK_DEVICE}",
pkgdir / file)
shutil.copy(_testdir / f"{file}.{MOCK_DEVICE}", pkgdir / file)
return pkgdir
@ -52,7 +51,8 @@ def device_package(config_file):
@pytest.fixture
def mock_devices_find_path(device_package, monkeypatch):
"""Fixture to mock pmb.helpers.devices.find_path()"""
def mock_find_path(device, file=''):
def mock_find_path(device, file=""):
print(f"mock_find_path({device}, {file})")
out = device_package / file
if not out.exists():
@ -67,6 +67,7 @@ def mock_devices_find_path(device_package, monkeypatch):
def logfile(tmp_path_factory):
"""Setup logging for all tests."""
from pmb.helpers import logging
tmp_path = tmp_path_factory.getbasetemp()
logfile = tmp_path / "log_testsuite.txt"
logging.init(logfile, verbose=True)
@ -79,8 +80,14 @@ def setup_mock_ask(monkeypatch):
"""Common setup to mock cli.ask() to avoid reading from stdin"""
import pmb.helpers.cli
def mock_ask(question="Continue?", choices=["y", "n"], default="n",
lowercase_answer=True, validation_regex=None, complete=None):
def mock_ask(
question="Continue?",
choices=["y", "n"],
default="n",
lowercase_answer=True,
validation_regex=None,
complete=None,
):
return default
monkeypatch.setattr(pmb.helpers.cli, "ask", mock_ask)
@ -129,10 +136,12 @@ def pmb_args(config_file, mock_context, logfile):
# Sanity check
assert ".pytest_tmp" in get_context().config.work.parts
@pytest.fixture
def foreign_arch():
"""Fixture to return the foreign arch."""
from pmb.core.arch import Arch
if os.uname().machine == "x86_64":
return Arch.aarch64
@ -155,5 +164,4 @@ def pmaports(pmb_args, monkeypatch):
pmb.helpers.git.clone("pmaports")
assert pmb.helpers.run.user(["git", "checkout", "master"],
working_dir=config.aports[0]) == 0
assert pmb.helpers.run.user(["git", "checkout", "master"], working_dir=config.aports[0]) == 0

View file

@ -12,6 +12,7 @@ _cached_native_arch: "Arch"
class Arch(enum.Enum):
"""Supported architectures according to the Alpine
APKBUILD format."""
x86 = "x86"
x86_64 = "x86_64"
armhf = "armhf"
@ -33,20 +34,19 @@ class Arch(enum.Enum):
ppc64 = "ppc64"
riscv32 = "riscv32"
def __str__(self) -> str:
return self.value
@staticmethod
def from_str(arch: str) -> "Arch":
try:
return Arch(arch)
except ValueError:
raise ValueError(f"Invalid architecture: '{arch}',"
raise ValueError(
f"Invalid architecture: '{arch}',"
" expected something like:"
f" {', '.join([str(a) for a in Arch.supported()])}")
f" {', '.join([str(a) for a in Arch.supported()])}"
)
@staticmethod
def from_machine_type(machine_type: str) -> "Arch":
@ -60,17 +60,14 @@ class Arch(enum.Enum):
}
return mapping[machine_type]
@staticmethod
def native() -> "Arch":
global _cached_native_arch
return _cached_native_arch
def is_native(self):
return self == Arch.native()
@staticmethod
def supported() -> Set["Arch"]:
"""Officially supported host/target architectures for postmarketOS. Only
@ -78,7 +75,8 @@ class Arch(enum.Enum):
we need to generate the "musl-$ARCH" and "gcc-$ARCH" packages (use
"pmbootstrap aportgen musl-armhf" etc.)."""
# FIXME: cache?
return set([
return set(
[
Arch.armhf,
Arch.armv7,
Arch.aarch64,
@ -86,8 +84,8 @@ class Arch(enum.Enum):
Arch.x86,
Arch.riscv64,
Arch.native(),
])
]
)
def kernel(self):
mapping = {
@ -112,7 +110,6 @@ class Arch(enum.Enum):
}
return mapping.get(self, self.value)
def alpine_triple(self):
"""Get the cross compiler triple for this architecture on Alpine."""
mapping = {
@ -140,9 +137,7 @@ class Arch(enum.Enum):
if self in mapping:
return mapping[self]
raise ValueError(f"Can not map Alpine architecture '{self}'"
" to the right hostspec value")
raise ValueError(f"Can not map Alpine architecture '{self}'" " to the right hostspec value")
def cpu_emulation_required(self):
# Obvious case: host arch is target arch
@ -162,7 +157,6 @@ class Arch(enum.Enum):
# No match: then it's required
return True
# Magic to let us use an arch as a Path element
def __truediv__(self, other: object) -> Path:
if isinstance(other, PosixPath) or isinstance(other, PurePosixPath):
@ -181,7 +175,6 @@ class Arch(enum.Enum):
return NotImplemented
def __rtruediv__(self, other: object) -> Path:
if isinstance(other, PosixPath) or isinstance(other, PurePosixPath):
# Important to produce a new Path object here, otherwise we
@ -192,4 +185,5 @@ class Arch(enum.Enum):
return NotImplemented
_cached_native_arch = Arch.from_machine_type(platform.machine())

View file

@ -9,6 +9,7 @@ import pmb.config
from pmb.core.arch import Arch
from .context import get_context
class ChrootType(enum.Enum):
ROOTFS = "rootfs"
BUILDROOT = "buildroot"
@ -19,6 +20,7 @@ class ChrootType(enum.Enum):
def __str__(self) -> str:
return self.name
class Chroot:
__type: ChrootType
__name: str
@ -63,38 +65,31 @@ class Chroot:
# A native suffix must not have a name.
if self.__type == ChrootType.NATIVE and self.__name != "":
raise ValueError(f"The native suffix can't have a name but got: "
f"'{self.__name}'")
raise ValueError(f"The native suffix can't have a name but got: " f"'{self.__name}'")
if self.__type == ChrootType.IMAGE and not Path(self.__name).exists():
raise ValueError(f"Image file '{self.__name}' does not exist")
def __str__(self) -> str:
if len(self.__name) > 0 and self.type != ChrootType.IMAGE:
return f"{self.__type.value}_{self.__name}"
else:
return self.__type.value
@property
def dirname(self) -> str:
return f"chroot_{self}"
@property
def path(self) -> Path:
return Path(get_context().config.work, self.dirname)
def exists(self) -> bool:
return (self / "bin/sh").is_symlink()
def is_mounted(self) -> bool:
return self.exists() and pmb.helpers.mount.ismount(self.path / "etc/apk/keys")
@property
def arch(self) -> Arch:
if self.type == ChrootType.NATIVE:
@ -108,8 +103,7 @@ class Chroot:
if arch is not None:
return arch
raise ValueError(f"Invalid chroot suffix: {self}"
" (wrong device chosen in 'init' step?)")
raise ValueError(f"Invalid chroot suffix: {self}" " (wrong device chosen in 'init' step?)")
def __eq__(self, other: object) -> bool:
if isinstance(other, str):
@ -123,7 +117,6 @@ class Chroot:
return self.type == other.type and self.name == other.name
def __truediv__(self, other: object) -> Path:
if isinstance(other, PosixPath) or isinstance(other, PurePosixPath):
# Convert the other path to a relative path
@ -136,7 +129,6 @@ class Chroot:
return NotImplemented
def __rtruediv__(self, other: object) -> Path:
if isinstance(other, PosixPath) or isinstance(other, PurePosixPath):
# Important to produce a new Path object here, otherwise we
@ -149,32 +141,26 @@ class Chroot:
return NotImplemented
@property
def type(self) -> ChrootType:
return self.__type
@property
def name(self) -> str:
return self.__name
@staticmethod
def native() -> Chroot:
return Chroot(ChrootType.NATIVE)
@staticmethod
def buildroot(arch: Arch) -> Chroot:
return Chroot(ChrootType.BUILDROOT, arch)
@staticmethod
def rootfs(device: str) -> Chroot:
return Chroot(ChrootType.ROOTFS, device)
@staticmethod
def from_str(s: str) -> Chroot:
"""
@ -203,7 +189,6 @@ class Chroot:
else:
yield f"chroot_{stype.value}_*"
@staticmethod
def glob() -> Generator[Path, None, None]:
"""

View file

@ -1,4 +1,3 @@
from copy import deepcopy
import enum
import multiprocessing
@ -6,6 +5,7 @@ from typing import Any, List, Dict, TypedDict
from pathlib import Path
import os
class Mirrors(TypedDict):
alpine: str
pmaports: str
@ -17,7 +17,6 @@ class SystemdConfig(enum.Enum):
ALWAYS = "always"
NEVER = "never"
def __str__(self) -> str:
return self.value
@ -41,9 +40,10 @@ class AutoZapConfig(enum.Enum):
return self == AutoZapConfig.YES
class Config():
aports: List[Path] = [Path(os.path.expanduser("~") +
"/.local/var/pmbootstrap/cache_git/pmaports")]
class Config:
aports: List[Path] = [
Path(os.path.expanduser("~") + "/.local/var/pmbootstrap/cache_git/pmaports")
]
boot_size: int = 256
build_default_device_arch: bool = False
build_pkgs_on_install: bool = True
@ -60,7 +60,7 @@ class Config():
mirrors: Mirrors = {
"alpine": "http://dl-cdn.alpinelinux.org/alpine/",
"pmaports": "http://mirror.postmarketos.org/postmarketos/",
"systemd": "http://mirror.postmarketos.org/postmarketos/staging/systemd/"
"systemd": "http://mirror.postmarketos.org/postmarketos/staging/systemd/",
}
qemu_redir_stdio: bool = False
ssh_key_glob: str = "~/.ssh/id_*.pub"
@ -77,13 +77,11 @@ class Config():
providers: Dict[str, str] = {}
def __init__(self):
# Make sure we aren't modifying the class defaults
for key in Config.__annotations__.keys():
setattr(self, key, deepcopy(Config.get_default(key)))
@staticmethod
def keys() -> List[str]:
keys = list(Config.__annotations__.keys())
@ -91,7 +89,6 @@ class Config():
keys += [f"mirrors.{k}" for k in Mirrors.__annotations__.keys()]
return sorted(keys)
@staticmethod
def get_default(dotted_key: str) -> Any:
"""Get the default value for a config option, supporting
@ -104,7 +101,6 @@ class Config():
else:
raise ValueError(f"Invalid dotted key: {dotted_key}")
def __setattr__(self, key: str, value: Any):
"""Allow for setattr() to be used with a dotted key
to set nested dictionaries (e.g. "mirrors.alpine")."""
@ -126,7 +122,6 @@ class Config():
else:
raise ValueError(f"Invalid dotted key: {key}")
def __getattribute__(self, key: str) -> Any:
"""Allow for getattr() to be used with a dotted key
to get nested dictionaries (e.g. "mirrors.alpine")."""

View file

@ -7,7 +7,7 @@ from pathlib import Path
from .config import Config
class Context():
class Context:
details_to_stdout: bool = False
quiet: bool = False
command_timeout: float = 900
@ -46,6 +46,7 @@ class Context():
__context: Context
# mypy: disable-error-code="return-value"
def get_context(allow_failure: bool = False) -> Context:
"""Get immutable global runtime context."""
@ -59,6 +60,7 @@ def get_context(allow_failure: bool=False) -> Context:
raise RuntimeError("Context not loaded yet")
return __context
def set_context(context: Context):
"""Set global runtime context."""
global __context
@ -67,5 +69,3 @@ def set_context(context: Context):
raise RuntimeError("Context already loaded")
__context = context

View file

@ -8,11 +8,13 @@ from typing import List
from pmb.core.chroot import ChrootType
from pmb.types import PathString
class CrossToolTarget(enum.Enum):
BUILDROOT = 0
ROOTFS = 1
class CrossTool():
class CrossTool:
__target: CrossToolTarget
__package: str
__paths: List[Path]
@ -36,7 +38,11 @@ class CrossTool():
def should_install(self, target: ChrootType) -> bool:
if target == ChrootType.BUILDROOT and self.__target == CrossToolTarget.BUILDROOT:
return True
if target == ChrootType.ROOTFS or target == ChrootType.INSTALLER and self.__target == CrossToolTarget.ROOTFS:
if (
target == ChrootType.ROOTFS
or target == ChrootType.INSTALLER
and self.__target == CrossToolTarget.ROOTFS
):
return True
return False

View file

@ -11,8 +11,7 @@ from pmb.meta import Cache
@Cache(skip_extras=False)
def pkgrepo_paths(skip_extras=False) -> List[Path]:
config = get_context().config
paths = list(map(lambda x: Path(x),
config.aports))
paths = list(map(lambda x: Path(x), config.aports))
if not paths:
raise RuntimeError("No package repositories specified?")
@ -28,15 +27,18 @@ def pkgrepo_paths(skip_extras = False) -> List[Path]:
return out_paths
def pkgrepo_default_path() -> Path:
return pkgrepo_paths(skip_extras=True)[0]
def pkgrepo_names(skip_exras=False) -> List[str]:
"""
Return a list of all the package repository names.
"""
return [aports.name for aports in pkgrepo_paths(skip_exras)]
def pkgrepo_path(name: str) -> Path:
"""
Return the absolute path to the package repository with the given name.
@ -46,6 +48,7 @@ def pkgrepo_path(name: str) -> Path:
return aports
raise RuntimeError(f"aports '{name}' not found")
def pkgrepo_name_from_subdir(subdir: Path) -> str:
"""
Return the name of the package repository for the given directory.
@ -56,6 +59,7 @@ def pkgrepo_name_from_subdir(subdir: Path) -> str:
return aports.name
raise RuntimeError(f"aports subdir '{subdir}' not found")
def pkgrepo_glob_one(path: str) -> Optional[Path]:
"""
Search for the file denoted by path in all aports repositories.
@ -102,8 +106,10 @@ def pkgrepo_iter_package_dirs(skip_extra_repos=False) -> Generator[Path, None, N
continue
pkg = os.path.basename(pdir)
if pkg in seen[repo.name]:
raise RuntimeError(f"Package {pkg} found in multiple aports "
"subfolders. Please put it only in one folder.")
raise RuntimeError(
f"Package {pkg} found in multiple aports "
"subfolders. Please put it only in one folder."
)
if pkg in [x for li in seen.values() for x in li]:
continue
seen[repo.name].append(pkg)

View file

@ -5,6 +5,7 @@ import pytest
from .arch import Arch
def test_valid_arches():
# Silly test
assert Arch.native().is_native()
@ -58,6 +59,7 @@ def test_valid_arches():
assert (Arch.aarch64 / "beep").name == "beep"
assert Path("boop") / Arch.aarch64 == Path("boop/aarch64")
def test_invalid_arches():
excinfo: Any
with pytest.raises(ValueError) as excinfo:

View file

@ -4,6 +4,7 @@ from .arch import Arch
from .context import get_context
from .chroot import Chroot, ChrootType
def test_valid_chroots(pmb_args, mock_devices_find_path):
"""Test that Chroot objects work as expected"""
@ -59,8 +60,12 @@ def test_invalid_chroots(pmb_args):
@pytest.mark.xfail
def test_untested_chroots():
# IMAGE type is untested, name should be a valid path in this case
tested_chroot_types = [ChrootType.ROOTFS, ChrootType.BUILDROOT, ChrootType.NATIVE,
ChrootType.INSTALLER]
tested_chroot_types = [
ChrootType.ROOTFS,
ChrootType.BUILDROOT,
ChrootType.NATIVE,
ChrootType.INSTALLER,
]
for ct in ChrootType:
if ct not in tested_chroot_types:
raise ValueError(f"ChrootType {ct} is untested!")

View file

@ -13,6 +13,7 @@ def test_pkgrepo_paths_no_repos(pmb_args):
paths = pkgrepo_paths()
print(paths)
def test_pkgrepo_pmaports(pmaports, monkeypatch):
"""Test pkgrepo_paths() with pmaports repository and systemd extra repo"""
@ -29,8 +30,12 @@ def test_pkgrepo_pmaports(pmaports, monkeypatch):
assert default_path.name == "pmaports"
# Test extra-repos
assert pmb.helpers.run.user(["git", "checkout", "master_staging_systemd"],
working_dir=default_path) == 0
assert (
pmb.helpers.run.user(
["git", "checkout", "master_staging_systemd"], working_dir=default_path
)
== 0
)
paths = pkgrepo_paths()
assert len(paths) == 2

View file

@ -21,8 +21,10 @@ def frontend(args: PmbArgs): # FIXME: ARGS_REFACTOR
chroot = Chroot.native()
rootfs_dir = chroot / "home/pmos/rootfs" / context.device
if not rootfs_dir.glob("*.img"):
logging.info("NOTE: To export the rootfs image, run 'pmbootstrap"
" install' first (without the 'disk' parameter).")
logging.info(
"NOTE: To export the rootfs image, run 'pmbootstrap"
" install' first (without the 'disk' parameter)."
)
# Rebuild the initramfs, just to make sure (see #69)
flavor = pmb.helpers.frontend._parse_flavor(context.device, args.autoinstall)

View file

@ -31,10 +31,12 @@ def odin(context: Context, flavor, folder: Path):
# Validate method
method = deviceinfo.flash_method or ""
if not method.startswith("heimdall-"):
raise RuntimeError("An odin flashable tar is not supported"
raise RuntimeError(
"An odin flashable tar is not supported"
f" for the flash method '{method}' specified"
" in the current configuration."
" Only 'heimdall' methods are supported.")
" Only 'heimdall' methods are supported."
)
# Partitions
partition_kernel = deviceinfo.flash_heimdall_partition_kernel or "KERNEL"
@ -55,9 +57,7 @@ def odin(context: Context, flavor, folder: Path):
odin_device_tar = f"{context.device}.tar"
odin_device_tar_md5 = f"{context.device}.tar.md5"
handle.write(
"#!/bin/sh\n"
f"cd {temp_folder}\n")
handle.write("#!/bin/sh\n" f"cd {temp_folder}\n")
if method == "heimdall-isorec":
handle.write(
# Kernel: copy and append md5
@ -66,33 +66,43 @@ def odin(context: Context, flavor, folder: Path):
# Initramfs: recompress with lzop, append md5
f"gunzip -c /boot/initramfs{suffix_flavor}"
f" | lzop > {odin_initfs_md5}\n"
f"md5sum -t {odin_initfs_md5} >> {odin_initfs_md5}\n")
f"md5sum -t {odin_initfs_md5} >> {odin_initfs_md5}\n"
)
elif method == "heimdall-bootimg":
handle.write(
# boot.img: copy and append md5
f"cp /boot/boot.img{suffix_flavor} {odin_kernel_md5}\n"
f"md5sum -t {odin_kernel_md5} >> {odin_kernel_md5}\n")
f"md5sum -t {odin_kernel_md5} >> {odin_kernel_md5}\n"
)
handle.write(
# Create tar, remove included files and append md5
f"tar -c -f {odin_device_tar} *.bin.md5\n"
"rm *.bin.md5\n"
f"md5sum -t {odin_device_tar} >> {odin_device_tar}\n"
f"mv {odin_device_tar} {odin_device_tar_md5}\n")
f"mv {odin_device_tar} {odin_device_tar_md5}\n"
)
commands = [["mkdir", "-p", temp_folder],
commands = [
["mkdir", "-p", temp_folder],
["cat", "/tmp/_odin.sh"], # for the log
["sh", "/tmp/_odin.sh"],
["rm", "/tmp/_odin.sh"]
["rm", "/tmp/_odin.sh"],
]
for command in commands:
pmb.chroot.root(command, suffix)
# Move Odin flashable tar to native chroot and cleanup temp folder
pmb.chroot.user(["mkdir", "-p", "/home/pmos/rootfs"])
pmb.chroot.root(["mv", f"/mnt/rootfs_{context.device}{temp_folder}"
f"/{odin_device_tar_md5}", "/home/pmos/rootfs/"]),
pmb.chroot.root(["chown", "pmos:pmos",
f"/home/pmos/rootfs/{odin_device_tar_md5}"])
(
pmb.chroot.root(
[
"mv",
f"/mnt/rootfs_{context.device}{temp_folder}" f"/{odin_device_tar_md5}",
"/home/pmos/rootfs/",
]
),
)
pmb.chroot.root(["chown", "pmos:pmos", f"/home/pmos/rootfs/{odin_device_tar_md5}"])
pmb.chroot.root(["rmdir", temp_folder], suffix)
# Create the symlink

View file

@ -30,8 +30,9 @@ def symlinks(flavor, folder: Path):
# File descriptions
info = {
f"boot.img{suffix}": ("Fastboot compatible boot.img file,"
" contains initramfs and kernel"),
f"boot.img{suffix}": (
"Fastboot compatible boot.img file," " contains initramfs and kernel"
),
"dtbo.img": "Fastboot compatible dtbo image",
f"initramfs{suffix}": "Initramfs",
f"initramfs{suffix}-extra": "Extra initramfs files in /boot",
@ -58,9 +59,10 @@ def symlinks(flavor, folder: Path):
chroot_native / "home/pmos/rootfs" / f"{context.device}.img",
chroot_native / "home/pmos/rootfs" / f"{context.device}-boot.img",
chroot_native / "home/pmos/rootfs" / f"{context.device}-root.img",
chroot_buildroot / "var/libpostmarketos-android-recovery-installer" /
f"pmos-{context.device}.zip",
path_boot / "lk2nd.img"
chroot_buildroot
/ "var/libpostmarketos-android-recovery-installer"
/ f"pmos-{context.device}.zip",
path_boot / "lk2nd.img",
]
files += list(path_boot.glob(f"initramfs{suffix}*"))

View file

@ -33,14 +33,14 @@ def kernel(deviceinfo: Deviceinfo, method: str, boot: bool = False, autoinstall:
else:
logging.info("(native) flash kernel " + flavor)
pmb.flasher.run(deviceinfo, method, "flash_kernel", flavor)
logging.info("You will get an IP automatically assigned to your "
"USB interface shortly.")
logging.info("Then you can connect to your device using ssh after pmOS has"
" booted:")
logging.info("You will get an IP automatically assigned to your " "USB interface shortly.")
logging.info("Then you can connect to your device using ssh after pmOS has" " booted:")
logging.info(f"ssh {get_context().config.user}@{pmb.config.default_ip}")
logging.info("NOTE: If you enabled full disk encryption, you should make"
logging.info(
"NOTE: If you enabled full disk encryption, you should make"
" sure that Unl0kr has been properly configured for your"
" device")
" device"
)
def list_flavors(device: str):
@ -57,17 +57,16 @@ def rootfs(deviceinfo: Deviceinfo, method: str):
img_path = Chroot.native() / "home/pmos/rootfs" / f"{deviceinfo.codename}{suffix}"
if not img_path.exists():
raise RuntimeError("The rootfs has not been generated yet, please run"
" 'pmbootstrap install' first.")
raise RuntimeError(
"The rootfs has not been generated yet, please run" " 'pmbootstrap install' first."
)
# Do not flash if using fastboot & image is too large
if method.startswith("fastboot") \
and deviceinfo.flash_fastboot_max_size:
if method.startswith("fastboot") and deviceinfo.flash_fastboot_max_size:
img_size = img_path.stat().st_size / 1024**2
max_size = int(deviceinfo.flash_fastboot_max_size)
if img_size > max_size:
raise RuntimeError("The rootfs is too large for fastboot to"
" flash.")
raise RuntimeError("The rootfs is too large for fastboot to" " flash.")
# Run the flasher
logging.info("(native) flash rootfs image")
@ -85,15 +84,20 @@ def sideload(deviceinfo: Deviceinfo, method: str):
# Mount the buildroot
chroot = Chroot.buildroot(deviceinfo.arch)
mountpoint = "/mnt/" / chroot
pmb.helpers.mount.bind(chroot.path,
Chroot.native().path / mountpoint)
pmb.helpers.mount.bind(chroot.path, Chroot.native().path / mountpoint)
# Missing recovery zip error
if not (Chroot.native() / mountpoint / "var/lib/postmarketos-android-recovery-installer"
/ f"pmos-{deviceinfo.codename}.zip").exists():
raise RuntimeError("The recovery zip has not been generated yet,"
if not (
Chroot.native()
/ mountpoint
/ "var/lib/postmarketos-android-recovery-installer"
/ f"pmos-{deviceinfo.codename}.zip"
).exists():
raise RuntimeError(
"The recovery zip has not been generated yet,"
" please run 'pmbootstrap install' with the"
" '--android-recovery-zip' parameter first!")
" '--android-recovery-zip' parameter first!"
)
pmb.flasher.run(deviceinfo, method, "sideload")
@ -105,13 +109,16 @@ def flash_lk2nd(deviceinfo: Deviceinfo, method: str):
# manually since supporting the codepath with heimdall requires more effort.
pmb.flasher.init(deviceinfo.codename, method)
logging.info("(native) checking current fastboot product")
output = pmb.chroot.root(["fastboot", "getvar", "product"],
output="interactive", output_return=True)
output = pmb.chroot.root(
["fastboot", "getvar", "product"], output="interactive", output_return=True
)
# Variable "product" is e.g. "LK2ND_MSM8974" or "lk2nd-msm8226" depending
# on the lk2nd version.
if "lk2nd" in output.lower():
raise RuntimeError("You are currently running lk2nd. Please reboot into the regular"
" bootloader mode to re-flash lk2nd.")
raise RuntimeError(
"You are currently running lk2nd. Please reboot into the regular"
" bootloader mode to re-flash lk2nd."
)
# Get the lk2nd package (which is a dependency of the device package)
device_pkg = f"device-{deviceinfo.codename}"
@ -141,8 +148,7 @@ def frontend(args: PmbArgs):
deviceinfo = pmb.parse.deviceinfo()
method = args.flash_method or deviceinfo.flash_method
if method == "none" and action in ["boot", "flash_kernel", "flash_rootfs",
"flash_lk2nd"]:
if method == "none" and action in ["boot", "flash_kernel", "flash_rootfs", "flash_lk2nd"]:
logging.info("This device doesn't support any flash method.")
return

View file

@ -11,29 +11,30 @@ from pmb.core import Chroot, ChrootType
def install_depends(method: str) -> None:
if method not in pmb.config.flashers:
raise RuntimeError(f"Flash method {method} is not supported by the"
raise RuntimeError(
f"Flash method {method} is not supported by the"
" current configuration. However, adding a new"
" flash method is not that hard, when the flashing"
" application already exists.\n"
"Make sure, it is packaged for Alpine Linux, or"
" package it yourself, and then add it to"
" pmb/config/__init__.py.")
" pmb/config/__init__.py."
)
depends = pmb.config.flashers[method]["depends"]
# Depends for some flash methods may be different for various pmaports
# branches, so read them from pmaports.cfg.
if method == "fastboot":
pmaports_cfg = pmb.config.pmaports.read_config()
depends = pmaports_cfg.get("supported_fastboot_depends",
"android-tools,avbtool").split(",")
depends = pmaports_cfg.get("supported_fastboot_depends", "android-tools,avbtool").split(",")
elif method == "heimdall-bootimg":
pmaports_cfg = pmb.config.pmaports.read_config()
depends = pmaports_cfg.get("supported_heimdall_depends",
"heimdall,avbtool").split(",")
depends = pmaports_cfg.get("supported_heimdall_depends", "heimdall,avbtool").split(",")
elif method == "mtkclient":
pmaports_cfg = pmb.config.pmaports.read_config()
depends = pmaports_cfg.get("supported_mtkclient_depends",
"mtkclient,android-tools").split(",")
depends = pmaports_cfg.get("supported_mtkclient_depends", "mtkclient,android-tools").split(
","
)
pmb.chroot.apk.install(depends, Chroot.native())

View file

@ -12,9 +12,16 @@ def check_partition_blacklist(deviceinfo: Deviceinfo, key, value):
name = deviceinfo.name
if value in (deviceinfo.partition_blacklist or "").split(","):
raise RuntimeError("'" + value + "'" + " partition is blacklisted " +
"from being flashed! See the " + name + " device " +
"wiki page for more information.")
raise RuntimeError(
"'"
+ value
+ "'"
+ " partition is blacklisted "
+ "from being flashed! See the "
+ name
+ " device "
+ "wiki page for more information."
)
def run(deviceinfo: Deviceinfo, method: str, action: str, flavor=None):
@ -25,12 +32,14 @@ def run(deviceinfo: Deviceinfo, method: str, action: str, flavor=None):
if not isinstance(cfg["actions"], dict):
raise TypeError(f"Flashers misconfigured! {method} key 'actions' should be a dictionary")
if action not in cfg["actions"]:
raise RuntimeError("action " + action + " is not"
raise RuntimeError(
"action " + action + " is not"
" configured for method " + method + "!"
" You can use the '--method' option to specify a"
" different flash method. See also:"
" <https://wiki.postmarketos.org/wiki/"
"Deviceinfo_flash_methods>")
"Deviceinfo_flash_methods>"
)
# Variable setup
# FIXME: handle argparsing and pass in only the args we need.
@ -39,30 +48,36 @@ def run(deviceinfo: Deviceinfo, method: str, action: str, flavor=None):
# vbmeta flasher requires vbmeta partition to be explicitly specified
if action == "flash_vbmeta" and not fvars["$PARTITION_VBMETA"]:
raise RuntimeError("Your device does not have 'vbmeta' partition"
raise RuntimeError(
"Your device does not have 'vbmeta' partition"
" specified; set"
" 'deviceinfo_flash_fastboot_partition_vbmeta'"
" or 'deviceinfo_flash_heimdall_partition_vbmeta'"
" in deviceinfo file. See also:"
" <https://wiki.postmarketos.org/wiki/"
"Deviceinfo_reference>")
"Deviceinfo_reference>"
)
# dtbo flasher requires dtbo partition to be explicitly specified
if action == "flash_dtbo" and not fvars["$PARTITION_DTBO"]:
raise RuntimeError("Your device does not have 'dtbo' partition"
raise RuntimeError(
"Your device does not have 'dtbo' partition"
" specified; set"
" 'deviceinfo_flash_fastboot_partition_dtbo'"
" in deviceinfo file. See also:"
" <https://wiki.postmarketos.org/wiki/"
"Deviceinfo_reference>")
"Deviceinfo_reference>"
)
if args.no_reboot and ("flash" not in action or method != "heimdall-bootimg"):
raise RuntimeError("The '--no-reboot' option is only"
" supported when flashing with heimall-bootimg.")
raise RuntimeError(
"The '--no-reboot' option is only" " supported when flashing with heimall-bootimg."
)
if args.resume and ("flash" not in action or method != "heimdall-bootimg"):
raise RuntimeError("The '--resume' option is only"
" supported when flashing with heimall-bootimg.")
raise RuntimeError(
"The '--resume' option is only" " supported when flashing with heimall-bootimg."
)
# Run the commands of each action
for command in cfg["actions"][action]:
@ -71,15 +86,17 @@ def run(deviceinfo: Deviceinfo, method: str, action: str, flavor=None):
for i in range(len(command)):
if key in command[i]:
if value is None:
raise RuntimeError(f"Variable {key} found in action"
raise RuntimeError(
f"Variable {key} found in action"
f" {action} for method {method},"
" but the value for this variable"
" is None! Is that missing in your"
" deviceinfo?")
" deviceinfo?"
)
check_partition_blacklist(deviceinfo, key, value)
command[i] = command[i].replace(key, value)
# Remove empty strings
command = [x for x in command if x != '']
command = [x for x in command if x != ""]
# Run the action
pmb.chroot.root(command, output="interactive")

View file

@ -24,40 +24,36 @@ def variables(args: PmbArgs, flavor: str, method: str):
_partition_rootfs: Optional[str]
if method.startswith("fastboot"):
_partition_kernel = deviceinfo.flash_fastboot_partition_kernel\
or "boot"
_partition_rootfs = deviceinfo.flash_fastboot_partition_rootfs\
or deviceinfo.flash_fastboot_partition_system or "userdata"
_partition_vbmeta = deviceinfo.flash_fastboot_partition_vbmeta\
or None
_partition_dtbo = deviceinfo.flash_fastboot_partition_dtbo\
or None
_partition_kernel = deviceinfo.flash_fastboot_partition_kernel or "boot"
_partition_rootfs = (
deviceinfo.flash_fastboot_partition_rootfs
or deviceinfo.flash_fastboot_partition_system
or "userdata"
)
_partition_vbmeta = deviceinfo.flash_fastboot_partition_vbmeta or None
_partition_dtbo = deviceinfo.flash_fastboot_partition_dtbo or None
# Require that the partitions are specified in deviceinfo for now
elif method.startswith("rkdeveloptool"):
_partition_kernel = deviceinfo.flash_rk_partition_kernel\
or None
_partition_rootfs = deviceinfo.flash_rk_partition_rootfs\
or deviceinfo.flash_rk_partition_system or None
_partition_kernel = deviceinfo.flash_rk_partition_kernel or None
_partition_rootfs = (
deviceinfo.flash_rk_partition_rootfs or deviceinfo.flash_rk_partition_system or None
)
_partition_vbmeta = None
_partition_dtbo = None
elif method.startswith("mtkclient"):
_partition_kernel = deviceinfo.flash_mtkclient_partition_kernel\
or "boot"
_partition_rootfs = deviceinfo.flash_mtkclient_partition_rootfs\
or "userdata"
_partition_vbmeta = deviceinfo.flash_mtkclient_partition_vbmeta\
or None
_partition_dtbo = deviceinfo.flash_mtkclient_partition_dtbo\
or None
_partition_kernel = deviceinfo.flash_mtkclient_partition_kernel or "boot"
_partition_rootfs = deviceinfo.flash_mtkclient_partition_rootfs or "userdata"
_partition_vbmeta = deviceinfo.flash_mtkclient_partition_vbmeta or None
_partition_dtbo = deviceinfo.flash_mtkclient_partition_dtbo or None
else:
_partition_kernel = deviceinfo.flash_heimdall_partition_kernel\
or "KERNEL"
_partition_rootfs = deviceinfo.flash_heimdall_partition_rootfs\
or deviceinfo.flash_heimdall_partition_system or "SYSTEM"
_partition_vbmeta = deviceinfo.flash_heimdall_partition_vbmeta\
or None
_partition_dtbo = deviceinfo.flash_heimdall_partition_dtbo\
or None
_partition_kernel = deviceinfo.flash_heimdall_partition_kernel or "KERNEL"
_partition_rootfs = (
deviceinfo.flash_heimdall_partition_rootfs
or deviceinfo.flash_heimdall_partition_system
or "SYSTEM"
)
_partition_vbmeta = deviceinfo.flash_heimdall_partition_vbmeta or None
_partition_dtbo = deviceinfo.flash_heimdall_partition_dtbo or None
if "partition" in args and args.partition:
# Only one operation is done at same time so it doesn't matter
@ -72,11 +68,11 @@ def variables(args: PmbArgs, flavor: str, method: str):
_dtb = "-dtb"
_no_reboot = ""
if getattr(args, 'no_reboot', False):
if getattr(args, "no_reboot", False):
_no_reboot = "--no-reboot"
_resume = ""
if getattr(args,'resume', False):
if getattr(args, "resume", False):
_resume = "--resume"
fvars = {
@ -98,7 +94,7 @@ def variables(args: PmbArgs, flavor: str, method: str):
"$UUU_SCRIPT": f"/mnt/{Chroot.rootfs(deviceinfo.codename)}"
"/usr/share/uuu/flash_script.lst",
"$NO_REBOOT": _no_reboot,
"$RESUME": _resume
"$RESUME": _resume,
}
# Backwards compatibility with old mkinitfs (pma#660)

View file

@ -13,6 +13,7 @@ import pmb.helpers.run_core
import pmb.parse.version
from pmb.core.context import get_context
def _prepare_fifo():
"""Prepare the progress fifo for reading / writing.
@ -54,7 +55,7 @@ def _compute_progress(line):
"""
if not line:
return 1
cur_tot = line.rstrip().split('/')
cur_tot = line.rstrip().split("/")
if len(cur_tot) != 2:
return 0
cur = float(cur_tot[0])
@ -80,17 +81,14 @@ def apk_with_progress(command: Sequence[PathString]):
_command.append(os.fspath(c))
command_with_progress = _create_command_with_progress(_command, fifo)
log_msg = " ".join(_command)
with pmb.helpers.run.root(['cat', fifo],
output="pipe") as p_cat:
with pmb.helpers.run.root(command_with_progress,
output="background") as p_apk:
with pmb.helpers.run.root(["cat", fifo], output="pipe") as p_cat:
with pmb.helpers.run.root(command_with_progress, output="background") as p_apk:
while p_apk.poll() is None:
line = p_cat.stdout.readline().decode('utf-8')
line = p_cat.stdout.readline().decode("utf-8")
progress = _compute_progress(line)
pmb.helpers.cli.progress_print(progress)
pmb.helpers.cli.progress_flush()
pmb.helpers.run_core.check_return_code(p_apk.returncode,
log_msg)
pmb.helpers.run_core.check_return_code(p_apk.returncode, log_msg)
def check_outdated(version_installed, action_msg):
@ -111,6 +109,8 @@ def check_outdated(version_installed, action_msg):
if pmb.parse.version.compare(version_installed, version_min) >= 0:
return
raise RuntimeError("Found an outdated version of the 'apk' package"
raise RuntimeError(
"Found an outdated version of the 'apk' package"
f" manager ({version_installed}, expected at least:"
f" {version_min}). {action_msg}")
f" {version_min}). {action_msg}"
)

View file

@ -34,17 +34,18 @@ def init_req_headers() -> None:
if req_headers and req_headers_github:
return
# Generic request headers
req_headers = {
'User-Agent': f'pmbootstrap/{pmb.__version__} aportupgrade'}
req_headers = {"User-Agent": f"pmbootstrap/{pmb.__version__} aportupgrade"}
# Request headers specific to GitHub
req_headers_github = dict(req_headers)
if os.getenv("GITHUB_TOKEN") is not None:
token = os.getenv("GITHUB_TOKEN")
req_headers_github['Authorization'] = f'token {token}'
req_headers_github["Authorization"] = f"token {token}"
else:
logging.info("NOTE: Consider using a GITHUB_TOKEN environment variable"
" to increase your rate limit")
logging.info(
"NOTE: Consider using a GITHUB_TOKEN environment variable"
" to increase your rate limit"
)
def get_package_version_info_github(repo_name: str, ref: Optional[str]):
@ -57,8 +58,8 @@ def get_package_version_info_github(repo_name: str, ref: Optional[str]):
# Get the commits for the repository
commits = pmb.helpers.http.retrieve_json(
f"{GITHUB_API_BASE}/repos/{repo_name}/commits{ref_arg}",
headers=req_headers_github)
f"{GITHUB_API_BASE}/repos/{repo_name}/commits{ref_arg}", headers=req_headers_github
)
latest_commit = commits[0]
commit_date = latest_commit["commit"]["committer"]["date"]
# Extract the time from the field
@ -69,11 +70,10 @@ def get_package_version_info_github(repo_name: str, ref: Optional[str]):
}
def get_package_version_info_gitlab(gitlab_host: str, repo_name: str,
ref: Optional[str]):
def get_package_version_info_gitlab(gitlab_host: str, repo_name: str, ref: Optional[str]):
logging.debug("Trying GitLab repository: {}".format(repo_name))
repo_name_safe = urllib.parse.quote(repo_name, safe='')
repo_name_safe = urllib.parse.quote(repo_name, safe="")
# Get the URL argument to request a special ref, if needed
ref_arg = ""
@ -82,9 +82,9 @@ def get_package_version_info_gitlab(gitlab_host: str, repo_name: str,
# Get the commits for the repository
commits = pmb.helpers.http.retrieve_json(
f"{gitlab_host}/api/v4/projects/{repo_name_safe}/repository"
f"/commits{ref_arg}",
headers=req_headers)
f"{gitlab_host}/api/v4/projects/{repo_name_safe}/repository" f"/commits{ref_arg}",
headers=req_headers,
)
latest_commit = commits[0]
commit_date = latest_commit["committed_date"]
# Extract the time from the field
@ -108,21 +108,20 @@ def upgrade_git_package(args: PmbArgs, pkgname: str, package) -> None:
if 1 <= len(source) <= 2:
source = source[-1]
else:
raise RuntimeError("Unhandled number of source elements. Please open"
f" a bug report: {source}")
raise RuntimeError(
"Unhandled number of source elements. Please open" f" a bug report: {source}"
)
verinfo = None
github_match = re.match(
r"https://github\.com/(.+)/(?:archive|releases)", source)
gitlab_match = re.match(
fr"({'|'.join(GITLAB_HOSTS)})/(.+)/-/archive/", source)
github_match = re.match(r"https://github\.com/(.+)/(?:archive|releases)", source)
gitlab_match = re.match(rf"({'|'.join(GITLAB_HOSTS)})/(.+)/-/archive/", source)
if github_match:
verinfo = get_package_version_info_github(
github_match.group(1), args.ref)
verinfo = get_package_version_info_github(github_match.group(1), args.ref)
elif gitlab_match:
verinfo = get_package_version_info_gitlab(
gitlab_match.group(1), gitlab_match.group(2), args.ref)
gitlab_match.group(1), gitlab_match.group(2), args.ref
)
if verinfo is None:
# ignore for now
@ -183,11 +182,13 @@ def upgrade_stable_package(args: PmbArgs, pkgname: str, package) -> None:
# Looking up if there's a custom mapping from postmarketOS package name
# to Anitya project name.
mappings = pmb.helpers.http.retrieve_json(
f"{ANITYA_API_BASE}/packages/?distribution=postmarketOS"
f"&name={pkgname}", headers=req_headers)
f"{ANITYA_API_BASE}/packages/?distribution=postmarketOS" f"&name={pkgname}",
headers=req_headers,
)
if mappings["total_items"] < 1:
projects = pmb.helpers.http.retrieve_json(
f"{ANITYA_API_BASE}/projects/?name={pkgname}", headers=req_headers)
f"{ANITYA_API_BASE}/projects/?name={pkgname}", headers=req_headers
)
if projects["total_items"] < 1:
logging.warning(f"{pkgname}: failed to get Anitya project")
return
@ -195,17 +196,19 @@ def upgrade_stable_package(args: PmbArgs, pkgname: str, package) -> None:
project_name = mappings["items"][0]["project"]
ecosystem = mappings["items"][0]["ecosystem"]
projects = pmb.helpers.http.retrieve_json(
f"{ANITYA_API_BASE}/projects/?name={project_name}&"
f"ecosystem={ecosystem}",
headers=req_headers)
f"{ANITYA_API_BASE}/projects/?name={project_name}&" f"ecosystem={ecosystem}",
headers=req_headers,
)
if projects["total_items"] < 1:
logging.warning(f"{pkgname}: didn't find any projects, can't upgrade!")
return
if projects["total_items"] > 1:
logging.warning(f"{pkgname}: found more than one project, can't "
logging.warning(
f"{pkgname}: found more than one project, can't "
f"upgrade! Please create an explicit mapping of "
f"\"project\" to the package name.")
f'"project" to the package name.'
)
return
# Get the first, best-matching item
@ -234,8 +237,7 @@ def upgrade_stable_package(args: PmbArgs, pkgname: str, package) -> None:
pkgrel_new = 0
if not pmb.parse.version.validate(pkgver_new):
logging.warning(f"{pkgname}: would upgrade to invalid pkgver:"
f" {pkgver_new}, ignoring")
logging.warning(f"{pkgname}: would upgrade to invalid pkgver:" f" {pkgver_new}, ignoring")
return
logging.info("{}: upgrading pmaport".format(pkgname))
@ -285,5 +287,4 @@ def upgrade_all(args: PmbArgs) -> None:
if skip:
continue
upgrade(args, pkgname, args.all or args.all_git,
args.all or args.all_stable)
upgrade(args, pkgname, args.all or args.all_git, args.all or args.all_stable)

View file

@ -55,8 +55,9 @@ def init(args: PmbArgs) -> PmbArgs:
config = pmb.config.load(args.config)
if args.aports and not args.aports.exists():
raise ValueError("pmaports path (specified with --aports) does"
f" not exist: {args.aports}")
raise ValueError(
"pmaports path (specified with --aports) does" f" not exist: {args.aports}"
)
# Override config at runtime with command line arguments
for key, _ in vars(config).items():
@ -97,8 +98,16 @@ def init(args: PmbArgs) -> PmbArgs:
pmb.helpers.logging.init(context.log, args.verbose, context.details_to_stdout)
# Initialization code which may raise errors
if args.action not in ["init", "checksum", "config", "bootimg_analyze", "log",
"pull", "shutdown", "zap"]:
if args.action not in [
"init",
"checksum",
"config",
"bootimg_analyze",
"log",
"pull",
"shutdown",
"zap",
]:
pmb.config.pmaports.read_config()
pmb.helpers.git.parse_channels_cfg(pkgrepo_default_path())
@ -143,8 +152,10 @@ def init(args: PmbArgs) -> PmbArgs:
# for key in vars(args_new):
# setattr(args, key, getattr(args_new, key))
def please_i_really_need_args() -> PmbArgs:
import traceback
traceback.print_stack()
print("FIXME: retrieved args where it shouldn't be needed!")
return __args

View file

@ -27,8 +27,7 @@ class ReadlineTabCompleter:
# First time: build match list
if iteration == 0:
if input_text:
self.matches = [s for s in self.options
if s and s.startswith(input_text)]
self.matches = [s for s in self.options if s and s.startswith(input_text)]
else:
self.matches = self.options[:]
@ -38,8 +37,14 @@ class ReadlineTabCompleter:
return None
def ask(question="Continue?", choices=["y", "n"], default="n",
lowercase_answer=True, validation_regex=None, complete=None):
def ask(
question="Continue?",
choices=["y", "n"],
default="n",
lowercase_answer=True,
validation_regex=None,
complete=None,
):
"""Ask a question on the terminal.
:param question: display prompt
@ -62,13 +67,12 @@ def ask(question="Continue?", choices=["y", "n"], default="n",
line = f"[{date}] {line}"
if complete:
readline.parse_and_bind('tab: complete')
readline.parse_and_bind("tab: complete")
delims = readline.get_completer_delims()
if '-' in delims:
delims = delims.replace('-', '')
if "-" in delims:
delims = delims.replace("-", "")
readline.set_completer_delims(delims)
readline.set_completer(
ReadlineTabCompleter(complete).completer_func)
readline.set_completer(ReadlineTabCompleter(complete).completer_func)
ret = input(f"{line_color}: ")
@ -93,8 +97,11 @@ def ask(question="Continue?", choices=["y", "n"], default="n",
if pattern.match(ret):
return ret
logging.fatal("ERROR: Input did not pass validation (regex: " +
validation_regex + "). Please try again.")
logging.fatal(
"ERROR: Input did not pass validation (regex: "
+ validation_regex
+ "). Please try again."
)
def confirm(question="Continue?", default=False, no_assumptions=False):

View file

@ -6,7 +6,7 @@ from typing import Optional
from pmb.core.pkgrepo import pkgrepo_glob_one, pkgrepo_iglob
def find_path(codename: str, file='') -> Optional[Path]:
def find_path(codename: str, file="") -> Optional[Path]:
"""Find path to device APKBUILD under `device/*/device-`.
:param codename: device codename
@ -29,10 +29,10 @@ def list_codenames(vendor=None, archived=True):
"""
ret = []
for path in pkgrepo_iglob("device/*/device-*"):
if not archived and 'archived' in path.parts:
if not archived and "archived" in path.parts:
continue
device = os.path.basename(path).split("-", 1)[1]
if (vendor is None) or device.startswith(vendor + '-'):
if (vendor is None) or device.startswith(vendor + "-"):
ret.append(device)
return ret

View file

@ -6,6 +6,7 @@ class BuildFailedError(Exception):
"""Exception to be raised when pmbootstrap fails to build a package. This is handled
separately from NonBugError as it needs to be treated differently as we want to hint
to users that they can check the log for more information when a build fails."""
pass
@ -13,4 +14,5 @@ class NonBugError(Exception):
"""Exception which originates from a problem not caused by pmbootstrap's code. This
could for example be raised if there is an error in a package pmboostrap is
interacting with in some way."""
pass

View file

@ -39,8 +39,8 @@ def replace_apkbuild(args: PmbArgs, pkgname, key, new, in_quotes=False):
line_old = '{}="{}"'.format(key, old)
line_new = '{}="{}"'.format(key, new)
else:
line_old = '{}={}'.format(key, old)
line_new = '{}={}'.format(key, new)
line_old = "{}={}".format(key, old)
line_new = "{}={}".format(key, new)
# Replace
replace(path, "\n" + line_old + "\n", "\n" + line_new + "\n")
@ -49,10 +49,11 @@ def replace_apkbuild(args: PmbArgs, pkgname, key, new, in_quotes=False):
pmb.parse.apkbuild.cache_clear()
apkbuild = pmb.parse.apkbuild(path)
if apkbuild[key] != str(new):
raise RuntimeError("Failed to set '{}' for pmaport '{}'. Make sure"
raise RuntimeError(
"Failed to set '{}' for pmaport '{}'. Make sure"
" that there's a line with exactly the string '{}'"
" and nothing else in: {}".format(key, pkgname,
line_old, path))
" and nothing else in: {}".format(key, pkgname, line_old, path)
)
def is_up_to_date(path_sources, path_target=None, lastmod_target=None):
@ -67,8 +68,7 @@ def is_up_to_date(path_sources, path_target=None, lastmod_target=None):
"""
if path_target and lastmod_target:
raise RuntimeError(
"Specify path_target *or* lastmod_target, not both!")
raise RuntimeError("Specify path_target *or* lastmod_target, not both!")
lastmod_source = None
for path_source in path_sources:
@ -93,8 +93,7 @@ def is_older_than(path, seconds):
def symlink(file: Path, link: Path):
"""Check if the symlink is already present, otherwise create it."""
if os.path.exists(link):
if (os.path.islink(link) and
os.path.realpath(os.readlink(link)) == os.path.realpath(file)):
if os.path.islink(link) and os.path.realpath(os.readlink(link)) == os.path.realpath(file):
return
raise RuntimeError(f"File exists: {link}")
elif link.is_symlink():

View file

@ -52,13 +52,13 @@ def _parse_flavor(device: str, autoinstall=True):
# "postmarketos-<manufacturer>-<device/chip>", e.g.
# "postmarketos-qcom-sdm845"
chroot = Chroot(ChrootType.ROOTFS, device)
flavor = pmb.chroot.other.kernel_flavor_installed(
chroot, autoinstall)
flavor = pmb.chroot.other.kernel_flavor_installed(chroot, autoinstall)
if not flavor:
raise RuntimeError(
f"No kernel flavors installed in chroot '{chroot}'! Please let"
" your device package depend on a package starting with 'linux-'.")
" your device package depend on a package starting with 'linux-'."
)
return flavor
@ -93,11 +93,13 @@ def _install_ondev_verify_no_rootfs(device: str, ondev_cp: List[Tuple[str, str]]
if chroot_dest_cp == chroot_dest:
return
raise ValueError(f"--no-rootfs set, but rootfs.img not found in install"
raise ValueError(
f"--no-rootfs set, but rootfs.img not found in install"
" chroot. Either run 'pmbootstrap install' without"
" --no-rootfs first to let it generate the postmarketOS"
" rootfs once, or supply a rootfs file with:"
f" --cp os.img:{chroot_dest}")
f" --cp os.img:{chroot_dest}"
)
def aportgen(args: PmbArgs):
@ -116,8 +118,7 @@ def build(args: PmbArgs):
return
# Set src and force
src = os.path.realpath(os.path.expanduser(args.src[0])) \
if args.src else None
src = os.path.realpath(os.path.expanduser(args.src[0])) if args.src else None
force = True if src else get_context().force
if src and not os.path.exists(src):
raise RuntimeError("Invalid path specified for --src: " + src)
@ -125,19 +126,21 @@ def build(args: PmbArgs):
# Ensure repo_bootstrap is done for all arches we intend to build for
for package in args.packages:
arch_package = args.arch or pmb.build.autodetect.arch(package)
pmb.helpers.pmaports.require_bootstrap(arch_package,
f"build {package} for {arch_package}")
pmb.helpers.pmaports.require_bootstrap(arch_package, f"build {package} for {arch_package}")
context = get_context()
# Build all packages
built = pmb.build.packages(context, args.packages, args.arch, force,
strict=args.strict, src=src)
built = pmb.build.packages(
context, args.packages, args.arch, force, strict=args.strict, src=src
)
# Notify about packages that weren't built
for package in set(args.packages) - set(built):
logging.info("NOTE: Package '" + package + "' is up to date. Use"
logging.info(
"NOTE: Package '" + package + "' is up to date. Use"
" 'pmbootstrap build " + package + " --force'"
" if needed.")
" if needed."
)
def build_init(args: PmbArgs):
@ -157,8 +160,7 @@ def sideload(args: PmbArgs):
arch = args.arch
user = get_context().config.user
host = args.host
pmb.sideload.sideload(args, user, host, args.port, arch, args.install_key,
args.packages)
pmb.sideload.sideload(args, user, host, args.port, arch, args.install_key, args.packages)
def netboot(args: PmbArgs):
@ -171,10 +173,12 @@ def chroot(args: PmbArgs):
# Suffix
chroot = _parse_suffix(args)
user = args.user
if (user and chroot != Chroot.native() and
chroot.type not in [ChrootType.BUILDROOT, ChrootType.IMAGE]):
raise RuntimeError("--user is only supported for native or"
" buildroot_* chroots.")
if (
user
and chroot != Chroot.native()
and chroot.type not in [ChrootType.BUILDROOT, ChrootType.IMAGE]
):
raise RuntimeError("--user is only supported for native or" " buildroot_* chroots.")
if args.xauth and chroot != Chroot.native():
raise RuntimeError("--xauth is only supported for native chroot.")
@ -197,28 +201,26 @@ def chroot(args: PmbArgs):
# Install blockdevice
if args.install_blockdev:
logging.warning("--install-blockdev is deprecated for the chroot command"
logging.warning(
"--install-blockdev is deprecated for the chroot command"
" and will be removed in a future release. If you need this"
" for some reason, please open an issue on"
" https://gitlab.com/postmarketOS/pmbootstrap.git")
" https://gitlab.com/postmarketOS/pmbootstrap.git"
)
size_boot = 128 # 128 MiB
size_root = 4096 # 4 GiB
size_reserve = 2048 # 2 GiB
pmb.install.blockdevice.create_and_mount_image(args, size_boot,
size_root, size_reserve)
pmb.install.blockdevice.create_and_mount_image(args, size_boot, size_root, size_reserve)
pmb.chroot.apk.update_repository_list(chroot, user_repository=True)
# Run the command as user/root
if user:
logging.info(f"({chroot}) % su pmos -c '" +
" ".join(args.command) + "'")
pmb.chroot.user(args.command, chroot, output=args.output,
env=env)
logging.info(f"({chroot}) % su pmos -c '" + " ".join(args.command) + "'")
pmb.chroot.user(args.command, chroot, output=args.output, env=env)
else:
logging.info(f"({chroot}) % " + " ".join(args.command))
pmb.chroot.root(args.command, chroot, output=args.output,
env=env)
pmb.chroot.root(args.command, chroot, output=args.output, env=env)
def config(args: PmbArgs):
@ -259,8 +261,7 @@ def config(args: PmbArgs):
def repo_missing(args: PmbArgs):
missing = pmb.helpers.repo_missing.generate(args.arch, args.overview,
args.package, args.built)
missing = pmb.helpers.repo_missing.generate(args.arch, args.overview, args.package, args.built)
print(json.dumps(missing, indent=4))
@ -273,52 +274,52 @@ def install(args: PmbArgs):
device = config.device
deviceinfo = pmb.parse.deviceinfo(device)
if args.no_fde:
logging.warning("WARNING: --no-fde is deprecated,"
" as it is now the default.")
logging.warning("WARNING: --no-fde is deprecated," " as it is now the default.")
if args.rsync and args.full_disk_encryption:
raise ValueError("Installation using rsync is not compatible with full"
" disk encryption.")
raise ValueError("Installation using rsync is not compatible with full" " disk encryption.")
if args.rsync and not args.disk:
raise ValueError("Installation using rsync only works with --disk.")
if args.rsync and args.filesystem == "btrfs":
raise ValueError("Installation using rsync"
" is not currently supported on btrfs filesystem.")
raise ValueError(
"Installation using rsync" " is not currently supported on btrfs filesystem."
)
pmb.helpers.pmaports.require_bootstrap(deviceinfo.arch,
f"do 'pmbootstrap install' for {deviceinfo.arch}"
" (deviceinfo_arch)")
pmb.helpers.pmaports.require_bootstrap(
deviceinfo.arch, f"do 'pmbootstrap install' for {deviceinfo.arch}" " (deviceinfo_arch)"
)
# On-device installer checks
# Note that this can't be in the mutually exclusive group that has most of
# the conflicting options, because then it would not work with --disk.
if args.on_device_installer:
if args.full_disk_encryption:
raise ValueError("--on-device-installer cannot be combined with"
raise ValueError(
"--on-device-installer cannot be combined with"
" --fde. The user can choose to encrypt their"
" installation later in the on-device installer.")
" installation later in the on-device installer."
)
if args.android_recovery_zip:
raise ValueError("--on-device-installer cannot be combined with"
" --android-recovery-zip (patches welcome)")
raise ValueError(
"--on-device-installer cannot be combined with"
" --android-recovery-zip (patches welcome)"
)
if args.no_image:
raise ValueError("--on-device-installer cannot be combined with"
" --no-image")
raise ValueError("--on-device-installer cannot be combined with" " --no-image")
if args.rsync:
raise ValueError("--on-device-installer cannot be combined with"
" --rsync")
raise ValueError("--on-device-installer cannot be combined with" " --rsync")
if args.filesystem:
raise ValueError("--on-device-installer cannot be combined with"
" --filesystem")
raise ValueError("--on-device-installer cannot be combined with" " --filesystem")
if deviceinfo.cgpt_kpart:
raise ValueError("--on-device-installer cannot be used with"
" ChromeOS devices")
raise ValueError("--on-device-installer cannot be used with" " ChromeOS devices")
else:
if args.ondev_cp:
raise ValueError("--cp can only be combined with --ondev")
if args.ondev_no_rootfs:
raise ValueError("--no-rootfs can only be combined with --ondev."
" Do you mean --no-image?")
raise ValueError(
"--no-rootfs can only be combined with --ondev." " Do you mean --no-image?"
)
if args.ondev_no_rootfs:
_install_ondev_verify_no_rootfs(device, args.ondev_cp)
@ -330,9 +331,11 @@ def install(args: PmbArgs):
# optionally add a new user for SSH that must not have the same
# username etc.)
if config.user != "user":
logging.warning(f"WARNING: custom username '{config.user}' will be"
logging.warning(
f"WARNING: custom username '{config.user}' will be"
" replaced with 'user' for the on-device"
" installer.")
" installer."
)
config.user = "user"
if not args.disk and args.split is None:
@ -343,17 +346,24 @@ def install(args: PmbArgs):
# Android recovery zip related
if args.android_recovery_zip and args.filesystem:
raise ValueError("--android-recovery-zip cannot be combined with"
" --filesystem (patches welcome)")
raise ValueError(
"--android-recovery-zip cannot be combined with" " --filesystem (patches welcome)"
)
if args.android_recovery_zip and args.full_disk_encryption:
logging.info("WARNING: --fde is rarely used in combination with"
logging.info(
"WARNING: --fde is rarely used in combination with"
" --android-recovery-zip. If this does not work, consider"
" using another method (e.g. installing via netcat)")
logging.info("WARNING: the kernel of the recovery system (e.g. TWRP)"
f" must support the cryptsetup cipher '{args.cipher}'.")
logging.info("If you know what you are doing, consider setting a"
" using another method (e.g. installing via netcat)"
)
logging.info(
"WARNING: the kernel of the recovery system (e.g. TWRP)"
f" must support the cryptsetup cipher '{args.cipher}'."
)
logging.info(
"If you know what you are doing, consider setting a"
" different cipher with 'pmbootstrap install --cipher=..."
" --fde --android-recovery-zip'.")
" --fde --android-recovery-zip'."
)
# Don't install locally compiled packages and package signing keys
if not args.install_local_pkgs:
@ -363,9 +373,11 @@ def install(args: PmbArgs):
# Safest way to avoid installing local packages is having none
if (config.work / "packages").glob("*"):
raise ValueError("--no-local-pkgs specified, but locally built"
raise ValueError(
"--no-local-pkgs specified, but locally built"
" packages found. Consider 'pmbootstrap zap -p'"
" to delete them.")
" to delete them."
)
# Verify that the root filesystem is supported by current pmaports branch
pmb.install.get_root_filesystem(args)
@ -384,12 +396,16 @@ def export(args: PmbArgs):
def update(args: PmbArgs):
existing_only = not args.non_existing
if not pmb.helpers.repo.update(args.arch, True, existing_only):
logging.info("No APKINDEX files exist, so none have been updated."
logging.info(
"No APKINDEX files exist, so none have been updated."
" The pmbootstrap command downloads the APKINDEX files on"
" demand.")
logging.info("If you want to force downloading the APKINDEX files for"
" demand."
)
logging.info(
"If you want to force downloading the APKINDEX files for"
" all architectures (not recommended), use:"
" pmbootstrap update --non-existing")
" pmbootstrap update --non-existing"
)
def newapkbuild(args: PmbArgs):
@ -402,8 +418,9 @@ def newapkbuild(args: PmbArgs):
# Sanity check: -n is only allowed with SRCURL
if args.pkgname and not is_url:
raise RuntimeError("You can only specify a pkgname (-n) when using"
" SRCURL as last parameter.")
raise RuntimeError(
"You can only specify a pkgname (-n) when using" " SRCURL as last parameter."
)
# Passthrough: Strings (e.g. -d "my description")
pass_through = []
@ -413,8 +430,10 @@ def newapkbuild(args: PmbArgs):
pass_through += [entry[0], value]
# Passthrough: Switches (e.g. -C for CMake)
for entry in (pmb.config.newapkbuild_arguments_switches_pkgtypes +
pmb.config.newapkbuild_arguments_switches_other):
for entry in (
pmb.config.newapkbuild_arguments_switches_pkgtypes
+ pmb.config.newapkbuild_arguments_switches_other
):
if getattr(args, entry[1]) is True:
pass_through.append(entry[0])
@ -434,8 +453,7 @@ def kconfig(args: PmbArgs):
# Handle passing a file directly
if args.file:
if pmb.parse.kconfig.check_file(args.file, components_list,
details=details):
if pmb.parse.kconfig.check_file(args.file, components_list, details=details):
logging.info("kconfig check succeeded!")
return
raise RuntimeError("kconfig check failed!")
@ -458,15 +476,13 @@ def kconfig(args: PmbArgs):
packages.sort()
for package in packages:
if not get_context().force:
pkgname = package if package.startswith("linux-") \
else "linux-" + package
pkgname = package if package.startswith("linux-") else "linux-" + package
aport = pmb.helpers.pmaports.find(pkgname)
apkbuild = pmb.parse.apkbuild(aport)
if "!pmb:kconfigcheck" in apkbuild["options"]:
skipped += 1
continue
if not pmb.parse.kconfig.check(package, components_list,
details=details):
if not pmb.parse.kconfig.check(package, components_list, details=details):
error = True
# At least one failure
@ -474,8 +490,10 @@ def kconfig(args: PmbArgs):
raise RuntimeError("kconfig check failed!")
else:
if skipped:
logging.info("NOTE: " + str(skipped) + " kernel(s) was skipped"
" (consider 'pmbootstrap kconfig check -f')")
logging.info(
"NOTE: " + str(skipped) + " kernel(s) was skipped"
" (consider 'pmbootstrap kconfig check -f')"
)
logging.info("kconfig check succeeded!")
elif args.action_kconfig in ["edit", "migrate"]:
if args.package:
@ -496,8 +514,7 @@ def deviceinfo_parse(args: PmbArgs):
kernel = args.deviceinfo_parse_kernel
for device in devices:
print(f"{device}, with kernel={kernel}:")
print(json.dumps(pmb.parse.deviceinfo(device, kernel), indent=4,
sort_keys=True))
print(json.dumps(pmb.parse.deviceinfo(device, kernel), indent=4, sort_keys=True))
def apkbuild_parse(args: PmbArgs):
@ -510,8 +527,7 @@ def apkbuild_parse(args: PmbArgs):
for package in packages:
print(package + ":")
aport = pmb.helpers.pmaports.find(package)
print(json.dumps(pmb.parse.apkbuild(aport), indent=4,
sort_keys=True))
print(json.dumps(pmb.parse.apkbuild(aport), indent=4, sort_keys=True))
def apkindex_parse(args: PmbArgs):
@ -574,11 +590,16 @@ def work_migrate(args: PmbArgs):
def zap(args: PmbArgs):
pmb.chroot.zap(dry=args.dry, http=args.http,
distfiles=args.distfiles, pkgs_local=args.pkgs_local,
pmb.chroot.zap(
dry=args.dry,
http=args.http,
distfiles=args.distfiles,
pkgs_local=args.pkgs_local,
pkgs_local_mismatch=args.pkgs_local_mismatch,
pkgs_online_mismatch=args.pkgs_online_mismatch,
rust=args.rust, netboot=args.netboot)
rust=args.rust,
netboot=args.netboot,
)
# Don't write the "Done" message
pmb.helpers.logging.disable()
@ -587,8 +608,7 @@ def zap(args: PmbArgs):
def bootimg_analyze(args: PmbArgs):
bootimg = pmb.parse.bootimg(args.path)
tmp_output = "Put these variables in the deviceinfo file of your device:\n"
for line in pmb.aportgen.device.\
generate_deviceinfo_fastboot_content(bootimg).split("\n"):
for line in pmb.aportgen.device.generate_deviceinfo_fastboot_content(bootimg).split("\n"):
tmp_output += "\n" + line.lstrip()
logging.info(tmp_output)
@ -611,28 +631,33 @@ def status(args: PmbArgs) -> None:
def ci(args: PmbArgs):
topdir = pmb.helpers.git.get_topdir(Path.cwd())
if not os.path.exists(topdir):
logging.error("ERROR: change your current directory to a git"
logging.error(
"ERROR: change your current directory to a git"
" repository (e.g. pmbootstrap, pmaports) before running"
" 'pmbootstrap ci'.")
" 'pmbootstrap ci'."
)
exit(1)
scripts_available = pmb.ci.get_ci_scripts(topdir)
scripts_available = pmb.ci.sort_scripts_by_speed(scripts_available)
if not scripts_available:
logging.error("ERROR: no supported CI scripts found in current git"
" repository, see https://postmarketos.org/pmb-ci")
logging.error(
"ERROR: no supported CI scripts found in current git"
" repository, see https://postmarketos.org/pmb-ci"
)
exit(1)
scripts_selected = {}
if args.scripts:
if args.all:
raise RuntimeError("Combining --all with script names doesn't"
" make sense")
raise RuntimeError("Combining --all with script names doesn't" " make sense")
for script in args.scripts:
if script not in scripts_available:
logging.error(f"ERROR: script '{script}' not found in git"
logging.error(
f"ERROR: script '{script}' not found in git"
" repository, found these:"
f" {', '.join(scripts_available.keys())}")
f" {', '.join(scripts_available.keys())}"
)
exit(1)
scripts_selected[script] = scripts_available[script]
elif args.all:

View file

@ -106,8 +106,11 @@ def get_upstream_remote(aports: Path):
for line in output.split("\n"):
if any(u in line for u in urls):
return line.split("\t", 1)[0]
raise RuntimeError("{}: could not find remote name for any URL '{}' in git"
" repository: {}".format(name_repo, urls, aports))
raise RuntimeError(
"{}: could not find remote name for any URL '{}' in git" " repository: {}".format(
name_repo, urls, aports
)
)
@Cache("aports")
@ -131,11 +134,14 @@ def parse_channels_cfg(aports: Path):
try:
cfg.read_string(stdout)
except configparser.MissingSectionHeaderError:
logging.info("NOTE: fix this by fetching your pmaports.git, e.g."
" with 'pmbootstrap pull'")
raise RuntimeError("Failed to read channels.cfg from"
logging.info(
"NOTE: fix this by fetching your pmaports.git, e.g." " with 'pmbootstrap pull'"
)
raise RuntimeError(
"Failed to read channels.cfg from"
f" '{remote}/master' branch of your local"
" pmaports clone")
" pmaports clone"
)
# Meta section
ret: Dict[str, Dict[str, str | Dict[str, str]]] = {"channels": {}}
@ -149,8 +155,7 @@ def parse_channels_cfg(aports: Path):
channel_new = pmb.helpers.pmaports.get_channel_new(channel)
ret["channels"][channel_new] = {}
for key in ["description", "branch_pmaports", "branch_aports",
"mirrordir_alpine"]:
for key in ["description", "branch_pmaports", "branch_aports", "mirrordir_alpine"]:
value = cfg.get(channel, key)
# FIXME: how to type this properly??
ret["channels"][channel_new][key] = value # type: ignore[index]
@ -199,8 +204,10 @@ def pull(repo_name: str):
official_looking_branches = "master, v24.06, …"
else:
official_looking_branches = "master, 3.20-stable, …"
logging.warning(f"{msg_start} not on one of the official branches"
f" ({official_looking_branches}), skipping pull!")
logging.warning(
f"{msg_start} not on one of the official branches"
f" ({official_looking_branches}), skipping pull!"
)
return -1
# Skip if workdir is not clean
@ -212,9 +219,11 @@ def pull(repo_name: str):
branch_upstream = get_upstream_remote(repo) + "/" + branch
remote_ref = rev_parse(repo, branch + "@{u}", ["--abbrev-ref"])
if remote_ref != branch_upstream:
logging.warning("{} is tracking unexpected remote branch '{}' instead"
" of '{}'".format(msg_start, remote_ref,
branch_upstream))
logging.warning(
"{} is tracking unexpected remote branch '{}' instead" " of '{}'".format(
msg_start, remote_ref, branch_upstream
)
)
return -3
# Fetch (exception on failure, meaning connection to server broke)
@ -229,9 +238,11 @@ def pull(repo_name: str):
# Skip if we can't fast-forward
if not can_fast_forward(repo, branch_upstream):
logging.warning("{} can't fast-forward to {}, looks like you changed"
logging.warning(
"{} can't fast-forward to {}, looks like you changed"
" the git history of your local branch. Skipping pull!"
"".format(msg_start, branch_upstream))
"".format(msg_start, branch_upstream)
)
return -4
# Fast-forward now (should not fail due to checks above, so it's fine to
@ -247,8 +258,9 @@ def get_topdir(repo: Path):
:returns: a string with the top dir of the git repository,
or an empty string if it's not a git repository.
"""
res = pmb.helpers.run.user(["git", "rev-parse", "--show-toplevel"],
repo, output_return=True, check=False)
res = pmb.helpers.run.user(
["git", "rev-parse", "--show-toplevel"], repo, output_return=True, check=False
)
if not isinstance(res, str):
raise RuntimeError("Not a git repository: " + str(repo))
return res.strip()
@ -265,9 +277,9 @@ def get_files(repo: Path):
"""
ret = []
files = pmb.helpers.run.user_output(["git", "ls-files"], repo).split("\n")
files += pmb.helpers.run.user_output(["git", "ls-files",
"--exclude-standard", "--other"],
repo).split("\n")
files += pmb.helpers.run.user_output(
["git", "ls-files", "--exclude-standard", "--other"], repo
).split("\n")
for file in files:
if os.path.exists(f"{repo}/{file}"):
ret += [file]

View file

@ -11,13 +11,13 @@ import urllib.request
from pmb.core.context import get_context
import pmb.helpers.run
def cache_file(prefix: str, url: str) -> Path:
prefix = prefix.replace("/", "_")
return Path(f"{prefix}_{hashlib.sha256(url.encode('utf-8')).hexdigest()}")
def download(url, prefix, cache=True, loglevel=logging.INFO,
allow_404=False):
def download(url, prefix, cache=True, loglevel=logging.INFO, allow_404=False):
"""Download a file to disk.
:param url: the http(s) address of to the file to download
@ -47,8 +47,7 @@ def download(url, prefix, cache=True, loglevel=logging.INFO,
# Offline and not cached
if context.offline:
raise RuntimeError("File not found in cache and offline flag is"
f" enabled: {url}")
raise RuntimeError("File not found in cache and offline flag is" f" enabled: {url}")
# Download the file
logging.log(loglevel, "Download " + url)

View file

@ -12,6 +12,7 @@ import pmb.build
import pmb.helpers.run
import pmb.helpers.pmaports
# FIXME: dest_paths[repo], repo expected to be a Literal.
# We should really make Config.mirrors not a TypedDict.
# mypy: disable-error-code="index"
@ -57,8 +58,11 @@ def check(pkgnames: Sequence[str]):
# For each pkgrepo run the linter on the relevant packages
for pkgrepo, apkbuild_paths in apkbuilds.items():
pmb.chroot.root(["apkbuild-lint"] + apkbuild_paths,
check=False, output="stdout",
pmb.chroot.root(
["apkbuild-lint"] + apkbuild_paths,
check=False,
output="stdout",
output_return=True,
working_dir=dest_paths[repo],
env={"CUSTOM_VALID_OPTIONS": " ".join(options)})
env={"CUSTOM_VALID_OPTIONS": " ".join(options)},
)

View file

@ -19,6 +19,7 @@ DEBUG = logging.DEBUG
NOTSET = logging.NOTSET
VERBOSE = 5
class log_handler(logging.StreamHandler):
"""Write to stdout and to the already opened log file."""
@ -32,9 +33,7 @@ class log_handler(logging.StreamHandler):
msg = self.format(record)
# INFO or higher: Write to stdout
if (self.details_to_stdout or
(not self.quiet and
record.levelno >= logging.INFO)):
if self.details_to_stdout or (not self.quiet and record.levelno >= logging.INFO):
stream = self.stream
styles = pmb.config.styles
@ -102,11 +101,14 @@ def add_verbose_log_level():
"""
setattr(logging, "VERBOSE", VERBOSE)
logging.addLevelName(VERBOSE, "VERBOSE")
setattr(logging.Logger, "verbose", lambda inst, msg, * \
args, **kwargs: inst.log(VERBOSE, msg, *args, **kwargs))
setattr(logging, "verbose", lambda msg, *args, **kwargs: logging.log(VERBOSE,
msg, *args,
**kwargs))
setattr(
logging.Logger,
"verbose",
lambda inst, msg, *args, **kwargs: inst.log(VERBOSE, msg, *args, **kwargs),
)
setattr(
logging, "verbose", lambda msg, *args, **kwargs: logging.log(VERBOSE, msg, *args, **kwargs)
)
def init(logfile: Path, verbose: bool, details_to_stdout: bool = False):
@ -131,8 +133,7 @@ def init(logfile: Path, verbose: bool, details_to_stdout: bool=False):
# Set log format
root_logger = logging.getLogger()
root_logger.handlers = []
formatter = logging.Formatter("[%(asctime)s] %(message)s",
datefmt="%H:%M:%S")
formatter = logging.Formatter("[%(asctime)s] %(message)s", datefmt="%H:%M:%S")
# Set log level
add_verbose_log_level()
@ -159,6 +160,7 @@ def disable():
# We have our own logging wrappers so we can make mypy happy
# by not calling the (undefined) logging.verbose() function.
def critical(msg: object, *args, **kwargs):
logging.critical(msg, *args, **kwargs)

View file

@ -69,8 +69,7 @@ def bind_file(source: Path, destination: Path, create_folders=False):
pmb.helpers.run.root(["touch", destination])
# Mount
pmb.helpers.run.root(["mount", "--bind", source,
destination])
pmb.helpers.run.root(["mount", "--bind", source, destination])
def umount_all_list(prefix: Path, source: Path = Path("/proc/mounts")) -> List[Path]:
@ -112,6 +111,5 @@ def mount_device_rootfs(chroot_rootfs: Chroot) -> PurePath:
:returns: the mountpoint (relative to the native chroot)
"""
mountpoint = PurePath("/mnt", chroot_rootfs.dirname)
pmb.helpers.mount.bind(chroot_rootfs.path,
Chroot.native() / mountpoint)
pmb.helpers.mount.bind(chroot_rootfs.path, Chroot.native() / mountpoint)
return mountpoint

View file

@ -15,7 +15,6 @@ import pmb.helpers.run
from typing import Dict, Any
def folder_size(path: Path):
"""Run `du` to calculate the size of a folder.
@ -24,8 +23,7 @@ def folder_size(path: Path):
:returns: folder size in kilobytes
"""
output = pmb.helpers.run.root(["du", "-ks",
path], output_return=True)
output = pmb.helpers.run.root(["du", "-ks", path], output_return=True)
# Only look at last line to filter out sudo garbage (#1766)
last_line = output.split("\n")[-2]
@ -44,8 +42,9 @@ def check_grsec():
if not os.path.exists(path):
return
raise RuntimeError("You're running a kernel based on the grsec"
" patchset. This is not supported.")
raise RuntimeError(
"You're running a kernel based on the grsec" " patchset. This is not supported."
)
def check_binfmt_misc():
@ -64,8 +63,9 @@ def check_binfmt_misc():
pmb.helpers.run.root(["modprobe", "binfmt_misc"], check=False)
# check=False: we check it below and print a more helpful message on error
pmb.helpers.run.root(["mount", "-t", "binfmt_misc", "none",
"/proc/sys/fs/binfmt_misc"], check=False)
pmb.helpers.run.root(
["mount", "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"], check=False
)
if not os.path.exists(path):
link = "https://postmarketos.org/binfmt_misc"
@ -91,9 +91,10 @@ def migrate_work_folder(args: PmbArgs):
required = pmb.config.work_version
if current == required:
return
logging.info("WARNING: Your work folder version needs to be migrated"
" (from version " + str(current) + " to " + str(required) +
")!")
logging.info(
"WARNING: Your work folder version needs to be migrated"
" (from version " + str(current) + " to " + str(required) + ")!"
)
# 0 => 1
if current == 0:
@ -110,8 +111,7 @@ def migrate_work_folder(args: PmbArgs):
pmb.chroot.zap(False)
conf = context.config.work / "config_abuild/abuild.conf"
if os.path.exists(conf):
pmb.helpers.run.root(["sed", "-i",
"s./home/user/./home/pmos/.g", conf])
pmb.helpers.run.root(["sed", "-i", "s./home/user/./home/pmos/.g", conf])
# Update version file
migrate_success(context.config.work, 1)
current = 1
@ -128,9 +128,11 @@ def migrate_work_folder(args: PmbArgs):
# Fix permissions
dir = "/var/cache/distfiles"
for cmd in [["chown", "-R", "root:abuild", dir],
for cmd in [
["chown", "-R", "root:abuild", dir],
["chmod", "-R", "664", dir],
["chmod", "a+X", dir]]:
["chmod", "a+X", dir],
]:
pmb.chroot.root(cmd)
migrate_success(context.config.work, 2)
current = 2
@ -196,13 +198,14 @@ def migrate_work_folder(args: PmbArgs):
new_path = edge_path / arch
if old_path.exists():
if new_path.exists():
raise RuntimeError(f"Won't move '{old_path}' to"
raise RuntimeError(
f"Won't move '{old_path}' to"
f" '{new_path}', destination already"
" exists! Consider 'pmbootstrap zap -p'"
f" to delete '{context.config.work}/packages'.")
f" to delete '{context.config.work}/packages'."
)
pmb.helpers.run.root(["mv", old_path, new_path])
pmb.helpers.run.root(["chown", pmb.config.chroot_uid_user,
edge_path])
pmb.helpers.run.root(["chown", pmb.config.chroot_uid_user, edge_path])
# Update version file
migrate_success(context.config.work, 5)
@ -238,12 +241,14 @@ def migrate_work_folder(args: PmbArgs):
# Can't migrate, user must delete it
if current != required:
raise RuntimeError("Sorry, we can't migrate that automatically. Please"
raise RuntimeError(
"Sorry, we can't migrate that automatically. Please"
" run 'pmbootstrap shutdown', then delete your"
" current work folder manually ('sudo rm -rf "
f"{context.config.work}') and start over with 'pmbootstrap"
" init'. All your binary packages and caches will"
" be lost.")
" be lost."
)
def validate_hostname(hostname):
@ -259,14 +264,15 @@ def validate_hostname(hostname):
# Check that it only contains valid chars
if not re.match(r"^[0-9a-z-\.]*$", hostname):
logging.fatal("ERROR: Hostname must only contain letters (a-z),"
" digits (0-9), minus signs (-), or periods (.)")
logging.fatal(
"ERROR: Hostname must only contain letters (a-z),"
" digits (0-9), minus signs (-), or periods (.)"
)
return False
# Check that doesn't begin or end with a minus sign or period
if re.search(r"^-|^\.|-$|\.$", hostname):
logging.fatal("ERROR: Hostname must not begin or end with a minus"
" sign or period")
logging.fatal("ERROR: Hostname must not begin or end with a minus" " sign or period")
return False
return True

View file

@ -8,6 +8,7 @@ See also:
- pmb/helpers/repo.py (work with binary package repos)
"""
import copy
from typing import Any, Dict
from pmb.core.arch import Arch
@ -52,11 +53,13 @@ def get(pkgname, arch, replace_subpkgnames=False, must_exist=True):
ret: Dict[str, Any] = {}
pmaport = pmb.helpers.pmaports.get(pkgname, False)
if pmaport:
ret = {"arch": pmaport["arch"],
ret = {
"arch": pmaport["arch"],
"depends": pmb.build._package.get_depends(get_context(), pmaport),
"pkgname": pmaport["pkgname"],
"provides": pmaport["provides"],
"version": pmaport["pkgver"] + "-r" + pmaport["pkgrel"]}
"version": pmaport["pkgver"] + "-r" + pmaport["pkgrel"],
}
# Find in APKINDEX (given arch)
if not ret or not pmb.helpers.pmaports.check_arches(ret["arch"], arch):
@ -93,8 +96,7 @@ def get(pkgname, arch, replace_subpkgnames=False, must_exist=True):
for depend in ret["depends"]:
depend_data = get(depend, arch, must_exist=False)
if not depend_data:
logging.warning(f"WARNING: {pkgname}: failed to resolve"
f" dependency '{depend}'")
logging.warning(f"WARNING: {pkgname}: failed to resolve" f" dependency '{depend}'")
# Can't replace potential subpkgname
if depend not in depends_new:
depends_new += [depend]
@ -111,8 +113,10 @@ def get(pkgname, arch, replace_subpkgnames=False, must_exist=True):
# Could not find the package
if not must_exist:
return None
raise RuntimeError("Package '" + pkgname + "': Could not find aport, and"
" could not find this package in any APKINDEX!")
raise RuntimeError(
"Package '" + pkgname + "': Could not find aport, and"
" could not find this package in any APKINDEX!"
)
@Cache("pkgname", "arch")

View file

@ -25,8 +25,16 @@ def package(args: PmbArgs, pkgname, reason="", dry=False):
pkgrel_new = pkgrel + 1
# Display the message, bail out in dry mode
logging.info("Increase '" + pkgname + "' pkgrel (" + str(pkgrel) + " -> " +
str(pkgrel_new) + ")" + reason)
logging.info(
"Increase '"
+ pkgname
+ "' pkgrel ("
+ str(pkgrel)
+ " -> "
+ str(pkgrel_new)
+ ")"
+ reason
)
if dry:
return
@ -39,9 +47,11 @@ def package(args: PmbArgs, pkgname, reason="", dry=False):
pmb.parse.apkbuild.cache_clear()
apkbuild = pmb.parse.apkbuild(path)
if int(apkbuild["pkgrel"]) != pkgrel_new:
raise RuntimeError(f"Failed to bump pkgrel for package '{pkgname}'."
raise RuntimeError(
f"Failed to bump pkgrel for package '{pkgname}'."
" Make sure that there's a line with exactly the"
f" string '{old.strip()}' and nothing else in: {path}")
f" string '{old.strip()}' and nothing else in: {path}"
)
def auto_apkindex_package(args: PmbArgs, arch, aport, apk, dry=False):
@ -62,42 +72,38 @@ def auto_apkindex_package(args: PmbArgs, arch, aport, apk, dry=False):
# Skip when aport version != binary package version
compare = pmb.parse.version.compare(version_aport, version_apk)
if compare == -1:
logging.warning("{}: skipping, because the aport version {} is lower"
" than the binary version {}".format(pkgname,
version_aport,
version_apk))
logging.warning(
"{}: skipping, because the aport version {} is lower"
" than the binary version {}".format(pkgname, version_aport, version_apk)
)
return
if compare == 1:
logging.verbose("{}: skipping, because the aport version {} is higher"
" than the binary version {}".format(pkgname,
version_aport,
version_apk))
logging.verbose(
"{}: skipping, because the aport version {} is higher"
" than the binary version {}".format(pkgname, version_aport, version_apk)
)
return
# Find missing depends
depends = apk["depends"]
logging.verbose("{}: checking depends: {}".format(pkgname,
", ".join(depends)))
logging.verbose("{}: checking depends: {}".format(pkgname, ", ".join(depends)))
missing = []
for depend in depends:
if depend.startswith("!"):
# Ignore conflict-dependencies
continue
providers = pmb.parse.apkindex.providers(depend, arch,
must_exist=False)
providers = pmb.parse.apkindex.providers(depend, arch, must_exist=False)
if providers == {}:
# We're only interested in missing depends starting with "so:"
# (which means dynamic libraries that the package was linked
# against) and packages for which no aport exists.
if (depend.startswith("so:") or
not pmb.helpers.pmaports.find_optional(depend)):
if depend.startswith("so:") or not pmb.helpers.pmaports.find_optional(depend):
missing.append(depend)
# Increase pkgrel
if len(missing):
package(args, pkgname, reason=", missing depend(s): " +
", ".join(missing), dry=dry)
package(args, pkgname, reason=", missing depend(s): " + ", ".join(missing), dry=dry)
return True
@ -113,13 +119,11 @@ def auto(args: PmbArgs, dry=False):
origin = apk["origin"]
# Only increase once!
if origin in ret:
logging.verbose(
f"{pkgname}: origin '{origin}' found again")
logging.verbose(f"{pkgname}: origin '{origin}' found again")
continue
aport_path = pmb.helpers.pmaports.find_optional(origin)
if not aport_path:
logging.warning("{}: origin '{}' aport not found".format(
pkgname, origin))
logging.warning("{}: origin '{}' aport not found".format(pkgname, origin))
continue
aport = pmb.parse.apkbuild(aport_path)
if auto_apkindex_package(args, arch, aport, apk, dry):

View file

@ -6,6 +6,7 @@ See also:
- pmb/helpers/repo.py (work with binary package repos)
- pmb/helpers/package.py (work with both)
"""
from pmb.core.context import get_context
from pmb.core.arch import Arch
from pmb.core.pkgrepo import pkgrepo_iter_package_dirs
@ -16,6 +17,7 @@ from typing import Any, Optional, Sequence, Dict, Tuple
from pmb.meta import Cache
import pmb.parse
def _find_apkbuilds(skip_extra_repos=False) -> Dict[str, Path]:
# Try to get a cached result first (we assume that the aports don't change
# in one pmbootstrap call)
@ -27,8 +29,10 @@ def _find_apkbuilds(skip_extra_repos=False) -> Dict[str, Path]:
for package in pkgrepo_iter_package_dirs(skip_extra_repos=skip_extra_repos):
pkgname = package.name
if pkgname in apkbuilds:
raise RuntimeError(f"Package {pkgname} found in multiple aports "
"subfolders. Please put it only in one folder.")
raise RuntimeError(
f"Package {pkgname} found in multiple aports "
"subfolders. Please put it only in one folder."
)
apkbuilds[pkgname] = package / "APKBUILD"
# Sort dictionary so we don't need to do it over and over again in
@ -57,13 +61,18 @@ def guess_main_dev(subpkgname) -> Optional[Path]:
pkgname = subpkgname[:-4]
path = _find_apkbuilds().get(pkgname)
if path:
logging.verbose(subpkgname + ": guessed to be a subpackage of " +
pkgname + " (just removed '-dev')")
logging.verbose(
subpkgname + ": guessed to be a subpackage of " + pkgname + " (just removed '-dev')"
)
return path.parent
logging.verbose(subpkgname + ": guessed to be a subpackage of " + pkgname +
", which we can't find in pmaports, so it's probably in"
" Alpine")
logging.verbose(
subpkgname
+ ": guessed to be a subpackage of "
+ pkgname
+ ", which we can't find in pmaports, so it's probably in"
" Alpine"
)
return None
@ -99,12 +108,12 @@ def guess_main(subpkgname) -> Optional[Path]:
# Look in pmaports
path = _find_apkbuilds().get(pkgname)
if path:
logging.verbose(subpkgname + ": guessed to be a subpackage of " +
pkgname)
logging.verbose(subpkgname + ": guessed to be a subpackage of " + pkgname)
return path.parent
return None
def _find_package_in_apkbuild(package: str, path: Path) -> bool:
"""Look through subpackages and all provides to see if the APKBUILD at the specified path
contains (or provides) the specified package.
@ -190,8 +199,7 @@ def find(package, must_exist=True, subpackages=True, skip_extra_repos=False):
# Crash when necessary
if ret is None and must_exist:
raise RuntimeError("Could not find aport for package: " +
package)
raise RuntimeError("Could not find aport for package: " + package)
return ret
@ -205,7 +213,9 @@ def find_optional(package: str) -> Optional[Path]:
# The only caller with subpackages=False is ui.check_option()
@Cache("pkgname", subpackages=True)
def get_with_path(pkgname, must_exist=True, subpackages=True, skip_extra_repos=False) -> Tuple[Optional[Path], Optional[Dict[str, Any]]]:
def get_with_path(
pkgname, must_exist=True, subpackages=True, skip_extra_repos=False
) -> Tuple[Optional[Path], Optional[Dict[str, Any]]]:
"""Find and parse an APKBUILD file.
Run 'pmbootstrap apkbuild_parse hello-world' for a full output example.
@ -257,8 +267,7 @@ def find_providers( provide):
if provides.split("=", 1)[0] == provide:
providers[subpkgname] = subpkg
return sorted(providers.items(), reverse=True,
key=lambda p: p[1].get('provider_priority', 0))
return sorted(providers.items(), reverse=True, key=lambda p: p[1].get("provider_priority", 0))
# FIXME (#2324): split into an _optional variant or drop must_exist
@ -315,6 +324,7 @@ def get_channel_new(channel: str) -> str:
return ret
return channel
def require_bootstrap_error(repo, arch, trigger_str):
"""
Tell the user that they need to do repo_bootstrap, with some context.
@ -323,10 +333,13 @@ def require_bootstrap_error(repo, arch, trigger_str):
:param arch: for which architecture
:param trigger_str: message for the user to understand what caused this
"""
logging.info(f"ERROR: Trying to {trigger_str} with {repo} enabled, but the"
f" {repo} repo needs to be bootstrapped first.")
raise RuntimeError(f"Run 'pmbootstrap repo_bootstrap {repo} --arch={arch}'"
" and then try again.")
logging.info(
f"ERROR: Trying to {trigger_str} with {repo} enabled, but the"
f" {repo} repo needs to be bootstrapped first."
)
raise RuntimeError(
f"Run 'pmbootstrap repo_bootstrap {repo} --arch={arch}'" " and then try again."
)
def require_bootstrap(arch, trigger_str):
@ -338,8 +351,6 @@ def require_bootstrap(arch, trigger_str):
"""
if pmb.config.other.is_systemd_selected(get_context().config):
pmb.helpers.repo.update(arch)
pkg = pmb.parse.apkindex.package("postmarketos-base-systemd",
arch, False)
pkg = pmb.parse.apkindex.package("postmarketos-base-systemd", arch, False)
if not pkg:
require_bootstrap_error("systemd", arch, trigger_str)

View file

@ -7,6 +7,7 @@ See also:
- pmb/helpers/pmaports.py (work with pmaports)
- pmb/helpers/package.py (work with both)
"""
import os
import hashlib
from pmb.core.context import get_context
@ -44,8 +45,8 @@ def apkindex_hash(url: str, length: int=8) -> Path:
ret = ""
for i in range(csum_bytes):
ret += xd[(binary[i] >> 4) & 0xf]
ret += xd[binary[i] & 0xf]
ret += xd[(binary[i] >> 4) & 0xF]
ret += xd[binary[i] & 0xF]
return Path(f"APKINDEX.{ret}.tar.gz")
@ -102,8 +103,9 @@ def urls(user_repository=False, mirrors_exclude: List[str] = []):
return ret
def apkindex_files(arch: Optional[Arch]=None, user_repository=True,
exclude_mirrors: List[str] = []) -> List[Path]:
def apkindex_files(
arch: Optional[Arch] = None, user_repository=True, exclude_mirrors: List[str] = []
) -> List[Path]:
"""Get a list of outside paths to all resolved APKINDEX.tar.gz files for a specific arch.
:param arch: defaults to native
@ -190,14 +192,18 @@ def update(arch: Optional[Arch]=None, force=False, existing_only=False):
# Bail out or show log message
if not len(outdated):
return False
logging.info("Update package index for " + ", ".join([str(a) for a in outdated_arches]) +
" (" + str(len(outdated)) + " file(s))")
logging.info(
"Update package index for "
+ ", ".join([str(a) for a in outdated_arches])
+ " ("
+ str(len(outdated))
+ " file(s))"
)
# Download and move to right location
for (i, (url, target)) in enumerate(outdated.items()):
for i, (url, target) in enumerate(outdated.items()):
pmb.helpers.cli.progress_print(i / len(outdated))
temp = pmb.helpers.http.download(url, "APKINDEX", False,
logging.DEBUG, True)
temp = pmb.helpers.http.download(url, "APKINDEX", False, logging.DEBUG, True)
if not temp:
pmb.helpers.other.cache[cache_key]["404"].append(url)
continue

View file

@ -71,15 +71,19 @@ def get_relevant_packages(arch, pkgname=None, built=False):
if built:
ret = filter_aport_packages(ret)
if not len(ret):
logging.info("NOTE: no aport found for any package in the"
logging.info(
"NOTE: no aport found for any package in the"
" dependency tree, it seems they are all provided by"
" upstream (Alpine).")
" upstream (Alpine)."
)
else:
ret = filter_missing_packages(arch, ret)
if not len(ret):
logging.info("NOTE: all relevant packages are up to date, use"
logging.info(
"NOTE: all relevant packages are up to date, use"
" --built to include the ones that have already been"
" built.")
" built."
)
# Sort alphabetically (to get a deterministic build order)
ret.sort()
@ -105,10 +109,14 @@ def generate_output_format(arch, pkgnames):
ret = []
for pkgname in pkgnames:
entry = pmb.helpers.package.get(pkgname, arch, True)
ret += [{"pkgname": entry["pkgname"],
ret += [
{
"pkgname": entry["pkgname"],
"repo": pmb.helpers.pmaports.get_repo(pkgname),
"version": entry["version"],
"depends": entry["depends"]}]
"depends": entry["depends"],
}
]
return ret
@ -124,8 +132,7 @@ def generate(arch, overview, pkgname=None, built=False):
"""
# Log message
packages_str = pkgname if pkgname else "all packages"
logging.info("Calculate packages that need to be built ({}, {})"
"".format(packages_str, arch))
logging.info("Calculate packages that need to be built ({}, {})" "".format(packages_str, arch))
# Order relevant packages
ret = get_relevant_packages(arch, pkgname, built)

View file

@ -9,8 +9,15 @@ from typing import Optional, Sequence
from pmb.types import Env, PathString
def user(cmd: Sequence[PathString], working_dir: Optional[Path] = None, output: str = "log", output_return: bool = False,
check: Optional[bool] = None, env: Env = {}, sudo: bool = False) -> str | int | subprocess.Popen:
def user(
cmd: Sequence[PathString],
working_dir: Optional[Path] = None,
output: str = "log",
output_return: bool = False,
check: Optional[bool] = None,
env: Env = {},
sudo: bool = False,
) -> str | int | subprocess.Popen:
"""
Run a command on the host system as user.
@ -40,12 +47,20 @@ def user(cmd: Sequence[PathString], working_dir: Optional[Path] = None, output:
pmb.helpers.run_core.add_proxy_env_vars(env)
if env:
cmd_parts = ["sh", "-c", pmb.helpers.run_core.flat_cmd([cmd_parts], env=env)]
return pmb.helpers.run_core.core(msg, cmd_parts, working_dir, output,
output_return, check, sudo)
return pmb.helpers.run_core.core(
msg, cmd_parts, working_dir, output, output_return, check, sudo
)
# FIXME: should probably use some kind of wrapper class / builder pattern for all these parameters...
def user_output(cmd: Sequence[PathString], working_dir: Optional[Path] = None, output: str = "log",
check: Optional[bool] = None, env: Env = {}, sudo: bool = False) -> str:
def user_output(
cmd: Sequence[PathString],
working_dir: Optional[Path] = None,
output: str = "log",
check: Optional[bool] = None,
env: Env = {},
sudo: bool = False,
) -> str:
ret = user(cmd, working_dir, output, output_return=True, check=check, env=env, sudo=sudo)
if not isinstance(ret, str):
raise TypeError("Expected str output, got " + str(ret))
@ -53,8 +68,14 @@ def user_output(cmd: Sequence[PathString], working_dir: Optional[Path] = None, o
return ret
def root(cmd: Sequence[PathString], working_dir=None, output="log", output_return=False,
check=None, env={}):
def root(
cmd: Sequence[PathString],
working_dir=None,
output="log",
output_return=False,
check=None,
env={},
):
"""Run a command on the host system as root, with sudo or doas.
:param env: dict of environment variables to be passed to the command, e.g.
@ -70,5 +91,4 @@ def root(cmd: Sequence[PathString], working_dir=None, output="log", output_retur
cmd = ["sh", "-c", pmb.helpers.run_core.flat_cmd([cmd], env=env)]
cmd = pmb.config.sudo(cmd)
return user(cmd, working_dir, output, output_return, check, env,
True)
return user(cmd, working_dir, output, output_return, check, env, True)

View file

@ -21,7 +21,9 @@ import pmb.helpers.run
called by core(). """
def flat_cmd(cmds: Sequence[Sequence[PathString]], working_dir: Optional[Path]=None, env: Env={}):
def flat_cmd(
cmds: Sequence[Sequence[PathString]], working_dir: Optional[Path] = None, env: Env = {}
):
"""Convert a shell command passed as list into a flat shell string with proper escaping.
:param cmds: list of commands as list, e.g. ["echo", "string with spaces"]
@ -74,24 +76,34 @@ def sanity_checks(output="log", output_return=False, check=None):
def background(cmd, working_dir=None):
"""Run a subprocess in background and redirect its output to the log."""
ret = subprocess.Popen(cmd, stdout=pmb.helpers.logging.logfd,
stderr=pmb.helpers.logging.logfd, cwd=working_dir)
ret = subprocess.Popen(
cmd, stdout=pmb.helpers.logging.logfd, stderr=pmb.helpers.logging.logfd, cwd=working_dir
)
logging.debug(f"New background process: pid={ret.pid}, output=background")
return ret
def pipe(cmd, working_dir=None):
"""Run a subprocess in background and redirect its output to a pipe."""
ret = subprocess.Popen(cmd, stdout=subprocess.PIPE,
ret = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stdin=subprocess.DEVNULL,
stderr=pmb.helpers.logging.logfd, cwd=working_dir)
stderr=pmb.helpers.logging.logfd,
cwd=working_dir,
)
logging.verbose(f"New background process: pid={ret.pid}, output=pipe")
return ret
# FIXME (#2324): These types make no sense at all
def pipe_read(process, output_to_stdout=False, output_log=True, output_return=False,
output_return_buffer=False):
def pipe_read(
process,
output_to_stdout=False,
output_log=True,
output_return=False,
output_return_buffer=False,
):
"""Read all output from a subprocess, copy it to the log and optionally stdout and a buffer variable.
This is only meant to be called by foreground_pipe() below.
@ -130,13 +142,11 @@ def kill_process_tree(pid, ppids, sudo):
:param sudo: use sudo to kill the process
"""
if sudo:
pmb.helpers.run.root(["kill", "-9", str(pid)],
check=False)
pmb.helpers.run.root(["kill", "-9", str(pid)], check=False)
else:
pmb.helpers.run.user(["kill", "-9", str(pid)],
check=False)
pmb.helpers.run.user(["kill", "-9", str(pid)], check=False)
for (child_pid, child_ppid) in ppids:
for child_pid, child_ppid in ppids:
if child_ppid == str(pid):
kill_process_tree(child_pid, ppids, sudo)
@ -150,7 +160,7 @@ def kill_command(pid, sudo):
cmd = ["ps", "-e", "-o", "pid,ppid"]
ret = subprocess.run(cmd, check=True, stdout=subprocess.PIPE)
ppids = []
proc_entries = ret.stdout.decode("utf-8").rstrip().split('\n')[1:]
proc_entries = ret.stdout.decode("utf-8").rstrip().split("\n")[1:]
for row in proc_entries:
items = row.split()
if len(items) != 2:
@ -160,9 +170,16 @@ def kill_command(pid, sudo):
kill_process_tree(pid, ppids, sudo)
def foreground_pipe(cmd, working_dir=None, output_to_stdout=False,
output_return=False, output_log=True, output_timeout=True,
sudo=False, stdin=None):
def foreground_pipe(
cmd,
working_dir=None,
output_to_stdout=False,
output_return=False,
output_log=True,
output_timeout=True,
sudo=False,
stdin=None,
):
"""Run a subprocess in foreground with redirected output.
Optionally kill it after being silent for too long.
@ -182,9 +199,9 @@ def foreground_pipe(cmd, working_dir=None, output_to_stdout=False,
"""
context = pmb.core.context.get_context()
# Start process in background (stdout and stderr combined)
process = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, cwd=working_dir,
stdin=stdin)
process = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=working_dir, stdin=stdin
)
# Make process.stdout non-blocking
stdout = process.stdout or None
@ -210,16 +227,15 @@ def foreground_pipe(cmd, working_dir=None, output_to_stdout=False,
if output_timeout:
wait_end = time.perf_counter()
if wait_end - wait_start >= timeout:
logging.info("Process did not write any output for " +
str(timeout) + " seconds. Killing it.")
logging.info("NOTE: The timeout can be increased with"
" 'pmbootstrap -t'.")
logging.info(
"Process did not write any output for " + str(timeout) + " seconds. Killing it."
)
logging.info("NOTE: The timeout can be increased with" " 'pmbootstrap -t'.")
kill_command(process.pid, sudo)
continue
# Read all currently available output
pipe_read(process, output_to_stdout, output_log, output_return,
output_buffer)
pipe_read(process, output_to_stdout, output_log, output_return, output_buffer)
# There may still be output after the process quit
pipe_read(process, output_to_stdout, output_log, output_return, output_buffer)
@ -236,8 +252,7 @@ def foreground_tui(cmd, working_dir=None):
This is the only way text-based user interfaces (ncurses programs like
vim, nano or the kernel's menuconfig) work properly.
"""
logging.debug("*** output passed to pmbootstrap stdout, not to this log"
" ***")
logging.debug("*** output passed to pmbootstrap stdout, not to this log" " ***")
process = subprocess.Popen(cmd, cwd=working_dir)
return process.wait()
@ -254,10 +269,11 @@ def check_return_code(code, log_message):
if code:
logging.debug("^" * 70)
log_file = get_context().log
logging.info("NOTE: The failed command's output is above the ^^^ line"
f" in the log file: {log_file}")
raise RuntimeError(f"Command failed (exit code {str(code)}): " +
log_message)
logging.info(
"NOTE: The failed command's output is above the ^^^ line"
f" in the log file: {log_file}"
)
raise RuntimeError(f"Command failed (exit code {str(code)}): " + log_message)
def sudo_timer_iterate():
@ -291,8 +307,7 @@ def add_proxy_env_vars(env):
"FTP_PROXY",
"HTTPS_PROXY",
"HTTP_PROXY",
"HTTP_PROXY_AUTH"
"ftp_proxy",
"HTTP_PROXY_AUTH" "ftp_proxy",
"http_proxy",
"https_proxy",
]
@ -302,8 +317,16 @@ def add_proxy_env_vars(env):
env[var] = os.environ[var]
def core(log_message, cmd, working_dir=None, output="log",
output_return=False, check=None, sudo=False, disable_timeout=False):
def core(
log_message,
cmd,
working_dir=None,
output="log",
output_return=False,
check=None,
sudo=False,
disable_timeout=False,
):
"""Run a command and create a log entry.
This is a low level function not meant to be used directly. Use one of the
@ -397,12 +420,16 @@ def core(log_message, cmd, working_dir=None, output="log",
stdin = subprocess.DEVNULL if output in ["log", "stdout"] else None
(code, output_after_run) = foreground_pipe(cmd, working_dir,
(code, output_after_run) = foreground_pipe(
cmd,
working_dir,
output_to_stdout,
output_return,
output != "null",
output_timeout,
sudo, stdin)
sudo,
stdin,
)
# Check the return code
if check is not False:

View file

@ -13,9 +13,14 @@ def list_ui(arch):
:param arch: device architecture, for which the UIs must be available
:returns: [("none", "No graphical..."), ("weston", "Wayland reference...")]
"""
ret = [("none", "Bare minimum OS image for testing and manual"
" customization. The \"console\" UI should be selected if"
" a graphical UI is not desired.")]
ret = [
(
"none",
"Bare minimum OS image for testing and manual"
' customization. The "console" UI should be selected if'
" a graphical UI is not desired.",
)
]
for path in sorted(pkgrepo_iglob("main/postmarketos-ui-*")):
apkbuild = pmb.parse.apkbuild(path)
ui = os.path.basename(path).split("-", 2)[2]
@ -29,5 +34,7 @@ def check_option(ui, option, skip_extra_repos=False):
Check if an option, such as pmb:systemd, is inside an UI's APKBUILD.
"""
pkgname = f"postmarketos-ui-{ui}"
apkbuild = pmb.helpers.pmaports.get(pkgname, subpackages=False, skip_extra_repos=skip_extra_repos)
apkbuild = pmb.helpers.pmaports.get(
pkgname, subpackages=False, skip_extra_repos=skip_extra_repos
)
return option in apkbuild["options"]

View file

@ -67,7 +67,7 @@ def get_nonfree_packages(device):
["device-nokia-n900-nonfree-firmware"]
"""
# Read subpackages
device_path = pmb.helpers.devices.find_path(device, 'APKBUILD')
device_path = pmb.helpers.devices.find_path(device, "APKBUILD")
if not device_path:
raise RuntimeError(f"Device package not found for {device}")
@ -100,9 +100,11 @@ def get_kernel_package(config: Config):
# Sanity check
if config.kernel not in kernels:
raise RuntimeError("Selected kernel (" + config.kernel + ") is not"
raise RuntimeError(
"Selected kernel (" + config.kernel + ") is not"
" valid for device " + config.device + ". Please"
" run 'pmbootstrap init' to select a valid kernel.")
" run 'pmbootstrap init' to select a valid kernel."
)
# Selected kernel subpackage
return ["device-" + config.device + "-kernel-" + config.kernel]
@ -145,24 +147,24 @@ def copy_files_from_chroot(args: PmbArgs, chroot: Chroot):
rsync_flags = "-a"
if args.verbose:
rsync_flags += "vP"
pmb.chroot.root(["rsync", rsync_flags, "--delete"] + folders +
["/mnt/install/"], working_dir=mountpoint)
pmb.chroot.root(
["rsync", rsync_flags, "--delete"] + folders + ["/mnt/install/"], working_dir=mountpoint
)
pmb.chroot.root(["rm", "-rf", "/mnt/install/home"])
else:
pmb.chroot.root(["cp", "-a"] + folders + ["/mnt/install/"],
working_dir=mountpoint)
pmb.chroot.root(["cp", "-a"] + folders + ["/mnt/install/"], working_dir=mountpoint)
def create_home_from_skel(filesystem: str, user: str):
"""
Create /home/{user} from /etc/skel
"""
rootfs = (Chroot.native() / "mnt/install")
rootfs = Chroot.native() / "mnt/install"
# In btrfs, home subvol & home dir is created in format.py
if filesystem != "btrfs":
pmb.helpers.run.root(["mkdir", rootfs / "home"])
home = (rootfs / "home" / user)
home = rootfs / "home" / user
if (rootfs / "etc/skel").exists():
pmb.helpers.run.root(["cp", "-a", (rootfs / "etc/skel"), home])
else:
@ -184,19 +186,21 @@ def configure_apk(args: PmbArgs):
keys_dir = get_context().config.work / "config_apk_keys"
# Copy over keys
rootfs = (Chroot.native() / "mnt/install")
rootfs = Chroot.native() / "mnt/install"
for key in keys_dir.glob("*.pub"):
pmb.helpers.run.root(["cp", key, rootfs / "etc/apk/keys/"])
# Copy over the corresponding APKINDEX files from cache
index_files = pmb.helpers.repo.apkindex_files(arch=pmb.parse.deviceinfo().arch,
user_repository=False)
index_files = pmb.helpers.repo.apkindex_files(
arch=pmb.parse.deviceinfo().arch, user_repository=False
)
for f in index_files:
pmb.helpers.run.root(["cp", f, rootfs / "var/cache/apk/"])
# Disable pmbootstrap repository
pmb.helpers.run.root(["sed", "-i", r"/\/mnt\/pmbootstrap\/packages/d",
rootfs / "etc/apk/repositories"])
pmb.helpers.run.root(
["sed", "-i", r"/\/mnt\/pmbootstrap\/packages/d", rootfs / "etc/apk/repositories"]
)
pmb.helpers.run.user(["cat", rootfs / "etc/apk/repositories"])
@ -210,18 +214,17 @@ def set_user(config: Config):
"""
chroot = Chroot.rootfs(config.device)
if not pmb.chroot.user_exists(config.user, chroot):
pmb.chroot.root(["adduser", "-D", "-u", "10000", config.user],
chroot)
pmb.chroot.root(["adduser", "-D", "-u", "10000", config.user], chroot)
pmaports_cfg = pmb.config.pmaports.read_config()
groups = []
groups += pmaports_cfg.get("install_user_groups",
"audio,input,netdev,plugdev,video,wheel").split(",")
groups += pmaports_cfg.get(
"install_user_groups", "audio,input,netdev,plugdev,video,wheel"
).split(",")
groups += pmb.install.ui.get_groups(config)
for group in groups:
pmb.chroot.root(["addgroup", "-S", group], chroot,
check=False)
pmb.chroot.root(["addgroup", "-S", group], chroot, check=False)
pmb.chroot.root(["addgroup", config.user, group], chroot)
@ -243,8 +246,7 @@ def setup_login_chpasswd_user_from_arg(args: PmbArgs, user: str, chroot: Chroot)
with open(path_outside, "w", encoding="utf-8") as handle:
handle.write(f"{user}:{args.password}")
pmb.chroot.root(["sh", "-c", f"cat {shlex.quote(path)} | chpasswd"],
chroot)
pmb.chroot.root(["sh", "-c", f"cat {shlex.quote(path)} | chpasswd"], chroot)
os.unlink(path_outside)
@ -257,8 +259,9 @@ def is_root_locked(chroot: Chroot):
:param suffix: either rootfs_{args.device} or installer_{args.device}
"""
shadow_root = pmb.chroot.root(["grep", "^root:!:", "/etc/shadow"],
chroot, output_return=True, check=False)
shadow_root = pmb.chroot.root(
["grep", "^root:!:", "/etc/shadow"], chroot, output_return=True, check=False
)
return shadow_root.startswith("root:!:")
@ -278,12 +281,10 @@ def setup_login(args: PmbArgs, config: Config, chroot: Chroot):
else:
while True:
try:
pmb.chroot.root(["passwd", config.user], chroot,
output="interactive")
pmb.chroot.root(["passwd", config.user], chroot, output="interactive")
break
except RuntimeError:
logging.info("WARNING: Failed to set the password. Try it"
" one more time.")
logging.info("WARNING: Failed to set the password. Try it" " one more time.")
# Disable root login
if is_root_locked(chroot):
@ -305,9 +306,11 @@ def copy_ssh_keys(config: Config):
keys += infile.readlines()
if not len(keys):
logging.info("NOTE: Public SSH keys not found. Since no SSH keys "
logging.info(
"NOTE: Public SSH keys not found. Since no SSH keys "
"were copied, you will need to use SSH password "
"authentication!")
"authentication!"
)
return
authorized_keys = Chroot.native() / "tmp/authorized_keys"
@ -333,20 +336,20 @@ def setup_keymap(config: Config):
if not deviceinfo.keymaps or deviceinfo.keymaps.strip() == "":
logging.info("NOTE: No valid keymap specified for device")
return
options = deviceinfo.keymaps.split(' ')
if (config.keymap != "" and
config.keymap is not None and
config.keymap in options):
options = deviceinfo.keymaps.split(" ")
if config.keymap != "" and config.keymap is not None and config.keymap in options:
layout, variant = config.keymap.split("/")
pmb.chroot.root(["setup-keymap", layout, variant], chroot,
output="interactive")
pmb.chroot.root(["setup-keymap", layout, variant], chroot, output="interactive")
# Check xorg config
xconfig = None
if (chroot / "etc/X11/xorg.conf.d").exists():
xconfig = pmb.chroot.root(["grep", "-rl", "XkbLayout",
"/etc/X11/xorg.conf.d/"],
chroot, check=False, output_return=True)
xconfig = pmb.chroot.root(
["grep", "-rl", "XkbLayout", "/etc/X11/xorg.conf.d/"],
chroot,
check=False,
output_return=True,
)
if xconfig:
# Nokia n900 (RX-51) randomly merges some keymaps so we
# have to specify a composite keymap for a few countries. See:
@ -359,10 +362,9 @@ def setup_keymap(config: Config):
layout = "ptes"
# Multiple files can contain the keyboard layout, take last
xconfig = xconfig.splitlines()[-1]
old_text = "Option *\\\"XkbLayout\\\" *\\\".*\\\""
new_text = "Option \\\"XkbLayout\\\" \\\"" + layout + "\\\""
pmb.chroot.root(["sed", "-i", "s/" + old_text + "/" +
new_text + "/", xconfig], chroot)
old_text = 'Option *\\"XkbLayout\\" *\\".*\\"'
new_text = 'Option \\"XkbLayout\\" \\"' + layout + '\\"'
pmb.chroot.root(["sed", "-i", "s/" + old_text + "/" + new_text + "/", xconfig], chroot)
else:
logging.info("NOTE: No valid keymap specified for device")
@ -400,16 +402,18 @@ def setup_hostname(device: str, hostname: Optional[str]):
elif not pmb.helpers.other.validate_hostname(hostname):
# Invalid hostname set by the user e.g., via pmb init, this should
# fail so they can fix it
raise RuntimeError("Hostname '" + hostname + "' is not valid, please"
" run 'pmbootstrap init' to configure it.")
raise RuntimeError(
"Hostname '" + hostname + "' is not valid, please"
" run 'pmbootstrap init' to configure it."
)
suffix = Chroot(ChrootType.ROOTFS, device)
# Generate /etc/hostname
pmb.chroot.root(["sh", "-c", "echo " + shlex.quote(hostname) +
" > /etc/hostname"], suffix)
pmb.chroot.root(["sh", "-c", "echo " + shlex.quote(hostname) + " > /etc/hostname"], suffix)
# Update /etc/hosts
regex = (r"s/^127\.0\.0\.1.*/127.0.0.1\t" + re.escape(hostname) +
" localhost.localdomain localhost/")
regex = (
r"s/^127\.0\.0\.1.*/127.0.0.1\t" + re.escape(hostname) + " localhost.localdomain localhost/"
)
pmb.chroot.root(["sed", "-i", "-e", regex, "/etc/hosts"], suffix)
@ -423,23 +427,31 @@ def setup_appstream(offline: bool, chroot: Chroot):
if "alpine-appstream-downloader" not in installed_pkgs or offline:
return
if not pmb.chroot.root(["alpine-appstream-downloader",
"/mnt/appstream-data"], chroot, check=False):
if not pmb.chroot.root(
["alpine-appstream-downloader", "/mnt/appstream-data"], chroot, check=False
):
pmb.chroot.root(["mkdir", "-p", "/var/lib/swcatalog"], chroot)
pmb.chroot.root(["cp", "-r", "/mnt/appstream-data/icons",
pmb.chroot.root(
[
"cp",
"-r",
"/mnt/appstream-data/icons",
"/mnt/appstream-data/xml",
"-t", "/var/lib/swcatalog"], chroot)
"-t",
"/var/lib/swcatalog",
],
chroot,
)
def disable_sshd(chroot: Chroot):
# check=False: rc-update doesn't exit with 0 if already disabled
pmb.chroot.root(["rc-update", "del", "sshd", "default"], chroot,
check=False)
pmb.chroot.root(["rc-update", "del", "sshd", "default"], chroot, check=False)
# Verify that it's gone
sshd_files = pmb.helpers.run.root(
["find", "-name", "sshd"], output_return=True,
working_dir=chroot / "etc/runlevels")
["find", "-name", "sshd"], output_return=True, working_dir=chroot / "etc/runlevels"
)
if sshd_files:
raise RuntimeError(f"Failed to disable sshd service: {sshd_files}")
@ -453,27 +465,29 @@ def print_sshd_info(args: PmbArgs):
logging.info("SSH daemon is disabled (--no-sshd).")
else:
logging.info("SSH daemon is enabled (disable with --no-sshd).")
logging.info(f"Login as '{get_context().config.user}' with the password given"
" during installation.")
logging.info(
f"Login as '{get_context().config.user}' with the password given"
" during installation."
)
if args.on_device_installer:
# We don't disable sshd in the installer OS. If the device is reachable
# on the network by default (e.g. Raspberry Pi), one can lock down the
# installer OS down by disabling the debug user (see wiki page).
logging.info("SSH daemon is enabled in the installer OS, to allow"
" debugging the installer image.")
logging.info(
"SSH daemon is enabled in the installer OS, to allow" " debugging the installer image."
)
logging.info("More info: https://postmarketos.org/ondev-debug")
def disable_firewall(chroot: Chroot):
# check=False: rc-update doesn't exit with 0 if already disabled
pmb.chroot.root(["rc-update", "del", "nftables", "default"], chroot,
check=False)
pmb.chroot.root(["rc-update", "del", "nftables", "default"], chroot, check=False)
# Verify that it's gone
nftables_files = pmb.helpers.run.root(
["find", "-name", "nftables"], output_return=True,
working_dir=chroot / "etc/runlevels")
["find", "-name", "nftables"], output_return=True, working_dir=chroot / "etc/runlevels"
)
if nftables_files:
raise RuntimeError(f"Failed to disable firewall: {nftables_files}")
@ -499,20 +513,21 @@ def print_firewall_info(disabled: bool, arch: Arch):
logging.info("*** FIREWALL INFORMATION ***")
if not pmaports_ok:
logging.info("Firewall is not supported in checked out pmaports"
" branch.")
logging.info("Firewall is not supported in checked out pmaports" " branch.")
elif disabled:
logging.info("Firewall is disabled (--no-firewall).")
elif not apkbuild_found:
logging.info("Firewall is enabled, but may not work (couldn't"
" determine if kernel supports nftables).")
logging.info(
"Firewall is enabled, but may not work (couldn't"
" determine if kernel supports nftables)."
)
elif apkbuild_has_opt:
logging.info("Firewall is enabled and supported by kernel.")
else:
logging.info("Firewall is enabled, but will not work (no support in"
" kernel config for nftables).")
logging.info("If/when the kernel supports it in the future, it"
" will work automatically.")
logging.info(
"Firewall is enabled, but will not work (no support in" " kernel config for nftables)."
)
logging.info("If/when the kernel supports it in the future, it" " will work automatically.")
logging.info("For more information: https://postmarketos.org/firewall")
@ -532,34 +547,39 @@ def generate_binary_list(args: PmbArgs, chroot: Chroot, step):
binaries = pmb.parse.deviceinfo().sd_embed_firmware.split(",")
for binary_offset in binaries:
binary, _offset = binary_offset.split(':')
binary, _offset = binary_offset.split(":")
try:
offset = int(_offset)
except ValueError:
raise RuntimeError("Value for firmware binary offset is "
f"not valid: {offset}")
raise RuntimeError("Value for firmware binary offset is " f"not valid: {offset}")
binary_path = chroot / "usr/share" / binary
if not os.path.exists(binary_path):
raise RuntimeError("The following firmware binary does not "
raise RuntimeError(
"The following firmware binary does not "
f"exist in the {chroot} chroot: "
f"/usr/share/{binary}")
f"/usr/share/{binary}"
)
# Insure that embedding the firmware will not overrun the
# first partition
boot_part_start = pmb.parse.deviceinfo().boot_part_start or "2048"
max_size = (int(boot_part_start) * 512) - (offset * step)
binary_size = os.path.getsize(binary_path)
if binary_size > max_size:
raise RuntimeError("The firmware is too big to embed in the "
f"disk image {binary_size}B > {max_size}B")
raise RuntimeError(
"The firmware is too big to embed in the "
f"disk image {binary_size}B > {max_size}B"
)
# Insure that the firmware does not conflict with any other firmware
# that will be embedded
binary_start = offset * step
binary_end = binary_start + binary_size
for start, end in binary_ranges.items():
if ((binary_start >= start and binary_start < end) or
(binary_end > start and binary_end <= end)):
raise RuntimeError("The firmware overlaps with at least one "
f"other firmware image: {binary}")
if (binary_start >= start and binary_start < end) or (
binary_end > start and binary_end <= end
):
raise RuntimeError(
"The firmware overlaps with at least one " f"other firmware image: {binary}"
)
binary_ranges[binary_start] = binary_end
binary_list.append((binary, offset))
@ -586,9 +606,11 @@ def embed_firmware(args: PmbArgs, suffix: Chroot):
try:
step = int(pmb.parse.deviceinfo().sd_embed_firmware_step_size)
except ValueError:
raise RuntimeError("Value for "
"deviceinfo_sd_embed_firmware_step_size "
"is not valid: {}".format(step))
raise RuntimeError(
"Value for " "deviceinfo_sd_embed_firmware_step_size " "is not valid: {}".format(
step
)
)
device_rootfs = mount_device_rootfs(suffix)
binary_list = generate_binary_list(args, suffix, step)
@ -596,11 +618,15 @@ def embed_firmware(args: PmbArgs, suffix: Chroot):
# Write binaries to disk
for binary, offset in binary_list:
binary_file = os.path.join("/usr/share", binary)
logging.info("Embed firmware {} in the SD card image at offset {} with"
" step size {}".format(binary, offset, step))
logging.info(
"Embed firmware {} in the SD card image at offset {} with" " step size {}".format(
binary, offset, step
)
)
filename = os.path.join(device_rootfs, binary_file.lstrip("/"))
pmb.chroot.root(["dd", "if=" + filename, "of=/dev/install",
"bs=" + str(step), "seek=" + str(offset)])
pmb.chroot.root(
["dd", "if=" + filename, "of=/dev/install", "bs=" + str(step), "seek=" + str(offset)]
)
def write_cgpt_kpart(args: PmbArgs, layout, suffix: Chroot):
@ -615,8 +641,7 @@ def write_cgpt_kpart(args: PmbArgs, layout, suffix: Chroot):
device_rootfs = mount_device_rootfs(suffix)
filename = f"{device_rootfs}{pmb.parse.deviceinfo()['cgpt_kpart']}"
pmb.chroot.root(
["dd", f"if={filename}", f"of=/dev/installp{layout['kernel']}"])
pmb.chroot.root(["dd", f"if={filename}", f"of=/dev/installp{layout['kernel']}"])
def sanity_check_boot_size():
@ -624,11 +649,14 @@ def sanity_check_boot_size():
config = get_context().config
if int(config.boot_size) >= int(default):
return
logging.error("ERROR: your pmbootstrap has a small/invalid boot_size of"
logging.error(
"ERROR: your pmbootstrap has a small/invalid boot_size of"
f" {config.boot_size} configured, probably because the config"
" has been created with an old version.")
logging.error("This can lead to problems later on, we recommend setting it"
f" to {default} MiB.")
" has been created with an old version."
)
logging.error(
"This can lead to problems later on, we recommend setting it" f" to {default} MiB."
)
logging.error(f"Run 'pmbootstrap config boot_size {default}' and try again.")
sys.exit(1)
@ -638,17 +666,17 @@ def sanity_check_disk(args: PmbArgs):
device_name = os.path.basename(device)
if not os.path.exists(device):
raise RuntimeError(f"{device} doesn't exist, is the disk plugged?")
if os.path.isdir('/sys/class/block/{}'.format(device_name)):
with open('/sys/class/block/{}/ro'.format(device_name), 'r') as handle:
if os.path.isdir("/sys/class/block/{}".format(device_name)):
with open("/sys/class/block/{}/ro".format(device_name), "r") as handle:
ro = handle.read()
if ro == '1\n':
if ro == "1\n":
raise RuntimeError(f"{device} is read-only, maybe a locked SD card?")
def sanity_check_disk_size(args: PmbArgs):
device = args.disk
devpath = os.path.realpath(device)
sysfs = '/sys/class/block/{}/size'.format(devpath.replace('/dev/', ''))
sysfs = "/sys/class/block/{}/size".format(devpath.replace("/dev/", ""))
if not os.path.isfile(sysfs):
# This is a best-effort sanity check, continue if it's not checkable
return
@ -662,11 +690,13 @@ def sanity_check_disk_size(args: PmbArgs):
# Warn if the size is larger than 100GiB
if not args.assume_yes and size > (100 * 2 * 1024 * 1024):
if not pmb.helpers.cli.confirm(f"WARNING: The target disk ({devpath}) "
if not pmb.helpers.cli.confirm(
f"WARNING: The target disk ({devpath}) "
"is larger than a usual SD card "
"(>100GiB). Are you sure you want to "
f"overwrite this {human} disk?",
no_assumptions=True):
no_assumptions=True,
):
raise RuntimeError("Aborted.")
@ -680,10 +710,12 @@ def sanity_check_ondev_version(args: PmbArgs):
ver_pkg = get_ondev_pkgver(args)
ver_min = pmb.config.ondev_min_version
if pmb.parse.version.compare(ver_pkg, ver_min) == -1:
raise RuntimeError("This version of pmbootstrap requires"
raise RuntimeError(
"This version of pmbootstrap requires"
f" postmarketos-ondev version {ver_min} or"
" higher. The postmarketos-ondev found in pmaports"
f" / in the binary packages has version {ver_pkg}.")
f" / in the binary packages has version {ver_pkg}."
)
def get_partition_layout(reserve, kernel):
@ -721,11 +753,13 @@ def get_uuid(args: PmbArgs, partition: Path):
return pmb.chroot.root(
[
"blkid",
"-s", "UUID",
"-o", "value",
"-s",
"UUID",
"-o",
"value",
partition,
],
output_return=True
output_return=True,
).rstrip()
@ -762,8 +796,9 @@ def create_fstab(args: PmbArgs, layout, chroot: Chroot):
root_dev = Path(f"/dev/installp{layout['root']}")
boot_mount_point = f"UUID={get_uuid(args, boot_dev)}"
root_mount_point = "/dev/mapper/root" if args.full_disk_encryption \
else f"UUID={get_uuid(args, root_dev)}"
root_mount_point = (
"/dev/mapper/root" if args.full_disk_encryption else f"UUID={get_uuid(args, root_dev)}"
)
boot_options = "nodev,nosuid,noexec"
boot_filesystem = pmb.parse.deviceinfo().boot_filesystem or "ext2"
@ -798,9 +833,17 @@ def create_fstab(args: PmbArgs, layout, chroot: Chroot):
pmb.chroot.root(["mv", "/tmp/fstab", "/etc/fstab"], chroot)
def install_system_image(args: PmbArgs, size_reserve, chroot: Chroot, step, steps,
boot_label="pmOS_boot", root_label="pmOS_root",
split=False, disk: Optional[Path]=None):
def install_system_image(
args: PmbArgs,
size_reserve,
chroot: Chroot,
step,
steps,
boot_label="pmOS_boot",
root_label="pmOS_root",
split=False,
disk: Optional[Path] = None,
):
"""
:param size_reserve: empty partition between root and boot in MiB (pma#463)
:param suffix: the chroot suffix, where the rootfs that will be installed
@ -818,15 +861,14 @@ def install_system_image(args: PmbArgs, size_reserve, chroot: Chroot, step, step
logging.info(f"*** ({step}/{steps}) PREPARE INSTALL BLOCKDEVICE ***")
pmb.helpers.mount.umount_all(chroot.path)
(size_boot, size_root) = get_subpartitions_size(chroot)
layout = get_partition_layout(size_reserve, pmb.parse.deviceinfo().cgpt_kpart \
and args.install_cgpt)
layout = get_partition_layout(
size_reserve, pmb.parse.deviceinfo().cgpt_kpart and args.install_cgpt
)
if not args.rsync:
pmb.install.blockdevice.create(args, size_boot, size_root,
size_reserve, split, disk)
pmb.install.blockdevice.create(args, size_boot, size_root, size_reserve, split, disk)
if not split:
if pmb.parse.deviceinfo().cgpt_kpart and args.install_cgpt:
pmb.install.partition_cgpt(
args, layout, size_boot, size_reserve)
pmb.install.partition_cgpt(args, layout, size_boot, size_reserve)
else:
pmb.install.partition(args, layout, size_boot, size_reserve)
if not split:
@ -867,8 +909,7 @@ def install_system_image(args: PmbArgs, size_reserve, chroot: Chroot, step, step
write_cgpt_kpart(args, layout, chroot)
if disk:
logging.info(f"Unmounting disk {disk} (this may take a while "
"to sync, please wait)")
logging.info(f"Unmounting disk {disk} (this may take a while " "to sync, please wait)")
pmb.chroot.shutdown(True)
# Convert rootfs to sparse using img2simg
@ -882,10 +923,8 @@ def install_system_image(args: PmbArgs, size_reserve, chroot: Chroot, step, step
pmb.chroot.apk.install(["android-tools"], Chroot.native())
sys_image = device + ".img"
sys_image_sparse = device + "-sparse.img"
pmb.chroot.user(["img2simg", sys_image, sys_image_sparse],
working_dir=workdir)
pmb.chroot.user(["mv", "-f", sys_image_sparse, sys_image],
working_dir=workdir)
pmb.chroot.user(["img2simg", sys_image, sys_image_sparse], working_dir=workdir)
pmb.chroot.user(["mv", "-f", sys_image_sparse, sys_image], working_dir=workdir)
# patch sparse image for Samsung devices if specified
samsungify_strategy = pmb.parse.deviceinfo().flash_sparse_samsung_format
@ -894,11 +933,18 @@ def install_system_image(args: PmbArgs, size_reserve, chroot: Chroot, step, step
pmb.chroot.apk.install(["sm-sparse-image-tool"], Chroot.native())
sys_image = f"{device}.img"
sys_image_patched = f"{device}-patched.img"
pmb.chroot.user(["sm_sparse_image_tool", "samsungify", "--strategy",
samsungify_strategy, sys_image, sys_image_patched],
working_dir=workdir)
pmb.chroot.user(["mv", "-f", sys_image_patched, sys_image],
working_dir=workdir)
pmb.chroot.user(
[
"sm_sparse_image_tool",
"samsungify",
"--strategy",
samsungify_strategy,
sys_image,
sys_image_patched,
],
working_dir=workdir,
)
pmb.chroot.user(["mv", "-f", sys_image_patched, sys_image], working_dir=workdir)
def print_flash_info(device: str, deviceinfo: Deviceinfo, split: bool, have_disk: bool):
@ -916,41 +962,42 @@ def print_flash_info(device: str, deviceinfo: Deviceinfo, split: bool, have_disk
requires_split = flasher.get("split", False)
if method == "none":
logging.info("Refer to the installation instructions of your device,"
" or the generic install instructions in the wiki.")
logging.info("https://wiki.postmarketos.org/wiki/Installation_guide"
"#pmbootstrap_flash")
logging.info(
"Refer to the installation instructions of your device,"
" or the generic install instructions in the wiki."
)
logging.info("https://wiki.postmarketos.org/wiki/Installation_guide" "#pmbootstrap_flash")
return
logging.info("Run the following to flash your installation to the"
" target device:")
logging.info("Run the following to flash your installation to the" " target device:")
if "flash_rootfs" in flasher_actions and not have_disk and \
bool(split) == requires_split:
if "flash_rootfs" in flasher_actions and not have_disk and bool(split) == requires_split:
logging.info("* pmbootstrap flasher flash_rootfs")
logging.info(" Flashes the generated rootfs image to your device:")
if split:
logging.info(f" {Chroot.native() / 'home/pmos/rootfs' / device}-rootfs.img")
else:
logging.info(f" {Chroot.native() / 'home/pmos/rootfs' / device}.img")
logging.info(" (NOTE: This file has a partition table, which"
logging.info(
" (NOTE: This file has a partition table, which"
" contains /boot and / subpartitions. That way we"
" don't need to change the partition layout on your"
" device.)")
" device.)"
)
# if current flasher supports vbmeta and partition is explicitly specified
# in deviceinfo
if "flash_vbmeta" in flasher_actions and \
(deviceinfo.flash_fastboot_partition_vbmeta or
deviceinfo.flash_heimdall_partition_vbmeta):
if "flash_vbmeta" in flasher_actions and (
deviceinfo.flash_fastboot_partition_vbmeta or deviceinfo.flash_heimdall_partition_vbmeta
):
logging.info("* pmbootstrap flasher flash_vbmeta")
logging.info(" Flashes vbmeta image with verification disabled flag.")
# if current flasher supports dtbo and partition is explicitly specified
# in deviceinfo
if "flash_dtbo" in flasher_actions and \
(deviceinfo.flash_fastboot_partition_dtbo or
deviceinfo.flash_heimdall_partition_dtbo):
if "flash_dtbo" in flasher_actions and (
deviceinfo.flash_fastboot_partition_dtbo or deviceinfo.flash_heimdall_partition_dtbo
):
logging.info("* pmbootstrap flasher flash_dtbo")
logging.info(" Flashes dtbo image.")
@ -958,8 +1005,7 @@ def print_flash_info(device: str, deviceinfo: Deviceinfo, split: bool, have_disk
# (e.g. an Android boot image is generated). In that case, "flash_kernel"
# works even when partitions are split or installing to disk. This is not
# possible if the flash method requires split partitions.
if "flash_kernel" in flasher_actions and \
(not requires_split or split):
if "flash_kernel" in flasher_actions and (not requires_split or split):
logging.info("* pmbootstrap flasher flash_kernel")
logging.info(" Flashes the kernel + initramfs to your device:")
if requires_split:
@ -968,21 +1014,29 @@ def print_flash_info(device: str, deviceinfo: Deviceinfo, split: bool, have_disk
logging.info(f" {Chroot(ChrootType.ROOTFS, device) / 'boot'}")
if "boot" in flasher_actions:
logging.info(" (NOTE: " + method + " also supports booting"
logging.info(
" (NOTE: " + method + " also supports booting"
" the kernel/initramfs directly without flashing."
" Use 'pmbootstrap flasher boot' to do that.)")
" Use 'pmbootstrap flasher boot' to do that.)"
)
if "flash_lk2nd" in flasher_actions and \
(Chroot(ChrootType.ROOTFS, device) / "boot/lk2nd.img").exists():
logging.info("* Your device supports and may even require"
if (
"flash_lk2nd" in flasher_actions
and (Chroot(ChrootType.ROOTFS, device) / "boot/lk2nd.img").exists()
):
logging.info(
"* Your device supports and may even require"
" flashing lk2nd. You should flash it before"
" flashing anything else. Use 'pmbootstrap flasher"
" flash_lk2nd' to do that.")
" flash_lk2nd' to do that."
)
# Export information
logging.info("* If the above steps do not work, you can also create"
logging.info(
"* If the above steps do not work, you can also create"
" symlinks to the generated files with 'pmbootstrap export'"
" and flash outside of pmbootstrap.")
" and flash outside of pmbootstrap."
)
def install_recovery_zip(args: PmbArgs, device: str, arch: Arch, steps):
@ -1002,17 +1056,17 @@ def install_on_device_installer(args: PmbArgs, step, steps):
config = get_context().config
if not args.ondev_no_rootfs:
suffix_rootfs = Chroot.rootfs(config.device)
install_system_image(args, 0, suffix_rootfs, step=step, steps=steps,
split=True)
install_system_image(args, 0, suffix_rootfs, step=step, steps=steps, split=True)
step += 2
# Prepare the installer chroot
logging.info(f"*** ({step}/{steps}) CREATE ON-DEVICE INSTALLER ROOTFS ***")
step += 1
packages = ([f"device-{config.device}",
"postmarketos-ondev"] +
get_kernel_package(config) +
get_nonfree_packages(config.device))
packages = (
[f"device-{config.device}", "postmarketos-ondev"]
+ get_kernel_package(config)
+ get_nonfree_packages(config.device)
)
chroot_installer = Chroot(ChrootType.INSTALLER, config.device)
pmb.chroot.apk.install(packages, chroot_installer)
@ -1033,24 +1087,24 @@ def install_on_device_installer(args: PmbArgs, step, steps):
logging.info(f"({chroot_installer}) ondev-prepare")
channel = pmb.config.pmaports.read_config()["channel"]
channel_cfg = pmb.config.pmaports.read_config_channel()
env = {"ONDEV_CHANNEL": channel,
env = {
"ONDEV_CHANNEL": channel,
"ONDEV_CHANNEL_BRANCH_APORTS": channel_cfg["branch_aports"],
"ONDEV_CHANNEL_BRANCH_PMAPORTS": channel_cfg["branch_pmaports"],
"ONDEV_CHANNEL_DESCRIPTION": channel_cfg["description"],
"ONDEV_CHANNEL_MIRRORDIR_ALPINE": channel_cfg["mirrordir_alpine"],
"ONDEV_CIPHER": args.cipher,
"ONDEV_PMBOOTSTRAP_VERSION": pmb.__version__,
"ONDEV_UI": config.ui}
"ONDEV_UI": config.ui,
}
pmb.chroot.root(["ondev-prepare"], chroot_installer, env=env)
# Copy files specified with 'pmbootstrap install --ondev --cp'
if args.ondev_cp:
for host_src, chroot_dest in args.ondev_cp:
host_dest = chroot_installer / chroot_dest
logging.info(f"({chroot_installer}) add {host_src} as"
f" {chroot_dest}")
pmb.helpers.run.root(["install", "-Dm644", host_src,
host_dest])
logging.info(f"({chroot_installer}) add {host_src} as" f" {chroot_dest}")
pmb.helpers.run.root(["install", "-Dm644", host_src, host_dest])
# Remove $DEVICE-boot.img (we will generate a new one if --split was
# specified, otherwise the separate boot image is not needed)
@ -1065,10 +1119,18 @@ def install_on_device_installer(args: PmbArgs, step, steps):
# Generate installer image
size_reserve = round(os.path.getsize(img_path_dest) / 1024 / 1024) + 200
pmaports_cfg = pmb.config.pmaports.read_config()
boot_label = pmaports_cfg.get("supported_install_boot_label",
"pmOS_inst_boot")
install_system_image(args, size_reserve, chroot_installer, step, steps,
boot_label, "pmOS_install", args.split, args.disk)
boot_label = pmaports_cfg.get("supported_install_boot_label", "pmOS_inst_boot")
install_system_image(
args,
size_reserve,
chroot_installer,
step,
steps,
boot_label,
"pmOS_install",
args.split,
args.disk,
)
def get_selected_providers(args: PmbArgs, packages):
@ -1099,11 +1161,10 @@ def get_selected_providers(args: PmbArgs, packages):
apkbuild = pmb.helpers.pmaports.get(package, subpackages=False, must_exist=False)
if not apkbuild:
continue
for select in apkbuild['_pmb_select']:
for select in apkbuild["_pmb_select"]:
if select in get_context().config.providers:
ret += [get_context().config.providers[select]]
logging.verbose(f"{package}: install selected_providers:"
f" {', '.join(ret)}")
logging.verbose(f"{package}: install selected_providers:" f" {', '.join(ret)}")
# Also iterate through dependencies to collect any providers they have
depends = apkbuild["depends"]
if depends:
@ -1157,8 +1218,7 @@ def get_recommends(args: PmbArgs, packages) -> Sequence[str]:
continue
recommends = apkbuild["_pmb_recommends"]
if recommends:
logging.debug(f"{package}: install _pmb_recommends:"
f" {', '.join(recommends)}")
logging.debug(f"{package}: install _pmb_recommends:" f" {', '.join(recommends)}")
ret += recommends
# Call recursively in case recommends have pmb_recommends of their
# own.
@ -1177,8 +1237,7 @@ def create_device_rootfs(args: PmbArgs, step, steps):
context = get_context()
config = context.config
device = context.config.device
logging.info(f'*** ({step}/{steps}) CREATE DEVICE ROOTFS ("{device}")'
' ***')
logging.info(f'*** ({step}/{steps}) CREATE DEVICE ROOTFS ("{device}")' " ***")
chroot = Chroot(ChrootType.ROOTFS, device)
pmb.chroot.init(chroot)
@ -1187,11 +1246,9 @@ def create_device_rootfs(args: PmbArgs, step, steps):
set_user(context.config)
# Fill install_packages
install_packages = (pmb.config.install_device_packages +
["device-" + device])
install_packages = pmb.config.install_device_packages + ["device-" + device]
if not args.install_base:
install_packages = [p for p in install_packages
if p != "postmarketos-base"]
install_packages = [p for p in install_packages if p != "postmarketos-base"]
if config.ui.lower() != "none":
install_packages += ["postmarketos-ui-" + config.ui]
@ -1210,7 +1267,7 @@ def create_device_rootfs(args: PmbArgs, step, steps):
install_packages += context.config.extra_packages.split(",")
if args.add:
install_packages += args.add.split(",")
locale_is_set = (config.locale != Config().locale)
locale_is_set = config.locale != Config().locale
if locale_is_set:
install_packages += ["lang", "musl-locales"]
@ -1226,7 +1283,8 @@ def create_device_rootfs(args: PmbArgs, step, steps):
# Pick the most suitable unlocker depending on the packages
# selected for installation
unlocker = pmb.parse.depends.package_provider(
"postmarketos-fde-unlocker", install_packages, chroot)
"postmarketos-fde-unlocker", install_packages, chroot
)
if unlocker["pkgname"] not in install_packages:
install_packages += [unlocker["pkgname"]]
else:
@ -1260,8 +1318,9 @@ def create_device_rootfs(args: PmbArgs, step, steps):
# alpine-baselayout by /etc/profile. Since they don't override the
# locale if it exists, it warranties we have preference
line = f"export LANG=${{LANG:-{shlex.quote(config.locale)}}}"
pmb.chroot.root(["sh", "-c", f"echo {shlex.quote(line)}"
" > /etc/profile.d/10locale-pmos.sh"], chroot)
pmb.chroot.root(
["sh", "-c", f"echo {shlex.quote(line)}" " > /etc/profile.d/10locale-pmos.sh"], chroot
)
# Set the hostname as the device name
setup_hostname(device, config.hostname)
@ -1303,8 +1362,7 @@ def install(args: PmbArgs):
step = 1
logging.info(f"*** ({step}/{steps}) PREPARE NATIVE CHROOT ***")
pmb.chroot.init(Chroot.native())
pmb.chroot.apk.install(pmb.config.install_native_packages, Chroot.native(),
build=False)
pmb.chroot.apk.install(pmb.config.install_native_packages, Chroot.native(), build=False)
step += 1
if not args.ondev_no_rootfs:
@ -1320,10 +1378,11 @@ def install(args: PmbArgs):
# Runs install_system_image twice
install_on_device_installer(args, step, steps)
else:
install_system_image(args, 0, chroot, step, steps,
split=args.split, disk=args.disk)
install_system_image(args, 0, chroot, step, steps, split=args.split, disk=args.disk)
print_flash_info(device, deviceinfo, args.split, True if args.disk and args.disk.is_absolute() else False)
print_flash_info(
device, deviceinfo, args.split, True if args.disk and args.disk.is_absolute() else False
)
print_sshd_info(args)
print_firewall_info(args.no_firewall, deviceinfo.arch)

View file

@ -25,16 +25,16 @@ def previous_install(path: Path):
if not os.path.exists(blockdevice_outside):
continue
blockdevice_inside = "/dev/diskp1"
pmb.helpers.mount.bind_file(blockdevice_outside,
Chroot.native() / blockdevice_inside)
pmb.helpers.mount.bind_file(blockdevice_outside, Chroot.native() / blockdevice_inside)
try:
label = pmb.chroot.root(["blkid", "-s", "LABEL",
"-o", "value",
blockdevice_inside],
output_return=True)
label = pmb.chroot.root(
["blkid", "-s", "LABEL", "-o", "value", blockdevice_inside], output_return=True
)
except RuntimeError:
logging.info("WARNING: Could not get block device label,"
" assume no previous installation on that partition")
logging.info(
"WARNING: Could not get block device label,"
" assume no previous installation on that partition"
)
pmb.helpers.run.root(["umount", Chroot.native() / blockdevice_inside])
return "pmOS_boot" in label
@ -49,23 +49,20 @@ def mount_disk( path: Path):
raise RuntimeError(f"The disk block device does not exist: {path}")
for path_mount in path.parent.glob(f"{path.name}*"):
if pmb.helpers.mount.ismount(path_mount):
raise RuntimeError(f"{path_mount} is mounted! Will not attempt to"
" format this!")
raise RuntimeError(f"{path_mount} is mounted! Will not attempt to" " format this!")
logging.info(f"(native) mount /dev/install (host: {path})")
pmb.helpers.mount.bind_file(path, Chroot.native() / "dev/install")
if previous_install(path):
if not pmb.helpers.cli.confirm("WARNING: This device has a"
" previous installation of pmOS."
" CONTINUE?"):
if not pmb.helpers.cli.confirm(
"WARNING: This device has a" " previous installation of pmOS." " CONTINUE?"
):
raise RuntimeError("Aborted.")
else:
if not pmb.helpers.cli.confirm(f"EVERYTHING ON {path} WILL BE"
" ERASED! CONTINUE?"):
if not pmb.helpers.cli.confirm(f"EVERYTHING ON {path} WILL BE" " ERASED! CONTINUE?"):
raise RuntimeError("Aborted.")
def create_and_mount_image(args: PmbArgs, size_boot, size_root, size_reserve,
split=False):
def create_and_mount_image(args: PmbArgs, size_boot, size_root, size_reserve, split=False):
"""
Create a new image file, and mount it as /dev/install.
@ -96,8 +93,10 @@ def create_and_mount_image(args: PmbArgs, size_boot, size_root, size_reserve,
disk_data = os.statvfs(get_context().config.work)
free = round((disk_data.f_bsize * disk_data.f_bavail) / (1024**2))
if size_mb > free:
raise RuntimeError("Not enough free space to create rootfs image! "
f"(free: {free}M, required: {size_mb}M)")
raise RuntimeError(
"Not enough free space to create rootfs image! "
f"(free: {free}M, required: {size_mb}M)"
)
# Create empty image files
pmb.chroot.user(["mkdir", "-p", "/home/pmos/rootfs"])
@ -106,18 +105,15 @@ def create_and_mount_image(args: PmbArgs, size_boot, size_root, size_reserve,
size_mb_root = str(round(size_root)) + "M"
images = {img_path_full: size_mb_full}
if split:
images = {img_path_boot: size_mb_boot,
img_path_root: size_mb_root}
images = {img_path_boot: size_mb_boot, img_path_root: size_mb_root}
for img_path, size_mb in images.items():
logging.info(f"(native) create {img_path.name} "
f"({size_mb})")
logging.info(f"(native) create {img_path.name} " f"({size_mb})")
pmb.chroot.root(["truncate", "-s", size_mb, img_path])
# Mount to /dev/install
mount_image_paths = {img_path_full: "/dev/install"}
if split:
mount_image_paths = {img_path_boot: "/dev/installp1",
img_path_root: "/dev/installp2"}
mount_image_paths = {img_path_boot: "/dev/installp1", img_path_root: "/dev/installp2"}
for img_path, mount_point in mount_image_paths.items():
logging.info(f"(native) mount {mount_point} ({img_path.name})")
@ -140,5 +136,4 @@ def create(args: PmbArgs, size_boot, size_root, size_reserve, split, disk: Optio
if disk:
mount_disk(disk)
else:
create_and_mount_image(args, size_boot, size_root, size_reserve,
split)
create_and_mount_image(args, size_boot, size_root, size_reserve, split)

View file

@ -25,20 +25,15 @@ def format_and_mount_boot(args: PmbArgs, device, boot_label):
mountpoint = "/mnt/install/boot"
filesystem = pmb.parse.deviceinfo().boot_filesystem or "ext2"
install_fsprogs(filesystem)
logging.info(f"(native) format {device} (boot, {filesystem}), mount to"
f" {mountpoint}")
logging.info(f"(native) format {device} (boot, {filesystem}), mount to" f" {mountpoint}")
if filesystem == "fat16":
pmb.chroot.root(["mkfs.fat", "-F", "16", "-n", boot_label,
device])
pmb.chroot.root(["mkfs.fat", "-F", "16", "-n", boot_label, device])
elif filesystem == "fat32":
pmb.chroot.root(["mkfs.fat", "-F", "32", "-n", boot_label,
device])
pmb.chroot.root(["mkfs.fat", "-F", "32", "-n", boot_label, device])
elif filesystem == "ext2":
pmb.chroot.root(["mkfs.ext2", "-F", "-q", "-L", boot_label,
device])
pmb.chroot.root(["mkfs.ext2", "-F", "-q", "-L", boot_label, device])
elif filesystem == "btrfs":
pmb.chroot.root(["mkfs.btrfs", "-f", "-q", "-L", boot_label,
device])
pmb.chroot.root(["mkfs.btrfs", "-f", "-q", "-L", boot_label, device])
else:
raise RuntimeError("Filesystem " + filesystem + " is not supported!")
pmb.chroot.root(["mkdir", "-p", mountpoint])
@ -51,21 +46,27 @@ def format_luks_root(args: PmbArgs, device):
"""
mountpoint = "/dev/mapper/pm_crypt"
logging.info(f"(native) format {device} (root, luks), mount to"
f" {mountpoint}")
logging.info(f"(native) format {device} (root, luks), mount to" f" {mountpoint}")
logging.info(" *** TYPE IN THE FULL DISK ENCRYPTION PASSWORD (TWICE!) ***")
# Avoid cryptsetup warning about missing locking directory
pmb.chroot.root(["mkdir", "-p", "/run/cryptsetup"])
pmb.chroot.root(["cryptsetup", "luksFormat",
pmb.chroot.root(
[
"cryptsetup",
"luksFormat",
"-q",
"--cipher", args.cipher,
"--iter-time", args.iter_time,
"--cipher",
args.cipher,
"--iter-time",
args.iter_time,
"--use-random",
device], output="interactive")
pmb.chroot.root(["cryptsetup", "luksOpen", device, "pm_crypt"],
output="interactive")
device,
],
output="interactive",
)
pmb.chroot.root(["cryptsetup", "luksOpen", device, "pm_crypt"], output="interactive")
if not (Chroot.native() / mountpoint).exists():
raise RuntimeError("Failed to open cryptdevice!")
@ -79,11 +80,13 @@ def get_root_filesystem(args: PmbArgs):
supported_list = supported.split(",")
if ret not in supported_list:
raise ValueError(f"Root filesystem {ret} is not supported by your"
raise ValueError(
f"Root filesystem {ret} is not supported by your"
" currently checked out pmaports branch. Update your"
" branch ('pmbootstrap pull'), change it"
" ('pmbootstrap init'), or select one of these"
f" filesystems: {', '.join(supported_list)}")
f" filesystems: {', '.join(supported_list)}"
)
return ret
@ -99,17 +102,10 @@ def prepare_btrfs_subvolumes(args: PmbArgs, device, mountpoint):
/snapshots should be a separate subvol so that changing the root subvol
doesn't affect snapshots
"""
subvolume_list = ["@",
"@home",
"@root",
"@snapshots",
"@srv",
"@tmp",
"@var"]
subvolume_list = ["@", "@home", "@root", "@snapshots", "@srv", "@tmp", "@var"]
for subvolume in subvolume_list:
pmb.chroot.root(["btrfs", "subvol", "create",
f"{mountpoint}/{subvolume}"])
pmb.chroot.root(["btrfs", "subvol", "create", f"{mountpoint}/{subvolume}"])
# Set the default root subvolume to be separate from top level btrfs
# subvol. This lets us easily swap out current root subvol with an
@ -119,12 +115,16 @@ def prepare_btrfs_subvolumes(args: PmbArgs, device, mountpoint):
# Make directories to mount subvols onto
pmb.chroot.root(["umount", mountpoint])
pmb.chroot.root(["mount", device, mountpoint])
pmb.chroot.root(["mkdir",
pmb.chroot.root(
[
"mkdir",
f"{mountpoint}/home",
f"{mountpoint}/root",
f"{mountpoint}/.snapshots",
f"{mountpoint}/srv",
f"{mountpoint}/var"])
f"{mountpoint}/var",
]
)
# snapshots contain sensitive information,
# and should only be readable by root.
@ -132,16 +132,11 @@ def prepare_btrfs_subvolumes(args: PmbArgs, device, mountpoint):
pmb.chroot.root(["chmod", "700", f"{mountpoint}/.snapshots"])
# Mount subvols
pmb.chroot.root(["mount", "-o", "subvol=@var",
device, f"{mountpoint}/var"])
pmb.chroot.root(["mount", "-o", "subvol=@home",
device, f"{mountpoint}/home"])
pmb.chroot.root(["mount", "-o", "subvol=@root",
device, f"{mountpoint}/root"])
pmb.chroot.root(["mount", "-o", "subvol=@srv",
device, f"{mountpoint}/srv"])
pmb.chroot.root(["mount", "-o", "subvol=@snapshots",
device, f"{mountpoint}/.snapshots"])
pmb.chroot.root(["mount", "-o", "subvol=@var", device, f"{mountpoint}/var"])
pmb.chroot.root(["mount", "-o", "subvol=@home", device, f"{mountpoint}/home"])
pmb.chroot.root(["mount", "-o", "subvol=@root", device, f"{mountpoint}/root"])
pmb.chroot.root(["mount", "-o", "subvol=@srv", device, f"{mountpoint}/srv"])
pmb.chroot.root(["mount", "-o", "subvol=@snapshots", device, f"{mountpoint}/.snapshots"])
# Disable CoW for /var, to avoid write multiplication
# and slowdown on databases, containers and VM images.
@ -162,8 +157,7 @@ def format_and_mount_root(args: PmbArgs, device, root_label, disk):
# Some downstream kernels don't support metadata_csum (#1364).
# When changing the options of mkfs.ext4, also change them in the
# recovery zip code (see 'grep -r mkfs\.ext4')!
mkfs_root_args = ["mkfs.ext4", "-O", "^metadata_csum", "-F",
"-q", "-L", root_label]
mkfs_root_args = ["mkfs.ext4", "-O", "^metadata_csum", "-F", "-q", "-L", root_label]
# When we don't know the file system size before hand like
# with non-block devices, we need to explicitly set a number of
# inodes. See #1717 and #1845 for details

View file

@ -33,8 +33,7 @@ def mount(img_path: Path):
for i in range(0, 5):
# Retry
if i > 0:
logging.debug("loop module might not be initialized yet, retry in"
" one second...")
logging.debug("loop module might not be initialized yet, retry in" " one second...")
time.sleep(1)
# Mount and return on success
@ -61,8 +60,7 @@ def device_by_back_file(back_file: Path) -> Path:
"""
# Get list from losetup
losetup_output = pmb.chroot.root(["losetup", "--json", "--list"],
output_return=True)
losetup_output = pmb.chroot.root(["losetup", "--json", "--list"], output_return=True)
if not losetup_output:
raise RuntimeError("losetup failed")
@ -86,12 +84,12 @@ def umount(img_path: Path):
logging.debug(f"(native) umount {device}")
pmb.chroot.root(["losetup", "-d", device])
def detach_all():
"""
Detach all loop devices used by pmbootstrap
"""
losetup_output = pmb.helpers.run.root(["losetup", "--json", "--list"],
output_return=True)
losetup_output = pmb.helpers.run.root(["losetup", "--json", "--list"], output_return=True)
if not losetup_output:
return
losetup = json.loads(losetup_output)

View file

@ -40,13 +40,16 @@ def partitions_mount(device: str, layout, disk: Optional[Path]):
if os.path.exists(f"{partition_prefix}1"):
found = True
break
logging.debug(f"NOTE: ({i + 1}/{tries}) failed to find the install "
"partition. Retrying...")
logging.debug(
f"NOTE: ({i + 1}/{tries}) failed to find the install " "partition. Retrying..."
)
time.sleep(0.1)
if not found:
raise RuntimeError(f"Unable to find the first partition of {disk}, "
f"expected it to be at {partition_prefix}1!")
raise RuntimeError(
f"Unable to find the first partition of {disk}, "
f"expected it to be at {partition_prefix}1!"
)
partitions = [layout["boot"], layout["root"]]
@ -77,8 +80,10 @@ def partition(args: PmbArgs, layout, size_boot, size_reserve):
mb_boot = f"{round(size_boot)}M"
mb_reserved = f"{round(size_reserve)}M"
mb_root_start = f"{round(size_boot) + round(size_reserve)}M"
logging.info(f"(native) partition /dev/install (boot: {mb_boot},"
f" reserved: {mb_reserved}, root: the rest)")
logging.info(
f"(native) partition /dev/install (boot: {mb_boot},"
f" reserved: {mb_reserved}, root: the rest)"
)
filesystem = pmb.parse.deviceinfo().boot_filesystem or "ext2"
@ -92,7 +97,7 @@ def partition(args: PmbArgs, layout, size_boot, size_reserve):
commands = [
["mktable", partition_type],
["mkpart", "primary", filesystem, boot_part_start + 's', mb_boot],
["mkpart", "primary", filesystem, boot_part_start + "s", mb_boot],
]
if size_reserve:
@ -101,7 +106,7 @@ def partition(args: PmbArgs, layout, size_boot, size_reserve):
commands += [
["mkpart", "primary", mb_root_start, "100%"],
["set", str(layout["boot"]), "boot", "on"]
["set", str(layout["boot"]), "boot", "on"],
]
# Not strictly necessary if the device doesn't use EFI boot, but marking
@ -114,8 +119,7 @@ def partition(args: PmbArgs, layout, size_boot, size_reserve):
commands += [["set", str(layout["boot"]), "esp", "on"]]
for command in commands:
pmb.chroot.root(["parted", "-s", "/dev/install"] +
command, check=False)
pmb.chroot.root(["parted", "-s", "/dev/install"] + command, check=False)
def partition_cgpt(args: PmbArgs, layout, size_boot, size_reserve):
@ -131,68 +135,88 @@ def partition_cgpt(args: PmbArgs, layout, size_boot, size_reserve):
pmb.chroot.apk.install(["cgpt"], Chroot.native(), build=False)
cgpt = {
'kpart_start': pmb.parse.deviceinfo().cgpt_kpart_start,
'kpart_size': pmb.parse.deviceinfo().cgpt_kpart_size,
"kpart_start": pmb.parse.deviceinfo().cgpt_kpart_start,
"kpart_size": pmb.parse.deviceinfo().cgpt_kpart_size,
}
# Convert to MB and print info
mb_boot = f"{round(size_boot)}M"
mb_reserved = f"{round(size_reserve)}M"
logging.info(f"(native) partition /dev/install (boot: {mb_boot},"
f" reserved: {mb_reserved}, root: the rest)")
logging.info(
f"(native) partition /dev/install (boot: {mb_boot},"
f" reserved: {mb_reserved}, root: the rest)"
)
boot_part_start = str(int(cgpt['kpart_start']) + int(cgpt['kpart_size']))
boot_part_start = str(int(cgpt["kpart_start"]) + int(cgpt["kpart_size"]))
# Convert to sectors
s_boot = str(int(size_boot * 1024 * 1024 / 512))
s_root_start = str(int(
int(boot_part_start) + int(s_boot) + size_reserve * 1024 * 1024 / 512
))
s_root_start = str(int(int(boot_part_start) + int(s_boot) + size_reserve * 1024 * 1024 / 512))
commands = [
["parted", "-s", "/dev/install", "mktable", "gpt"],
["cgpt", "create", "/dev/install"],
[
"cgpt", "add",
"-i", str(layout["kernel"]),
"-t", "kernel",
"-b", cgpt['kpart_start'],
"-s", cgpt['kpart_size'],
"-l", "pmOS_kernel",
"-S", "1", # Successful flag
"-T", "5", # Tries flag
"-P", "10", # Priority flag
"/dev/install"
"cgpt",
"add",
"-i",
str(layout["kernel"]),
"-t",
"kernel",
"-b",
cgpt["kpart_start"],
"-s",
cgpt["kpart_size"],
"-l",
"pmOS_kernel",
"-S",
"1", # Successful flag
"-T",
"5", # Tries flag
"-P",
"10", # Priority flag
"/dev/install",
],
[
"cgpt", "add",
"cgpt",
"add",
# pmOS_boot is second partition, the first will be ChromeOS kernel
# partition
"-i", str(layout["boot"]), # Partition number
"-t", "efi", # Mark this partition as bootable for u-boot
"-b", boot_part_start,
"-s", s_boot,
"-l", "pmOS_boot",
"/dev/install"
"-i",
str(layout["boot"]), # Partition number
"-t",
"efi", # Mark this partition as bootable for u-boot
"-b",
boot_part_start,
"-s",
s_boot,
"-l",
"pmOS_boot",
"/dev/install",
],
]
dev_size = pmb.chroot.root(
["blockdev", "--getsz", "/dev/install"], output_return=True)
dev_size = pmb.chroot.root(["blockdev", "--getsz", "/dev/install"], output_return=True)
# 33: Sec GPT table (32) + Sec GPT header (1)
root_size = str(int(dev_size) - int(s_root_start) - 33)
commands += [
[
"cgpt", "add",
"-i", str(layout["root"]),
"-t", "data",
"-b", s_root_start,
"-s", root_size,
"-l", "pmOS_root",
"/dev/install"
"cgpt",
"add",
"-i",
str(layout["root"]),
"-t",
"data",
"-b",
s_root_start,
"-s",
root_size,
"-l",
"pmOS_root",
"/dev/install",
],
["partx", "-a", "/dev/install"]
["partx", "-a", "/dev/install"],
]
for command in commands:

View file

@ -24,8 +24,7 @@ def create_zip(args: PmbArgs, chroot: Chroot, device: str):
fvars = pmb.flasher.variables(args, flavor, method)
# Install recovery installer package in buildroot
pmb.chroot.apk.install(["postmarketos-android-recovery-installer"],
chroot)
pmb.chroot.apk.install(["postmarketos-android-recovery-installer"], chroot)
logging.info(f"({chroot}) create recovery zip")
@ -65,12 +64,12 @@ def create_zip(args: PmbArgs, chroot: Chroot, device: str):
# Move config file from /tmp/ to zip root
["mv", "/tmp/install_options", "chroot/install_options"],
# Create tar archive of the rootfs
["tar", "-pcf", "rootfs.tar", "--exclude", "./home", "-C", rootfs,
"."],
["tar", "-pcf", "rootfs.tar", "--exclude", "./home", "-C", rootfs, "."],
# Append packages keys
["tar", "-prf", "rootfs.tar", "-C", "/", "./etc/apk/keys"],
# Compress with -1 for speed improvement
["gzip", "-f1", "rootfs.tar"],
["build-recovery-zip", device]]
["build-recovery-zip", device],
]
for command in commands:
pmb.chroot.root(command, chroot, working_dir=zip_root)

View file

@ -22,8 +22,7 @@ def get_groups(config: Config) -> List[str]:
apkbuild = pmb.helpers.pmaports.get(meta)
groups = apkbuild["_pmb_groups"]
if groups:
logging.debug(f"{meta}: install _pmb_groups:"
f" {', '.join(groups)}")
logging.debug(f"{meta}: install _pmb_groups:" f" {', '.join(groups)}")
ret += groups
# UI-extras subpackage
@ -31,8 +30,7 @@ def get_groups(config: Config) -> List[str]:
if config.ui_extras and meta_extras in apkbuild["subpackages"]:
groups = apkbuild["subpackages"][meta_extras]["_pmb_groups"]
if groups:
logging.debug(f"{meta_extras}: install _pmb_groups:"
f" {', '.join(groups)}")
logging.debug(f"{meta_extras}: install _pmb_groups:" f" {', '.join(groups)}")
ret += groups
return ret

View file

@ -17,7 +17,6 @@ class Wrapper:
self.hits = 0
self.misses = 0
# When someone attempts to call a cached function, they'll
# actually end up here. We first check if we have a cached
# result and if not then we do the actual function call and
@ -74,7 +73,6 @@ class Cache:
self.kwargs = kwargs
self.cache_deepcopy = cache_deepcopy
# Build the cache key, or return None to not cache in the case where
# we only cache when an argument has a specific value
def build_key(self, func: Callable, *args, **kwargs) -> Optional[str]:
@ -95,12 +93,16 @@ class Cache:
elif val.default != inspect.Parameter.empty:
passed_args[k] = val.default
else:
raise ValueError(f"Invalid cache key argument {k}"
f" in function {func.__module__}.{func.__name__}")
raise ValueError(
f"Invalid cache key argument {k}"
f" in function {func.__module__}.{func.__name__}"
)
for k, v in self.kwargs.items():
if k not in signature.parameters.keys():
raise ValueError(f"Cache key attribute {k} is not a valid parameter to {func.__name__}()")
raise ValueError(
f"Cache key attribute {k} is not a valid parameter to {func.__name__}()"
)
passed_val = passed_args[k]
if passed_val != v:
# Don't cache
@ -114,20 +116,22 @@ class Cache:
if v.__str__ != object.__str__:
key += f"{v}~"
else:
raise ValueError(f"Cache key argument {k} to function"
f" {func.__name__} must be a stringable type")
raise ValueError(
f"Cache key argument {k} to function"
f" {func.__name__} must be a stringable type"
)
return key
def __call__(self, func: Callable):
argnames = func.__code__.co_varnames
for a in self.params:
if a not in argnames:
raise ValueError(f"Cache key attribute {a} is not a valid parameter to {func.__name__}()")
raise ValueError(
f"Cache key attribute {a} is not a valid parameter to {func.__name__}()"
)
return Wrapper(self, func)
def clear(self):
self.cache.clear()

View file

@ -28,6 +28,7 @@ def test_cache_hits_basic():
assert multiply_2_cached(3) == 6
assert multiply_2_cached.hits == 2
def test_cache_hits_kwargs():
def multiply_2(x: int, y: int = 2, z: List[int] = []) -> int:
return x * y + sum(z)
@ -65,6 +66,7 @@ def test_cache_hits_kwargs():
assert multiply_2_cached_y3(1, 3, [4, 1]) == 8
assert multiply_2_cached_y3.hits == 1
def test_build_key():
def multiply_2(x: int, y: int = 2, z: List[int] = []) -> int:
return x * y + sum(z)

View file

@ -19,7 +19,7 @@ def start_nbd_server(device: str, replace: bool, ip="172.16.42.2", port=9999):
:param port: port of nbd server
"""
pmb.chroot.apk.install(['nbd'], Chroot.native())
pmb.chroot.apk.install(["nbd"], Chroot.native())
chroot = Chroot.native()
@ -27,18 +27,20 @@ def start_nbd_server(device: str, replace: bool, ip="172.16.42.2", port=9999):
if not (chroot / rootfs_path).exists() or replace:
rootfs_path2 = Path("/home/pmos/rootfs") / f"{device}.img"
if not (chroot / rootfs_path2).exists():
raise RuntimeError("The rootfs has not been generated yet, please "
"run 'pmbootstrap install' first.")
if replace and not \
pmb.helpers.cli.confirm(f"Are you sure you want to "
f"replace the rootfs for "
f"{device}?"):
raise RuntimeError(
"The rootfs has not been generated yet, please " "run 'pmbootstrap install' first."
)
if replace and not pmb.helpers.cli.confirm(
f"Are you sure you want to " f"replace the rootfs for " f"{device}?"
):
return
pmb.chroot.root(["cp", rootfs_path2, rootfs_path])
logging.info(f"NOTE: Copied device image to {get_context().config.work}"
f"/images_netboot/. The image will persist \"pmbootstrap "
f"zap\" for your convenience. Use \"pmbootstrap netboot "
f"serve --help\" for more options.")
logging.info(
f"NOTE: Copied device image to {get_context().config.work}"
f'/images_netboot/. The image will persist "pmbootstrap '
f'zap" for your convenience. Use "pmbootstrap netboot '
f'serve --help" for more options.'
)
logging.info(f"Running nbd server for {device} on {ip} port {port}.")
@ -62,8 +64,9 @@ def start_nbd_server(device: str, replace: bool, ip="172.16.42.2", port=9999):
break
logging.info("Found postmarketOS device, serving image...")
pmb.chroot.root(["nbd-server", f"{ip}@{port}", rootfs_path, "-d"],
check=False, disable_timeout=True)
pmb.chroot.root(
["nbd-server", f"{ip}@{port}", rootfs_path, "-d"], check=False, disable_timeout=True
)
logging.info("nbd-server quit. Connection lost?")
# On a reboot nbd-server will quit, but the IP address sticks around
# for a bit longer, so wait.

View file

@ -33,15 +33,19 @@ revar5 = re.compile(r"([a-zA-Z_]+[a-zA-Z0-9_]*)=")
def replace_variable(apkbuild, value: str) -> str:
def log_key_not_found(match):
logging.verbose(f"{apkbuild['pkgname']}: key '{match.group(1)}' for"
f" replacing '{match.group(0)}' not found, ignoring")
logging.verbose(
f"{apkbuild['pkgname']}: key '{match.group(1)}' for"
f" replacing '{match.group(0)}' not found, ignoring"
)
# ${foo}
for match in revar.finditer(value):
try:
logging.verbose("{}: replace '{}' with '{}'".format(
apkbuild["pkgname"], match.group(0),
apkbuild[match.group(1)]))
logging.verbose(
"{}: replace '{}' with '{}'".format(
apkbuild["pkgname"], match.group(0), apkbuild[match.group(1)]
)
)
value = value.replace(match.group(0), apkbuild[match.group(1)], 1)
except KeyError:
log_key_not_found(match)
@ -50,9 +54,9 @@ def replace_variable(apkbuild, value: str) -> str:
for match in revar2.finditer(value):
try:
newvalue = apkbuild[match.group(1)]
logging.verbose("{}: replace '{}' with '{}'".format(
apkbuild["pkgname"], match.group(0),
newvalue))
logging.verbose(
"{}: replace '{}' with '{}'".format(apkbuild["pkgname"], match.group(0), newvalue)
)
value = value.replace(match.group(0), newvalue, 1)
except KeyError:
log_key_not_found(match)
@ -66,8 +70,9 @@ def replace_variable(apkbuild, value: str) -> str:
if replacement is None: # arg 3 is optional
replacement = ""
newvalue = newvalue.replace(search, replacement, 1)
logging.verbose("{}: replace '{}' with '{}'".format(
apkbuild["pkgname"], match.group(0), newvalue))
logging.verbose(
"{}: replace '{}' with '{}'".format(apkbuild["pkgname"], match.group(0), newvalue)
)
value = value.replace(match.group(0), newvalue, 1)
except KeyError:
log_key_not_found(match)
@ -80,8 +85,9 @@ def replace_variable(apkbuild, value: str) -> str:
substr = match.group(2)
if newvalue.startswith(substr):
newvalue = newvalue.replace(substr, "", 1)
logging.verbose("{}: replace '{}' with '{}'".format(
apkbuild["pkgname"], match.group(0), newvalue))
logging.verbose(
"{}: replace '{}' with '{}'".format(apkbuild["pkgname"], match.group(0), newvalue)
)
value = value.replace(match.group(0), newvalue, 1)
except KeyError:
log_key_not_found(match)
@ -123,7 +129,7 @@ def read_file(path: Path):
"""
with path.open(encoding="utf-8") as handle:
lines = handle.readlines()
if handle.newlines != '\n':
if handle.newlines != "\n":
raise RuntimeError(f"Wrong line endings in APKBUILD: {path}")
return lines
@ -160,7 +166,7 @@ def parse_next_attribute(lines, i, path):
# Determine end quote sign
end_char = None
for char in ["'", "\""]:
for char in ["'", '"']:
if value.startswith(char):
end_char = char
value = value[1:]
@ -185,8 +191,9 @@ def parse_next_attribute(lines, i, path):
value += line.strip()
i += 1
raise RuntimeError(f"Can't find closing quote sign ({end_char}) for"
f" attribute '{attribute}' in: {path}")
raise RuntimeError(
f"Can't find closing quote sign ({end_char}) for" f" attribute '{attribute}' in: {path}"
)
def _parse_attributes(path, lines, apkbuild_attributes, ret):
@ -280,13 +287,15 @@ def _parse_subpackage(path, lines, apkbuild, subpackages, subpkg):
subpackages[subpkgname] = None
logging.verbose(
f"{apkbuild['pkgname']}: subpackage function '{subpkgsplit}' for "
f"subpackage '{subpkgname}' not found, ignoring")
f"subpackage '{subpkgname}' not found, ignoring"
)
return
if not end:
raise RuntimeError(
f"Could not find end of subpackage function, no line starts with "
f"'}}' after '{prefix}' in {path}")
f"'}}' after '{prefix}' in {path}"
)
lines = lines[start:end]
# Strip tabs before lines in function
@ -303,8 +312,7 @@ def _parse_subpackage(path, lines, apkbuild, subpackages, subpkg):
apkbuild["_pmb_recommends"] = ""
# Parse relevant attributes for the subpackage
_parse_attributes(
path, lines, pmb.config.apkbuild_package_attributes, apkbuild)
_parse_attributes(path, lines, pmb.config.apkbuild_package_attributes, apkbuild)
# Return only properties interesting for subpackages
ret = {}
@ -347,17 +355,18 @@ def apkbuild(path: Path, check_pkgver=True, check_pkgname=True):
if not os.path.realpath(path).endswith(suffix):
logging.info(f"Folder: '{os.path.dirname(path)}'")
logging.info(f"Pkgname: '{ret['pkgname']}'")
raise RuntimeError("The pkgname must be equal to the name of"
" the folder that contains the APKBUILD!")
raise RuntimeError(
"The pkgname must be equal to the name of" " the folder that contains the APKBUILD!"
)
# Sanity check: pkgver
if check_pkgver:
if not pmb.parse.version.validate(ret["pkgver"]):
logging.info(
"NOTE: Valid pkgvers are described here: "
"https://wiki.alpinelinux.org/wiki/APKBUILD_Reference#pkgver")
raise RuntimeError(f"Invalid pkgver '{ret['pkgver']}' in"
f" APKBUILD: {path}")
"https://wiki.alpinelinux.org/wiki/APKBUILD_Reference#pkgver"
)
raise RuntimeError(f"Invalid pkgver '{ret['pkgver']}' in" f" APKBUILD: {path}")
# Fill cache
return ret
@ -375,7 +384,7 @@ def kernels(device: str):
"downstream": "Downstream description"}
"""
# Read the APKBUILD
apkbuild_path = pmb.helpers.devices.find_path(device, 'APKBUILD')
apkbuild_path = pmb.helpers.devices.find_path(device, "APKBUILD")
if apkbuild_path is None:
return None
subpackages = apkbuild(apkbuild_path)["subpackages"]
@ -387,8 +396,7 @@ def kernels(device: str):
if not subpkgname.startswith(subpackage_prefix):
continue
if subpkg is None:
raise RuntimeError(
f"Cannot find subpackage function for: {subpkgname}")
raise RuntimeError(f"Cannot find subpackage function for: {subpkgname}")
name = subpkgname[len(subpackage_prefix) :]
ret[name] = subpkg["pkgdesc"]
@ -407,7 +415,7 @@ def _parse_comment_tags(lines, tag):
:param tag: the tag to parse, e.g. Maintainer
:returns: array of values of the tag, one per line
"""
prefix = f'# {tag}:'
prefix = f"# {tag}:"
ret = []
for line in lines:
if line.startswith(prefix):
@ -425,7 +433,7 @@ def maintainers(path):
:returns: array of (at least one) maintainer, or None
"""
lines = read_file(path)
maintainers = _parse_comment_tags(lines, 'Maintainer')
maintainers = _parse_comment_tags(lines, "Maintainer")
if not maintainers:
return None
@ -434,8 +442,8 @@ def maintainers(path):
if len(maintainers) > 1:
raise RuntimeError("Multiple Maintainer: lines in APKBUILD")
maintainers += _parse_comment_tags(lines, 'Co-Maintainer')
if '' in maintainers:
maintainers += _parse_comment_tags(lines, "Co-Maintainer")
if "" in maintainers:
raise RuntimeError("Empty (Co-)Maintainer: tag")
return maintainers
@ -448,7 +456,7 @@ def archived(path):
:param path: full path to the APKBUILD
:returns: reason why APKBUILD is archived, or None
"""
archived = _parse_comment_tags(read_file(path), 'Archived')
archived = _parse_comment_tags(read_file(path), "Archived")
if not archived:
return None
return '\n'.join(archived)
return "\n".join(archived)

View file

@ -25,6 +25,7 @@ apkindex_map = {
required_apkindex_keys = ["arch", "pkgname", "version"]
def parse_next_block(path: Path, lines: List[str]):
"""Parse the next block in an APKINDEX.
@ -62,7 +63,7 @@ def parse_next_block(path: Path, lines: List[str]):
# The checksum key is always the FIRST in the block, so when we find
# it we know we're done.
if k == 'C':
if k == "C":
break
if key:
if key in ret:
@ -79,10 +80,11 @@ def parse_next_block(path: Path, lines: List[str]):
if required_found != len(required_apkindex_keys):
for key in required_apkindex_keys:
if key not in ret:
raise RuntimeError(f"Missing required key '{key}' in block "
f"{ret}, file: {path}")
raise RuntimeError(f"Expected {len(required_apkindex_keys)} required keys,"
f" but found {required_found} in block: {ret}, file: {path}")
raise RuntimeError(f"Missing required key '{key}' in block " f"{ret}, file: {path}")
raise RuntimeError(
f"Expected {len(required_apkindex_keys)} required keys,"
f" but found {required_found} in block: {ret}, file: {path}"
)
# Format optional lists
for key in ["provides", "depends"]:
@ -171,8 +173,10 @@ def parse(path: Path, multiple_providers=True):
"""
# Require the file to exist
if not path.is_file():
logging.verbose("NOTE: APKINDEX not found, assuming no binary packages"
f" exist for that architecture: {path}")
logging.verbose(
"NOTE: APKINDEX not found, assuming no binary packages"
f" exist for that architecture: {path}"
)
return {}
# Try to get a cached result first
@ -213,8 +217,7 @@ def parse(path: Path, multiple_providers=True):
# Skip virtual packages
if "timestamp" not in block:
logging.verbose(f"Skipped virtual package {block} in"
f" file: {path}")
logging.verbose(f"Skipped virtual package {block} in" f" file: {path}")
continue
# Add the next package and all aliases
@ -273,8 +276,10 @@ def clear_cache(path: Path):
del pmb.helpers.other.cache["apkindex"][key]
return True
else:
logging.verbose("Nothing to do, path was not in cache:" +
str(pmb.helpers.other.cache["apkindex"].keys()))
logging.verbose(
"Nothing to do, path was not in cache:"
+ str(pmb.helpers.other.cache["apkindex"].keys())
)
return False
@ -311,8 +316,10 @@ def providers(package, arch: Optional[Arch]=None, must_exist=True, indexes=None)
if provider_pkgname in ret:
version_last = ret[provider_pkgname]["version"]
if pmb.parse.version.compare(version, version_last) == -1:
logging.verbose(f"{package}: provided by: {provider_pkgname}-{version}"
f"in {path} (but {version_last} is higher)")
logging.verbose(
f"{package}: provided by: {provider_pkgname}-{version}"
f"in {path} (but {version_last} is higher)"
)
continue
# Add the provider to ret
@ -321,6 +328,7 @@ def providers(package, arch: Optional[Arch]=None, must_exist=True, indexes=None)
if ret == {} and must_exist:
import os
logging.debug(f"Searched in APKINDEX files: {', '.join([os.fspath(x) for x in indexes])}")
raise RuntimeError("Could not find package '" + package + "'!")
@ -346,7 +354,8 @@ def provider_highest_priority(providers, pkgname):
if priority_providers:
logging.debug(
f"{pkgname}: picked provider(s) with highest priority "
f"{max_priority}: {', '.join(priority_providers.keys())}")
f"{max_priority}: {', '.join(priority_providers.keys())}"
)
return priority_providers
# None of the providers seems to have a provider_priority defined
@ -365,7 +374,8 @@ def provider_shortest(providers, pkgname):
if len(providers) != 1:
logging.debug(
f"{pkgname}: has multiple providers ("
f"{', '.join(providers.keys())}), picked shortest: {ret}")
f"{', '.join(providers.keys())}), picked shortest: {ret}"
)
return providers[ret]
@ -398,6 +408,5 @@ def package(package, arch: Optional[Arch]=None, must_exist=True, indexes=None):
# No provider
if must_exist:
raise RuntimeError("Package '" + package + "' not found in any"
" APKINDEX.")
raise RuntimeError("Package '" + package + "' not found in any" " APKINDEX.")
return None

File diff suppressed because it is too large Load diff

View file

@ -14,7 +14,7 @@ def binfmt_info(arch_qemu):
logging.verbose(f"parsing: {info}")
with open(info, "r") as handle:
for line in handle:
if line.startswith('#') or "=" not in line:
if line.startswith("#") or "=" not in line:
continue
split = line.split("=")
key = split[0].strip()
@ -26,8 +26,7 @@ def binfmt_info(arch_qemu):
for type in ["mask", "magic"]:
key = arch_qemu + "_" + type
if key not in full:
raise RuntimeError(
f"Could not find key {key} in binfmt info file: {info}")
raise RuntimeError(f"Could not find key {key} in binfmt info file: {info}")
ret[type] = full[key]
logging.verbose("=> " + str(ret))
return ret

View file

@ -14,9 +14,9 @@ from pmb.core import Chroot
def is_dtb(path):
if not os.path.isfile(path):
return False
with open(path, 'rb') as f:
with open(path, "rb") as f:
# Check FDT magic identifier (0xd00dfeed)
return f.read(4) == b'\xd0\x0d\xfe\xed'
return f.read(4) == b"\xd0\x0d\xfe\xed"
def get_mtk_label(path):
@ -28,12 +28,12 @@ def get_mtk_label(path):
if not os.path.exists(path):
return None
with open(path, 'rb') as f:
with open(path, "rb") as f:
# Check Mediatek header (0x88168858)
if not f.read(4) == b'\x88\x16\x88\x58':
if not f.read(4) == b"\x88\x16\x88\x58":
return None
f.seek(8)
label = f.read(32).decode("utf-8").rstrip('\0')
label = f.read(32).decode("utf-8").rstrip("\0")
if label == "RECOVERY":
logging.warning(
@ -44,7 +44,8 @@ def get_mtk_label(path):
" is wrong and it may not boot; in that case, run bootimg_analyze"
" again with a regular boot.img. If this *is* a regular boot.img,"
" replace the value of deviceinfo_bootimg_mtk_label_ramdisk with"
" 'RECOVERY'.")
" 'RECOVERY'."
)
return "ROOTFS"
else:
return label
@ -59,14 +60,14 @@ def get_qcdt_type(path):
if not os.path.exists(path):
return None
with open(path, 'rb') as f:
with open(path, "rb") as f:
fourcc = f.read(4)
if fourcc == b'QCDT':
if fourcc == b"QCDT":
return "qcom"
elif fourcc == b'SPRD':
elif fourcc == b"SPRD":
return "sprd"
elif fourcc == b'DTBH':
elif fourcc == b"DTBH":
return "exynos"
else:
return None
@ -76,9 +77,11 @@ def bootimg(path: Path):
if not path.exists():
raise RuntimeError(f"Could not find file '{path}'")
logging.info("NOTE: You will be prompted for your sudo/doas password, so"
logging.info(
"NOTE: You will be prompted for your sudo/doas password, so"
" we can set up a chroot to extract and analyze your"
" boot.img file")
" boot.img file"
)
pmb.chroot.apk.install(["file", "unpackbootimg"], Chroot.native())
temp_path = pmb.chroot.other.tempfolder(Path("/tmp/bootimg_parser"))
@ -89,81 +92,84 @@ def bootimg(path: Path):
pmb.helpers.run.root(["cp", path, bootimg_path])
pmb.helpers.run.root(["chmod", "a+r", bootimg_path])
file_output = pmb.chroot.user(["file", "-b", "boot.img"],
working_dir=temp_path,
output_return=True).rstrip()
file_output = pmb.chroot.user(
["file", "-b", "boot.img"], working_dir=temp_path, output_return=True
).rstrip()
if "android bootimg" not in file_output.lower():
if get_context().force:
logging.warning("WARNING: boot.img file seems to be invalid, but"
" proceeding anyway (-f specified)")
logging.warning(
"WARNING: boot.img file seems to be invalid, but"
" proceeding anyway (-f specified)"
)
else:
logging.info("NOTE: If you are sure that your file is a valid"
logging.info(
"NOTE: If you are sure that your file is a valid"
" boot.img file, you could force the analysis"
f" with: 'pmbootstrap bootimg_analyze {path} -f'")
if ("linux kernel" in file_output.lower() or
"ARM OpenFirmware FORTH Dictionary" in file_output):
raise RuntimeError("File is a Kernel image, you might need the"
f" with: 'pmbootstrap bootimg_analyze {path} -f'"
)
if (
"linux kernel" in file_output.lower()
or "ARM OpenFirmware FORTH Dictionary" in file_output
):
raise RuntimeError(
"File is a Kernel image, you might need the"
" 'heimdall-isorec' flash method. See also:"
" <https://wiki.postmarketos.org/wiki/"
"Deviceinfo_flash_methods>")
"Deviceinfo_flash_methods>"
)
else:
raise RuntimeError("File is not an Android boot.img. (" +
file_output + ")")
raise RuntimeError("File is not an Android boot.img. (" + file_output + ")")
# Extract all the files
pmb.chroot.user(["unpackbootimg", "-i", "boot.img"],
working_dir=temp_path)
pmb.chroot.user(["unpackbootimg", "-i", "boot.img"], working_dir=temp_path)
output = {}
header_version = 0
# Get base, offsets, pagesize, cmdline and qcdt info
# This file does not exist for example for qcdt images
if os.path.isfile(f"{bootimg_path}-header_version"):
with open(f"{bootimg_path}-header_version", 'r') as f:
header_version = int(f.read().replace('\n', ''))
with open(f"{bootimg_path}-header_version", "r") as f:
header_version = int(f.read().replace("\n", ""))
output["header_version"] = str(header_version)
if header_version >= 3:
output["pagesize"] = "4096"
else:
with open(f"{bootimg_path}-base", 'r') as f:
output["base"] = ("0x%08x" % int(f.read().replace('\n', ''), 16))
with open(f"{bootimg_path}-kernel_offset", 'r') as f:
output["kernel_offset"] = ("0x%08x"
% int(f.read().replace('\n', ''), 16))
with open(f"{bootimg_path}-ramdisk_offset", 'r') as f:
output["ramdisk_offset"] = ("0x%08x"
% int(f.read().replace('\n', ''), 16))
with open(f"{bootimg_path}-second_offset", 'r') as f:
output["second_offset"] = ("0x%08x"
% int(f.read().replace('\n', ''), 16))
with open(f"{bootimg_path}-tags_offset", 'r') as f:
output["tags_offset"] = ("0x%08x"
% int(f.read().replace('\n', ''), 16))
with open(f"{bootimg_path}-pagesize", 'r') as f:
output["pagesize"] = f.read().replace('\n', '')
with open(f"{bootimg_path}-base", "r") as f:
output["base"] = "0x%08x" % int(f.read().replace("\n", ""), 16)
with open(f"{bootimg_path}-kernel_offset", "r") as f:
output["kernel_offset"] = "0x%08x" % int(f.read().replace("\n", ""), 16)
with open(f"{bootimg_path}-ramdisk_offset", "r") as f:
output["ramdisk_offset"] = "0x%08x" % int(f.read().replace("\n", ""), 16)
with open(f"{bootimg_path}-second_offset", "r") as f:
output["second_offset"] = "0x%08x" % int(f.read().replace("\n", ""), 16)
with open(f"{bootimg_path}-tags_offset", "r") as f:
output["tags_offset"] = "0x%08x" % int(f.read().replace("\n", ""), 16)
with open(f"{bootimg_path}-pagesize", "r") as f:
output["pagesize"] = f.read().replace("\n", "")
if header_version == 2:
with open(f"{bootimg_path}-dtb_offset", 'r') as f:
output["dtb_offset"] = ("0x%08x"
% int(f.read().replace('\n', ''), 16))
with open(f"{bootimg_path}-dtb_offset", "r") as f:
output["dtb_offset"] = "0x%08x" % int(f.read().replace("\n", ""), 16)
if get_mtk_label(f"{bootimg_path}-kernel") is not None:
output["mtk_label_kernel"] = get_mtk_label(f"{bootimg_path}-kernel")
if get_mtk_label(f"{bootimg_path}-ramdisk") is not None:
output["mtk_label_ramdisk"] = get_mtk_label(f"{bootimg_path}-ramdisk")
output["qcdt"] = ("true" if os.path.isfile(f"{bootimg_path}-dt") and
os.path.getsize(f"{bootimg_path}-dt") > 0 else "false")
output["qcdt"] = (
"true"
if os.path.isfile(f"{bootimg_path}-dt") and os.path.getsize(f"{bootimg_path}-dt") > 0
else "false"
)
if get_qcdt_type(f"{bootimg_path}-dt") is not None:
output["qcdt_type"] = get_qcdt_type(f"{bootimg_path}-dt")
output["dtb_second"] = ("true" if is_dtb(f"{bootimg_path}-second")
else "false")
output["dtb_second"] = "true" if is_dtb(f"{bootimg_path}-second") else "false"
with open(f"{bootimg_path}-cmdline", 'r') as f:
output["cmdline"] = f.read().replace('\n', '')
with open(f"{bootimg_path}-cmdline", "r") as f:
output["cmdline"] = f.read().replace("\n", "")
# Cleanup
pmb.chroot.user(["rm", "-r", temp_path])

View file

@ -18,7 +18,7 @@ def arm_big_little_first_group_ncpus() -> Optional[int]:
counter = 0
part = None
with open('/proc/cpuinfo', 'r') as cpuinfo:
with open("/proc/cpuinfo", "r") as cpuinfo:
for line in cpuinfo:
match = pattern.match(line)
if match:

Some files were not shown because too many files have changed in this diff Show more