pmb.parse.apkindex: Introduce proper typing (MR 2425)

And adjust other code.

Closes https://gitlab.postmarketos.org/postmarketOS/pmbootstrap/-/issues/2455
This commit is contained in:
Stefan Hansson 2024-09-29 20:24:36 +02:00 committed by Newbyte
parent 566b43edd4
commit 71772b9b6b
No known key found for this signature in database
GPG key ID: 8A700086A9FE41FD
18 changed files with 279 additions and 179 deletions

View file

@ -18,7 +18,11 @@ def generate(pkgname: str) -> None:
# Parse version from APKINDEX
package_data = pmb.parse.apkindex.package("busybox")
version = package_data["version"]
if package_data is None:
raise RuntimeError("Couldn't find APKINDEX for busybox!")
version = package_data.version
pkgver = version.split("-r")[0]
pkgrel = version.split("-r")[1]

View file

@ -210,14 +210,17 @@ def get_upstream_aport(pkgname: str, arch: Arch | None = None, retain_branch: bo
index_path = pmb.helpers.repo.alpine_apkindex_path(repo, arch)
package = pmb.parse.apkindex.package(pkgname, indexes=[index_path])
if package is None:
raise RuntimeError(f"Couldn't find {pkgname} in APKINDEX!")
# Compare version (return when equal)
compare = pmb.parse.version.compare(apkbuild_version, package["version"])
compare = pmb.parse.version.compare(apkbuild_version, package.version)
# APKBUILD > binary: this is fine
if compare == 1:
logging.info(
f"NOTE: {pkgname} {arch} binary package has a lower"
f" version {package['version']} than the APKBUILD"
f" version {package.version} than the APKBUILD"
f" {apkbuild_version}"
)
return aport_path
@ -229,7 +232,7 @@ def get_upstream_aport(pkgname: str, arch: Arch | None = None, retain_branch: bo
" local checkout of Alpine's aports ("
+ apkbuild_version
+ ") compared to Alpine's binary package ("
+ package["version"]
+ package.version
+ ")!"
)
logging.info("NOTE: You can update your local checkout with: 'pmbootstrap pull'")

View file

@ -12,12 +12,14 @@ from pmb.core import Chroot
from pmb.core.context import get_context
def generate(pkgname):
def generate(pkgname: str) -> None:
arch = Arch.x86
if pkgname != "grub-efi-x86":
raise RuntimeError("only grub-efi-x86 is available")
package_data = pmb.parse.apkindex.package("grub")
version = package_data["version"]
if package_data is None:
raise RuntimeError("Couldn't find package grub!")
version = package_data.version
pkgver = version.split("-r")[0]
pkgrel = version.split("-r")[1]

View file

@ -17,7 +17,9 @@ def generate(pkgname: str) -> None:
# Parse musl version from APKINDEX
package_data = pmb.parse.apkindex.package("musl")
version = package_data["version"]
if package_data is None:
raise RuntimeError("Couldn't find package musl!")
version = package_data.version
pkgver = version.split("-r")[0]
pkgrel = version.split("-r")[1]

View file

@ -51,7 +51,7 @@ def check_build_for_arch(pkgname: str, arch: Arch):
pmaport_version = pmaport["pkgver"] + "-r" + pmaport["pkgrel"]
logging.debug(
pkgname + ": found pmaport (" + pmaport_version + ") and"
" binary package (" + binary["version"] + ", from"
" binary package (" + binary.version + ", from"
" postmarketOS or Alpine), but pmaport can't be built"
f" for {arch} -> using binary package"
)
@ -274,7 +274,7 @@ def prioritise_build_queue(disarray: list[BuildQueueItem]) -> list[BuildQueueIte
)
if not dep_data:
raise NonBugError(f"{item['name']}: dependency not found: {dep}")
dep = dep_data["pkgname"]
dep = dep_data.pkgname
if dep in all_pkgnames:
unmet_deps.setdefault(item["name"], []).append(dep)
@ -483,11 +483,11 @@ def packages(
# building with --src with an outdated pmaports checkout.
if (
index_data
and pmb.parse.version.compare(index_data["version"], f"{pkgver}-r{apkbuild['pkgrel']}")
and pmb.parse.version.compare(index_data.version, f"{pkgver}-r{apkbuild['pkgrel']}")
== 1
):
raise NonBugError(
f"A binary package for {name} has a newer version ({index_data['version']})"
f"A binary package for {name} has a newer version ({index_data.version})"
f" than the source ({pkgver}-{apkbuild['pkgrel']}). Please ensure your pmaports branch is up"
" to date and that you don't have a newer version of the package in your local"
f" binary repo ({context.config.work / 'packages' / channel / pkg_arch})."

View file

@ -107,7 +107,7 @@ def get_status(arch, apkbuild) -> BuildStatus:
return BuildStatus.CANT_BUILD
# a) Binary repo has a newer version
version_binary = index_data["version"]
version_binary = index_data.version
if pmb.parse.version.compare(version_binary, version_pmaports) == 1:
logging.warning(
f"WARNING: about to install {package} {version_binary}"

View file

@ -98,7 +98,7 @@ def check_min_version(chroot: Chroot = Chroot.native()):
)
# Compare
version_installed = installed_pkgs["apk-tools"]["version"]
version_installed = installed_pkgs["apk-tools"].version
pmb.helpers.apk.check_outdated(
version_installed,
"Delete your http cache and zap all chroots, then try again:" " 'pmbootstrap zap -hc'",
@ -150,7 +150,7 @@ def packages_get_locally_built_apks(packages, arch: Arch) -> list[Path]:
if not data_repo:
continue
apk_file = f"{data_repo['pkgname']}-{data_repo['version']}.apk"
apk_file = f"{data_repo.pkgname}-{data_repo.version}.apk"
# FIXME: we should know what channel we expect this package to be in
# this will have weird behaviour if you build gnome-shell for edge and
# then checkout out the systemd branch... But there isn't
@ -163,12 +163,13 @@ def packages_get_locally_built_apks(packages, arch: Arch) -> list[Path]:
break
# Record all the packages we have visited so far
walked |= set([data_repo["pkgname"], package])
# Add all dependencies to the list of packages to check, excluding
# meta-deps like cmd:* and so:* as well as conflicts (!).
packages |= (
set(filter(lambda x: ":" not in x and "!" not in x, data_repo["depends"])) - walked
)
walked |= set([data_repo.pkgname, package])
if data_repo.depends:
# Add all dependencies to the list of packages to check, excluding
# meta-deps like cmd:* and so:* as well as conflicts (!).
packages |= (
set(filter(lambda x: ":" not in x and "!" not in x, data_repo.depends)) - walked
)
return local
@ -283,21 +284,13 @@ def install(packages, chroot: Chroot, build=True, quiet: bool = False):
install_run_apk(to_add, to_add_local, to_del, chroot)
def installed(suffix: Chroot = Chroot.native()):
def installed(suffix: Chroot = Chroot.native()) -> dict[str, pmb.parse.apkindex.ApkindexBlock]:
"""
Read the list of installed packages (which has almost the same format, as
an APKINDEX, but with more keys).
:returns: a dictionary with the following structure:
{ "postmarketos-mkinitfs":
{
"pkgname": "postmarketos-mkinitfs"
"version": "0.0.4-r10",
"depends": ["busybox-extras", "lddtree", ...],
"provides": ["mkinitfs=0.0.1"]
}, ...
}
{ "postmarketos-mkinitfs": ApkindexBlock }
"""
path = suffix / "lib/apk/db/installed"

View file

@ -157,14 +157,18 @@ def download(file):
return pmb.helpers.http.download(f"{base_url}/{file}", file)
def init():
def init() -> None:
"""
Download, verify, extract $WORK/apk.static.
"""
# Get and parse the APKINDEX
apkindex = pmb.helpers.repo.alpine_apkindex_path("main")
index_data = pmb.parse.apkindex.package("apk-tools-static", indexes=[apkindex])
version = index_data["version"]
if index_data is None:
raise RuntimeError("Could not find apk-tools-static in APKINDEX!")
version = index_data.version
# Verify the apk-tools-static version
pmb.helpers.apk.check_outdated(version, "Run 'pmbootstrap update', then try again.")

View file

@ -1,14 +1,17 @@
from pathlib import Path
import pytest
from pmb.core.arch import Arch
from pmb.core.context import get_context
from pmb.parse.apkindex import ApkindexBlock
from .apk import packages_get_locally_built_apks
import pmb.config.pmaports
@pytest.fixture
def apk_mocks(monkeypatch):
def apk_mocks(monkeypatch) -> dict | None:
def _pmaports_config(_aports=None):
return {
"channel": "edge",
@ -16,48 +19,67 @@ def apk_mocks(monkeypatch):
monkeypatch.setattr(pmb.config.pmaports, "read_config", _pmaports_config)
def _apkindex_package(_package, _arch, _must_exist=False, indexes=None):
def _apkindex_package(
_package: str, _arch: Arch, _must_exist: bool = False, indexes=None
) -> ApkindexBlock:
if _package == "package1":
return {
"pkgname": _package,
"version": "5.5-r0",
"arch": str(_arch),
"depends": ["package2"],
}
return ApkindexBlock(
arch=_arch,
depends=["package2"],
origin=None,
pkgname=_package,
provides=[],
provider_priority=None,
timestamp=None,
version="5.5-r0",
)
if _package == "package2":
return {
"pkgname": _package,
"version": "5.5-r0",
"arch": str(_arch),
"depends": [],
}
return ApkindexBlock(
arch=_arch,
depends=[],
origin=None,
pkgname=_package,
provides=[],
provider_priority=None,
timestamp=None,
version="5.5-r0",
)
if _package == "package3":
return {
"pkgname": _package,
"version": "5.5-r0",
"arch": str(_arch),
"depends": ["package1", "package4"],
}
return ApkindexBlock(
arch=_arch,
depends=["package1", "package4"],
origin=None,
pkgname=_package,
provides=[],
provider_priority=None,
timestamp=None,
version="5.5-r0",
)
# Test recursive dependency
if _package == "package4":
return {
"pkgname": _package,
"version": "5.5-r0",
"arch": str(_arch),
"depends": ["package3"],
}
return ApkindexBlock(
arch=_arch,
depends=["package3"],
origin=None,
pkgname=_package,
provides=[],
provider_priority=None,
timestamp=None,
version="5.5-r0",
)
monkeypatch.setattr(pmb.parse.apkindex, "package", _apkindex_package)
return None
def create_apk(pkgname, arch):
def create_apk(pkgname: str, arch: Arch) -> Path:
apk_file = get_context().config.work / "packages" / "edge" / arch / f"{pkgname}-5.5-r0.apk"
apk_file.parent.mkdir(parents=True, exist_ok=True)
apk_file.touch()
return apk_file
def test_get_local_apks(pmb_args, apk_mocks):
def test_get_local_apks(pmb_args, apk_mocks) -> None:
"""Ensure packages_get_locally_built_apks() returns paths for local apks"""
pkgname = "package1"

View file

@ -116,7 +116,7 @@ def zap(
logging.info("Dry run: nothing has been deleted")
def zap_pkgs_local_mismatch(confirm=True, dry=False):
def zap_pkgs_local_mismatch(confirm: bool = True, dry: bool = False) -> None:
channel = pmb.config.pmaports.read_config()["channel"]
if not os.path.exists(f"{get_context().config.work}/packages/{channel}"):
return
@ -135,10 +135,10 @@ def zap_pkgs_local_mismatch(confirm=True, dry=False):
# Delete packages without same version in aports
blocks = pmb.parse.apkindex.parse_blocks(apkindex_path)
for block in blocks:
pkgname = block["pkgname"]
origin = block["origin"]
version = block["version"]
arch = block["arch"]
pkgname = block.pkgname
origin = block.origin
version = block.version
arch = block.arch
# Apk path
apk_path_short = f"{arch}/{pkgname}-{version}.apk"
@ -147,6 +147,9 @@ def zap_pkgs_local_mismatch(confirm=True, dry=False):
logging.info("WARNING: Package mentioned in index not" f" found: {apk_path_short}")
continue
if origin is None:
raise RuntimeError("Can't handle virtual packages")
# Aport path
aport_path = pmb.helpers.pmaports.find_optional(origin)
if not aport_path:

View file

@ -36,6 +36,7 @@ import pmb.install
import pmb.install.blockdevice
import pmb.netboot
import pmb.parse
import pmb.parse.apkindex
import pmb.qemu
import pmb.sideload
from pmb.core import ChrootType, Chroot
@ -490,7 +491,12 @@ def apkindex_parse(args: PmbArgs) -> None:
if args.package:
if args.package not in result:
raise RuntimeError(f"Package not found in the APKINDEX: {args.package}")
result = result[args.package]
if isinstance(args.package, list):
raise AssertionError
result_temp = result[args.package]
if isinstance(result_temp, pmb.parse.apkindex.ApkindexBlock):
raise AssertionError
result = result_temp
print(json.dumps(result, indent=4))

View file

@ -9,10 +9,9 @@ See also:
- pmb/helpers/repo.py (work with binary package repos)
"""
import copy
from typing import Any, overload
from typing import overload
from pmb.core.arch import Arch
from pmb.core.context import get_context
from pmb.core.package_metadata import PackageMetadata
from pmb.helpers import logging
import pmb.build._package
@ -30,27 +29,33 @@ def remove_operators(package):
@overload
def get(pkgname: str, arch: Arch, replace_subpkgnames: bool = False) -> dict[str, Any]: ...
def get(pkgname: str, arch: Arch, replace_subpkgnames: bool = ...) -> PackageMetadata: ...
@overload
def get(
pkgname: str, arch: Arch, replace_subpkgnames: bool = False, must_exist: bool = True
) -> dict[str, Any] | None: ...
pkgname: str, arch: Arch, replace_subpkgnames: bool = ..., must_exist: bool = ...
) -> PackageMetadata | None: ...
@overload
def get(
pkgname: str,
arch: Arch,
replace_subpkgnames: bool = False,
must_exist: bool = True,
try_other_arches: bool = True,
) -> dict[str, Any] | None: ...
replace_subpkgnames: bool = ...,
must_exist: bool = ...,
try_other_arches: bool = ...,
) -> PackageMetadata | None: ...
@Cache("pkgname", "arch", "replace_subpkgnames", "try_other_arches")
def get(pkgname, arch, replace_subpkgnames=False, must_exist=True, try_other_arches=True):
def get(
pkgname: str,
arch: Arch,
replace_subpkgnames: bool = False,
must_exist: bool = True,
try_other_arches: bool = True,
) -> PackageMetadata | None:
"""Find a package in pmaports, and as fallback in the APKINDEXes of the binary packages.
:param pkgname: package name (e.g. "hello-world")
@ -71,50 +76,37 @@ def get(pkgname, arch, replace_subpkgnames=False, must_exist=True, try_other_arc
* None if the package was not found
"""
# Find in pmaports
ret: dict[str, Any] = {}
ret: PackageMetadata | None = None
pmaport = pmb.helpers.pmaports.get(pkgname, False)
if pmaport:
ret = {
"arch": pmaport["arch"],
"depends": pmb.build._package.get_depends(get_context(), pmaport),
"pkgname": pmaport["pkgname"],
"provides": pmaport["provides"],
"version": pmaport["pkgver"] + "-r" + pmaport["pkgrel"],
}
ret = PackageMetadata.from_pmaport(pmaport)
# Find in APKINDEX (given arch)
if not ret or not pmb.helpers.pmaports.check_arches(ret["arch"], arch):
if not ret or not pmb.helpers.pmaports.check_arches(ret.arch, arch):
pmb.helpers.repo.update(arch)
ret_repo = pmb.parse.apkindex.package(pkgname, arch, False)
# Save as result if there was no pmaport, or if the pmaport can not be
# built for the given arch, but there is a binary package for that arch
# (e.g. temp/mesa can't be built for x86_64, but Alpine has it)
if not ret or (ret_repo and ret_repo["arch"] == arch):
ret = ret_repo
if ret_repo and (not ret or ret_repo.arch == arch):
ret = PackageMetadata.from_apkindex_block(ret_repo)
# Find in APKINDEX (other arches)
if not ret and try_other_arches:
pmb.helpers.repo.update()
for arch_i in Arch.supported():
if arch_i != arch:
ret = pmb.parse.apkindex.package(pkgname, arch_i, False)
apkindex_block = pmb.parse.apkindex.package(pkgname, arch_i, False)
if apkindex_block is not None:
ret = PackageMetadata.from_apkindex_block(apkindex_block)
if ret:
break
# Copy ret (it might have references to caches of the APKINDEX or APKBUILDs
# and we don't want to modify those!)
if ret:
ret = copy.deepcopy(ret)
# Make sure ret["arch"] is a list (APKINDEX code puts a string there)
if ret and isinstance(ret["arch"], str):
ret["arch"] = [ret["arch"]]
# Replace subpkgnames if desired
if replace_subpkgnames:
if replace_subpkgnames and ret:
depends_new = []
for depend in ret["depends"]:
for depend in ret.depends:
depend_data = get(depend, arch, must_exist=False, try_other_arches=try_other_arches)
if not depend_data:
logging.warning(f"WARNING: {pkgname}: failed to resolve" f" dependency '{depend}'")
@ -122,10 +114,10 @@ def get(pkgname, arch, replace_subpkgnames=False, must_exist=True, try_other_arc
if depend not in depends_new:
depends_new += [depend]
continue
depend_pkgname = depend_data["pkgname"]
depend_pkgname = depend_data.pkgname
if depend_pkgname not in depends_new:
depends_new += [depend_pkgname]
ret["depends"] = depends_new
ret.depends = depends_new
# Save to cache and return
if ret:
@ -141,7 +133,7 @@ def get(pkgname, arch, replace_subpkgnames=False, must_exist=True, try_other_arc
@Cache("pkgname", "arch")
def depends_recurse(pkgname, arch):
def depends_recurse(pkgname: str, arch: Arch) -> list[str]:
"""Recursively resolve all of the package's dependencies.
:param pkgname: name of the package (e.g. "device-samsung-i9100")
@ -158,19 +150,19 @@ def depends_recurse(pkgname, arch):
package = get(pkgname_queue, arch)
# Add its depends to the queue
for depend in package["depends"]:
for depend in package.depends:
if depend not in ret:
queue += [depend]
# Add the pkgname (not possible subpkgname) to ret
if package["pkgname"] not in ret:
ret += [package["pkgname"]]
if package.pkgname not in ret:
ret += [package.pkgname]
ret.sort()
return ret
def check_arch(pkgname, arch, binary=True):
def check_arch(pkgname: str, arch: Arch, binary: bool = True) -> bool:
"""Check if a package be built for a certain architecture, or is there a binary package for it.
:param pkgname: name of the package
@ -181,7 +173,7 @@ def check_arch(pkgname, arch, binary=True):
:returns: True when the package can be built, or there is a binary package, False otherwise
"""
if binary:
arches = get(pkgname, arch)["arch"]
arches = get(pkgname, arch).arch
else:
arches = pmb.helpers.pmaports.get(pkgname, must_exist=True)["arch"]
return pmb.helpers.pmaports.check_arches(arches, arch)

View file

@ -123,7 +123,7 @@ def auto_apkindex_package(arch, aport, apk, dry: bool = False) -> bool:
return False
def auto(dry=False) -> list[str]:
def auto(dry: bool = False) -> list[str]:
""":returns: list of aport names, where the pkgrel needed to be changed"""
ret = []
for arch in Arch.supported():
@ -132,11 +132,19 @@ def auto(dry=False) -> list[str]:
logging.info(f"scan {path}")
index = pmb.parse.apkindex.parse(path, False)
for pkgname, apk in index.items():
origin = apk["origin"]
if isinstance(apk, dict):
raise AssertionError("pmb.parse.apkindex.parse returned an illegal structure")
origin = apk.origin
# Only increase once!
if origin in ret:
logging.verbose(f"{pkgname}: origin '{origin}' found again")
continue
if origin is None:
logging.warning(f"{pkgname}: skipping, is a virtual package")
continue
aport_path = pmb.helpers.pmaports.find_optional(origin)
if not aport_path:
logging.warning(f"{pkgname}: origin '{origin}' aport not found")

View file

@ -215,7 +215,7 @@ def find_optional(package: str) -> Path | None:
# The only caller with subpackages=False is ui.check_option()
@Cache("pkgname", subpackages=True)
def get_with_path(
pkgname, must_exist=True, subpackages=True, skip_extra_repos=False
pkgname: str, must_exist: bool = True, subpackages: bool = True, skip_extra_repos: bool = False
) -> tuple[Path | None, dict[str, Any] | None]:
"""Find and parse an APKBUILD file.

View file

@ -1,5 +1,8 @@
# Copyright 2023 Oliver Smith
# SPDX-License-Identifier: GPL-3.0-or-later
from typing import Any
from pmb.core.arch import Arch
from pmb.helpers import logging
import pmb.build
@ -90,7 +93,7 @@ def get_relevant_packages(arch, pkgname=None, built=False):
return ret
def generate_output_format(arch, pkgnames):
def generate_output_format(arch: Arch, pkgnames: list[str]) -> list[dict[str, Any]]:
"""Generate the detailed output format.
:param arch: architecture
@ -109,12 +112,16 @@ def generate_output_format(arch, pkgnames):
ret = []
for pkgname in pkgnames:
entry = pmb.helpers.package.get(pkgname, arch, True, try_other_arches=False)
if entry is None:
raise RuntimeError(f"Couldn't get package {pkgname} for arch {arch}")
ret += [
{
"pkgname": entry["pkgname"],
"pkgname": entry.pkgname,
"repo": pmb.helpers.pmaports.get_repo(pkgname),
"version": entry["version"],
"depends": entry["depends"],
"version": entry.version,
"depends": entry.depends,
}
]
return ret

View file

@ -372,7 +372,7 @@ def setup_keymap(config: Config):
def setup_timezone(chroot: Chroot, timezone: str):
# We don't care about the arch since it's built for all!
alpine_conf = pmb.helpers.package.get("alpine-conf", Arch.native())
version = alpine_conf["version"].split("-r")[0]
version = alpine_conf.version.split("-r")[0]
setup_tz_cmd = ["setup-timezone"]
# setup-timezone will, by default, copy the timezone to /etc/zoneinfo
@ -700,7 +700,7 @@ def sanity_check_disk_size(args: PmbArgs):
def get_ondev_pkgver(args: PmbArgs):
arch = pmb.parse.deviceinfo().arch
package = pmb.helpers.package.get("postmarketos-ondev", arch)
return package["version"].split("-r")[0]
return package.version.split("-r")[0]
def sanity_check_ondev_version(args: PmbArgs):

View file

@ -1,8 +1,9 @@
# Copyright 2023 Oliver Smith
# SPDX-License-Identifier: GPL-3.0-or-later
import collections
from typing import Any
from typing import cast, overload, Any, Literal
from collections.abc import Sequence
from pmb.core.apkindex_block import ApkindexBlock
from pmb.core.arch import Arch
from pmb.core.context import get_context
from pmb.helpers import logging
@ -27,7 +28,7 @@ apkindex_map = {
required_apkindex_keys = ["arch", "pkgname", "version"]
def parse_next_block(path: Path, lines: list[str]):
def parse_next_block(path: Path, lines: list[str]) -> ApkindexBlock | None:
"""Parse the next block in an APKINDEX.
:param path: to the APKINDEX.tar.gz
@ -35,18 +36,7 @@ def parse_next_block(path: Path, lines: list[str]):
function. Wrapped into a list, so it can be modified
"by reference". Example: [5]
:param lines: all lines from the "APKINDEX" file inside the archive
:returns: dictionary with the following structure:
``{ "arch": "noarch", "depends": ["busybox-extras", "lddtree", ... ],
"origin": "postmarketos-mkinitfs",
"pkgname": "postmarketos-mkinitfs",
"provides": ["mkinitfs=0.0.1"],
"timestamp": "1500000000",
"version": "0.0.4-r10" }``
NOTE: "depends" is not set for packages without any dependencies, e.g. ``musl``.
NOTE: "timestamp" and "origin" are not set for virtual packages (#1273).
We use that information to skip these virtual packages in parse().
:returns: ApkindexBlock
:returns: None, when there are no more blocks
"""
# Parse until we hit an empty line or end of file
@ -101,10 +91,42 @@ def parse_next_block(path: Path, lines: list[str]):
ret[key].append(value)
else:
ret[key] = []
return ret
return ApkindexBlock(
arch=Arch.from_str(ret["arch"]),
depends=ret["depends"],
origin=ret["origin"],
pkgname=ret["pkgname"],
provides=ret["provides"],
provider_priority=ret.get("provider_priority"),
timestamp=ret["timestamp"],
version=ret["version"],
)
def parse_add_block(ret, block, alias=None, multiple_providers=True):
@overload
def parse_add_block(
ret: dict[str, ApkindexBlock],
block: ApkindexBlock,
alias: str | None = ...,
multiple_providers: bool = ..., # FIXME: Type should be Literal[False], but mypy complains?
) -> None: ...
@overload
def parse_add_block(
ret: dict[str, dict[str, ApkindexBlock]],
block: ApkindexBlock,
alias: str | None = ...,
multiple_providers: Literal[True] = ...,
) -> None: ...
def parse_add_block(
ret: dict[str, ApkindexBlock] | dict[str, dict[str, ApkindexBlock]],
block: ApkindexBlock,
alias: str | None = None,
multiple_providers: bool = True,
) -> None:
"""Add one block to the return dictionary of parse().
:param ret: dictionary of all packages in the APKINDEX that is
@ -118,33 +140,58 @@ def parse_add_block(ret, block, alias=None, multiple_providers=True):
not when parsing apk's installed packages DB.
"""
# Defaults
pkgname = block["pkgname"]
pkgname = block.pkgname
alias = alias or pkgname
# Get an existing block with the same alias
block_old = None
if multiple_providers and alias in ret and pkgname in ret[alias]:
block_old = ret[alias][pkgname]
elif not multiple_providers and alias in ret:
block_old = ret[alias]
if multiple_providers:
ret = cast(dict[str, dict[str, ApkindexBlock]], ret)
if alias in ret and pkgname in ret[alias]:
picked_aliases = ret[alias]
if not isinstance(picked_aliases, dict):
raise AssertionError
block_old = picked_aliases[pkgname]
elif not multiple_providers:
if alias in ret:
ret = cast(dict[str, ApkindexBlock], ret)
picked_alias = ret[alias]
if not isinstance(picked_alias, ApkindexBlock):
raise AssertionError
block_old = picked_alias
# Ignore the block, if the block we already have has a higher version
if block_old:
version_old = block_old["version"]
version_new = block["version"]
version_old = block_old.version
version_new = block.version
if pmb.parse.version.compare(version_old, version_new) == 1:
return
# Add it to the result set
if multiple_providers:
ret = cast(dict[str, dict[str, ApkindexBlock]], ret)
if alias not in ret:
ret[alias] = {}
ret[alias][pkgname] = block
picked_aliases = cast(dict[str, ApkindexBlock], ret[alias])
picked_aliases[pkgname] = block
else:
ret = cast(dict[str, ApkindexBlock], ret)
ret[alias] = block
def parse(path: Path, multiple_providers=True):
@overload
def parse(path: Path, multiple_providers: Literal[False] = ...) -> dict[str, ApkindexBlock]: ...
@overload
def parse(
path: Path, multiple_providers: Literal[True] = ...
) -> dict[str, dict[str, ApkindexBlock]]: ...
def parse(
path: Path, multiple_providers: bool = True
) -> dict[str, ApkindexBlock] | dict[str, dict[str, ApkindexBlock]]:
r"""Parse an APKINDEX.tar.gz file, and return its content as dictionary.
:param path: path to an APKINDEX.tar.gz file or apk package database
@ -156,18 +203,18 @@ def parse(path: Path, multiple_providers=True):
:returns: (without multiple_providers)
Generic format:
``{ pkgname: block, ... }``
``{ pkgname: ApkindexBlock, ... }``
Example:
``{ "postmarketos-mkinitfs": block, "so:libGL.so.1": block, ...}``
``{ "postmarketos-mkinitfs": ApkindexBlock, "so:libGL.so.1": ApkindexBlock, ...}``
:returns: (with multiple_providers)
Generic format:
``{ provide: { pkgname: block, ... }, ... }``
``{ provide: { pkgname: ApkindexBlock, ... }, ... }``
Example:
``{ "postmarketos-mkinitfs": {"postmarketos-mkinitfs": block},"so:libGL.so.1": {"mesa-egl": block, "libhybris": block}, ...}``
``{ "postmarketos-mkinitfs": {"postmarketos-mkinitfs": ApkindexBlock},"so:libGL.so.1": {"mesa-egl": ApkindexBlock, "libhybris": ApkindexBlock}, ...}``
*NOTE:* ``block`` is the return value from ``parse_next_block()`` above.
@ -208,7 +255,7 @@ def parse(path: Path, multiple_providers=True):
return {}
# Parse the whole APKINDEX file
ret: dict[str, Any] = collections.OrderedDict()
ret: dict[str, ApkindexBlock] = collections.OrderedDict()
if lines[-1] == "\n":
lines.pop() # Strip the trailing newline
while True:
@ -217,14 +264,14 @@ def parse(path: Path, multiple_providers=True):
break
# Skip virtual packages
if "timestamp" not in block:
if block.timestamp is None:
logging.verbose(f"Skipped virtual package {block} in" f" file: {path}")
continue
# Add the next package and all aliases
parse_add_block(ret, block, None, multiple_providers)
if "provides" in block:
for alias in block["provides"]:
if block.provides is not None:
for alias in block.provides:
parse_add_block(ret, block, alias, multiple_providers)
# Update the cache
@ -235,17 +282,14 @@ def parse(path: Path, multiple_providers=True):
return ret
def parse_blocks(path: Path):
def parse_blocks(path: Path) -> list[ApkindexBlock]:
"""
Read all blocks from an APKINDEX.tar.gz into a list.
:path: full path to the APKINDEX.tar.gz file.
:returns: all blocks in the APKINDEX, without restructuring them by
pkgname or removing duplicates with lower versions (use
parse() if you need these features). Structure:
``[block, block, ...]``
NOTE: "block" is the return value from parse_next_block() above.
parse() if you need these features).
"""
# Parse all lines
with tarfile.open(path, "r:gz") as tar:
@ -253,7 +297,7 @@ def parse_blocks(path: Path):
lines = handle.read().decode().splitlines()
# Parse lines into blocks
ret: list[str] = []
ret: list[ApkindexBlock] = []
while True:
block = pmb.parse.apkindex.parse_next_block(path, lines)
if not block:
@ -261,11 +305,11 @@ def parse_blocks(path: Path):
ret.append(block)
def cache_key(path: Path):
def cache_key(path: Path) -> str:
return str(path.relative_to(get_context().config.work))
def clear_cache(path: Path):
def clear_cache(path: Path) -> bool:
"""
Clear the APKINDEX parsing cache.
@ -285,8 +329,12 @@ def clear_cache(path: Path):
def providers(
package, arch: Arch | None = None, must_exist=True, indexes=None, user_repository=True
):
package: str,
arch: Arch | None = None,
must_exist: bool = True,
indexes: list[Path] | None = None,
user_repository: bool = True,
) -> dict[str, ApkindexBlock]:
"""
Get all packages, which provide one package.
@ -298,27 +346,31 @@ def providers(
(depending on arch)
:param user_repository: add path to index of locally built packages
:returns: list of parsed packages. Example for package="so:libGL.so.1":
``{"mesa-egl": block, "libhybris": block}``
block is the return value from parse_next_block() above.
``{"mesa-egl": ApkindexBlock, "libhybris": ApkindexBlock}``
"""
if not indexes:
indexes = pmb.helpers.repo.apkindex_files(arch, user_repository=user_repository)
package = pmb.helpers.package.remove_operators(package)
ret: dict[str, Any] = collections.OrderedDict()
ret: dict[str, ApkindexBlock] = collections.OrderedDict()
for path in indexes:
# Skip indexes not providing the package
index_packages = parse(path)
if package not in index_packages:
continue
indexed_package = index_packages[package]
if isinstance(indexed_package, ApkindexBlock):
raise AssertionError
# Iterate over found providers
for provider_pkgname, provider in index_packages[package].items():
for provider_pkgname, provider in indexed_package.items():
# Skip lower versions of providers we already found
version = provider["version"]
version = provider.version
if provider_pkgname in ret:
version_last = ret[provider_pkgname]["version"]
version_last = ret[provider_pkgname].version
if pmb.parse.version.compare(version, version_last) == -1:
logging.verbose(
f"{package}: provided by: {provider_pkgname}-{version}"
@ -339,16 +391,18 @@ def providers(
return ret
def provider_highest_priority(providers, pkgname):
def provider_highest_priority(
providers: dict[str, ApkindexBlock], pkgname: str
) -> dict[str, ApkindexBlock]:
"""Get the provider(s) with the highest provider_priority and log a message.
:param providers: returned dict from providers(), must not be empty
:param pkgname: the package name we are interested in (for the log message)
"""
max_priority = 0
priority_providers: collections.OrderedDict[str, str] = collections.OrderedDict()
priority_providers: collections.OrderedDict[str, ApkindexBlock] = collections.OrderedDict()
for provider_name, provider in providers.items():
priority = int(provider.get("provider_priority", -1))
priority = int(-1 if provider.provider_priority is None else provider.provider_priority)
if priority > max_priority:
priority_providers.clear()
max_priority = priority
@ -366,7 +420,7 @@ def provider_highest_priority(providers, pkgname):
return providers
def provider_shortest(providers, pkgname):
def provider_shortest(providers: dict[str, ApkindexBlock], pkgname: str) -> ApkindexBlock:
"""Get the provider with the shortest pkgname and log a message. In most cases
this should be sufficient, e.g. 'mesa-purism-gc7000-egl, mesa-egl' or
'gtk+2.0-maemo, gtk+2.0'.
@ -384,7 +438,9 @@ def provider_shortest(providers, pkgname):
# This can't be cached because the APKINDEX can change during pmbootstrap build!
def package(package, arch: Arch | None = None, must_exist=True, indexes=None, user_repository=True):
def package(
package, arch: Arch | None = None, must_exist=True, indexes=None, user_repository=True
) -> ApkindexBlock | None:
"""
Get a specific package's data from an apkindex.
@ -395,13 +451,7 @@ def package(package, arch: Arch | None = None, must_exist=True, indexes=None, us
:param indexes: list of APKINDEX.tar.gz paths, defaults to all index files
(depending on arch)
:param user_repository: add path to index of locally built packages
:returns: a dictionary with the following structure:
{ "arch": "noarch",
"depends": ["busybox-extras", "lddtree", ... ],
"pkgname": "postmarketos-mkinitfs",
"provides": ["mkinitfs=0.0.1"],
"version": "0.0.4-r10" }
or None when the package was not found.
:returns: ApkindexBlock or None when the package was not found.
"""
# Provider with the same package
package_providers = providers(

View file

@ -116,7 +116,11 @@ def sideload(
to_build = []
for pkgname in pkgnames:
data_repo = pmb.parse.apkindex.package(pkgname, arch, True)
apk_file = f"{pkgname}-{data_repo['version']}.apk"
if data_repo is None:
raise RuntimeError(f"Couldn't find APKINDEX data for {pkgname}!")
apk_file = f"{pkgname}-{data_repo.version}.apk"
host_path = context.config.work / "packages" / channel / arch / apk_file
if not host_path.is_file():
to_build.append(pkgname)