args.cache: remove (MR 2136)

Replace "args.cache" with a global variable in order to
avoid passing "args" to all functions. This is a step to get rid of this
args-passed-to-all-functions pattern in pmbootstrap.
This commit is contained in:
BO41 2021-10-30 14:20:57 +02:00 committed by Oliver Smith
parent f30b1cc3f2
commit ce0f1c2d4a
No known key found for this signature in database
GPG key ID: 5AE7F5513E0885CB
23 changed files with 127 additions and 115 deletions

View file

@ -22,13 +22,13 @@ def skip_already_built(args, pkgname, arch):
:returns: True when it can be skipped or False :returns: True when it can be skipped or False
""" """
if arch not in args.cache["built"]: if arch not in pmb.helpers.other.cache["built"]:
args.cache["built"][arch] = [] pmb.helpers.other.cache["built"][arch] = []
if pkgname in args.cache["built"][arch]: if pkgname in pmb.helpers.other.cache["built"][arch]:
logging.verbose(pkgname + ": already checked this session," logging.verbose(pkgname + ": already checked this session,"
" no need to build it or its dependencies") " no need to build it or its dependencies")
return True return True
args.cache["built"][arch].append(pkgname) pmb.helpers.other.cache["built"][arch].append(pkgname)
return False return False

View file

@ -25,7 +25,7 @@ def update_repository_list(args, suffix="native", check=False):
True. True.
""" """
# Skip if we already did this # Skip if we already did this
if suffix in args.cache["apk_repository_list_updated"]: if suffix in pmb.helpers.other.cache["apk_repository_list_updated"]:
return return
# Read old entries or create folder structure # Read old entries or create folder structure
@ -43,7 +43,7 @@ def update_repository_list(args, suffix="native", check=False):
# Up to date: Save cache, return # Up to date: Save cache, return
lines_new = pmb.helpers.repo.urls(args) lines_new = pmb.helpers.repo.urls(args)
if lines_old == lines_new: if lines_old == lines_new:
args.cache["apk_repository_list_updated"].append(suffix) pmb.helpers.other.cache["apk_repository_list_updated"].append(suffix)
return return
# Check phase: raise error when still outdated # Check phase: raise error when still outdated
@ -67,7 +67,7 @@ def check_min_version(args, suffix="native"):
""" """
# Skip if we already did this # Skip if we already did this
if suffix in args.cache["apk_min_version_checked"]: if suffix in pmb.helpers.other.cache["apk_min_version_checked"]:
return return
# Skip if apk is not installed yet # Skip if apk is not installed yet
@ -84,7 +84,7 @@ def check_min_version(args, suffix="native"):
" 'pmbootstrap zap -hc'") " 'pmbootstrap zap -hc'")
# Mark this suffix as checked # Mark this suffix as checked
args.cache["apk_min_version_checked"].append(suffix) pmb.helpers.other.cache["apk_min_version_checked"].append(suffix)
def install_is_necessary(args, build, arch, package, packages_installed): def install_is_necessary(args, build, arch, package, packages_installed):

View file

@ -81,7 +81,7 @@ def zap(args, confirm=True, dry=False, pkgs_local=False, http=False,
pmb.config.workdir.clean(args) pmb.config.workdir.clean(args)
# Chroots were zapped, so no repo lists exist anymore # Chroots were zapped, so no repo lists exist anymore
args.cache["apk_repository_list_updated"].clear() pmb.helpers.other.cache["apk_repository_list_updated"].clear()
# Print amount of cleaned up space # Print amount of cleaned up space
if dry: if dry:

View file

@ -84,8 +84,8 @@ def read_config(args):
""" Read and verify pmaports.cfg. """ """ Read and verify pmaports.cfg. """
# Try cache first # Try cache first
cache_key = "pmb.config.pmaports.read_config" cache_key = "pmb.config.pmaports.read_config"
if args.cache[cache_key]: if pmb.helpers.other.cache[cache_key]:
return args.cache[cache_key] return pmb.helpers.other.cache[cache_key]
# Migration message # Migration message
if not os.path.exists(args.aports): if not os.path.exists(args.aports):
@ -112,7 +112,7 @@ def read_config(args):
ret["channel"] = pmb.helpers.pmaports.get_channel_new(ret["channel"]) ret["channel"] = pmb.helpers.pmaports.get_channel_new(ret["channel"])
# Cache and return # Cache and return
args.cache[cache_key] = ret pmb.helpers.other.cache[cache_key] = ret
return ret return ret
@ -186,7 +186,7 @@ def switch_to_channel_branch(args, channel_new):
f"{args.aports}") f"{args.aports}")
# Invalidate all caches # Invalidate all caches
pmb.helpers.args.add_cache(args) pmb.helpers.other.init_cache()
# Verify pmaports.cfg on new branch # Verify pmaports.cfg on new branch
read_config(args) read_config(args)

View file

@ -31,22 +31,7 @@ import pmb.helpers.git
args.device ("samsung-i9100", "qemu-amd64" etc.) args.device ("samsung-i9100", "qemu-amd64" etc.)
args.work ("/home/user/.local/var/pmbootstrap", override with --work) args.work ("/home/user/.local/var/pmbootstrap", override with --work)
3. Cache 3. Parsed configs
pmbootstrap uses this dictionary to save the result of expensive
results, so they work a lot faster the next time they are needed in the
same session. Usually the cache is written to and read from in the same
Python file, with code similar to the following:
def lookup(args, key):
if key in args.cache["mycache"]:
return args.cache["mycache"][key]
ret = expensive_operation(args, key)
args.cache["mycache"][key] = ret
return ret
See add_cache() below for details.
4. Parsed configs
Similar to the cache above, specific config files get parsed and added Similar to the cache above, specific config files get parsed and added
to args, so they can get accessed quickly (without parsing the configs to args, so they can get accessed quickly (without parsing the configs
over and over). These configs are not only used in one specific over and over). These configs are not only used in one specific
@ -108,23 +93,6 @@ def replace_placeholders(args):
setattr(args, key, os.path.expanduser(getattr(args, key))) setattr(args, key, os.path.expanduser(getattr(args, key)))
def add_cache(args):
""" Add a caching dict (caches parsing of files etc. for the current
session) """
repo_update = {"404": [], "offline_msg_shown": False}
setattr(args, "cache", {"apkindex": {},
"apkbuild": {},
"apk_min_version_checked": [],
"apk_repository_list_updated": [],
"built": {},
"find_aport": {},
"pmb.helpers.package.depends_recurse": {},
"pmb.helpers.package.get": {},
"pmb.helpers.repo.update": repo_update,
"pmb.helpers.git.parse_channels_cfg": {},
"pmb.config.pmaports.read_config": None})
def add_deviceinfo(args): def add_deviceinfo(args):
""" Add and verify the deviceinfo (only after initialization) """ """ Add and verify the deviceinfo (only after initialization) """
setattr(args, "deviceinfo", pmb.parse.deviceinfo(args)) setattr(args, "deviceinfo", pmb.parse.deviceinfo(args))
@ -141,7 +109,7 @@ def init(args):
fix_mirrors_postmarketos(args) fix_mirrors_postmarketos(args)
pmb.config.merge_with_args(args) pmb.config.merge_with_args(args)
replace_placeholders(args) replace_placeholders(args)
add_cache(args) pmb.helpers.other.init_cache()
# Initialize logs (we could raise errors below) # Initialize logs (we could raise errors below)
pmb.helpers.logging.init(args) pmb.helpers.logging.init(args)

View file

@ -41,7 +41,7 @@ def replace_apkbuild(args, pkgname, key, new, in_quotes=False):
replace(path, "\n" + line_old + "\n", "\n" + line_new + "\n") replace(path, "\n" + line_old + "\n", "\n" + line_new + "\n")
# Verify # Verify
del (args.cache["apkbuild"][path]) del (pmb.helpers.other.cache["apkbuild"][path])
apkbuild = pmb.parse.apkbuild(args, path) apkbuild = pmb.parse.apkbuild(args, path)
if apkbuild[key] != str(new): if apkbuild[key] != str(new):
raise RuntimeError("Failed to set '{}' for pmaport '{}'. Make sure" raise RuntimeError("Failed to set '{}' for pmaport '{}'. Make sure"

View file

@ -109,8 +109,8 @@ def parse_channels_cfg(args):
...}} """ ...}} """
# Cache during one pmbootstrap run # Cache during one pmbootstrap run
cache_key = "pmb.helpers.git.parse_channels_cfg" cache_key = "pmb.helpers.git.parse_channels_cfg"
if args.cache[cache_key]: if pmb.helpers.other.cache[cache_key]:
return args.cache[cache_key] return pmb.helpers.other.cache[cache_key]
# Read with configparser # Read with configparser
cfg = configparser.ConfigParser() cfg = configparser.ConfigParser()
@ -147,7 +147,7 @@ def parse_channels_cfg(args):
value = cfg.get(channel, key) value = cfg.get(channel, key)
ret["channels"][channel_new][key] = value ret["channels"][channel_new][key] = value
args.cache[cache_key] = ret pmb.helpers.other.cache[cache_key] = ret
return ret return ret

View file

@ -275,3 +275,37 @@ def validate_hostname(hostname):
" sign") " sign")
return False return False
return True return True
"""
pmbootstrap uses this dictionary to save the result of expensive
results, so they work a lot faster the next time they are needed in the
same session. Usually the cache is written to and read from in the same
Python file, with code similar to the following:
def lookup(key):
if key in pmb.helpers.other.cache["mycache"]:
return pmb.helpers.other.cache["mycache"][key]
ret = expensive_operation(args, key)
pmb.helpers.other.cache["mycache"][key] = ret
return ret
"""
cache = None
def init_cache():
global cache
""" Add a caching dict (caches parsing of files etc. for the current
session) """
repo_update = {"404": [], "offline_msg_shown": False}
cache = {"apkindex": {},
"apkbuild": {},
"apk_min_version_checked": [],
"apk_repository_list_updated": [],
"built": {},
"find_aport": {},
"pmb.helpers.package.depends_recurse": {},
"pmb.helpers.package.get": {},
"pmb.helpers.repo.update": repo_update,
"pmb.helpers.git.parse_channels_cfg": {},
"pmb.config.pmaports.read_config": None}

View file

@ -33,10 +33,16 @@ def get(args, pkgname, arch, replace_subpkgnames=False, must_exist=True):
* None if the package was not found """ * None if the package was not found """
# Cached result # Cached result
cache_key = "pmb.helpers.package.get" cache_key = "pmb.helpers.package.get"
if (arch in args.cache[cache_key] and if (
pkgname in args.cache[cache_key][arch] and arch in pmb.helpers.other.cache[cache_key] and
replace_subpkgnames in args.cache[cache_key][arch][pkgname]): pkgname in pmb.helpers.other.cache[cache_key][arch] and
return args.cache[cache_key][arch][pkgname][replace_subpkgnames] replace_subpkgnames in pmb.helpers.other.cache[cache_key][arch][
pkgname
]
):
return pmb.helpers.other.cache[cache_key][arch][pkgname][
replace_subpkgnames
]
# Find in pmaports # Find in pmaports
ret = None ret = None
@ -96,11 +102,13 @@ def get(args, pkgname, arch, replace_subpkgnames=False, must_exist=True):
# Save to cache and return # Save to cache and return
if ret: if ret:
if arch not in args.cache[cache_key]: if arch not in pmb.helpers.other.cache[cache_key]:
args.cache[cache_key][arch] = {} pmb.helpers.other.cache[cache_key][arch] = {}
if pkgname not in args.cache[cache_key][arch]: if pkgname not in pmb.helpers.other.cache[cache_key][arch]:
args.cache[cache_key][arch][pkgname] = {} pmb.helpers.other.cache[cache_key][arch][pkgname] = {}
args.cache[cache_key][arch][pkgname][replace_subpkgnames] = ret pmb.helpers.other.cache[cache_key][arch][pkgname][
replace_subpkgnames
] = ret
return ret return ret
# Could not find the package # Could not find the package
@ -119,9 +127,9 @@ def depends_recurse(args, pkgname, arch):
"linux-samsung-i9100", ...] """ "linux-samsung-i9100", ...] """
# Cached result # Cached result
cache_key = "pmb.helpers.package.depends_recurse" cache_key = "pmb.helpers.package.depends_recurse"
if (arch in args.cache[cache_key] and if (arch in pmb.helpers.other.cache[cache_key] and
pkgname in args.cache[cache_key][arch]): pkgname in pmb.helpers.other.cache[cache_key][arch]):
return args.cache[cache_key][arch][pkgname] return pmb.helpers.other.cache[cache_key][arch][pkgname]
# Build ret (by iterating over the queue) # Build ret (by iterating over the queue)
queue = [pkgname] queue = [pkgname]
@ -141,9 +149,9 @@ def depends_recurse(args, pkgname, arch):
ret.sort() ret.sort()
# Save to cache and return # Save to cache and return
if arch not in args.cache[cache_key]: if arch not in pmb.helpers.other.cache[cache_key]:
args.cache[cache_key][arch] = {} pmb.helpers.other.cache[cache_key][arch] = {}
args.cache[cache_key][arch][pkgname] = ret pmb.helpers.other.cache[cache_key][arch][pkgname] = ret
return ret return ret

View file

@ -34,7 +34,7 @@ def package(args, pkgname, reason="", dry=False):
pmb.helpers.file.replace(path, old, new) pmb.helpers.file.replace(path, old, new)
# Verify # Verify
del(args.cache["apkbuild"][path]) del(pmb.helpers.other.cache["apkbuild"][path])
apkbuild = pmb.parse.apkbuild(args, path) apkbuild = pmb.parse.apkbuild(args, path)
if int(apkbuild["pkgrel"]) != pkgrel_new: if int(apkbuild["pkgrel"]) != pkgrel_new:
raise RuntimeError("Failed to bump pkgrel for package '" + pkgname + raise RuntimeError("Failed to bump pkgrel for package '" + pkgname +

View file

@ -15,7 +15,7 @@ import pmb.parse
def _find_apkbuilds(args): def _find_apkbuilds(args):
# Try to get a cached result first (we assume that the aports don't change # Try to get a cached result first (we assume that the aports don't change
# in one pmbootstrap call) # in one pmbootstrap call)
apkbuilds = args.cache.get("pmb.helpers.pmaports.apkbuilds") apkbuilds = pmb.helpers.other.cache.get("pmb.helpers.pmaports.apkbuilds")
if apkbuilds is not None: if apkbuilds is not None:
return apkbuilds return apkbuilds
@ -32,7 +32,7 @@ def _find_apkbuilds(args):
apkbuilds = dict(sorted(apkbuilds.items())) apkbuilds = dict(sorted(apkbuilds.items()))
# Save result in cache # Save result in cache
args.cache["pmb.helpers.pmaports.apkbuilds"] = apkbuilds pmb.helpers.other.cache["pmb.helpers.pmaports.apkbuilds"] = apkbuilds
return apkbuilds return apkbuilds
@ -109,7 +109,7 @@ def _find_package_in_apkbuild(args, package, path):
:param path: The path to the apkbuild :param path: The path to the apkbuild
:return: True if the APKBUILD contains or provides the package :return: True if the APKBUILD contains or provides the package
""" """
apkbuild = pmb.parse.apkbuild(args, path) apkbuild = pmb.parse.apkbuild(path)
# Subpackages # Subpackages
if package in apkbuild["subpackages"]: if package in apkbuild["subpackages"]:
@ -146,8 +146,8 @@ def find(args, package, must_exist=True):
# Try to get a cached result first (we assume that the aports don't change # Try to get a cached result first (we assume that the aports don't change
# in one pmbootstrap call) # in one pmbootstrap call)
ret = None ret = None
if package in args.cache["find_aport"]: if package in pmb.helpers.other.cache["find_aport"]:
ret = args.cache["find_aport"][package] ret = pmb.helpers.other.cache["find_aport"][package]
else: else:
# Sanity check # Sanity check
if "*" in package: if "*" in package:
@ -182,7 +182,7 @@ def find(args, package, must_exist=True):
package) package)
# Save result in cache # Save result in cache
args.cache["find_aport"][package] = ret pmb.helpers.other.cache["find_aport"][package] = ret
return ret return ret

View file

@ -127,9 +127,9 @@ def update(args, arch=None, force=False, existing_only=False):
# Skip in offline mode, only show once # Skip in offline mode, only show once
cache_key = "pmb.helpers.repo.update" cache_key = "pmb.helpers.repo.update"
if args.offline: if args.offline:
if not args.cache[cache_key]["offline_msg_shown"]: if not pmb.helpers.other.cache[cache_key]["offline_msg_shown"]:
logging.info("NOTE: skipping package index update (offline mode)") logging.info("NOTE: skipping package index update (offline mode)")
args.cache[cache_key]["offline_msg_shown"] = True pmb.helpers.other.cache[cache_key]["offline_msg_shown"] = True
return False return False
# Architectures and retention time # Architectures and retention time
@ -151,7 +151,7 @@ def update(args, arch=None, force=False, existing_only=False):
# Find update reason, possibly skip non-existing or known 404 files # Find update reason, possibly skip non-existing or known 404 files
reason = None reason = None
if url_full in args.cache[cache_key]["404"]: if url_full in pmb.helpers.other.cache[cache_key]["404"]:
# We already attempted to download this file once in this # We already attempted to download this file once in this
# session # session
continue continue
@ -184,7 +184,7 @@ def update(args, arch=None, force=False, existing_only=False):
temp = pmb.helpers.http.download(args, url, "APKINDEX", False, temp = pmb.helpers.http.download(args, url, "APKINDEX", False,
logging.DEBUG, True) logging.DEBUG, True)
if not temp: if not temp:
args.cache[cache_key]["404"].append(url) pmb.helpers.other.cache[cache_key]["404"].append(url)
continue continue
target_folder = os.path.dirname(target) target_folder = os.path.dirname(target)
if not os.path.exists(target_folder): if not os.path.exists(target_folder):

View file

@ -237,9 +237,9 @@ def sudo_timer_start(args):
needed once. needed once.
""" """
if "sudo_timer_active" in args.cache: if "sudo_timer_active" in pmb.helpers.other.cache:
return return
args.cache["sudo_timer_active"] = True pmb.helpers.other.cache["sudo_timer_active"] = True
sudo_timer_iterate() sudo_timer_iterate()

View file

@ -298,8 +298,8 @@ def apkbuild(args, path, check_pkgver=True, check_pkgname=True):
""" """
# Try to get a cached result first (we assume that the aports don't change # Try to get a cached result first (we assume that the aports don't change
# in one pmbootstrap call) # in one pmbootstrap call)
if path in args.cache["apkbuild"]: if path in pmb.helpers.other.cache["apkbuild"]:
return args.cache["apkbuild"][path] return pmb.helpers.other.cache["apkbuild"][path]
# Read the file and check line endings # Read the file and check line endings
lines = read_file(path) lines = read_file(path)
@ -328,7 +328,7 @@ def apkbuild(args, path, check_pkgver=True, check_pkgname=True):
f" APKBUILD: {path}") f" APKBUILD: {path}")
# Fill cache # Fill cache
args.cache["apkbuild"][path] = ret pmb.helpers.other.cache["apkbuild"][path] = ret
return ret return ret

View file

@ -179,8 +179,8 @@ def parse(args, path, multiple_providers=True):
# Try to get a cached result first # Try to get a cached result first
lastmod = os.path.getmtime(path) lastmod = os.path.getmtime(path)
cache_key = "multiple" if multiple_providers else "single" cache_key = "multiple" if multiple_providers else "single"
if path in args.cache["apkindex"]: if path in pmb.helpers.other.cache["apkindex"]:
cache = args.cache["apkindex"][path] cache = pmb.helpers.other.cache["apkindex"][path]
if cache["lastmod"] == lastmod: if cache["lastmod"] == lastmod:
if cache_key in cache: if cache_key in cache:
return cache[cache_key] return cache[cache_key]
@ -217,9 +217,9 @@ def parse(args, path, multiple_providers=True):
parse_add_block(ret, block, alias, multiple_providers) parse_add_block(ret, block, alias, multiple_providers)
# Update the cache # Update the cache
if path not in args.cache["apkindex"]: if path not in pmb.helpers.other.cache["apkindex"]:
args.cache["apkindex"][path] = {"lastmod": lastmod} pmb.helpers.other.cache["apkindex"][path] = {"lastmod": lastmod}
args.cache["apkindex"][path][cache_key] = ret pmb.helpers.other.cache["apkindex"][path][cache_key] = ret
return ret return ret
@ -257,12 +257,12 @@ def clear_cache(args, path):
:returns: True on successful deletion, False otherwise :returns: True on successful deletion, False otherwise
""" """
logging.verbose("Clear APKINDEX cache for: " + path) logging.verbose("Clear APKINDEX cache for: " + path)
if path in args.cache["apkindex"]: if path in pmb.helpers.other.cache["apkindex"]:
del args.cache["apkindex"][path] del pmb.helpers.other.cache["apkindex"][path]
return True return True
else: else:
logging.verbose("Nothing to do, path was not in cache:" + logging.verbose("Nothing to do, path was not in cache:" +
str(args.cache["apkindex"].keys())) str(pmb.helpers.other.cache["apkindex"].keys()))
return False return False

View file

@ -512,7 +512,7 @@ def package_completer(prefix, action, parser=None, parsed_args=None):
args = parsed_args args = parsed_args
pmb.config.merge_with_args(args) pmb.config.merge_with_args(args)
pmb.helpers.args.replace_placeholders(args) pmb.helpers.args.replace_placeholders(args)
pmb.helpers.args.add_cache(args) pmb.helpers.other.init_cache()
packages = set( packages = set(
package for package in pmb.helpers.pmaports.get_list(args) package for package in pmb.helpers.pmaports.get_list(args)
if package.startswith(prefix)) if package.startswith(prefix))

View file

@ -82,7 +82,7 @@ def generate(args, monkeypatch, answers):
remove_contributor_maintainer_lines(args, apkbuild_path_linux) remove_contributor_maintainer_lines(args, apkbuild_path_linux)
# Parse the deviceinfo and apkbuilds # Parse the deviceinfo and apkbuilds
args.cache["apkbuild"] = {} pmb.helpers.other.cache["apkbuild"] = {}
apkbuild = pmb.parse.apkbuild(args, apkbuild_path) apkbuild = pmb.parse.apkbuild(args, apkbuild_path)
apkbuild_linux = pmb.parse.apkbuild(args, apkbuild_path_linux, apkbuild_linux = pmb.parse.apkbuild(args, apkbuild_path_linux,
check_pkgver=False) check_pkgver=False)

View file

@ -23,8 +23,8 @@ def args(request, tmpdir):
apkindex_path = str(tmpdir) + "/APKINDEX.tar.gz" apkindex_path = str(tmpdir) + "/APKINDEX.tar.gz"
open(apkindex_path, "a").close() open(apkindex_path, "a").close()
lastmod = os.path.getmtime(apkindex_path) lastmod = os.path.getmtime(apkindex_path)
args.cache["apkindex"][apkindex_path] = {"lastmod": lastmod, pmb.helpers.other.cache["apkindex"][apkindex_path] = {"lastmod": lastmod,
"multiple": {}} "multiple": {}}
return args return args
@ -34,9 +34,9 @@ def cache_apkindex(args, version):
for the "hello-world" package. for the "hello-world" package.
:param version: full version string, includes pkgver and pkgrl (e.g. 1-r2) :param version: full version string, includes pkgver and pkgrl (e.g. 1-r2)
""" """
apkindex_path = list(args.cache["apkindex"].keys())[0] apkindex_path = list(pmb.helpers.other.cache["apkindex"].keys())[0]
providers = args.cache[ providers = pmb.helpers.other.cache[
"apkindex"][apkindex_path]["multiple"]["hello-world"] "apkindex"][apkindex_path]["multiple"]["hello-world"]
providers["hello-world"]["version"] = version providers["hello-world"]["version"] = version
@ -47,11 +47,11 @@ def test_build_is_necessary(args):
apkbuild = pmb.parse.apkbuild(args, aport + "/APKBUILD") apkbuild = pmb.parse.apkbuild(args, aport + "/APKBUILD")
apkbuild["pkgver"] = "1" apkbuild["pkgver"] = "1"
apkbuild["pkgrel"] = "2" apkbuild["pkgrel"] = "2"
indexes = list(args.cache["apkindex"].keys()) indexes = list(pmb.helpers.other.cache["apkindex"].keys())
apkindex_path = indexes[0] apkindex_path = indexes[0]
cache = {"hello-world": {"hello-world": {"pkgname": "hello-world", cache = {"hello-world": {"hello-world": {"pkgname": "hello-world",
"version": "1-r2"}}} "version": "1-r2"}}}
args.cache["apkindex"][apkindex_path]["multiple"] = cache pmb.helpers.other.cache["apkindex"][apkindex_path]["multiple"] = cache
# Binary repo has a newer version # Binary repo has a newer version
cache_apkindex(args, "999-r1") cache_apkindex(args, "999-r1")
@ -71,7 +71,7 @@ def test_build_is_necessary_no_binary_available(args):
APKINDEX cache is set up to fake an empty APKINDEX, which means that the APKINDEX cache is set up to fake an empty APKINDEX, which means that the
hello-world package has not been built yet. hello-world package has not been built yet.
""" """
indexes = list(args.cache["apkindex"].keys()) indexes = list(pmb.helpers.other.cache["apkindex"].keys())
aport = pmb.helpers.pmaports.find(args, "hello-world") aport = pmb.helpers.pmaports.find(args, "hello-world")
apkbuild = pmb.parse.apkbuild(args, aport + "/APKBUILD") apkbuild = pmb.parse.apkbuild(args, aport + "/APKBUILD")
assert pmb.build.is_necessary(args, None, apkbuild, indexes) is True assert pmb.build.is_necessary(args, None, apkbuild, indexes) is True

View file

@ -58,9 +58,9 @@ def args_patched(monkeypatch, argv):
def test_skip_already_built(args): def test_skip_already_built(args):
func = pmb.build._package.skip_already_built func = pmb.build._package.skip_already_built
assert args.cache["built"] == {} assert pmb.helpers.other.cache["built"] == {}
assert func(args, "test-package", "armhf") is False assert func(args, "test-package", "armhf") is False
assert args.cache["built"] == {"armhf": ["test-package"]} assert pmb.helpers.other.cache["built"] == {"armhf": ["test-package"]}
assert func(args, "test-package", "armhf") is True assert func(args, "test-package", "armhf") is True
@ -323,11 +323,11 @@ def test_package(args):
assert pmb.build.package(args, "hello-world", force=True) assert pmb.build.package(args, "hello-world", force=True)
# Package exists # Package exists
args.cache["built"] = {} pmb.helpers.other.cache["built"] = {}
assert pmb.build.package(args, "hello-world") is None assert pmb.build.package(args, "hello-world") is None
# Force building again # Force building again
args.cache["built"] = {} pmb.helpers.other.cache["built"] = {}
assert pmb.build.package(args, "hello-world", force=True) assert pmb.build.package(args, "hello-world", force=True)
# Build for another architecture # Build for another architecture
@ -363,7 +363,7 @@ def test_build_depends_high_level(args, monkeypatch):
# Remove hello-world # Remove hello-world
pmb.helpers.run.root(args, ["rm", output_hello_outside]) pmb.helpers.run.root(args, ["rm", output_hello_outside])
pmb.build.index_repo(args, pmb.config.arch_native) pmb.build.index_repo(args, pmb.config.arch_native)
args.cache["built"] = {} pmb.helpers.other.cache["built"] = {}
# Ask to build the wrapper. It should not build the wrapper (it exists, not # Ask to build the wrapper. It should not build the wrapper (it exists, not
# using force), but build/update its missing dependency "hello-world" # using force), but build/update its missing dependency "hello-world"

View file

@ -86,7 +86,7 @@ def test_helpers_package_depends_recurse(args):
"b": {False: {"pkgname": "b", "depends": []}}, "b": {False: {"pkgname": "b", "depends": []}},
"c": {False: {"pkgname": "c", "depends": ["d"]}}, "c": {False: {"pkgname": "c", "depends": ["d"]}},
"d": {False: {"pkgname": "d", "depends": ["b"]}}} "d": {False: {"pkgname": "d", "depends": ["b"]}}}
args.cache["pmb.helpers.package.get"]["armhf"] = cache pmb.helpers.other.cache["pmb.helpers.package.get"]["armhf"] = cache
# Normal runs # Normal runs
func = pmb.helpers.package.depends_recurse func = pmb.helpers.package.depends_recurse
@ -94,7 +94,7 @@ def test_helpers_package_depends_recurse(args):
assert func(args, "d", "armhf") == ["b", "d"] assert func(args, "d", "armhf") == ["b", "d"]
# Cached result # Cached result
args.cache["pmb.helpers.package.get"]["armhf"] = {} pmb.helpers.other.cache["pmb.helpers.package.get"]["armhf"] = {}
assert func(args, "d", "armhf") == ["b", "d"] assert func(args, "d", "armhf") == ["b", "d"]
@ -103,7 +103,7 @@ def test_helpers_package_check_arch_package(args):
# Put fake data into the pmb.helpers.package.get() cache # Put fake data into the pmb.helpers.package.get() cache
func = pmb.helpers.package.check_arch func = pmb.helpers.package.check_arch
cache = {"a": {False: {"arch": []}}} cache = {"a": {False: {"arch": []}}}
args.cache["pmb.helpers.package.get"]["armhf"] = cache pmb.helpers.other.cache["pmb.helpers.package.get"]["armhf"] = cache
cache["a"][False]["arch"] = ["all !armhf"] cache["a"][False]["arch"] = ["all !armhf"]
assert func(args, "a", "armhf") is False assert func(args, "a", "armhf") is False

View file

@ -64,7 +64,7 @@ def test_newapkbuild(args, monkeypatch, tmpdir):
monkeypatch.setattr(pmb.helpers.cli, "confirm", confirm_true) monkeypatch.setattr(pmb.helpers.cli, "confirm", confirm_true)
pkgdesc = "testdescription" pkgdesc = "testdescription"
func(args, "main", ["-d", pkgdesc, pkgname]) func(args, "main", ["-d", pkgdesc, pkgname])
args.cache["apkbuild"] = {} pmb.helpers.other.cache["apkbuild"] = {}
apkbuild = pmb.parse.apkbuild(args, apkbuild_path) apkbuild = pmb.parse.apkbuild(args, apkbuild_path)
assert apkbuild["pkgname"] == pkgname assert apkbuild["pkgname"] == pkgname
assert apkbuild["pkgdesc"] == pkgdesc assert apkbuild["pkgdesc"] == pkgdesc

View file

@ -187,7 +187,7 @@ def test_parse_cached(args, tmpdir):
lastmod = os.path.getmtime(path) lastmod = os.path.getmtime(path)
# Fill the cache # Fill the cache
args.cache["apkindex"][path] = { pmb.helpers.other.cache["apkindex"][path] = {
"lastmod": lastmod, "lastmod": lastmod,
"multiple": "cached_result_multiple", "multiple": "cached_result_multiple",
"single": "cached_result_single", "single": "cached_result_single",
@ -199,12 +199,12 @@ def test_parse_cached(args, tmpdir):
assert func(args, path, False) == "cached_result_single" assert func(args, path, False) == "cached_result_single"
# Make cache invalid # Make cache invalid
args.cache["apkindex"][path]["lastmod"] -= 10 pmb.helpers.other.cache["apkindex"][path]["lastmod"] -= 10
assert func(args, path, True) == {} assert func(args, path, True) == {}
# Delete the cache (run twice for both code paths) # Delete the cache (run twice for both code paths)
assert pmb.parse.apkindex.clear_cache(args, path) is True assert pmb.parse.apkindex.clear_cache(args, path) is True
assert args.cache["apkindex"] == {} assert pmb.helpers.other.cache["apkindex"] == {}
assert pmb.parse.apkindex.clear_cache(args, path) is False assert pmb.parse.apkindex.clear_cache(args, path) is False
@ -234,7 +234,7 @@ def test_parse(args):
'musl': block_musl, 'musl': block_musl,
'so:libc.musl-x86_64.so.1': block_musl} 'so:libc.musl-x86_64.so.1': block_musl}
assert pmb.parse.apkindex.parse(args, path, False) == ret_single assert pmb.parse.apkindex.parse(args, path, False) == ret_single
assert args.cache["apkindex"][path]["single"] == ret_single assert pmb.helpers.other.cache["apkindex"][path]["single"] == ret_single
# Test with multiple_providers # Test with multiple_providers
ret_multiple = {'cmd:curl': {"curl": block_curl}, ret_multiple = {'cmd:curl': {"curl": block_curl},
@ -242,7 +242,9 @@ def test_parse(args):
'musl': {"musl": block_musl}, 'musl': {"musl": block_musl},
'so:libc.musl-x86_64.so.1': {"musl": block_musl}} 'so:libc.musl-x86_64.so.1': {"musl": block_musl}}
assert pmb.parse.apkindex.parse(args, path, True) == ret_multiple assert pmb.parse.apkindex.parse(args, path, True) == ret_multiple
assert args.cache["apkindex"][path]["multiple"] == ret_multiple assert (
pmb.helpers.other.cache["apkindex"][path]["multiple"] == ret_multiple
)
def test_parse_virtual(args): def test_parse_virtual(args):
@ -260,7 +262,7 @@ def test_parse_virtual(args):
'version': '2-r0'} 'version': '2-r0'}
ret = {"hello-world": block, "cmd:hello-world": block} ret = {"hello-world": block, "cmd:hello-world": block}
assert pmb.parse.apkindex.parse(args, path, False) == ret assert pmb.parse.apkindex.parse(args, path, False) == ret
assert args.cache["apkindex"][path]["single"] == ret assert pmb.helpers.other.cache["apkindex"][path]["single"] == ret
def test_providers_invalid_package(args, tmpdir): def test_providers_invalid_package(args, tmpdir):

View file

@ -110,7 +110,7 @@ def verify_pkgrels(args, tmpdir, pkgrel_testlib, pkgrel_testapp,
Verify the pkgrels of the three test APKBUILDs ("testlib", "testapp", Verify the pkgrels of the three test APKBUILDs ("testlib", "testapp",
"testsubpkg"). "testsubpkg").
""" """
args.cache["apkbuild"] = {} pmb.helpers.other.cache["apkbuild"] = {}
mapping = {"testlib": pkgrel_testlib, mapping = {"testlib": pkgrel_testlib,
"testapp": pkgrel_testapp, "testapp": pkgrel_testapp,
"testsubpkg": pkgrel_testsubpkg} "testsubpkg": pkgrel_testsubpkg}