forked from Mirror/pmbootstrap
args.cache: remove (MR 2136)
Replace "args.cache" with a global variable in order to avoid passing "args" to all functions. This is a step to get rid of this args-passed-to-all-functions pattern in pmbootstrap.
This commit is contained in:
parent
f30b1cc3f2
commit
ce0f1c2d4a
23 changed files with 127 additions and 115 deletions
|
@ -22,13 +22,13 @@ def skip_already_built(args, pkgname, arch):
|
|||
|
||||
:returns: True when it can be skipped or False
|
||||
"""
|
||||
if arch not in args.cache["built"]:
|
||||
args.cache["built"][arch] = []
|
||||
if pkgname in args.cache["built"][arch]:
|
||||
if arch not in pmb.helpers.other.cache["built"]:
|
||||
pmb.helpers.other.cache["built"][arch] = []
|
||||
if pkgname in pmb.helpers.other.cache["built"][arch]:
|
||||
logging.verbose(pkgname + ": already checked this session,"
|
||||
" no need to build it or its dependencies")
|
||||
return True
|
||||
args.cache["built"][arch].append(pkgname)
|
||||
pmb.helpers.other.cache["built"][arch].append(pkgname)
|
||||
return False
|
||||
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ def update_repository_list(args, suffix="native", check=False):
|
|||
True.
|
||||
"""
|
||||
# Skip if we already did this
|
||||
if suffix in args.cache["apk_repository_list_updated"]:
|
||||
if suffix in pmb.helpers.other.cache["apk_repository_list_updated"]:
|
||||
return
|
||||
|
||||
# Read old entries or create folder structure
|
||||
|
@ -43,7 +43,7 @@ def update_repository_list(args, suffix="native", check=False):
|
|||
# Up to date: Save cache, return
|
||||
lines_new = pmb.helpers.repo.urls(args)
|
||||
if lines_old == lines_new:
|
||||
args.cache["apk_repository_list_updated"].append(suffix)
|
||||
pmb.helpers.other.cache["apk_repository_list_updated"].append(suffix)
|
||||
return
|
||||
|
||||
# Check phase: raise error when still outdated
|
||||
|
@ -67,7 +67,7 @@ def check_min_version(args, suffix="native"):
|
|||
"""
|
||||
|
||||
# Skip if we already did this
|
||||
if suffix in args.cache["apk_min_version_checked"]:
|
||||
if suffix in pmb.helpers.other.cache["apk_min_version_checked"]:
|
||||
return
|
||||
|
||||
# Skip if apk is not installed yet
|
||||
|
@ -84,7 +84,7 @@ def check_min_version(args, suffix="native"):
|
|||
" 'pmbootstrap zap -hc'")
|
||||
|
||||
# Mark this suffix as checked
|
||||
args.cache["apk_min_version_checked"].append(suffix)
|
||||
pmb.helpers.other.cache["apk_min_version_checked"].append(suffix)
|
||||
|
||||
|
||||
def install_is_necessary(args, build, arch, package, packages_installed):
|
||||
|
|
|
@ -81,7 +81,7 @@ def zap(args, confirm=True, dry=False, pkgs_local=False, http=False,
|
|||
pmb.config.workdir.clean(args)
|
||||
|
||||
# Chroots were zapped, so no repo lists exist anymore
|
||||
args.cache["apk_repository_list_updated"].clear()
|
||||
pmb.helpers.other.cache["apk_repository_list_updated"].clear()
|
||||
|
||||
# Print amount of cleaned up space
|
||||
if dry:
|
||||
|
|
|
@ -84,8 +84,8 @@ def read_config(args):
|
|||
""" Read and verify pmaports.cfg. """
|
||||
# Try cache first
|
||||
cache_key = "pmb.config.pmaports.read_config"
|
||||
if args.cache[cache_key]:
|
||||
return args.cache[cache_key]
|
||||
if pmb.helpers.other.cache[cache_key]:
|
||||
return pmb.helpers.other.cache[cache_key]
|
||||
|
||||
# Migration message
|
||||
if not os.path.exists(args.aports):
|
||||
|
@ -112,7 +112,7 @@ def read_config(args):
|
|||
ret["channel"] = pmb.helpers.pmaports.get_channel_new(ret["channel"])
|
||||
|
||||
# Cache and return
|
||||
args.cache[cache_key] = ret
|
||||
pmb.helpers.other.cache[cache_key] = ret
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -186,7 +186,7 @@ def switch_to_channel_branch(args, channel_new):
|
|||
f"{args.aports}")
|
||||
|
||||
# Invalidate all caches
|
||||
pmb.helpers.args.add_cache(args)
|
||||
pmb.helpers.other.init_cache()
|
||||
|
||||
# Verify pmaports.cfg on new branch
|
||||
read_config(args)
|
||||
|
|
|
@ -31,22 +31,7 @@ import pmb.helpers.git
|
|||
args.device ("samsung-i9100", "qemu-amd64" etc.)
|
||||
args.work ("/home/user/.local/var/pmbootstrap", override with --work)
|
||||
|
||||
3. Cache
|
||||
pmbootstrap uses this dictionary to save the result of expensive
|
||||
results, so they work a lot faster the next time they are needed in the
|
||||
same session. Usually the cache is written to and read from in the same
|
||||
Python file, with code similar to the following:
|
||||
|
||||
def lookup(args, key):
|
||||
if key in args.cache["mycache"]:
|
||||
return args.cache["mycache"][key]
|
||||
ret = expensive_operation(args, key)
|
||||
args.cache["mycache"][key] = ret
|
||||
return ret
|
||||
|
||||
See add_cache() below for details.
|
||||
|
||||
4. Parsed configs
|
||||
3. Parsed configs
|
||||
Similar to the cache above, specific config files get parsed and added
|
||||
to args, so they can get accessed quickly (without parsing the configs
|
||||
over and over). These configs are not only used in one specific
|
||||
|
@ -108,23 +93,6 @@ def replace_placeholders(args):
|
|||
setattr(args, key, os.path.expanduser(getattr(args, key)))
|
||||
|
||||
|
||||
def add_cache(args):
|
||||
""" Add a caching dict (caches parsing of files etc. for the current
|
||||
session) """
|
||||
repo_update = {"404": [], "offline_msg_shown": False}
|
||||
setattr(args, "cache", {"apkindex": {},
|
||||
"apkbuild": {},
|
||||
"apk_min_version_checked": [],
|
||||
"apk_repository_list_updated": [],
|
||||
"built": {},
|
||||
"find_aport": {},
|
||||
"pmb.helpers.package.depends_recurse": {},
|
||||
"pmb.helpers.package.get": {},
|
||||
"pmb.helpers.repo.update": repo_update,
|
||||
"pmb.helpers.git.parse_channels_cfg": {},
|
||||
"pmb.config.pmaports.read_config": None})
|
||||
|
||||
|
||||
def add_deviceinfo(args):
|
||||
""" Add and verify the deviceinfo (only after initialization) """
|
||||
setattr(args, "deviceinfo", pmb.parse.deviceinfo(args))
|
||||
|
@ -141,7 +109,7 @@ def init(args):
|
|||
fix_mirrors_postmarketos(args)
|
||||
pmb.config.merge_with_args(args)
|
||||
replace_placeholders(args)
|
||||
add_cache(args)
|
||||
pmb.helpers.other.init_cache()
|
||||
|
||||
# Initialize logs (we could raise errors below)
|
||||
pmb.helpers.logging.init(args)
|
||||
|
|
|
@ -41,7 +41,7 @@ def replace_apkbuild(args, pkgname, key, new, in_quotes=False):
|
|||
replace(path, "\n" + line_old + "\n", "\n" + line_new + "\n")
|
||||
|
||||
# Verify
|
||||
del (args.cache["apkbuild"][path])
|
||||
del (pmb.helpers.other.cache["apkbuild"][path])
|
||||
apkbuild = pmb.parse.apkbuild(args, path)
|
||||
if apkbuild[key] != str(new):
|
||||
raise RuntimeError("Failed to set '{}' for pmaport '{}'. Make sure"
|
||||
|
|
|
@ -109,8 +109,8 @@ def parse_channels_cfg(args):
|
|||
...}} """
|
||||
# Cache during one pmbootstrap run
|
||||
cache_key = "pmb.helpers.git.parse_channels_cfg"
|
||||
if args.cache[cache_key]:
|
||||
return args.cache[cache_key]
|
||||
if pmb.helpers.other.cache[cache_key]:
|
||||
return pmb.helpers.other.cache[cache_key]
|
||||
|
||||
# Read with configparser
|
||||
cfg = configparser.ConfigParser()
|
||||
|
@ -147,7 +147,7 @@ def parse_channels_cfg(args):
|
|||
value = cfg.get(channel, key)
|
||||
ret["channels"][channel_new][key] = value
|
||||
|
||||
args.cache[cache_key] = ret
|
||||
pmb.helpers.other.cache[cache_key] = ret
|
||||
return ret
|
||||
|
||||
|
||||
|
|
|
@ -275,3 +275,37 @@ def validate_hostname(hostname):
|
|||
" sign")
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
"""
|
||||
pmbootstrap uses this dictionary to save the result of expensive
|
||||
results, so they work a lot faster the next time they are needed in the
|
||||
same session. Usually the cache is written to and read from in the same
|
||||
Python file, with code similar to the following:
|
||||
|
||||
def lookup(key):
|
||||
if key in pmb.helpers.other.cache["mycache"]:
|
||||
return pmb.helpers.other.cache["mycache"][key]
|
||||
ret = expensive_operation(args, key)
|
||||
pmb.helpers.other.cache["mycache"][key] = ret
|
||||
return ret
|
||||
"""
|
||||
cache = None
|
||||
|
||||
|
||||
def init_cache():
|
||||
global cache
|
||||
""" Add a caching dict (caches parsing of files etc. for the current
|
||||
session) """
|
||||
repo_update = {"404": [], "offline_msg_shown": False}
|
||||
cache = {"apkindex": {},
|
||||
"apkbuild": {},
|
||||
"apk_min_version_checked": [],
|
||||
"apk_repository_list_updated": [],
|
||||
"built": {},
|
||||
"find_aport": {},
|
||||
"pmb.helpers.package.depends_recurse": {},
|
||||
"pmb.helpers.package.get": {},
|
||||
"pmb.helpers.repo.update": repo_update,
|
||||
"pmb.helpers.git.parse_channels_cfg": {},
|
||||
"pmb.config.pmaports.read_config": None}
|
||||
|
|
|
@ -33,10 +33,16 @@ def get(args, pkgname, arch, replace_subpkgnames=False, must_exist=True):
|
|||
* None if the package was not found """
|
||||
# Cached result
|
||||
cache_key = "pmb.helpers.package.get"
|
||||
if (arch in args.cache[cache_key] and
|
||||
pkgname in args.cache[cache_key][arch] and
|
||||
replace_subpkgnames in args.cache[cache_key][arch][pkgname]):
|
||||
return args.cache[cache_key][arch][pkgname][replace_subpkgnames]
|
||||
if (
|
||||
arch in pmb.helpers.other.cache[cache_key] and
|
||||
pkgname in pmb.helpers.other.cache[cache_key][arch] and
|
||||
replace_subpkgnames in pmb.helpers.other.cache[cache_key][arch][
|
||||
pkgname
|
||||
]
|
||||
):
|
||||
return pmb.helpers.other.cache[cache_key][arch][pkgname][
|
||||
replace_subpkgnames
|
||||
]
|
||||
|
||||
# Find in pmaports
|
||||
ret = None
|
||||
|
@ -96,11 +102,13 @@ def get(args, pkgname, arch, replace_subpkgnames=False, must_exist=True):
|
|||
|
||||
# Save to cache and return
|
||||
if ret:
|
||||
if arch not in args.cache[cache_key]:
|
||||
args.cache[cache_key][arch] = {}
|
||||
if pkgname not in args.cache[cache_key][arch]:
|
||||
args.cache[cache_key][arch][pkgname] = {}
|
||||
args.cache[cache_key][arch][pkgname][replace_subpkgnames] = ret
|
||||
if arch not in pmb.helpers.other.cache[cache_key]:
|
||||
pmb.helpers.other.cache[cache_key][arch] = {}
|
||||
if pkgname not in pmb.helpers.other.cache[cache_key][arch]:
|
||||
pmb.helpers.other.cache[cache_key][arch][pkgname] = {}
|
||||
pmb.helpers.other.cache[cache_key][arch][pkgname][
|
||||
replace_subpkgnames
|
||||
] = ret
|
||||
return ret
|
||||
|
||||
# Could not find the package
|
||||
|
@ -119,9 +127,9 @@ def depends_recurse(args, pkgname, arch):
|
|||
"linux-samsung-i9100", ...] """
|
||||
# Cached result
|
||||
cache_key = "pmb.helpers.package.depends_recurse"
|
||||
if (arch in args.cache[cache_key] and
|
||||
pkgname in args.cache[cache_key][arch]):
|
||||
return args.cache[cache_key][arch][pkgname]
|
||||
if (arch in pmb.helpers.other.cache[cache_key] and
|
||||
pkgname in pmb.helpers.other.cache[cache_key][arch]):
|
||||
return pmb.helpers.other.cache[cache_key][arch][pkgname]
|
||||
|
||||
# Build ret (by iterating over the queue)
|
||||
queue = [pkgname]
|
||||
|
@ -141,9 +149,9 @@ def depends_recurse(args, pkgname, arch):
|
|||
ret.sort()
|
||||
|
||||
# Save to cache and return
|
||||
if arch not in args.cache[cache_key]:
|
||||
args.cache[cache_key][arch] = {}
|
||||
args.cache[cache_key][arch][pkgname] = ret
|
||||
if arch not in pmb.helpers.other.cache[cache_key]:
|
||||
pmb.helpers.other.cache[cache_key][arch] = {}
|
||||
pmb.helpers.other.cache[cache_key][arch][pkgname] = ret
|
||||
return ret
|
||||
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ def package(args, pkgname, reason="", dry=False):
|
|||
pmb.helpers.file.replace(path, old, new)
|
||||
|
||||
# Verify
|
||||
del(args.cache["apkbuild"][path])
|
||||
del(pmb.helpers.other.cache["apkbuild"][path])
|
||||
apkbuild = pmb.parse.apkbuild(args, path)
|
||||
if int(apkbuild["pkgrel"]) != pkgrel_new:
|
||||
raise RuntimeError("Failed to bump pkgrel for package '" + pkgname +
|
||||
|
|
|
@ -15,7 +15,7 @@ import pmb.parse
|
|||
def _find_apkbuilds(args):
|
||||
# Try to get a cached result first (we assume that the aports don't change
|
||||
# in one pmbootstrap call)
|
||||
apkbuilds = args.cache.get("pmb.helpers.pmaports.apkbuilds")
|
||||
apkbuilds = pmb.helpers.other.cache.get("pmb.helpers.pmaports.apkbuilds")
|
||||
if apkbuilds is not None:
|
||||
return apkbuilds
|
||||
|
||||
|
@ -32,7 +32,7 @@ def _find_apkbuilds(args):
|
|||
apkbuilds = dict(sorted(apkbuilds.items()))
|
||||
|
||||
# Save result in cache
|
||||
args.cache["pmb.helpers.pmaports.apkbuilds"] = apkbuilds
|
||||
pmb.helpers.other.cache["pmb.helpers.pmaports.apkbuilds"] = apkbuilds
|
||||
return apkbuilds
|
||||
|
||||
|
||||
|
@ -109,7 +109,7 @@ def _find_package_in_apkbuild(args, package, path):
|
|||
:param path: The path to the apkbuild
|
||||
:return: True if the APKBUILD contains or provides the package
|
||||
"""
|
||||
apkbuild = pmb.parse.apkbuild(args, path)
|
||||
apkbuild = pmb.parse.apkbuild(path)
|
||||
|
||||
# Subpackages
|
||||
if package in apkbuild["subpackages"]:
|
||||
|
@ -146,8 +146,8 @@ def find(args, package, must_exist=True):
|
|||
# Try to get a cached result first (we assume that the aports don't change
|
||||
# in one pmbootstrap call)
|
||||
ret = None
|
||||
if package in args.cache["find_aport"]:
|
||||
ret = args.cache["find_aport"][package]
|
||||
if package in pmb.helpers.other.cache["find_aport"]:
|
||||
ret = pmb.helpers.other.cache["find_aport"][package]
|
||||
else:
|
||||
# Sanity check
|
||||
if "*" in package:
|
||||
|
@ -182,7 +182,7 @@ def find(args, package, must_exist=True):
|
|||
package)
|
||||
|
||||
# Save result in cache
|
||||
args.cache["find_aport"][package] = ret
|
||||
pmb.helpers.other.cache["find_aport"][package] = ret
|
||||
return ret
|
||||
|
||||
|
||||
|
|
|
@ -127,9 +127,9 @@ def update(args, arch=None, force=False, existing_only=False):
|
|||
# Skip in offline mode, only show once
|
||||
cache_key = "pmb.helpers.repo.update"
|
||||
if args.offline:
|
||||
if not args.cache[cache_key]["offline_msg_shown"]:
|
||||
if not pmb.helpers.other.cache[cache_key]["offline_msg_shown"]:
|
||||
logging.info("NOTE: skipping package index update (offline mode)")
|
||||
args.cache[cache_key]["offline_msg_shown"] = True
|
||||
pmb.helpers.other.cache[cache_key]["offline_msg_shown"] = True
|
||||
return False
|
||||
|
||||
# Architectures and retention time
|
||||
|
@ -151,7 +151,7 @@ def update(args, arch=None, force=False, existing_only=False):
|
|||
|
||||
# Find update reason, possibly skip non-existing or known 404 files
|
||||
reason = None
|
||||
if url_full in args.cache[cache_key]["404"]:
|
||||
if url_full in pmb.helpers.other.cache[cache_key]["404"]:
|
||||
# We already attempted to download this file once in this
|
||||
# session
|
||||
continue
|
||||
|
@ -184,7 +184,7 @@ def update(args, arch=None, force=False, existing_only=False):
|
|||
temp = pmb.helpers.http.download(args, url, "APKINDEX", False,
|
||||
logging.DEBUG, True)
|
||||
if not temp:
|
||||
args.cache[cache_key]["404"].append(url)
|
||||
pmb.helpers.other.cache[cache_key]["404"].append(url)
|
||||
continue
|
||||
target_folder = os.path.dirname(target)
|
||||
if not os.path.exists(target_folder):
|
||||
|
|
|
@ -237,9 +237,9 @@ def sudo_timer_start(args):
|
|||
needed once.
|
||||
"""
|
||||
|
||||
if "sudo_timer_active" in args.cache:
|
||||
if "sudo_timer_active" in pmb.helpers.other.cache:
|
||||
return
|
||||
args.cache["sudo_timer_active"] = True
|
||||
pmb.helpers.other.cache["sudo_timer_active"] = True
|
||||
|
||||
sudo_timer_iterate()
|
||||
|
||||
|
|
|
@ -298,8 +298,8 @@ def apkbuild(args, path, check_pkgver=True, check_pkgname=True):
|
|||
"""
|
||||
# Try to get a cached result first (we assume that the aports don't change
|
||||
# in one pmbootstrap call)
|
||||
if path in args.cache["apkbuild"]:
|
||||
return args.cache["apkbuild"][path]
|
||||
if path in pmb.helpers.other.cache["apkbuild"]:
|
||||
return pmb.helpers.other.cache["apkbuild"][path]
|
||||
|
||||
# Read the file and check line endings
|
||||
lines = read_file(path)
|
||||
|
@ -328,7 +328,7 @@ def apkbuild(args, path, check_pkgver=True, check_pkgname=True):
|
|||
f" APKBUILD: {path}")
|
||||
|
||||
# Fill cache
|
||||
args.cache["apkbuild"][path] = ret
|
||||
pmb.helpers.other.cache["apkbuild"][path] = ret
|
||||
return ret
|
||||
|
||||
|
||||
|
|
|
@ -179,8 +179,8 @@ def parse(args, path, multiple_providers=True):
|
|||
# Try to get a cached result first
|
||||
lastmod = os.path.getmtime(path)
|
||||
cache_key = "multiple" if multiple_providers else "single"
|
||||
if path in args.cache["apkindex"]:
|
||||
cache = args.cache["apkindex"][path]
|
||||
if path in pmb.helpers.other.cache["apkindex"]:
|
||||
cache = pmb.helpers.other.cache["apkindex"][path]
|
||||
if cache["lastmod"] == lastmod:
|
||||
if cache_key in cache:
|
||||
return cache[cache_key]
|
||||
|
@ -217,9 +217,9 @@ def parse(args, path, multiple_providers=True):
|
|||
parse_add_block(ret, block, alias, multiple_providers)
|
||||
|
||||
# Update the cache
|
||||
if path not in args.cache["apkindex"]:
|
||||
args.cache["apkindex"][path] = {"lastmod": lastmod}
|
||||
args.cache["apkindex"][path][cache_key] = ret
|
||||
if path not in pmb.helpers.other.cache["apkindex"]:
|
||||
pmb.helpers.other.cache["apkindex"][path] = {"lastmod": lastmod}
|
||||
pmb.helpers.other.cache["apkindex"][path][cache_key] = ret
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -257,12 +257,12 @@ def clear_cache(args, path):
|
|||
:returns: True on successful deletion, False otherwise
|
||||
"""
|
||||
logging.verbose("Clear APKINDEX cache for: " + path)
|
||||
if path in args.cache["apkindex"]:
|
||||
del args.cache["apkindex"][path]
|
||||
if path in pmb.helpers.other.cache["apkindex"]:
|
||||
del pmb.helpers.other.cache["apkindex"][path]
|
||||
return True
|
||||
else:
|
||||
logging.verbose("Nothing to do, path was not in cache:" +
|
||||
str(args.cache["apkindex"].keys()))
|
||||
str(pmb.helpers.other.cache["apkindex"].keys()))
|
||||
return False
|
||||
|
||||
|
||||
|
|
|
@ -512,7 +512,7 @@ def package_completer(prefix, action, parser=None, parsed_args=None):
|
|||
args = parsed_args
|
||||
pmb.config.merge_with_args(args)
|
||||
pmb.helpers.args.replace_placeholders(args)
|
||||
pmb.helpers.args.add_cache(args)
|
||||
pmb.helpers.other.init_cache()
|
||||
packages = set(
|
||||
package for package in pmb.helpers.pmaports.get_list(args)
|
||||
if package.startswith(prefix))
|
||||
|
|
|
@ -82,7 +82,7 @@ def generate(args, monkeypatch, answers):
|
|||
remove_contributor_maintainer_lines(args, apkbuild_path_linux)
|
||||
|
||||
# Parse the deviceinfo and apkbuilds
|
||||
args.cache["apkbuild"] = {}
|
||||
pmb.helpers.other.cache["apkbuild"] = {}
|
||||
apkbuild = pmb.parse.apkbuild(args, apkbuild_path)
|
||||
apkbuild_linux = pmb.parse.apkbuild(args, apkbuild_path_linux,
|
||||
check_pkgver=False)
|
||||
|
|
|
@ -23,8 +23,8 @@ def args(request, tmpdir):
|
|||
apkindex_path = str(tmpdir) + "/APKINDEX.tar.gz"
|
||||
open(apkindex_path, "a").close()
|
||||
lastmod = os.path.getmtime(apkindex_path)
|
||||
args.cache["apkindex"][apkindex_path] = {"lastmod": lastmod,
|
||||
"multiple": {}}
|
||||
pmb.helpers.other.cache["apkindex"][apkindex_path] = {"lastmod": lastmod,
|
||||
"multiple": {}}
|
||||
return args
|
||||
|
||||
|
||||
|
@ -34,9 +34,9 @@ def cache_apkindex(args, version):
|
|||
for the "hello-world" package.
|
||||
:param version: full version string, includes pkgver and pkgrl (e.g. 1-r2)
|
||||
"""
|
||||
apkindex_path = list(args.cache["apkindex"].keys())[0]
|
||||
apkindex_path = list(pmb.helpers.other.cache["apkindex"].keys())[0]
|
||||
|
||||
providers = args.cache[
|
||||
providers = pmb.helpers.other.cache[
|
||||
"apkindex"][apkindex_path]["multiple"]["hello-world"]
|
||||
providers["hello-world"]["version"] = version
|
||||
|
||||
|
@ -47,11 +47,11 @@ def test_build_is_necessary(args):
|
|||
apkbuild = pmb.parse.apkbuild(args, aport + "/APKBUILD")
|
||||
apkbuild["pkgver"] = "1"
|
||||
apkbuild["pkgrel"] = "2"
|
||||
indexes = list(args.cache["apkindex"].keys())
|
||||
indexes = list(pmb.helpers.other.cache["apkindex"].keys())
|
||||
apkindex_path = indexes[0]
|
||||
cache = {"hello-world": {"hello-world": {"pkgname": "hello-world",
|
||||
"version": "1-r2"}}}
|
||||
args.cache["apkindex"][apkindex_path]["multiple"] = cache
|
||||
pmb.helpers.other.cache["apkindex"][apkindex_path]["multiple"] = cache
|
||||
|
||||
# Binary repo has a newer version
|
||||
cache_apkindex(args, "999-r1")
|
||||
|
@ -71,7 +71,7 @@ def test_build_is_necessary_no_binary_available(args):
|
|||
APKINDEX cache is set up to fake an empty APKINDEX, which means that the
|
||||
hello-world package has not been built yet.
|
||||
"""
|
||||
indexes = list(args.cache["apkindex"].keys())
|
||||
indexes = list(pmb.helpers.other.cache["apkindex"].keys())
|
||||
aport = pmb.helpers.pmaports.find(args, "hello-world")
|
||||
apkbuild = pmb.parse.apkbuild(args, aport + "/APKBUILD")
|
||||
assert pmb.build.is_necessary(args, None, apkbuild, indexes) is True
|
||||
|
|
|
@ -58,9 +58,9 @@ def args_patched(monkeypatch, argv):
|
|||
|
||||
def test_skip_already_built(args):
|
||||
func = pmb.build._package.skip_already_built
|
||||
assert args.cache["built"] == {}
|
||||
assert pmb.helpers.other.cache["built"] == {}
|
||||
assert func(args, "test-package", "armhf") is False
|
||||
assert args.cache["built"] == {"armhf": ["test-package"]}
|
||||
assert pmb.helpers.other.cache["built"] == {"armhf": ["test-package"]}
|
||||
assert func(args, "test-package", "armhf") is True
|
||||
|
||||
|
||||
|
@ -323,11 +323,11 @@ def test_package(args):
|
|||
assert pmb.build.package(args, "hello-world", force=True)
|
||||
|
||||
# Package exists
|
||||
args.cache["built"] = {}
|
||||
pmb.helpers.other.cache["built"] = {}
|
||||
assert pmb.build.package(args, "hello-world") is None
|
||||
|
||||
# Force building again
|
||||
args.cache["built"] = {}
|
||||
pmb.helpers.other.cache["built"] = {}
|
||||
assert pmb.build.package(args, "hello-world", force=True)
|
||||
|
||||
# Build for another architecture
|
||||
|
@ -363,7 +363,7 @@ def test_build_depends_high_level(args, monkeypatch):
|
|||
# Remove hello-world
|
||||
pmb.helpers.run.root(args, ["rm", output_hello_outside])
|
||||
pmb.build.index_repo(args, pmb.config.arch_native)
|
||||
args.cache["built"] = {}
|
||||
pmb.helpers.other.cache["built"] = {}
|
||||
|
||||
# Ask to build the wrapper. It should not build the wrapper (it exists, not
|
||||
# using force), but build/update its missing dependency "hello-world"
|
||||
|
|
|
@ -86,7 +86,7 @@ def test_helpers_package_depends_recurse(args):
|
|||
"b": {False: {"pkgname": "b", "depends": []}},
|
||||
"c": {False: {"pkgname": "c", "depends": ["d"]}},
|
||||
"d": {False: {"pkgname": "d", "depends": ["b"]}}}
|
||||
args.cache["pmb.helpers.package.get"]["armhf"] = cache
|
||||
pmb.helpers.other.cache["pmb.helpers.package.get"]["armhf"] = cache
|
||||
|
||||
# Normal runs
|
||||
func = pmb.helpers.package.depends_recurse
|
||||
|
@ -94,7 +94,7 @@ def test_helpers_package_depends_recurse(args):
|
|||
assert func(args, "d", "armhf") == ["b", "d"]
|
||||
|
||||
# Cached result
|
||||
args.cache["pmb.helpers.package.get"]["armhf"] = {}
|
||||
pmb.helpers.other.cache["pmb.helpers.package.get"]["armhf"] = {}
|
||||
assert func(args, "d", "armhf") == ["b", "d"]
|
||||
|
||||
|
||||
|
@ -103,7 +103,7 @@ def test_helpers_package_check_arch_package(args):
|
|||
# Put fake data into the pmb.helpers.package.get() cache
|
||||
func = pmb.helpers.package.check_arch
|
||||
cache = {"a": {False: {"arch": []}}}
|
||||
args.cache["pmb.helpers.package.get"]["armhf"] = cache
|
||||
pmb.helpers.other.cache["pmb.helpers.package.get"]["armhf"] = cache
|
||||
|
||||
cache["a"][False]["arch"] = ["all !armhf"]
|
||||
assert func(args, "a", "armhf") is False
|
||||
|
|
|
@ -64,7 +64,7 @@ def test_newapkbuild(args, monkeypatch, tmpdir):
|
|||
monkeypatch.setattr(pmb.helpers.cli, "confirm", confirm_true)
|
||||
pkgdesc = "testdescription"
|
||||
func(args, "main", ["-d", pkgdesc, pkgname])
|
||||
args.cache["apkbuild"] = {}
|
||||
pmb.helpers.other.cache["apkbuild"] = {}
|
||||
apkbuild = pmb.parse.apkbuild(args, apkbuild_path)
|
||||
assert apkbuild["pkgname"] == pkgname
|
||||
assert apkbuild["pkgdesc"] == pkgdesc
|
||||
|
|
|
@ -187,7 +187,7 @@ def test_parse_cached(args, tmpdir):
|
|||
lastmod = os.path.getmtime(path)
|
||||
|
||||
# Fill the cache
|
||||
args.cache["apkindex"][path] = {
|
||||
pmb.helpers.other.cache["apkindex"][path] = {
|
||||
"lastmod": lastmod,
|
||||
"multiple": "cached_result_multiple",
|
||||
"single": "cached_result_single",
|
||||
|
@ -199,12 +199,12 @@ def test_parse_cached(args, tmpdir):
|
|||
assert func(args, path, False) == "cached_result_single"
|
||||
|
||||
# Make cache invalid
|
||||
args.cache["apkindex"][path]["lastmod"] -= 10
|
||||
pmb.helpers.other.cache["apkindex"][path]["lastmod"] -= 10
|
||||
assert func(args, path, True) == {}
|
||||
|
||||
# Delete the cache (run twice for both code paths)
|
||||
assert pmb.parse.apkindex.clear_cache(args, path) is True
|
||||
assert args.cache["apkindex"] == {}
|
||||
assert pmb.helpers.other.cache["apkindex"] == {}
|
||||
assert pmb.parse.apkindex.clear_cache(args, path) is False
|
||||
|
||||
|
||||
|
@ -234,7 +234,7 @@ def test_parse(args):
|
|||
'musl': block_musl,
|
||||
'so:libc.musl-x86_64.so.1': block_musl}
|
||||
assert pmb.parse.apkindex.parse(args, path, False) == ret_single
|
||||
assert args.cache["apkindex"][path]["single"] == ret_single
|
||||
assert pmb.helpers.other.cache["apkindex"][path]["single"] == ret_single
|
||||
|
||||
# Test with multiple_providers
|
||||
ret_multiple = {'cmd:curl': {"curl": block_curl},
|
||||
|
@ -242,7 +242,9 @@ def test_parse(args):
|
|||
'musl': {"musl": block_musl},
|
||||
'so:libc.musl-x86_64.so.1': {"musl": block_musl}}
|
||||
assert pmb.parse.apkindex.parse(args, path, True) == ret_multiple
|
||||
assert args.cache["apkindex"][path]["multiple"] == ret_multiple
|
||||
assert (
|
||||
pmb.helpers.other.cache["apkindex"][path]["multiple"] == ret_multiple
|
||||
)
|
||||
|
||||
|
||||
def test_parse_virtual(args):
|
||||
|
@ -260,7 +262,7 @@ def test_parse_virtual(args):
|
|||
'version': '2-r0'}
|
||||
ret = {"hello-world": block, "cmd:hello-world": block}
|
||||
assert pmb.parse.apkindex.parse(args, path, False) == ret
|
||||
assert args.cache["apkindex"][path]["single"] == ret
|
||||
assert pmb.helpers.other.cache["apkindex"][path]["single"] == ret
|
||||
|
||||
|
||||
def test_providers_invalid_package(args, tmpdir):
|
||||
|
|
|
@ -110,7 +110,7 @@ def verify_pkgrels(args, tmpdir, pkgrel_testlib, pkgrel_testapp,
|
|||
Verify the pkgrels of the three test APKBUILDs ("testlib", "testapp",
|
||||
"testsubpkg").
|
||||
"""
|
||||
args.cache["apkbuild"] = {}
|
||||
pmb.helpers.other.cache["apkbuild"] = {}
|
||||
mapping = {"testlib": pkgrel_testlib,
|
||||
"testapp": pkgrel_testapp,
|
||||
"testsubpkg": pkgrel_testsubpkg}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue