|
|
4f4af9 |
From b3542a96c6f77e5cc0b5217e586fcc56fde074d8 Mon Sep 17 00:00:00 2001
|
|
|
4f4af9 |
From: =?UTF-8?q?Ale=C5=A1=20Mat=C4=9Bj?= <amatej@redhat.com>
|
|
|
4f4af9 |
Date: Wed, 2 Dec 2020 15:27:13 +0100
|
|
|
4f4af9 |
Subject: [PATCH 1/2] Add api function: fill_sack_from_repos_in_cache
|
|
|
4f4af9 |
(RhBug:1865803)
|
|
|
4f4af9 |
|
|
|
4f4af9 |
= changelog =
|
|
|
4f4af9 |
msg: Add api function fill_sack_from_repos_in_cache to allow loading a repo cache with repomd and (solv file or primary xml) only
|
|
|
4f4af9 |
type: enhancement
|
|
|
4f4af9 |
resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1865803
|
|
|
4f4af9 |
---
|
|
|
4f4af9 |
dnf.spec | 2 +-
|
|
|
4f4af9 |
dnf/base.py | 62 +++++++++++++++++++++++++++++++++++++++++++++++++++++
|
|
|
4f4af9 |
2 files changed, 63 insertions(+), 1 deletion(-)
|
|
|
4f4af9 |
|
|
|
4f4af9 |
diff --git a/dnf/base.py b/dnf/base.py
|
|
|
4f4af9 |
index 075e74265a..a10b837340 100644
|
|
|
4f4af9 |
--- a/dnf/base.py
|
|
|
4f4af9 |
+++ b/dnf/base.py
|
|
|
4f4af9 |
@@ -425,6 +425,68 @@ def fill_sack(self, load_system_repo=True, load_available_repos=True):
|
|
|
4f4af9 |
self._plugins.run_sack()
|
|
|
4f4af9 |
return self._sack
|
|
|
4f4af9 |
|
|
|
4f4af9 |
+ def fill_sack_from_repos_in_cache(self, load_system_repo=True):
|
|
|
4f4af9 |
+ # :api
|
|
|
4f4af9 |
+ """
|
|
|
4f4af9 |
+ Prepare Sack and Goal objects and also load all enabled repositories from cache only,
|
|
|
4f4af9 |
+ it doesn't download anything and it doesn't check if metadata are expired.
|
|
|
4f4af9 |
+ If there is not enough metadata present (repond.xml or both primary.xml and solv file
|
|
|
4f4af9 |
+ are missing) given repo is either skipped or it throws a RepoError exception depending
|
|
|
4f4af9 |
+ on skip_if_unavailable configuration.
|
|
|
4f4af9 |
+ """
|
|
|
4f4af9 |
+ timer = dnf.logging.Timer('sack setup')
|
|
|
4f4af9 |
+ self.reset(sack=True, goal=True)
|
|
|
4f4af9 |
+ self._sack = dnf.sack._build_sack(self)
|
|
|
4f4af9 |
+ lock = dnf.lock.build_metadata_lock(self.conf.cachedir, self.conf.exit_on_lock)
|
|
|
4f4af9 |
+ with lock:
|
|
|
4f4af9 |
+ if load_system_repo is not False:
|
|
|
4f4af9 |
+ try:
|
|
|
4f4af9 |
+ # FIXME: If build_cache=True, @System.solv is incorrectly updated in install-
|
|
|
4f4af9 |
+ # remove loops
|
|
|
4f4af9 |
+ self._sack.load_system_repo(build_cache=False)
|
|
|
4f4af9 |
+ except IOError:
|
|
|
4f4af9 |
+ if load_system_repo != 'auto':
|
|
|
4f4af9 |
+ raise
|
|
|
4f4af9 |
+
|
|
|
4f4af9 |
+ error_repos = []
|
|
|
4f4af9 |
+ # Iterate over installed GPG keys and check their validity using DNSSEC
|
|
|
4f4af9 |
+ if self.conf.gpgkey_dns_verification:
|
|
|
4f4af9 |
+ dnf.dnssec.RpmImportedKeys.check_imported_keys_validity()
|
|
|
4f4af9 |
+ for repo in self.repos.iter_enabled():
|
|
|
4f4af9 |
+ try:
|
|
|
4f4af9 |
+ repo._repo.loadCache(throwExcept=True, ignoreMissing=True)
|
|
|
4f4af9 |
+ mdload_flags = dict(load_filelists=True,
|
|
|
4f4af9 |
+ load_presto=repo.deltarpm,
|
|
|
4f4af9 |
+ load_updateinfo=True)
|
|
|
4f4af9 |
+ if repo.load_metadata_other:
|
|
|
4f4af9 |
+ mdload_flags["load_other"] = True
|
|
|
4f4af9 |
+
|
|
|
4f4af9 |
+ self._sack.load_repo(repo._repo, **mdload_flags)
|
|
|
4f4af9 |
+
|
|
|
4f4af9 |
+ logger.debug(_("%s: using metadata from %s."), repo.id,
|
|
|
4f4af9 |
+ dnf.util.normalize_time(
|
|
|
4f4af9 |
+ repo._repo.getMaxTimestamp()))
|
|
|
4f4af9 |
+ except (RuntimeError, hawkey.Exception) as e:
|
|
|
4f4af9 |
+ if repo.skip_if_unavailable is False:
|
|
|
4f4af9 |
+ raise dnf.exceptions.RepoError(
|
|
|
4f4af9 |
+ _("loading repo '{}' failure: {}").format(repo.id, e))
|
|
|
4f4af9 |
+ else:
|
|
|
4f4af9 |
+ logger.debug(_("loading repo '{}' failure: {}").format(repo.id, e))
|
|
|
4f4af9 |
+ error_repos.append(repo.id)
|
|
|
4f4af9 |
+ repo.disable()
|
|
|
4f4af9 |
+ if error_repos:
|
|
|
4f4af9 |
+ logger.warning(
|
|
|
4f4af9 |
+ _("Ignoring repositories: %s"), ', '.join(error_repos))
|
|
|
4f4af9 |
+
|
|
|
4f4af9 |
+ conf = self.conf
|
|
|
4f4af9 |
+ self._sack._configure(conf.installonlypkgs, conf.installonly_limit, conf.allow_vendor_change)
|
|
|
4f4af9 |
+ self._setup_excludes_includes()
|
|
|
4f4af9 |
+ timer()
|
|
|
4f4af9 |
+ self._goal = dnf.goal.Goal(self._sack)
|
|
|
4f4af9 |
+ self._goal.protect_running_kernel = conf.protect_running_kernel
|
|
|
4f4af9 |
+ self._plugins.run_sack()
|
|
|
4f4af9 |
+ return self._sack
|
|
|
4f4af9 |
+
|
|
|
4f4af9 |
def _finalize_base(self):
|
|
|
4f4af9 |
self._tempfile_persistor = dnf.persistor.TempfilePersistor(
|
|
|
4f4af9 |
self.conf.cachedir)
|
|
|
4f4af9 |
|
|
|
4f4af9 |
From 29ae53918d4a0b65a917aca2f8f43416fee15dfd Mon Sep 17 00:00:00 2001
|
|
|
4f4af9 |
From: =?UTF-8?q?Ale=C5=A1=20Mat=C4=9Bj?= <amatej@redhat.com>
|
|
|
4f4af9 |
Date: Thu, 10 Dec 2020 14:54:16 +0100
|
|
|
4f4af9 |
Subject: [PATCH 2/2] Add api test for new fill_sack_from_repos_in_cache
|
|
|
4f4af9 |
|
|
|
4f4af9 |
---
|
|
|
4f4af9 |
tests/api/test_dnf_base.py | 6 ++++++
|
|
|
4f4af9 |
1 file changed, 6 insertions(+)
|
|
|
4f4af9 |
|
|
|
4f4af9 |
diff --git a/tests/api/test_dnf_base.py b/tests/api/test_dnf_base.py
|
|
|
4f4af9 |
index 656bd22584..335981897e 100644
|
|
|
4f4af9 |
--- a/tests/api/test_dnf_base.py
|
|
|
4f4af9 |
+++ b/tests/api/test_dnf_base.py
|
|
|
4f4af9 |
@@ -107,6 +107,12 @@ def test_fill_sack(self):
|
|
|
4f4af9 |
|
|
|
4f4af9 |
self.base.fill_sack(load_system_repo=False, load_available_repos=False)
|
|
|
4f4af9 |
|
|
|
4f4af9 |
+ def test_fill_sack_from_repos_in_cache(self):
|
|
|
4f4af9 |
+ # Base.fill_sack_from_repos_in_cache(self, load_system_repo=True):
|
|
|
4f4af9 |
+ self.assertHasAttr(self.base, "fill_sack_from_repos_in_cache")
|
|
|
4f4af9 |
+
|
|
|
4f4af9 |
+ self.base.fill_sack_from_repos_in_cache(load_system_repo=False)
|
|
|
4f4af9 |
+
|
|
|
4f4af9 |
def test_close(self):
|
|
|
4f4af9 |
# Base.close()
|
|
|
4f4af9 |
self.assertHasAttr(self.base, "close")
|