Script 'mail_helper' called by obssrc Hello community, here is the log from the commit of package dnf for openSUSE:Factory checked in at 2021-04-26 16:38:14 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Comparing /work/SRC/openSUSE:Factory/dnf (Old) and /work/SRC/openSUSE:Factory/.dnf.new.12324 (New) ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "dnf" Mon Apr 26 16:38:14 2021 rev:27 rq:887046 version:4.7.0 Changes: -------- --- /work/SRC/openSUSE:Factory/dnf/dnf.changes 2021-03-16 15:44:27.329042090 +0100 +++ /work/SRC/openSUSE:Factory/.dnf.new.12324/dnf.changes 2021-04-26 16:38:17.609948527 +0200 @@ -1,0 +2,22 @@ +Tue Apr 13 13:56:53 UTC 2021 - Neal Gompa <ngomp...@gmail.com> + +- Update to version 4.7.0 + + Improve repo config path ordering to fix a comps merging issue (rh#1928181) + + Keep reason when package is removed (rh#1921063) + + Improve mechanism for application of security filters (rh#1918475) + + [doc] Add description for new API + + [API] Add new method for reset of security filters + + [doc] Improve documentation for Hotfix repositories + + [doc] fix: "makecache" command downloads only enabled repositories + + Use libdnf.utils.checksum_{check,value} + + [doc] Add info that maximum parallel downloads is 20 + + Increase loglevel in case of invalid config options + + [doc] installonly_limit documentation follows behavior + + Prevent traceback (catch ValueError) if pkg is from cmdline + + Add documentation for config option sslverifystatus (rh#1814383) + + Check for specific key string when verifing signatures (rh#1915990) + + Use rpmkeys binary to verify package signature (rh#1915990) + + Bugs fixed (rh#1916783) + + Preserve file mode during log rotation (rh#1910084) + +------------------------------------------------------------------- Old: ---- dnf-4.6.1.tar.gz New: ---- dnf-4.7.0.tar.gz ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Other differences: ------------------ ++++++ dnf.spec ++++++ --- /var/tmp/diff_new_pack.ICJr9K/_old 2021-04-26 16:38:18.105949316 +0200 +++ /var/tmp/diff_new_pack.ICJr9K/_new 2021-04-26 16:38:18.105949316 +0200 @@ -17,11 +17,11 @@ # -%global hawkey_version 0.59.0 +%global hawkey_version 0.61.1 %global libcomps_version 0.1.8 %global libmodulemd_version 2.9.3 %global rpm_version 4.14.0 -%global min_plugins_core 4.0.16 +%global min_plugins_core 4.0.20 %global min_plugins_extras 4.0.4 %global confdir %{_sysconfdir}/%{name} @@ -50,7 +50,7 @@ %bcond_with tests Name: dnf -Version: 4.6.1 +Version: 4.7.0 Release: 0 Summary: Package manager forked from Yum, using libsolv as a dependency resolver # For a breakdown of the licensing, see PACKAGE-LICENSING @@ -165,7 +165,6 @@ BuildRequires: python3-hawkey >= %{hawkey_version} BuildRequires: python3-libcomps >= %{libcomps_version} BuildRequires: python3-libmodulemd >= %{libmodulemd_version} -BuildRequires: python3-nose BuildRequires: python3-rpm >= %{rpm_version} Recommends: (python3-dbus-python if NetworkManager) Requires: deltarpm ++++++ dnf-4.6.1.tar.gz -> dnf-4.7.0.tar.gz ++++++ diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/.github/workflows/ci.yaml new/dnf-4.7.0/.github/workflows/ci.yaml --- old/dnf-4.6.1/.github/workflows/ci.yaml 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/.github/workflows/ci.yaml 1970-01-01 01:00:00.000000000 +0100 @@ -1,36 +0,0 @@ ---- -name: DNF CI -on: pull_request_target - -jobs: - integration-tests: - name: Integration Tests - runs-on: ubuntu-latest - container: - image: fedora:latest - options: --privileged - steps: - - name: Check out ci-dnf-stack - uses: actions/checkout@v2 - with: - repository: rpm-software-management/ci-dnf-stack - - - name: Setup CI - id: setup-ci - uses: ./.github/actions/setup-ci - with: - copr-user: ${{secrets.COPR_USER}} - copr-api-token: ${{secrets.COPR_API_TOKEN}} - setup-integration-testing: true - - - name: Check out sources - uses: actions/checkout@v2 - with: - path: gits/${{github.event.pull_request.head.repo.name}} - ref: ${{github.event.pull_request.head.sha}} # check out the PR HEAD - fetch-depth: 0 - - - name: Run CI - uses: ./.github/actions/run-ci - with: - copr-user: ${{steps.setup-ci.outputs.copr-user}} diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/.github/workflows/ci.yml new/dnf-4.7.0/.github/workflows/ci.yml --- old/dnf-4.6.1/.github/workflows/ci.yml 1970-01-01 01:00:00.000000000 +0100 +++ new/dnf-4.7.0/.github/workflows/ci.yml 2021-04-12 17:26:33.000000000 +0200 @@ -0,0 +1,73 @@ +--- +name: DNF CI +on: pull_request_target + +jobs: + copr-build: + name: Copr Build + runs-on: ubuntu-latest + container: + image: ghcr.io/rpm-software-management/dnf-ci-host + outputs: + package-urls: ${{steps.copr-build.outputs.package-urls}} + steps: + - name: Check out ci-dnf-stack + uses: actions/checkout@v2 + with: + repository: rpm-software-management/ci-dnf-stack + + - name: Setup CI + id: setup-ci + uses: ./.github/actions/setup-ci + with: + copr-user: ${{secrets.COPR_USER}} + copr-api-token: ${{secrets.COPR_API_TOKEN}} + + - name: Check out sources + uses: actions/checkout@v2 + with: + path: gits/${{github.event.repository.name}} + ref: ${{github.event.pull_request.head.sha}} # check out the PR HEAD + fetch-depth: 0 + + - name: Run Copr Build + id: copr-build + uses: ./.github/actions/copr-build + with: + copr-user: ${{steps.setup-ci.outputs.copr-user}} + + integration-tests: + name: DNF Integration Tests + needs: copr-build + runs-on: ubuntu-latest + container: + image: ghcr.io/rpm-software-management/dnf-ci-host + options: --privileged + steps: + - name: Check out ci-dnf-stack + uses: actions/checkout@v2 + with: + repository: rpm-software-management/ci-dnf-stack + + - name: Run Integration Tests + uses: ./.github/actions/integration-tests + with: + package-urls: ${{needs.copr-build.outputs.package-urls}} + + ansible-tests: + name: Ansible Tests + needs: copr-build + runs-on: ubuntu-latest + container: + image: ghcr.io/rpm-software-management/dnf-ci-host + options: --privileged + steps: + - name: Check out ci-dnf-stack + uses: actions/checkout@v2 + with: + repository: rpm-software-management/ci-dnf-stack + + - name: Run Ansible Tests + uses: ./.github/actions/ansible-tests + with: + package-urls: ${{needs.copr-build.outputs.package-urls}} diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/.packit.yaml new/dnf-4.7.0/.packit.yaml --- old/dnf-4.6.1/.packit.yaml 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/.packit.yaml 1970-01-01 01:00:00.000000000 +0100 @@ -1,9 +0,0 @@ ---- -jobs: -- job: copr_build - trigger: pull_request - metadata: - targets: - - fedora-all - - mageia-cauldron-x86_64 - - opensuse-tumbleweed-x86_64 diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/VERSION.cmake new/dnf-4.7.0/VERSION.cmake --- old/dnf-4.6.1/VERSION.cmake 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/VERSION.cmake 2021-04-12 17:26:33.000000000 +0200 @@ -1,4 +1,4 @@ -set (DEFAULT_DNF_VERSION "4.6.1") +set (DEFAULT_DNF_VERSION "4.7.0") if(DEFINED DNF_VERSION) if(NOT ${DEFAULT_DNF_VERSION} STREQUAL ${DNF_VERSION}) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/bors.toml new/dnf-4.7.0/bors.toml --- old/dnf-4.6.1/bors.toml 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/bors.toml 1970-01-01 01:00:00.000000000 +0100 @@ -1,2 +0,0 @@ -status = ["DNF CI"] -timeout_sec = 10800 # 3 hours diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/dnf/base.py new/dnf-4.7.0/dnf/base.py --- old/dnf-4.6.1/dnf/base.py 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/dnf/base.py 2021-04-12 17:26:33.000000000 +0200 @@ -111,7 +111,9 @@ self._trans_success = False self._trans_install_set = False self._tempfile_persistor = None + # self._update_security_filters is used by ansible self._update_security_filters = [] + self._update_security_options = {} self._allow_erasing = False self._repo_set_imported_gpg_keys = set() self.output = None @@ -684,6 +686,8 @@ ts = self.history.rpm all_obsoleted = set(goal.list_obsoleted()) installonly_query = self._get_installonly_query() + installonly_query.apply() + installonly_query_installed = installonly_query.installed().apply() for pkg in goal.list_downgrades(): obs = goal.obsoleted_by_package(pkg) @@ -715,11 +719,11 @@ reason = goal.get_reason(pkg) - if pkg in installonly_query: - reason_installonly = ts.get_reason(pkg) - if libdnf.transaction.TransactionItemReasonCompare( - reason, reason_installonly) == -1: - reason = reason_installonly + # Inherit reason if package is installonly an package with same name is installed + # Use the same logic like upgrade + # Upgrade of installonly packages result in install or install and remove step + if pkg in installonly_query and installonly_query_installed.filter(name=pkg.name): + reason = ts.get_reason(pkg) # inherit the best reason from obsoleted packages for obsolete in obs: @@ -756,10 +760,18 @@ ts.add_upgrade(pkg, upgraded, obs) self._ds_callback.pkg_added(upgraded, 'ud') self._ds_callback.pkg_added(pkg, 'u') - for pkg in goal.list_erasures(): - self._ds_callback.pkg_added(pkg, 'e') - reason = goal.get_reason(pkg) - ts.add_erase(pkg, reason) + erasures = goal.list_erasures() + if erasures: + remaining_installed_query = self.sack.query(flags=hawkey.IGNORE_EXCLUDES).installed() + remaining_installed_query.filterm(pkg__neq=erasures) + for pkg in erasures: + if remaining_installed_query.filter(name=pkg.name): + remaining = remaining_installed_query[0] + ts.get_reason(remaining) + self.history.set_reason(remaining, ts.get_reason(remaining)) + self._ds_callback.pkg_added(pkg, 'e') + reason = goal.get_reason(pkg) + ts.add_erase(pkg, reason) return ts def _query_matches_installed(self, q): @@ -1460,7 +1472,7 @@ elif pkgnarrow == 'upgrades': updates = query_for_repo(q).filterm(upgrades_by_priority=True) # reduce a query to security upgrades if they are specified - updates = self._merge_update_filters(updates) + updates = self._merge_update_filters(updates, upgrade=True) # reduce a query to latest packages updates = updates.latest().run() @@ -2049,12 +2061,18 @@ msg = _("File %s is a source package and cannot be updated, ignoring.") logger.info(msg, pkg.location) return 0 - - q = self.sack.query().installed().filterm(name=pkg.name, arch=[pkg.arch, "noarch"]) + installed = self.sack.query().installed().apply() + if self.conf.obsoletes and self.sack.query().filterm(pkg=[pkg]).filterm(obsoletes=installed): + sltr = dnf.selector.Selector(self.sack) + sltr.set(pkg=[pkg]) + self._goal.upgrade(select=sltr) + return 1 + q = installed.filter(name=pkg.name, arch=[pkg.arch, "noarch"]) if not q: msg = _("Package %s not installed, cannot update it.") logger.warning(msg, pkg.name) - raise dnf.exceptions.MarkingError(_('No match for argument: %s') % pkg.location, pkg.name) + raise dnf.exceptions.MarkingError( + _('No match for argument: %s') % pkg.location, pkg.name) elif sorted(q)[-1] < pkg: sltr = dnf.selector.Selector(self.sack) sltr.set(pkg=[pkg]) @@ -2068,20 +2086,21 @@ def _upgrade_internal(self, query, obsoletes, reponame, pkg_spec=None): installed_all = self.sack.query().installed() + # Add only relevant obsoletes to transaction => installed, upgrades q = query.intersection(self.sack.query().filterm(name=[pkg.name for pkg in installed_all])) installed_query = q.installed() if obsoletes: obsoletes = self.sack.query().available().filterm( obsoletes=installed_query.union(q.upgrades())) # add obsoletes into transaction - q = q.union(obsoletes) + query = query.union(obsoletes) if reponame is not None: - q.filterm(reponame=reponame) - q = self._merge_update_filters(q, pkg_spec=pkg_spec) - if q: - q = q.available().union(installed_query.latest()) + query.filterm(reponame=reponame) + query = self._merge_update_filters(query, pkg_spec=pkg_spec, upgrade=True) + if query: + query = query.union(installed_query.latest()) sltr = dnf.selector.Selector(self.sack) - sltr.set(pkg=q) + sltr.set(pkg=query) self._goal.upgrade(select=sltr) return 1 @@ -2096,18 +2115,21 @@ # wildcard shouldn't print not installed packages # only solution with nevra.name provide packages with same name if not wildcard and solution['nevra'] and solution['nevra'].name: - installed = self.sack.query().installed() pkg_name = solution['nevra'].name - installed.filterm(name=pkg_name).apply() - if not installed: - msg = _('Package %s available, but not installed.') - logger.warning(msg, pkg_name) - raise dnf.exceptions.PackagesNotInstalledError( - _('No match for argument: %s') % pkg_spec, pkg_spec) - if solution['nevra'].arch and not dnf.util.is_glob_pattern(solution['nevra'].arch): - if not installed.filter(arch=solution['nevra'].arch): - msg = _('Package %s available, but installed for different architecture.') - logger.warning(msg, "{}.{}".format(pkg_name, solution['nevra'].arch)) + installed = self.sack.query().installed().apply() + obsoleters = q.filter(obsoletes=installed) \ + if self.conf.obsoletes else self.sack.query().filterm(empty=True) + if not obsoleters: + installed_name = installed.filter(name=pkg_name).apply() + if not installed_name: + msg = _('Package %s available, but not installed.') + logger.warning(msg, pkg_name) + raise dnf.exceptions.PackagesNotInstalledError( + _('No match for argument: %s') % pkg_spec, pkg_spec) + elif solution['nevra'].arch and not dnf.util.is_glob_pattern(solution['nevra'].arch): + if not installed_name.filterm(arch=solution['nevra'].arch): + msg = _('Package %s available, but installed for different architecture.') + logger.warning(msg, "{}.{}".format(pkg_name, solution['nevra'].arch)) obsoletes = self.conf.obsoletes and solution['nevra'] \ and solution['nevra'].has_just_name() return self._upgrade_internal(q, obsoletes, reponame, pkg_spec) @@ -2285,36 +2307,89 @@ for prefix in ['/bin/', '/sbin/', '/usr/bin/', '/usr/sbin/']] return self.sack.query().filterm(file__glob=binary_provides), binary_provides - def _merge_update_filters(self, q, pkg_spec=None, warning=True): + def add_security_filters(self, cmp_type, types=(), advisory=(), bugzilla=(), cves=(), severity=()): + # :api + """ + It modifies results of install, upgrade, and distrosync methods according to provided + filters. + + :param cmp_type: only 'eq' or 'gte' allowed + :param types: List or tuple with strings. E.g. 'bugfix', 'enhancement', 'newpackage', + 'security' + :param advisory: List or tuple with strings. E.g.Eg. FEDORA-2201-123 + :param bugzilla: List or tuple with strings. Include packages that fix a Bugzilla ID, + Eg. 123123. + :param cves: List or tuple with strings. Include packages that fix a CVE + (Common Vulnerabilities and Exposures) ID. Eg. CVE-2201-0123 + :param severity: List or tuple with strings. Includes packages that provide a fix + for an issue of the specified severity. + """ + cmp_dict = {'eq': '__eqg', 'gte': '__eqg__gt'} + if cmp_type not in cmp_dict: + raise ValueError("Unsupported value for `cmp_type`") + cmp = cmp_dict[cmp_type] + if types: + key = 'advisory_type' + cmp + self._update_security_options.setdefault(key, set()).update(types) + if advisory: + key = 'advisory' + cmp + self._update_security_options.setdefault(key, set()).update(advisory) + if bugzilla: + key = 'advisory_bug' + cmp + self._update_security_options.setdefault(key, set()).update(bugzilla) + if cves: + key = 'advisory_cve' + cmp + self._update_security_options.setdefault(key, set()).update(cves) + if severity: + key = 'advisory_severity' + cmp + self._update_security_options.setdefault(key, set()).update(severity) + + def reset_security_filters(self): + # :api + """ + Reset all security filters + """ + self._update_security_options = {} + + def _merge_update_filters(self, q, pkg_spec=None, warning=True, upgrade=False): """ Merge Queries in _update_filters and return intersection with q Query @param q: Query @return: Query """ - if not self._update_security_filters or not q: + if not (self._update_security_options or self._update_security_filters) or not q: return q - merged_queries = self._update_security_filters[0] - for query in self._update_security_filters[1:]: - merged_queries = merged_queries.union(query) + merged_queries = self.sack.query().filterm(empty=True) + if self._update_security_filters: + for query in self._update_security_filters: + merged_queries = merged_queries.union(query) + + self._update_security_filters = [merged_queries] + if self._update_security_options: + for filter_name, values in self._update_security_options.items(): + if upgrade: + filter_name = filter_name + '__upgrade' + kwargs = {filter_name: values} + merged_queries = merged_queries.union(q.filter(**kwargs)) - self._update_security_filters = [merged_queries] merged_queries = q.intersection(merged_queries) if not merged_queries: if warning: q = q.upgrades() count = len(q._name_dict().keys()) - if pkg_spec is None: - msg1 = _("No security updates needed, but {} update " - "available").format(count) - msg2 = _("No security updates needed, but {} updates " - "available").format(count) - logger.warning(P_(msg1, msg2, count)) - else: - msg1 = _('No security updates needed for "{}", but {} ' - 'update available').format(pkg_spec, count) - msg2 = _('No security updates needed for "{}", but {} ' - 'updates available').format(pkg_spec, count) - logger.warning(P_(msg1, msg2, count)) + if count > 0: + if pkg_spec is None: + msg1 = _("No security updates needed, but {} update " + "available").format(count) + msg2 = _("No security updates needed, but {} updates " + "available").format(count) + logger.warning(P_(msg1, msg2, count)) + else: + msg1 = _('No security updates needed for "{}", but {} ' + 'update available').format(pkg_spec, count) + msg2 = _('No security updates needed for "{}", but {} ' + 'updates available').format(pkg_spec, count) + logger.warning(P_(msg1, msg2, count)) return merged_queries def _get_key_for_package(self, po, askcb=None, fullaskcb=None): @@ -2489,7 +2564,7 @@ def _get_installonly_query(self, q=None): if q is None: - q = self._sack.query() + q = self._sack.query(flags=hawkey.IGNORE_EXCLUDES) installonly = q.filter(provides=self.conf.installonlypkgs) return installonly diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/dnf/cli/cli.py new/dnf-4.7.0/dnf/cli/cli.py --- old/dnf-4.6.1/dnf/cli/cli.py 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/dnf/cli/cli.py 2021-04-12 17:26:33.000000000 +0200 @@ -292,7 +292,7 @@ fn = lambda x, y, z: self.output.userconfirm() try: self._get_key_for_package(po, fn) - except dnf.exceptions.Error as e: + except (dnf.exceptions.Error, ValueError) as e: error_messages.append(str(e)) else: @@ -950,43 +950,30 @@ timer() return conf - def _populate_update_security_filter(self, opts, query, cmp_type='eq', all=None): + def _populate_update_security_filter(self, opts, cmp_type='eq', all=None): """ :param opts: - :param query: base package set for filters - :param cmp_type: string like "eq", "gt", "gte", "lt", "lte" + :param cmp_type: string supported "eq", "gte" :param all: :return: """ if (opts is None) and (all is None): return - filters = [] + types = [] + if opts.bugfix or all: - key = {'advisory_type__' + cmp_type: 'bugfix'} - filters.append(query.filter(**key)) + types.append('bugfix') if opts.enhancement or all: - key = {'advisory_type__' + cmp_type: 'enhancement'} - filters.append(query.filter(**key)) + types.append('enhancement') if opts.newpackage or all: - key = {'advisory_type__' + cmp_type: 'newpackage'} - filters.append(query.filter(**key)) + types.append('newpackage') if opts.security or all: - key = {'advisory_type__' + cmp_type: 'security'} - filters.append(query.filter(**key)) - if opts.advisory: - key = {'advisory__' + cmp_type: opts.advisory} - filters.append(query.filter(**key)) - if opts.bugzilla: - key = {'advisory_bug__' + cmp_type: opts.bugzilla} - filters.append(query.filter(**key)) - if opts.cves: - key = {'advisory_cve__' + cmp_type: opts.cves} - filters.append(query.filter(**key)) - if opts.severity: - key = {'advisory_severity__' + cmp_type: opts.severity} - filters.append(query.filter(**key)) - self.base._update_security_filters = filters + types.append('security') + + self.base.add_security_filters(cmp_type, types=types, advisory=opts.advisory, + bugzilla=opts.bugzilla, cves=opts.cves, + severity=opts.severity) def redirect_logger(self, stdout=None, stderr=None): # :api diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/dnf/cli/commands/__init__.py new/dnf-4.7.0/dnf/cli/commands/__init__.py --- old/dnf-4.6.1/dnf/cli/commands/__init__.py 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/dnf/cli/commands/__init__.py 2021-04-12 17:26:33.000000000 +0200 @@ -208,7 +208,7 @@ self.opts.packages_action = 'upgrades' def run(self): - self.cli._populate_update_security_filter(self.opts, self.base.sack.query()) + self.cli._populate_update_security_filter(self.opts) return self.base.output_packages('info', self.opts.packages_action, self.opts.packages) @@ -221,7 +221,7 @@ summary = _('list a package or groups of packages') def run(self): - self.cli._populate_update_security_filter(self.opts, self.base.sack.query()) + self.cli._populate_update_security_filter(self.opts) return self.base.output_packages('list', self.opts.packages_action, self.opts.packages) @@ -274,12 +274,7 @@ _checkEnabledRepo(self.base) def run(self): - query = self.base.sack.query().filterm(upgrades_by_priority=True) - if self.base.conf.obsoletes: - obsoleted = query.union(self.base.sack.query().installed()) - obsoletes = self.base.sack.query().filter(obsoletes=obsoleted) - query = query.union(obsoletes) - self.cli._populate_update_security_filter(self.opts, query, cmp_type="gte") + self.cli._populate_update_security_filter(self.opts, cmp_type="gte") found = self.base.check_updates(self.opts.packages, print_=True, changelogs=self.opts.changelogs) @@ -330,7 +325,7 @@ def run_on_repo(self): """Execute the command with respect to given arguments *cli_args*.""" - self.cli._populate_update_security_filter(self.opts, self.base.sack.query()) + self.cli._populate_update_security_filter(self.opts) self.base.output_packages('info', self.opts.pkg_specs_action, self.opts.pkg_specs, self.reponame) @@ -347,7 +342,7 @@ demands.root_user = True def run_on_repo(self): - self.cli._populate_update_security_filter(self.opts, self.base.sack.query()) + self.cli._populate_update_security_filter(self.opts) """Execute the command with respect to given arguments *cli_args*.""" _checkGPGKey(self.base, self.cli) @@ -382,7 +377,7 @@ def run_on_repo(self): """Execute the command with respect to given arguments *cli_args*.""" - self.cli._populate_update_security_filter(self.opts, self.base.sack.query()) + self.cli._populate_update_security_filter(self.opts) self.base.output_packages('list', self.opts.pkg_specs_action, self.opts.pkg_specs, self.reponame) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/dnf/cli/commands/install.py new/dnf-4.7.0/dnf/cli/commands/install.py --- old/dnf-4.6.1/dnf/cli/commands/install.py 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/dnf/cli/commands/install.py 2021-04-12 17:26:33.000000000 +0200 @@ -73,7 +73,7 @@ nevra_forms = self._get_nevra_forms_from_command() - self.cli._populate_update_security_filter(self.opts, self.base.sack.query()) + self.cli._populate_update_security_filter(self.opts) if self.opts.command == 'localinstall' and (self.opts.grp_specs or self.opts.pkg_specs): self._log_not_valid_rpm_file_paths(self.opts.grp_specs) if self.base.conf.strict: diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/dnf/cli/commands/repoquery.py new/dnf-4.7.0/dnf/cli/commands/repoquery.py --- old/dnf-4.6.1/dnf/cli/commands/repoquery.py 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/dnf/cli/commands/repoquery.py 2021-04-12 17:26:33.000000000 +0200 @@ -437,7 +437,7 @@ print(QUERY_TAGS) return - self.cli._populate_update_security_filter(self.opts, self.base.sack.query()) + self.cli._populate_update_security_filter(self.opts) q = self.base.sack.query( flags=hawkey.IGNORE_MODULAR_EXCLUDES diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/dnf/cli/commands/upgrade.py new/dnf-4.7.0/dnf/cli/commands/upgrade.py --- old/dnf-4.6.1/dnf/cli/commands/upgrade.py 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/dnf/cli/commands/upgrade.py 2021-04-12 17:26:33.000000000 +0200 @@ -64,13 +64,8 @@ self.skipped_grp_specs = None def run(self): - query = self.base.sack.query().upgrades() - if self.base.conf.obsoletes: - obsoleted = query.union(self.base.sack.query().installed()) - obsoletes = self.base.sack.query().filter(obsoletes=obsoleted) - query = query.union(obsoletes) cmp_type = "eq" if self.upgrade_minimal else "gte" - self.cli._populate_update_security_filter(self.opts, query, cmp_type=cmp_type, + self.cli._populate_update_security_filter(self.opts, cmp_type=cmp_type, all=self.all_security) if self.opts.filenames or self.opts.pkg_specs or self.opts.grp_specs: diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/dnf/conf/config.py new/dnf-4.7.0/dnf/conf/config.py --- old/dnf-4.6.1/dnf/conf/config.py 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/dnf/conf/config.py 2021-04-12 17:26:33.000000000 +0200 @@ -148,7 +148,7 @@ try: self._config.optBinds().at(name).newString(priority, value) except RuntimeError as e: - logger.debug(_('Unknown configuration value: %s=%s in %s; %s'), + logger.error(_('Invalid configuration value: %s=%s in %s; %s'), ucd(name), ucd(value), ucd(filename), str(e)) else: if name == 'arch' and hasattr(self, name): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/dnf/conf/read.py new/dnf-4.7.0/dnf/conf/read.py --- old/dnf-4.6.1/dnf/conf/read.py 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/dnf/conf/read.py 2021-04-12 17:26:33.000000000 +0200 @@ -27,6 +27,7 @@ import dnf.repo import glob import logging +import os logger = logging.getLogger('dnf') @@ -42,8 +43,16 @@ yield r # read .repo files from directories specified by conf.reposdir - for repofn in (repofn for reposdir in self.conf.reposdir - for repofn in sorted(glob.glob('{}/*.repo'.format(reposdir)))): + repo_configs = [] + for reposdir in self.conf.reposdir: + for path in glob.glob(os.path.join(reposdir, "*.repo")): + repo_configs.append(path) + + # remove .conf suffix before calling the sort function + # also split the path so the separators are not treated as ordinary characters + repo_configs.sort(key=lambda x: dnf.util.split_path(x[:-5])) + + for repofn in repo_configs: try: for r in self._get_repos(repofn): yield r diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/dnf/db/group.py new/dnf-4.7.0/dnf/db/group.py --- old/dnf-4.6.1/dnf/db/group.py 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/dnf/db/group.py 2021-04-12 17:26:33.000000000 +0200 @@ -46,14 +46,21 @@ def _get_obj_id(self, obj): raise NotImplementedError + def _add_to_history(self, item, action): + ti = self.history.swdb.addItem(item, "", action, libdnf.transaction.TransactionItemReason_USER) + ti.setState(libdnf.transaction.TransactionItemState_DONE) + def install(self, obj): self._installed[self._get_obj_id(obj)] = obj + self._add_to_history(obj, libdnf.transaction.TransactionItemAction_INSTALL) def remove(self, obj): self._removed[self._get_obj_id(obj)] = obj + self._add_to_history(obj, libdnf.transaction.TransactionItemAction_REMOVE) def upgrade(self, obj): self._upgraded[self._get_obj_id(obj)] = obj + self._add_to_history(obj, libdnf.transaction.TransactionItemAction_UPGRADE) def new(self, obj_id, name, translated_name, pkg_types): raise NotImplementedError @@ -265,7 +272,8 @@ self.add_remove(old, reason) def add_install(self, new, obsoleted=None, reason=None): - reason = reason or libdnf.transaction.TransactionItemReason_USER + if reason is None: + reason = libdnf.transaction.TransactionItemReason_USER ti_new = self.new(new, libdnf.transaction.TransactionItemAction_INSTALL, reason) self._add_obsoleted(obsoleted, replaced_by=ti_new) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/dnf/db/history.py new/dnf-4.7.0/dnf/db/history.py --- old/dnf-4.6.1/dnf/db/history.py 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/dnf/db/history.py 2021-04-12 17:26:33.000000000 +0200 @@ -389,8 +389,6 @@ rpm_item = self.rpm._pkg_to_swdb_rpm_item(pkg) repoid = self.repo(pkg) action = libdnf.transaction.TransactionItemAction_REASON_CHANGE - reason = reason - replaced_by = None ti = self.swdb.addItem(rpm_item, repoid, action, reason) ti.setState(libdnf.transaction.TransactionItemState_DONE) return ti @@ -426,67 +424,6 @@ except: pass - ''' - for pkg in using_pkgs: - pid = self.pkg2pid(pkg) - self.swdb.trans_with(tid, pid) - ''' - - # add RPMs to the transaction - # TODO: _populate_rpm_ts() ? - - if self.group: - for group_id, group_item in sorted(self.group._installed.items()): - repoid = "" - action = libdnf.transaction.TransactionItemAction_INSTALL - reason = libdnf.transaction.TransactionItemReason_USER - replaced_by = None - ti = self.swdb.addItem(group_item, repoid, action, reason) - ti.setState(libdnf.transaction.TransactionItemState_DONE) - - for group_id, group_item in sorted(self.group._upgraded.items()): - repoid = "" - action = libdnf.transaction.TransactionItemAction_UPGRADE - reason = libdnf.transaction.TransactionItemReason_USER - replaced_by = None - ti = self.swdb.addItem(group_item, repoid, action, reason) - ti.setState(libdnf.transaction.TransactionItemState_DONE) - - for group_id, group_item in sorted(self.group._removed.items()): - repoid = "" - action = libdnf.transaction.TransactionItemAction_REMOVE - reason = libdnf.transaction.TransactionItemReason_USER - replaced_by = None - ti = self.swdb.addItem(group_item, repoid, action, reason) - ti.setState(libdnf.transaction.TransactionItemState_DONE) - - if self.env: - for env_id, env_item in sorted(self.env._installed.items()): - repoid = "" - action = libdnf.transaction.TransactionItemAction_INSTALL - reason = libdnf.transaction.TransactionItemReason_USER - replaced_by = None - ti = self.swdb.addItem(env_item, repoid, action, reason) - ti.setState(libdnf.transaction.TransactionItemState_DONE) - - for env_id, env_item in sorted(self.env._upgraded.items()): - repoid = "" - action = libdnf.transaction.TransactionItemAction_UPGRADE - reason = libdnf.transaction.TransactionItemReason_USER - replaced_by = None - ti = self.swdb.addItem(env_item, repoid, action, reason) - ti.setState(libdnf.transaction.TransactionItemState_DONE) - - for env_id, env_item in sorted(self.env._removed.items()): - repoid = "" - action = libdnf.transaction.TransactionItemAction_REMOVE - reason = libdnf.transaction.TransactionItemReason_USER - replaced_by = None - ti = self.swdb.addItem(env_item, repoid, action, reason) - ti.setState(libdnf.transaction.TransactionItemState_DONE) - - - # save when everything is in memory tid = self.swdb.beginTransaction( int(calendar.timegm(time.gmtime())), str(rpmdb_version), diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/dnf/logging.py new/dnf-4.7.0/dnf/logging.py --- old/dnf-4.6.1/dnf/logging.py 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/dnf/logging.py 2021-04-12 17:26:33.000000000 +0200 @@ -125,7 +125,10 @@ try: if self.shouldRollover(record): with self.rotate_lock: + # Do rollover while preserving the mode of the new log file + mode = os.stat(self.baseFilename).st_mode self.doRollover() + os.chmod(self.baseFilename, mode) logging.FileHandler.emit(self, record) return except (dnf.exceptions.ProcessLockError, dnf.exceptions.ThreadLockError): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/dnf/module/module_base.py new/dnf-4.7.0/dnf/module/module_base.py --- old/dnf-4.6.1/dnf/module/module_base.py 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/dnf/module/module_base.py 2021-04-12 17:26:33.000000000 +0200 @@ -420,7 +420,7 @@ streams_str = "', '".join( sorted(streamDict.keys(), key=functools.cmp_to_key(self.base.sack.evr_cmp))) msg = _("Argument '{argument}' matches {stream_count} streams ('{streams}') of " - "module '{module}', but non of the streams are enabled or " + "module '{module}', but none of the streams are enabled or " "default").format( argument=spec, stream_count=len(streamDict), streams=streams_str, module=moduleName) @@ -579,6 +579,9 @@ else ", " return profiles_str[:-2] + def _summary_report_formatter(self, summary): + return summary.strip().replace("\n", " ") + def _module_strs_formatter(self, modulePackage, markActive=False): default_str = "" enabled_str = "" @@ -761,7 +764,8 @@ column_stream).setData( modulePackage.getStream() + default_str + enabled_str + disabled_str) line.getColumnCell(column_profiles).setData(profiles_str) - line.getColumnCell(column_info).setData(modulePackage.getSummary()) + summary_str = self._summary_report_formatter(modulePackage.getSummary()) + line.getColumnCell(column_info).setData(summary_str) return table diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/dnf/package.py new/dnf-4.7.0/dnf/package.py --- old/dnf-4.6.1/dnf/package.py 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/dnf/package.py 2021-04-12 17:26:33.000000000 +0200 @@ -30,6 +30,8 @@ import dnf.rpm import dnf.yum.misc import hawkey +import libdnf.error +import libdnf.utils import logging import os import rpm @@ -56,7 +58,10 @@ return self._priv_chksum if self._from_cmdline: chksum_type = dnf.yum.misc.get_default_chksum_type() - chksum_val = dnf.yum.misc.checksum(chksum_type, self.location) + try: + chksum_val = libdnf.utils.checksum_value(chksum_type, self.location) + except libdnf.error.Error as e: + raise dnf.exceptions.MiscError(str(e)) return (hawkey.chksum_type(chksum_type), binascii.unhexlify(chksum_val)) return super(Package, self).chksum @@ -330,10 +335,7 @@ if self._from_cmdline: return True # local package always verifies against itself (chksum_type, chksum) = self.returnIdSum() - real_sum = dnf.yum.misc.checksum(chksum_type, self.localPkg(), - datasize=self._size) - if real_sum != chksum: - logger.debug(_('%s: %s check failed: %s vs %s'), - self, chksum_type, real_sum, chksum) - return False - return True + try: + return libdnf.utils.checksum_check(chksum_type, self.localPkg(), chksum) + except libdnf.error.Error as e: + raise dnf.exceptions.MiscError(str(e)) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/dnf/rpm/miscutils.py new/dnf-4.7.0/dnf/rpm/miscutils.py --- old/dnf-4.6.1/dnf/rpm/miscutils.py 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/dnf/rpm/miscutils.py 2021-04-12 17:26:33.000000000 +0200 @@ -18,10 +18,40 @@ import rpm import os +import subprocess +import logging from dnf.i18n import ucd +from shutil import which +logger = logging.getLogger('dnf') + + +def _verifyPkgUsingRpmkeys(package, installroot): + rpmkeys_binary = '/usr/bin/rpmkeys' + if not os.path.isfile(rpmkeys_binary): + rpmkeys_binary = which("rpmkeys") + logger.info(_('Using rpmkeys executable from {path} to verify signature for package: {package}.').format( + path=rpmkeys_binary, package=package)) + + if not os.path.isfile(rpmkeys_binary): + logger.critical(_('Cannot find rpmkeys executable to verify signatures.')) + return 0 + + args = ('rpmkeys', '--checksig', '--root', installroot, '--define', '_pkgverify_level all', '--', package) + with subprocess.Popen( + args=args, + executable=rpmkeys_binary, + env={'LC_ALL': 'C'}, + stdout=subprocess.PIPE, + cwd='/') as p: + data, _ = p.communicate() + if p.returncode != 0 or data != (package.encode('ascii', 'strict') + b': digests signatures OK\n'): + return 0 + else: + return 1 + def checkSig(ts, package): """Takes a transaction set and a package, check it's sigs, return 0 if they are all fine @@ -30,7 +60,7 @@ return 3 if the key is not trusted return 4 if the pkg is not gpg or pgp signed""" - value = 0 + value = 4 currentflags = ts.setVSFlags(0) fdno = os.open(package, os.O_RDONLY) try: @@ -38,10 +68,12 @@ except rpm.error as e: if str(e) == "public key not available": value = 1 - if str(e) == "public key not trusted": + elif str(e) == "public key not trusted": value = 3 - if str(e) == "error reading package header": + elif str(e) == "error reading package header": value = 2 + else: + raise ValueError('Unexpected error value %r from ts.hdrFromFdno when checking signature.' % str(e)) else: # checks signature from an hdr string = '%|DSAHEADER?{%{DSAHEADER:pgpsig}}:{%|RSAHEADER?{%{RSAHEADER:pgpsig}}:' \ @@ -49,17 +81,19 @@ try: siginfo = hdr.sprintf(string) siginfo = ucd(siginfo) + if siginfo == '(none)': value = 4 + elif "Key ID" in siginfo and _verifyPkgUsingRpmkeys(package, ts.ts.rootDir): + value = 0 + else: + raise ValueError('Unexpected return value %r from hdr.sprintf when checking signature.' % siginfo) except UnicodeDecodeError: pass del hdr - try: - os.close(fdno) - except OSError as e: # if we're not opened, don't scream about it - pass + os.close(fdno) ts.setVSFlags(currentflags) # put things back like they were before return value diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/dnf/util.py new/dnf-4.7.0/dnf/util.py --- old/dnf-4.6.1/dnf/util.py 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/dnf/util.py 2021-04-12 17:26:33.000000000 +0200 @@ -149,6 +149,27 @@ if e.errno != errno.EEXIST or not os.path.isdir(dname): raise e + +def split_path(path): + """ + Split path by path separators. + Use os.path.join() to join the path back to string. + """ + result = [] + + head = path + while True: + head, tail = os.path.split(head) + if not tail: + if head or not result: + # if not result: make sure result is [""] so os.path.join(*result) can be called + result.insert(0, head) + break + result.insert(0, tail) + + return result + + def empty(iterable): try: l = len(iterable) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/dnf/yum/misc.py new/dnf-4.7.0/dnf/yum/misc.py --- old/dnf-4.6.1/dnf/yum/misc.py 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/dnf/yum/misc.py 2021-04-12 17:26:33.000000000 +0200 @@ -22,7 +22,6 @@ from __future__ import print_function, absolute_import from __future__ import unicode_literals -from dnf.exceptions import MiscError from dnf.pycomp import base64_decodebytes, basestring, unicode from stat import * import libdnf.utils @@ -32,7 +31,6 @@ import dnf.i18n import errno import glob -import hashlib import io import os import os.path @@ -41,7 +39,6 @@ import shutil import tempfile -_available_checksums = set(['md5', 'sha1', 'sha256', 'sha384', 'sha512']) _default_checksums = ['sha256'] @@ -68,119 +65,9 @@ return True return False - -class Checksums(object): - """ Generate checksum(s), on given pieces of data. Producing the - Length and the result(s) when complete. """ - - def __init__(self, checksums=None, ignore_missing=False, ignore_none=False): - if checksums is None: - checksums = _default_checksums - self._sumalgos = [] - self._sumtypes = [] - self._len = 0 - - done = set() - for sumtype in checksums: - if sumtype == 'sha': - sumtype = 'sha1' - if sumtype in done: - continue - - if sumtype in _available_checksums: - sumalgo = hashlib.new(sumtype) - elif ignore_missing: - continue - else: - raise MiscError('Error Checksumming, bad checksum type %s' % - sumtype) - done.add(sumtype) - self._sumtypes.append(sumtype) - self._sumalgos.append(sumalgo) - if not done and not ignore_none: - raise MiscError('Error Checksumming, no valid checksum type') - - def __len__(self): - return self._len - - # Note that len(x) is assert limited to INT_MAX, which is 2GB on i686. - length = property(fget=lambda self: self._len) - - def update(self, data): - self._len += len(data) - for sumalgo in self._sumalgos: - data = data.encode('utf-8') if isinstance(data, unicode) else data - sumalgo.update(data) - - def read(self, fo, size=2**16): - data = fo.read(size) - self.update(data) - return data - - def hexdigests(self): - ret = {} - for sumtype, sumdata in zip(self._sumtypes, self._sumalgos): - ret[sumtype] = sumdata.hexdigest() - return ret - - def hexdigest(self, checksum=None): - if checksum is None: - if not self._sumtypes: - return None - checksum = self._sumtypes[0] - if checksum == 'sha': - checksum = 'sha1' - return self.hexdigests()[checksum] - - def digests(self): - ret = {} - for sumtype, sumdata in zip(self._sumtypes, self._sumalgos): - ret[sumtype] = sumdata.digest() - return ret - - def digest(self, checksum=None): - if checksum is None: - if not self._sumtypes: - return None - checksum = self._sumtypes[0] - if checksum == 'sha': - checksum = 'sha1' - return self.digests()[checksum] - def get_default_chksum_type(): return _default_checksums[0] -def checksum(sumtype, file, CHUNK=2**16, datasize=None): - """takes filename, hand back Checksum of it - sumtype = md5 or sha/sha1/sha256/sha512 (note sha == sha1) - filename = /path/to/file - CHUNK=65536 by default""" - - # chunking brazenly lifted from Ryan Tomayko - - if isinstance(file, basestring): - try: - with open(file, 'rb', CHUNK) as fo: - return checksum(sumtype, fo, CHUNK, datasize) - except (IOError, OSError): - raise MiscError('Error opening file for checksum: %s' % file) - - try: - # assumes file is a file-like-object - data = Checksums([sumtype]) - while data.read(file, CHUNK): - if datasize is not None and data.length > datasize: - break - - # This screws up the length, but that shouldn't matter. We only care - # if this checksum == what we expect. - if datasize is not None and datasize != data.length: - return '!%u!%s' % (datasize, data.hexdigest(sumtype)) - - return data.hexdigest(sumtype) - except (IOError, OSError) as e: - raise MiscError('Error reading file for checksum: %s' % file) - class GenericHolder(object): """Generic Holder class used to hold other objects of known types It exists purely to be able to do object.somestuff, object.someotherstuff diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/dnf.spec new/dnf-4.7.0/dnf.spec --- old/dnf-4.6.1/dnf.spec 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/dnf.spec 2021-04-12 17:26:33.000000000 +0200 @@ -2,13 +2,13 @@ %define __cmake_in_source_build 1 # default dependencies -%global hawkey_version 0.59.0 +%global hawkey_version 0.61.1 %global libcomps_version 0.1.8 %global libmodulemd_version 2.9.3 %global rpm_version 4.14.0 # conflicts -%global conflicts_dnf_plugins_core_version 4.0.16 +%global conflicts_dnf_plugins_core_version 4.0.20 %global conflicts_dnf_plugins_extras_version 4.0.4 %global conflicts_dnfdaemon_version 0.3.19 @@ -65,7 +65,7 @@ It supports RPMs, modules and comps groups & environments. Name: dnf -Version: 4.6.1 +Version: 4.7.0 Release: 1%{?dist} Summary: %{pkg_summary} # For a breakdown of the licensing, see PACKAGE-LICENSING @@ -150,7 +150,6 @@ BuildRequires: python3-libdnf BuildRequires: libmodulemd >= %{libmodulemd_version} Requires: libmodulemd >= %{libmodulemd_version} -BuildRequires: python3-nose BuildRequires: python3-gpg Requires: python3-gpg Requires: %{name}-data = %{version}-%{release} @@ -371,6 +370,25 @@ %{python3_sitelib}/%{name}/automatic/ %changelog +* Mon Apr 12 2021 Nicola Sella <nse...@redhat.com> - 4.7.0-1 +- Improve repo config path ordering to fix a comps merging issue (RhBug:1928181) +- Keep reason when package is removed (RhBug:1921063) +- Improve mechanism for application of security filters (RhBug:1918475) +- [doc] Add description for new API +- [API] Add new method for reset of security filters +- [doc] Improve documentation for Hotfix repositories +- [doc] fix: "makecache" command downloads only enabled repositories +- Use libdnf.utils.checksum_{check,value} +- [doc] Add info that maximum parallel downloads is 20 +- Increase loglevel in case of invalid config options +- [doc] installonly_limit documentation follows behavior +- Prevent traceback (catch ValueError) if pkg is from cmdline +- Add documentation for config option sslverifystatus (RhBug:1814383) +- Check for specific key string when verifing signatures (RhBug:1915990) +- Use rpmkeys binary to verify package signature (RhBug:1915990) +- Bugs fixed (RhBug:1916783) +- Preserve file mode during log rotation (RhBug:1910084) + * Tue Mar 02 2021 Nicola Sella <nse...@redhat.com> - 4.6.1-1 - Fix recreate script - Add unit test for fill_sack_from_repos_in_cache (RhBug:1865803) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/doc/api_base.rst new/dnf-4.7.0/doc/api_base.rst --- old/dnf-4.6.1/doc/api_base.rst 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/doc/api_base.rst 2021-04-12 17:26:33.000000000 +0200 @@ -62,6 +62,22 @@ :exc:`dnf.exceptions.Error` if the :attr:`goal` is not empty. `progress`, if given, should be a :class:`.DownloadProgress` instance which can be used to monitor the progress of the download. + .. method:: add_security_filters(cmp_type, types=(), advisory=(), bugzilla=(), cves=(), severity=()) + + It modifies results of install, upgrade, and distrosync methods according to provided filters. + `cmp_type` - only 'eq' or 'gte' allowed + `types` - List or tuple with strings. Eg. `bugfix`, `enhancement`, `newpackage`, `security` + `advisory` - List or tuple with strings. Eg. `FEDORA-2201-123` + `bugzilla` - List or tuple with strings. Include packages that fix a Bugzilla ID, Eg. `123123`. + `cves` - List or tuple with strings. Include packages that fix a CVE (Common Vulnerabilities + and Exposures) ID. Eg. `CVE-2201-0123` + `severity` - List or tuple with strings. Includes packages that provide a fix for an issue + of the specified severity. + + .. method:: reset_security_filters() + + Reset all security filters + .. method:: close() Close all external handles the object holds. This is called automatically via context manager mechanism if the instance is handled using the ``with`` statement. diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/doc/api_queries.rst new/dnf-4.7.0/doc/api_queries.rst --- old/dnf-4.6.1/doc/api_queries.rst 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/doc/api_queries.rst 2021-04-12 17:26:33.000000000 +0200 @@ -119,9 +119,9 @@ The key name can be supplemented with a relation-specifying suffix, separated by ``__``: - ========== =========== ========================================================== + ========== =========== =========================================================== key suffix value type semantics - ========== =========== ========================================================== + ========== =========== =========================================================== eq any exact match; This is the default if no suffix is specified. glob string shell-style wildcard match gt integer the actual value is greater than specified @@ -130,7 +130,9 @@ lte integer the actual value is less than or equal to specified neq any does not equal substr string the specified value is contained in the actual value - ========== =========== ========================================================== + eqg string exact match or the first higher, used with advisory filters + upgrade string skips advisory resolved by installed packages + ========== =========== =========================================================== For example, the following creates a query that matches all packages containing the string "club" in its name:: diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/doc/command_ref.rst new/dnf-4.7.0/doc/command_ref.rst --- old/dnf-4.6.1/doc/command_ref.rst 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/doc/command_ref.rst 2021-04-12 17:26:33.000000000 +0200 @@ -938,7 +938,7 @@ | Aliases: ``mc`` ``dnf [options] makecache`` - Downloads and caches metadata for all known repos. Tries to + Downloads and caches metadata for enabled repositories. Tries to avoid downloading whenever possible (e.g. when the local metadata hasn't expired yet or when the metadata timestamp hasn't changed). diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/doc/conf_ref.rst new/dnf-4.7.0/doc/conf_ref.rst --- old/dnf-4.6.1/doc/conf_ref.rst 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/doc/conf_ref.rst 2021-04-12 17:26:33.000000000 +0200 @@ -250,8 +250,9 @@ :ref:`integer <integer-label>` Number of :ref:`installonly packages <installonlypkgs-label>` allowed to be installed - concurrently. Defaults to 3. The minimal number of installonly packages is 2. Value 0 or 1 means - unlimited number of installonly packages. + concurrently. Defaults to 3. The minimal number of installonly packages is 2. Value 0 means + unlimited number of installonly packages. Value 1 is explicitely not allowed since it + complicates kernel upgrades due to protection of the running kernel from removal. ``installroot`` :ref:`string <string-label>` @@ -799,7 +800,7 @@ ``max_parallel_downloads`` :ref:`integer <integer-label>` - Maximum number of simultaneous package downloads. Defaults to 3. + Maximum number of simultaneous package downloads. Defaults to 3. Maximum of 20. .. _metadata_expire-label: @@ -936,6 +937,11 @@ When enabled, remote SSL certificates are verified. If the client can not be authenticated, connecting fails and the repository is not used any further. If ``False``, SSL connections can be used, but certificates are not verified. Default is ``True``. +``sslverifystatus`` + :ref:`boolean <boolean-label>` + + When enabled, revocation status of the server certificate is verified using the "Certificate Status Request" TLS extension (aka. OCSP stapling). Default is ``False``. + .. _sslclientcert-label: ``sslclientcert`` diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/doc/modularity.rst new/dnf-4.7.0/doc/modularity.rst --- old/dnf-4.6.1/doc/modularity.rst 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/doc/modularity.rst 2021-04-12 17:26:33.000000000 +0200 @@ -71,7 +71,8 @@ ===================== In special cases, a user wants to cherry-pick individual packages provided outside module streams and make them available on along with packages from the active streams. -Under normal circumstances, such packages are filtered out. +Under normal circumstances, such packages are filtered out or rejected from getting on the system by +Fail-safe mechanisms. To make the system use packages from a repository regardless of their modularity, specify ``module_hotfixes=true`` in the .repo file. This protects the repository from package filtering. @@ -111,4 +112,5 @@ All packages that are built as a part of a module have ``%{modularitylabel}`` RPM header set. If such package becomes part of RPM transaction and cannot be associated with any available modulemd, DNF prevents from getting it on the system (package is available, but cannot be -installed, upgraded, etc.) +installed, upgraded, etc.). Packages from Hotfix repositories or Commandline repository are not +affected by Fail-safe mechanisms. diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/doc/release_notes.rst new/dnf-4.7.0/doc/release_notes.rst --- old/dnf-4.6.1/doc/release_notes.rst 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/doc/release_notes.rst 2021-04-12 17:26:33.000000000 +0200 @@ -20,6 +20,40 @@ ################### =================== +4.7.0 Release Notes +=================== + +- Improve repo config path ordering to fix a comps merging issue (RhBug:1928181) +- Keep reason when package is removed (RhBug:1921063) +- Improve mechanism for application of security filters (RhBug:1918475) +- [doc] Add description for new API +- [API] Add new method for reset of security filters +- [doc] Improve documentation for Hotfix repositories +- [doc] fix: "makecache" command downloads only enabled repositories +- Use libdnf.utils.checksum_{check,value} +- [doc] Add info that maximum parallel downloads is 20 +- Increase loglevel in case of invalid config options +- [doc] installonly_limit documentation follows behavior +- Prevent traceback (catch ValueError) if pkg is from cmdline +- Add documentation for config option sslverifystatus (RhBug:1814383) + +- Security fixes: + - Check for specific key string when verifing signatures (RhBug:1915990) + - Use rpmkeys binary to verify package signature (RhBug:1915990) + +- Bug fixes: + - Bugs fixed (RhBug:1916783) + - Preserve file mode during log rotation (RhBug:1910084) + +Bugs fixed in 4.7.0: + +* :rhbug:`1910084` +* :rhbug:`1921063` +* :rhbug:`1918475` +* :rhbug:`1814383` +* :rhbug:`1928181` + +=================== 4.6.1 Release Notes =================== diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/doc/summaries_cache new/dnf-4.7.0/doc/summaries_cache --- old/dnf-4.6.1/doc/summaries_cache 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/doc/summaries_cache 2021-04-12 17:26:33.000000000 +0200 @@ -3314,5 +3314,21 @@ [ 1906970, "dnf history wrong output if piped through more or redirected to file" + ], + [ + 1921063, + "dnf autoremove wants to remove \"kernel-modules-extra\" if you have a rawhide kernel installed" + ], + [ + 1918475, + "dnf --security pulling in packages without security advisory" + ], + [ + 1814383, + "librepo does not do TLS certificate revocation checking" + ], + [ + 1928181, + "comps from the updates repo don't override comps from the fedora repo" ] ] \ No newline at end of file diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/tests/CMakeLists.txt new/dnf-4.7.0/tests/CMakeLists.txt --- old/dnf-4.6.1/tests/CMakeLists.txt 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/tests/CMakeLists.txt 2021-04-12 17:26:33.000000000 +0200 @@ -1,4 +1,7 @@ -ADD_TEST(test ${PYTHON_EXECUTABLE} -m nose -s ${CMAKE_CURRENT_SOURCE_DIR}) +ADD_TEST( + NAME test + COMMAND ${PYTHON_EXECUTABLE} -m unittest discover -s tests + WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}) # For libdnf built with sanitizers, has no effect otherwise. # dnf tests do some wild stuff and cause a lot of leaks, hence turn leak diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/tests/__init__.py new/dnf-4.7.0/tests/__init__.py --- old/dnf-4.6.1/tests/__init__.py 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/tests/__init__.py 2021-04-12 17:26:33.000000000 +0200 @@ -17,9 +17,11 @@ # Red Hat, Inc. # +import locale import os # run tests with C locales os.environ["LC_ALL"] = "C.UTF-8" os.environ["LANG"] = "C.UTF-8" os.environ["LANGUAGE"] = "en_US:en" +locale.setlocale(locale.LC_ALL, "C.UTF-8") diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/tests/test_groups.py new/dnf-4.7.0/tests/test_groups.py --- old/dnf-4.6.1/tests/test_groups.py 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/tests/test_groups.py 2021-04-12 17:26:33.000000000 +0200 @@ -313,6 +313,11 @@ COMPS_SEED_HISTORY = True def test_environment_install(self): + # actually commit the pre-mocked comps, as otherwise + # 'sugar-desktop-environment' is already present in the open + # transaction and it wins over the one installed here + self._swdb_commit() + env_id = 'sugar-desktop-environment' comps_env = self.comps.environment_by_pattern(env_id) self.base.environment_install(comps_env.id, ('mandatory',)) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/dnf-4.6.1/tests/test_util.py new/dnf-4.7.0/tests/test_util.py --- old/dnf-4.6.1/tests/test_util.py 2021-03-02 15:05:07.000000000 +0100 +++ new/dnf-4.7.0/tests/test_util.py 2021-04-12 17:26:33.000000000 +0200 @@ -21,6 +21,7 @@ from __future__ import unicode_literals import operator +import os import dnf.util @@ -210,6 +211,31 @@ self.assertRaises(OSError, dnf.util.touch, tests.support.NONEXISTENT_FILE, no_create=True) + def test_split_path(self): + path_orig = "" + path_split = dnf.util.split_path(path_orig) + path_join = os.path.join(*path_split) + self.assertEqual(path_split, [""]) + self.assertEqual(path_join, path_orig) + + path_orig = "/" + path_split = dnf.util.split_path(path_orig) + path_join = os.path.join(*path_split) + self.assertEqual(path_split, ["/"]) + self.assertEqual(path_join, path_orig) + + path_orig = "abc" + path_split = dnf.util.split_path(path_orig) + path_join = os.path.join(*path_split) + self.assertEqual(path_split, ["abc"]) + self.assertEqual(path_join, path_orig) + + path_orig = "/a/bb/ccc/dddd.conf" + path_split = dnf.util.split_path(path_orig) + path_join = os.path.join(*path_split) + self.assertEqual(path_split, ["/", "a", "bb", "ccc", "dddd.conf"]) + self.assertEqual(path_join, path_orig) + class TestMultiCall(tests.support.TestCase): def test_multi_call(self):