Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package rpmlint for openSUSE:Factory checked 
in at 2026-04-29 19:17:24
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/rpmlint (Old)
 and      /work/SRC/openSUSE:Factory/.rpmlint.new.30200 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "rpmlint"

Wed Apr 29 19:17:24 2026 rev:536 rq:1349542 version:2.9.0+git20260423.8742fdc6

Changes:
--------
--- /work/SRC/openSUSE:Factory/rpmlint/rpmlint.changes  2026-04-26 
21:13:02.841672272 +0200
+++ /work/SRC/openSUSE:Factory/.rpmlint.new.30200/rpmlint.changes       
2026-04-29 19:17:31.884317254 +0200
@@ -1,0 +2,10 @@
+Mon Apr 27 08:29:36 UTC 2026 - Matthias Gerstner <[email protected]>
+
+- Update to version 2.9.0+git20260423.8742fdc6:
+  * FileDigestCheck: some more harmonization and type annotation
+  * test: FileDigestCheck: verify only files from the same group are allowed
+  * FileDigestCheck: operate on digest groups, not on a global view
+  * FileDigestCheck: refactoring
+  * opensuse.toml: fix typo
+
+-------------------------------------------------------------------

Old:
----
  rpmlint-2.9.0+git20260417.eb98d54d.tar.xz

New:
----
  rpmlint-2.9.0+git20260423.8742fdc6.tar.xz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ rpmlint.spec ++++++
--- /var/tmp/diff_new_pack.t9s9f9/_old  2026-04-29 19:17:33.100366854 +0200
+++ /var/tmp/diff_new_pack.t9s9f9/_new  2026-04-29 19:17:33.100366854 +0200
@@ -24,7 +24,7 @@
 %define name_suffix -%{flavor}
 %endif
 Name:           rpmlint%{name_suffix}
-Version:        2.9.0+git20260417.eb98d54d
+Version:        2.9.0+git20260423.8742fdc6
 Release:        0
 Summary:        RPM file correctness checker
 License:        GPL-2.0-or-later

++++++ _servicedata ++++++
--- /var/tmp/diff_new_pack.t9s9f9/_old  2026-04-29 19:17:33.200370932 +0200
+++ /var/tmp/diff_new_pack.t9s9f9/_new  2026-04-29 19:17:33.208371260 +0200
@@ -1,6 +1,6 @@
 <servicedata>
 <service name="tar_scm">
                 <param 
name="url">https://github.com/rpm-software-management/rpmlint.git</param>
-              <param 
name="changesrevision">eb98d54d7712f7717ab91d887b47fb0cc23e4249</param></service></servicedata>
+              <param 
name="changesrevision">8742fdc6250c24a8521244970dcf2c8ef7e9f2d2</param></service></servicedata>
 (No newline at EOF)
 

++++++ rpmlint-2.9.0+git20260417.eb98d54d.tar.xz -> 
rpmlint-2.9.0+git20260423.8742fdc6.tar.xz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/rpmlint-2.9.0+git20260417.eb98d54d/configs/openSUSE/opensuse.toml 
new/rpmlint-2.9.0+git20260423.8742fdc6/configs/openSUSE/opensuse.toml
--- old/rpmlint-2.9.0+git20260417.eb98d54d/configs/openSUSE/opensuse.toml       
2026-04-17 10:34:14.000000000 +0200
+++ new/rpmlint-2.9.0+git20260423.8742fdc6/configs/openSUSE/opensuse.toml       
2026-04-23 10:59:14.000000000 +0200
@@ -337,7 +337,7 @@
 [[SymlinkExceptions]]
 packages = ["systemd", "systemd-mini"]
 paths = [
-    # compability symlink towards /etc/sysctl.conf for systemd-sysctl
+    # compatibility symlink towards /etc/sysctl.conf for systemd-sysctl
     "/usr/lib/sysctl.d/99-sysctl.conf"
 ]
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/rpmlint-2.9.0+git20260417.eb98d54d/rpmlint/checks/FileDigestCheck.py 
new/rpmlint-2.9.0+git20260423.8742fdc6/rpmlint/checks/FileDigestCheck.py
--- old/rpmlint-2.9.0+git20260417.eb98d54d/rpmlint/checks/FileDigestCheck.py    
2026-04-17 10:34:14.000000000 +0200
+++ new/rpmlint-2.9.0+git20260423.8742fdc6/rpmlint/checks/FileDigestCheck.py    
2026-04-23 10:59:14.000000000 +0200
@@ -21,7 +21,7 @@
 # target files depending on their file type. Currently we have two common
 # special cases:
 #
-# - shell like files (configuration files, shell scripts, Python
+# - shell-like files (configuration files, shell scripts, Python
 # scripts, Perl scripts etc) that can contain empty lines or comments
 # introduced by '#'.
 # - XML files for things like D-Bus configuration files or polkit policies.
@@ -124,121 +124,148 @@
 
 
 class FileDigestCheck(AbstractCheck):
+
     def __init__(self, config, output):
         super().__init__(config, output)
-        self.digest_configurations = {}
-        # Build trie for fast lookup
-        self.digest_configuration_trie = {}
-        self.follow_symlinks_in_group = {}
-        self.name_patterns_in_group = {}
-        for group, values in 
self.config.configuration['FileDigestLocation'].items():
-            self.digest_configurations[group] = [Path(p) for p in 
values['Locations']]
-            self.follow_symlinks_in_group[group] = values['FollowSymlinks']
-            self.name_patterns_in_group[group] = values.get('NamePatterns')
+        # gather different FileDigestCheck configurations in an list. Each 
entry
+        # will be a dictionary like:
+        #
+        # {
+        #     "FollowSymlinks": True,
+        #     "Locations: ["/path/one", "/path/two"],
+        #     "NamePatterns: ["*.so"],
+        #     "type": "pam"
+        # }
+        self.checks = []
+        # maps from a check type name like "pam" to its configuration dict as
+        # found in self.checks
+        self.known_check_types: dict[str, dict] = {}
+
+        for check_type, config in 
self.config.configuration['FileDigestLocation'].items():
+            # convert the Location paths into Path objects
+            config['Locations'] = [Path(p) for p in config['Locations']]
+            # make sure these keys always exists
+            config.setdefault('NamePatterns', [])
+            config.setdefault('FollowSymlinks', False)
+            config['type'] = check_type
+            self.checks.append(config)
+            self.known_check_types[check_type] = config
 
         self._setup_digest_location_trie()
-        self.ghost_file_exceptions = 
self.config.configuration.get('GhostFilesExceptions', [])
-        self.symlink_exceptions = 
self.config.configuration.get('SymlinkExceptions', [])
 
-        self.digest_groups = self.config.configuration.get('FileDigestGroup', 
[])
         self.digest_cache = {}
+
+        self.digest_groups = self.config.configuration.get('FileDigestGroup', 
[])
         for digest_group in self.digest_groups:
             self._normalize_digest_group(digest_group)
-            self._verify_digest_group(digest_group)
+            self._sanity_check_digest_group(digest_group)
+
+        # lists of dictionaries describing symlink and ghostfile exception 
configuration
+        self.ghost_file_exceptions = 
self.config.configuration.get('GhostFilesExceptions', [])
+        self.symlink_exceptions = 
self.config.configuration.get('SymlinkExceptions', [])
 
         for exc in self.ghost_file_exceptions:
-            self._verify_package_keys_in_dict('GhostFilesExceptions', exc)
+            self._sanity_check_package_keys('GhostFilesExceptions', exc)
         for exc in self.symlink_exceptions:
-            self._verify_package_keys_in_dict('SymlinkExceptions', exc)
+            self._sanity_check_package_keys('SymlinkExceptions', exc)
 
-    def _get_digester(self, entry):
-        name = entry.get('digester', 'default')
+    def _get_digester(self, digest_info: dict):
+        """Returns the proper digester instance for the given digest_info
+        dictionary as found in a file digest group."""
+        name = digest_info.get('digester', 'default')
         try:
             return DIGESTERS[name]
         except KeyError:
-            path = entry['path']
+            path = digest_info['path']
             raise Exception(f'Invalid digester {name} encountered for path 
{path}')
 
     def _setup_digest_location_trie(self):
-        # Build trie of Locations that are present in FileDigestLocation
-        for config_locations in self.digest_configurations.values():
-            for location in config_locations:
-                path = Path(location)
+        """Builds a trie data structure for quickly determining if a path is
+        in a restricted location."""
+        self.restricted_paths_trie = {}
+        # Build a trie of restricted paths across all checks
+        for config in self.checks:
+            for path in config['Locations']:
                 if not path.is_absolute():
                     raise Exception(f'Absolute path expected: {path}')
-                node = self.digest_configuration_trie
+                node = self.restricted_paths_trie
                 # Skip initial '/'.
                 parts = path.parts[1:]
                 for part in parts[:-1]:
                     node = node.setdefault(part, {})
                     if node is None:
-                        raise Exception(f'Conflicting paths in trie 
{location}')
+                        # a terminator was already set here, so we cannot
+                        # enter this additional path into the trie.
+                        raise Exception(f'Conflicting paths in trie {path}')
                 node[parts[-1]] = None
 
     def _is_path_restricted(self, path):
-        """
-        Return true if there is a digest configuration that covers
-        provided file at given path.
-        """
+        """Return true if there is a check configuration that restricts the 
given
+        path."""
+        if isinstance(path, str):
+            path = Path(path)
 
+        node = self.restricted_paths_trie
         # Skip initial '/'
-        parts = path.parts[1:]
-        node = self.digest_configuration_trie
-        for part in parts:
+        for part in path.parts[1:]:
             if node is None:
+                # terminator found, this path prefix is indeed restricted by a 
check
                 return True
-            if part not in node:
+            elif part not in node:
+                # path element not in trie, not restricted
                 return False
             node = node[part]
+        # `path` is a prefix of a restricted path, but not within a restricted 
location
         return False
 
-    def _normalize_digest_group(self, digest_group):
+    def _normalize_digest_group(self, digest_group: dict):
         """Perform any operations on the digest_group to make it match the
-        format excepted by the rest of the checker.
+        structure excepted by the rest of the checker.
 
         Some convenience functionality like the nodigests list needs to be
         translated into something that is easier to process by the check.
         """
-        # expand skip digests
-        for skip_digest in digest_group.get('nodigests', []):
+        # expand short form 'nodigests' into full configuration entries which
+        # are more easily processable by us.
+        for no_digest_path in digest_group.get('nodigests', []):
             digests = digest_group.setdefault('digests', [])
             digests.append(
                 {
-                    'path': skip_digest,
+                    'path': no_digest_path,
                     'algorithm': 'skip',
                     'hash': '',
                 }
             )
 
         for digest in digest_group.get('digests', []):
-            # imply sha256 digests by default
-            digest.setdefault('algorithm', 'sha256')
+            # imply default algorithm
+            digest.setdefault('algorithm', DEFAULT_DIGEST_ALG)
 
-    def _verify_digest_group(self, digest_group):
-        dg_type = digest_group['type']
-        if dg_type not in self.digest_configurations:
-            raise KeyError(f'FileDigestGroup type "{dg_type}" is not '
-                           f'supported, known values: 
{list(self.digest_configurations)}')
+    def _sanity_check_digest_group(self, digest_group: dict):
+        check_type = digest_group['type']
+        if check_type not in self.known_check_types:
+            raise KeyError(f'FileDigestGroup type "{check_type}" is not '
+                           f'supported, known values: 
{self.known_check_types.keys()}')
 
-        for digest in digest_group['digests']:
+        for digest_info in digest_group['digests']:
             # verify digest algorithm
-            algorithm = digest['algorithm']
+            algorithm = digest_info['algorithm']
             if algorithm == 'skip':
                 pass
             else:
                 # this will raise on bad algorithm names
                 hashlib.new(algorithm)
 
-            if 'path' not in digest:
+            if 'path' not in digest_info:
                 raise KeyError('FileDigestCheck: missing "path" key in 
FileDigestGroup entry')
 
             # verify a valid digester is selected, if any
-            self._get_digester(digest)
+            self._get_digester(digest_info)
 
-        self._verify_package_keys_in_dict('FileDigestGroup', digest_group)
+        self._sanity_check_package_keys('FileDigestGroup', digest_group)
 
-    def _verify_package_keys_in_dict(self, context, d):
-        """verifies package/packages keys in the given dictionary.
+    def _sanity_check_package_keys(self, context: str, d: dict):
+        """Verifies package/packages keys in the given dictionary `d`.
 
         This supports a single `package = "name"` key as well as a `packages =
         ["one", "two"]` list. Sanity checks are performed verifying that at
@@ -273,8 +300,17 @@
         else:
             return d['packages']
 
-    def _matches_pkg(self, config_dict, pkg):
-        for candidate in self._gather_packages_from_dict(config_dict):
+    def _matches_pkg(self, pkg, config: dict):
+        """Checks whether the given `config` applies to the given package
+        name.
+
+        `config` can be different types of configuration dicts for
+        file digest groups, symlink and ghost exceptions as long as it
+        supports `package` / `packages` keys.
+
+        This function also resolves glob patterns.
+        """
+        for candidate in self._gather_packages_from_dict(config):
             if pkg.name == candidate:
                 return True
             elif candidate.startswith('glob:'):
@@ -284,52 +320,68 @@
 
         return False
 
-    def _get_digest_configuration_group(self, pkgfile):
+    def _lookup_check_for_file(self, pkgfile):
+        """Returns the dictionary of the FileDigestCheck config which applies
+        to `pkgfile` or None if none applies."""
         if stat.S_ISDIR(pkgfile.mode):
+            # directories are generally not restricted by this check
             return None
 
-        # Make quick lookup in the built trie of configuration locations
+        # Make a fast lookup in the prepared trie of configuration locations
         path = Path(pkgfile.name)
         if not self._is_path_restricted(path):
             return None
 
-        for group, locations in self.digest_configurations.items():
-            for location in locations:
+        for config in self.checks:
+            for location in config['Locations']:
                 with contextlib.suppress(ValueError):
                     if path.relative_to(location):
-                        if not self.name_patterns_in_group[group]:
-                            return group
+                        if not config['NamePatterns']:
+                            # files in this location are unconditionally 
subject to the check.
+                            return config
                         else:
-                            for glob in self.name_patterns_in_group[group]:
+                            # we need to check if the filename matches the 
configured pattern.
+                            for glob in config['NamePatterns']:
                                 if fnmatch(path.name, glob):
-                                    return group
+                                    return config
+
+        # must be a file which isn't matched by a name pattern.
         return None
 
-    def _is_valid_digest(self, path, digest, pkg):
-        algorithm = digest['algorithm']
+    def _check_digest(self, pkg, digest_info: dict):
+        """Returns a tuple of (bool, str), where the boolean indicates whether
+        the digest of the package's file matches the one recorded in the
+        digest_info; the str contains the actual hash digest calculated
+        from the package's file or None if no hash digest is configured or an
+        error occurred."""
+        algorithm = digest_info['algorithm']
         if algorithm == 'skip':
             return (True, None)
 
-        pkgfile = self._resolve_links(pkg, path)
+        pkgfile = self._resolve_links(pkg, digest_info['path'])
         if pkgfile is None:
             return (False, None)
 
-        digester = self._get_digester(digest)
+        digester = self._get_digester(digest_info)
         file_digest = self._calc_digest(digester, pkgfile, algorithm)
-        return (file_digest == digest['hash'], file_digest)
+        return (file_digest == digest_info['hash'], file_digest)
 
     def _resolve_links(self, pkg, path):
+        """Follows symbolic links within the package until a non-symlink entry
+        is found. This cannot follow symlinks outside of the current
+        package. If no valid target is found then None is returned, the
+        resolved file entry otherwise."""
         pkgfile = pkg.files[path]
-        while stat.S_ISLNK(pkgfile.mode):
+        while pkgfile and stat.S_ISLNK(pkgfile.mode):
             pkgfile = pkg.readlink(pkgfile)
-            if not pkgfile:
-                return None
 
         return pkgfile
 
     def _calc_digest(self, digester, pkgfile, algorithm):
+        """Calculates the hash digest of `pkgfile` based on `algorithm` and
+        `digester."""
         # include the digester in the cache key, because different entries
-        # might be using different digester types
+        # might be using different digester types for the same paths.
         cache_key = (id(digester), pkgfile.name, algorithm)
 
         digest = self.digest_cache.get(cache_key, None)
@@ -342,13 +394,16 @@
         return digest
 
     def _get_digest_hint(self, pkg, path):
+        """Returns a string listing the observed digests of a path in case of
+        whitelisting violations."""
         pkgfile = self._resolve_links(pkg, path)
         digest_hint = f'{DEFAULT_DIGEST_ALG} file digest'
         if not pkgfile:
-            return digest_hint + ': <failed to resolve pkgfile>'
+            return f'{digest_hint}: <failed to resolve pkgfile>'
 
         if pkgfile.name != path:
-            digest_hint += ' of resolved path ' + pkgfile.name
+            # make it clear that we resolved a symlink here
+            digest_hint += f' of resolved path {pkgfile.name}'
 
         for dtype, digester in DIGESTERS.items():
             try:
@@ -362,8 +417,8 @@
 
     def _check_paths_match(self, rpm_path, whitelist_path):
         """This checks a whitelisted path against a file path found in the RPM
-        if they match. This also handles special cases like globbing
-        characters in the whitelisting."""
+        if they match. This handles special cases like globbing characters in
+        the whitelisting."""
         if rpm_path == whitelist_path:
             # exact match
             return True
@@ -373,127 +428,269 @@
         else:
             return False
 
-    def _check_group_type(self, pkg, group_type, secured_paths):
-        """ Check all secured files of a group type
+    def _find_digest_groups(self, pkg, check: dict):
+        """Return all digest group entries matching the given check type and
+        package name."""
+        ret = []
+        for group in self.digest_groups:
+            if group['type'] == check['type'] and self._matches_pkg(pkg, 
group):
+                ret.append(group)
+        return ret
 
-        Ensures that all files in secured paths have to be whitelisted in an
-        digest whitelisting belonging to group_type.
+    def _check_for_unauthorized(self, pkg, check: dict, restricted_paths: 
list):
+        """Detect and complain about files with missing whitelistings.
+
+        Returns the list of paths which are basically whitelisted (but digests
+        still need to be checked).
 
         Params:
-        - group_type: type of digest group type e.g. "cron", "dbus", ...
-        - secured_paths: all secured paths found in this package e.g. 
['/usr/share/dbus-1/system-services/org.freedesktop.PolicyKit1.service', 
'/usr/share/dbus-1/system.d/org.freedesktop.PolicyKit1.conf']
+        - check: check configuration dictionary.
+        - restricted_paths: list of restricted paths found in this package 
e.g. [
+            
'/usr/share/dbus-1/system-services/org.freedesktop.PolicyKit1.service',
+            '/usr/share/dbus-1/system.d/org.freedesktop.PolicyKit1.conf'
+          ]
         """
-        # Find all digest whitelisted paths that belong to group_type and 
focus on current package
-        digests = []
-        for digest_group in self.digest_groups:
-            if digest_group['type'] == group_type and 
self._matches_pkg(digest_group, pkg):
-                digests.extend(digest_group['digests'])
-
-        # For all files in this package that fall into the secured paths: 
check if they are whitelisted
-        # If not whitelisted print error: file-unauthorized
-        whitelisted_paths = {dg['path'] for dg in digests}
-        for spath in secured_paths:
-            for wpath in whitelisted_paths:
-                # filepath is whitelisted
-                if self._check_paths_match(spath, wpath):
+        # Collect all paths for the given check and package that are 
whitelisted
+        known_paths = []
+        for group in self._find_digest_groups(pkg, check):
+            known_paths.extend(info['path'] for info in group['digests'])
+
+        known_paths = sorted(set(known_paths))
+
+        # here the remaining paths that have whitelisting entries will be 
collected
+        whitelisted = []
+
+        for restricted in restricted_paths:
+            for known in known_paths:
+                # path is basically whitelisted somewhere, digest might still 
be wrong
+                if self._check_paths_match(restricted, known):
+                    whitelisted.append(restricted)
                     break
             else:
-                digest_hint = self._get_digest_hint(pkg, spath)
-                self.output.add_info('E', pkg, 
f'{group_type}-file-unauthorized', spath, f'({digest_hint})')
+                digest_hint = self._get_digest_hint(pkg, restricted)
+                check_type = check['type']
+                self.output.add_info('E', pkg, 
f'{check_type}-file-unauthorized', restricted, f'({digest_hint})')
 
-        # For all digest whitelisted files check if the digests in the package 
are correct
-        # If not correct print error: file-digest-mismatch
-        for path in whitelisted_paths:
-            # Find all digests with same path
-            # This is needed because there could be an older and a newer
-            # version of this package with same whitelisted paths and 
different digests
-            digests_of_path = []
-            for digest in digests:
-                if self._check_paths_match(path, digest['path']):
-                    digests_of_path.append(digest)
-            # If *any* digest with the same path matches the package's file
-            # digest of that path, then we assume the file is correctly 
whitelisted
-            error_digests = []
-            for digest in digests_of_path:
-                # Check if digest whitelist path has a matching file in our 
package
-                if not pkg.files.get(path):
-                    # This digest entry is not needed anymore and could be 
dropped
-                    continue
-                try:
-                    valid_digest, file_digest = self._is_valid_digest(path, 
digest, pkg)
-                except Exception as e:
-                    self.output.add_info('E', pkg, 
f'{group_type}-file-parse-error', path, f'failed to calculate digest: {e}')
-                    continue
-                if valid_digest:
-                    # Valid digest found, no mismatch error will be printed
-                    error_digests = []
-                    break
-                # Gather all digest mismatches for error message
-                if file_digest:
-                    error_digests.append(digest)
-            if error_digests:
-                for digest in error_digests:
-                    digest_algorithm = digest['algorithm']
-                    digest_hash = digest['hash']
-                    self.output.add_info('E', pkg, 
f'{group_type}-file-digest-mismatch', path,
-                                         f'expected 
{digest_algorithm}:{digest_hash}, has:{file_digest}')
+        return whitelisted
 
-    def _check_ghost_exceptions(self, pkg, name):
-        """ Check if a ghosted file is whilelisted
+    def _check_digest_group(self, pkg, check: dict, group: dict,
+                            verified_paths: set, mismatches: dict[str, list]):
+        """Verify digests from the given digest group.
 
-        In general we don't allow files we want to secure to be included as 
%ghost. And of course there are exceptions,
-        e.g. polkit-default-privs
+        The caller must ensure that `group` is valid for `pkg`.
+
+        This verifies a digest group as a set of related paths: The
+        whitelisting will only be considered complete if all files in the
+        group have valid digests.
+
+        If the group matches then the validated files will be added to
+        `verified_paths`. Otherwise if digest mismatches have been found
+        then these mismatches will be recorded in the dictionary `mismatches`.
+        """
+
+        missing_files = []
+        valid_files = []
+        found_mismatch = False
+        check_type = check['type']
+        # we need to handle mismatches for paths in unrelated locations 
specially
+        unrelated_mismatches = {}
+
+        # check all digests of the group if they have matching files in the 
package
+        for digest_info in group['digests']:
+            path = digest_info['path']
+            # Check if digest whitelist path has a matching file in our package
+            if not pkg.files.get(path):
+                # This digest entry might not be needed anymore.
+                #
+                # We are tolerating that some of the listed files are no
+                # longer present as long as any existing files have valid
+                # digests.
+                #
+                # NOTE: glob patterns not supported here yet, but coupling
+                # glob patterns with digests would be weird anyway.
+                if not path.startswith('glob:'):
+                    missing_files.append(path)
+                continue
+
+            try:
+                valid, hashsum = self._check_digest(pkg, digest_info)
+            except Exception as e:
+                self.output.add_info('E', pkg, 
f'{check_type}-file-parse-error', path, f'failed to calculate digest: {e}')
+                continue
+
+            if valid:
+                # Valid digest found, continue with the rest of the paths in 
this group
+                valid_files.append(path)
+            else:
+                found_mismatch = True
+
+            if hashsum:
+                # also store the digest we encountered for later reference
+                digest_info['encountered'] = hashsum
+                # Record this digest mismatch for later error messages
+                if self._is_path_restricted(path):
+                    mismatch_list = mismatches.setdefault(path, [])
+                else:
+                    mismatch_list = unrelated_mismatches.setdefault(path, [])
+
+                mismatch_list.append(digest_info)
+
+        if valid_files and unrelated_mismatches:
+            # only store unrelated file mismatches if any actually
+            # restricted path was valid, otherwise this could create confusion
+            # and too much noise.
+            for unrelated, infos in unrelated_mismatches.items():
+                mismatch_list = mismatches.setdefault(unrelated, [])
+                mismatch_list.extend(infos)
+
+        if found_mismatch:
+            # this digest group is not fully valid for this package
+            return
+
+        for missing in missing_files:
+            self.output.add_info('W', pkg, 
f'{check_type}-whitelisted-file-missing', missing, 'path present in whitelist 
but not in package')
+
+        # the digest group was fully verified, we can add all files to the set
+        # of verified paths.
+        for file in valid_files:
+            verified_paths.add(file)
+
+    def _check_for_valid_digests(self, pkg, check: dict, restricted_paths: 
list):
+        """Check for valid digest entries for the given restricted paths.
+
+        Ensures that all files in restricted locations are whitelisted in an
+        appropriate file digest group. `restricted_paths` need to contain
+        paths that are known to be whitelisted, but digest verification and
+        group verification is still pending.
 
         Params:
-        - pkg: name of the package
-        - name: paths of the ghosted file
+        - check: check configuration dictionary.
+        - restricted_paths: list of restricted and whitelisted paths found in 
this package e.g. [
+            
'/usr/share/dbus-1/system-services/org.freedesktop.PolicyKit1.service',
+            '/usr/share/dbus-1/system.d/org.freedesktop.PolicyKit1.conf'
+          ]
+        """
+
+        # digest mismatches will be recorded here with paths as keys and a
+        # list of digest info dictionaries as values.
+        mismatches: dict[str, list] = {}
+        verified_paths = set()
+
+        for group in self._find_digest_groups(pkg, check):
+            self._check_digest_group(pkg, check, group, verified_paths, 
mismatches)
+
+        violations = set()
+
+        # now gather all whitelisting violations
+        for restricted in restricted_paths:
+            if restricted not in verified_paths:
+                violations.add(restricted)
+
+        # this here will collect "unrelated files", i.e. files that are not
+        # directly in a restricted path but are listed as additional files
+        # to verify in a digest group.
+        for mismatch in mismatches:
+            if mismatch not in verified_paths:
+                violations.add(mismatch)
+
+        for violation in sorted(violations):
+            mismatch_list = mismatches.get(violation, [])
+            if not mismatch_list:
+                # This can happen if a group has mixed digest-coupled and
+                # nodigest files. if the digest-coupled files fail to verify
+                # then the nodigest files also fail to verify, since they are
+                # treated as a group. No point in printing a
+                # file-digest-mismatch in this case, though.
+                continue
+
+            hashes_seen = set()
+
+            # in case multiple groups exist for the same file then we are
+            # potentially printing multiple errors with multiple expected
+            # hashes here, because we cannot really know which of the groups
+            # is supposed to be the correct / desired one.
+            for digest_info in mismatch_list:
+                # print an error for each mismatch in case we have multiple
+                # digest groups in which the path is listed.
+                alg = digest_info['algorithm']
+                expected = digest_info['hash']
+                hashsum = digest_info['encountered']
+                if hashsum in hashes_seen:
+                    # avoid printing duplicate errors
+                    continue
+                hashes_seen.add(hashsum)
+                check_type = check['type']
+                self.output.add_info('E', pkg, 
f'{check_type}-file-digest-mismatch', digest_info['path'],
+                                     f'expected {alg}:{expected}, 
has:{hashsum}')
+
+    def _is_ghost_allowed(self, pkg, path):
+        """Check if a ghost file is in the exception list.
+
+        In general we don't allow files we want to restrict to be included as
+        %ghost. Some corner cases e.g. in polkit-default-privs make it
+        necessary to ignore %ghost files. For these cases an exception list is
+        maintained.
+
+        Params:
+        - pkg: package information
+        - path: path of the %ghost file in the package
         """
         for ghost_exception in self.ghost_file_exceptions:
-            if not self._matches_pkg(ghost_exception, pkg):
+            if not self._matches_pkg(pkg, ghost_exception):
                 continue
-            if name in ghost_exception['paths']:
+            if path in ghost_exception['paths']:
                 return True
         return False
 
-    def _check_symlink_exceptions(self, pkg, name):
-        """ Check if a symlink'ed file is in the exception list
+    def _is_symlink_allowed(self, pkg, path):
+        """Check if a symlink file is in the exception list.
 
         For some checks we generally don't want to support symlinks at all,
         but certain corner cases make it necessary to ignore certain
-        package/path combinations. For these cases a separate exception list
-        is maintained.
+        package/path combinations. For these cases an exception list is
+        maintained.
         """
-
         for symlink_exception in self.symlink_exceptions:
-            if not self._matches_pkg(symlink_exception, pkg):
+            if not self._matches_pkg(pkg, symlink_exception):
                 continue
-            if name in symlink_exception['paths']:
+            if path in symlink_exception['paths']:
                 return True
         return False
 
     def check_binary(self, pkg):
-        """
-        Check that all files in secured locations are covered by a file digest 
group
-        in which all files have valid digest.
+        """Entry point for digest checks. Check that all files in restricted
+        locations are covered by a file digest group in which all files have
+        valid digests.
         """
 
-        # Find all files in this package that fall in a digest secured path
-        secured_paths = {}
-        for pkgfile in pkg.files.values():
-            group = self._get_digest_configuration_group(pkgfile)
+        # Find all files in this package that are placed in restricted 
locations.
 
-            if not group:
+        # this maps the check name to the list of paths affected by it.
+        restricted_paths: dict[str, list] = {}
+        for pkgfile in pkg.files.values():
+            check = self._lookup_check_for_file(pkgfile)
+            if not check:
                 continue
-            elif pkgfile.name in pkg.ghost_files:
-                if not self._check_ghost_exceptions(pkg, pkgfile.name):
-                    self.output.add_info('E', pkg, f'{group}-file-ghost', 
pkgfile.name)
-            elif stat.S_ISLNK(pkgfile.mode) and not 
self.follow_symlinks_in_group[group]:
-                if not self._check_symlink_exceptions(pkg, pkgfile.name):
-                    self.output.add_info('E', pkg, f'{group}-file-symlink', 
pkgfile.name)
+
+            check_type = check['type']
+
+            path = pkgfile.name
+            if path in pkg.ghost_files:
+                if not self._is_ghost_allowed(pkg, path):
+                    self.output.add_info('E', pkg, f'{check_type}-file-ghost', 
path)
+            elif stat.S_ISLNK(pkgfile.mode) and not check['FollowSymlinks']:
+                if not self._is_symlink_allowed(pkg, path):
+                    self.output.add_info('E', pkg, 
f'{check_type}-file-symlink', path)
             else:
-                file_list = secured_paths.setdefault(group, [])
-                file_list.append(pkgfile.name)
+                file_list = restricted_paths.setdefault(check_type, [])
+                file_list.append(path)
+
+        # Check all found restricted files for every check type
+        for check_type, files in restricted_paths.items():
+            check = self.known_check_types[check_type]
+            # filter out duplicates and sort the list of files to achieve
+            # predictable behaviour of this check
+            files = sorted(set(files))
 
-        # Check all found secured files for every group type
-        for group, files in secured_paths.items():
-            self._check_group_type(pkg, group, set(files))
+            files = self._check_for_unauthorized(pkg, check, files)
+            self._check_for_valid_digests(pkg, check, files)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/rpmlint-2.9.0+git20260417.eb98d54d/test/configs/digests.config 
new/rpmlint-2.9.0+git20260423.8742fdc6/test/configs/digests.config
--- old/rpmlint-2.9.0+git20260417.eb98d54d/test/configs/digests.config  
2026-04-17 10:34:14.000000000 +0200
+++ new/rpmlint-2.9.0+git20260423.8742fdc6/test/configs/digests.config  
2026-04-23 10:59:14.000000000 +0200
@@ -46,4 +46,32 @@
 [[FileDigestGroup.digests]]
 path = "/restricted/1/afile"
 algorithm = "sha256"
-hash = "b05ffa4eea8fb5609d576a68c1066be3f99e4dc53d365a0ac2a78259b2dd91f9"
\ No newline at end of file
+hash = "b05ffa4eea8fb5609d576a68c1066be3f99e4dc53d365a0ac2a78259b2dd91f9"
+
+[[FileDigestGroup]]
+package = "testpkg3"
+type = "somerestriction"
+note = "simulates a package which contains valid digests from one group and 
another group, which should fail"
+bug = "bsc#5667"
+[[FileDigestGroup.digests]]
+path = "/restricted/1/firstfile"
+algorithm = "sha256"
+hash = "f06ec93de4272610d2b8e9a3d654fc8d74de5caaf164a3793d99d4c2e041cdc8"
+[[FileDigestGroup.digests]]
+path = "/restricted/1/secondfile"
+algorithm = "sha256"
+hash = "fcca25469ced85943ed5953bf3fd7d8c27661c6009feee05a4dbfed746d89fc0"
+
+[[FileDigestGroup]]
+package = "testpkg3"
+type = "somerestriction"
+note = "simulates a package which contains valid digests from one group and 
another group, which should fail"
+bug = "bsc#8799"
+[[FileDigestGroup.digests]]
+path = "/restricted/1/firstfile"
+algorithm = "sha256"
+hash = "fcca25469ced85943ed5953bf3fd7d8c27661c6009feee05a4dbfed746d89fc0"
+[[FileDigestGroup.digests]]
+path = "/restricted/1/secondfile"
+algorithm = "sha256"
+hash = "94b2b7b43dc37e068bfb110c1bc67c3aec31c77e6b9e062ec5d129239205fbae"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/rpmlint-2.9.0+git20260417.eb98d54d/test/test_file_digest.py 
new/rpmlint-2.9.0+git20260423.8742fdc6/test/test_file_digest.py
--- old/rpmlint-2.9.0+git20260417.eb98d54d/test/test_file_digest.py     
2026-04-17 10:34:14.000000000 +0200
+++ new/rpmlint-2.9.0+git20260423.8742fdc6/test/test_file_digest.py     
2026-04-23 10:59:14.000000000 +0200
@@ -20,6 +20,10 @@
     return output, test
 
 
+def filter_missing_file_warnings(output):
+    return [line for line in output if 'whitelisted-file-missing' not in line]
+
+
 @pytest.fixture(scope='function', autouse=True)
 def digestcheck():
     return get_digestcheck(Testing.TEST_CONFIG[0])
@@ -77,9 +81,8 @@
         pkg.add_file_with_content('/other/place/suspicious.txt', 'really good 
stuff')
         pkg.add_file_with_content('/related/and/also/sensitive', 'related 
sensitive stuff')
         test.check(pkg)
-        assert len(output.results) == 1
-        error = output.results[0]
-        assert error.startswith('testpkg: E: 
somerestriction-file-digest-mismatch /alsorestricted/2/suspicious expected 
sha256:')
+        assert len(output.results) >= 1
+        assert any(error.startswith('testpkg: E: 
somerestriction-file-digest-mismatch /alsorestricted/2/suspicious expected 
sha256:') for error in output.results)
 
 
 def test_related_mismatch():
@@ -91,9 +94,8 @@
         pkg.add_file_with_content('/other/place/suspicious.txt', 'really 
suspicious stuff')
         pkg.add_file_with_content('/related/and/also/sensitive', 'related fine 
stuff')
         test.check(pkg)
-        assert len(output.results) == 1
-        error = output.results[0]
-        assert error.startswith('testpkg: E: 
somerestriction-file-digest-mismatch /related/and/also/sensitive expected 
sha1:')
+        assert len(output.results) > 1
+        assert any(error.startswith('testpkg: E: 
somerestriction-file-digest-mismatch /related/and/also/sensitive expected 
sha1:') for error in output.results)
 
 
 def test_glob_support():
@@ -185,9 +187,10 @@
         pkg.add_file_with_content('/other/place/suspicious.txt', 'really 
suspicious stuff')
         pkg.add_file_with_content('/related/and/also/sensitive', 'related 
sensitive stuff')
         test.check(pkg)
-        assert len(output.results) == 2
-        assert 'otherpkg: E: somerestriction-file-unauthorized 
/alsorestricted/2/suspicious (sha256 file digest of resolved path 
/other/place/suspicious.txt default 
filter:a412bca55af87ea264063df10d08a40ff3b8e68106f4a48a3c4a1cacb6394c94 shell 
filter:26f40cba5d4f8d6ff9815a12890fb1dbc9e32771a29ccc4ecbb300475dfeb057 xml 
filter:<failed-to-calculate>)' in output.results
-        assert 'otherpkg: E: somerestriction-file-unauthorized 
/restricted/1/dangerous (sha256 file digest default 
filter:537b320f9c3b30276bd54b838b6e6b72e923e70dbdb126926f992d594a30256c shell 
filter:eb372739a52b2c1a400038e1bea1ff3e194ed2a2986d098d01fd631fb3f29c81 xml 
filter:<failed-to-calculate>)' in output.results
+        results = filter_missing_file_warnings(output.results)
+        assert len(results) == 2
+        assert 'otherpkg: E: somerestriction-file-unauthorized 
/alsorestricted/2/suspicious (sha256 file digest of resolved path 
/other/place/suspicious.txt default 
filter:a412bca55af87ea264063df10d08a40ff3b8e68106f4a48a3c4a1cacb6394c94 shell 
filter:26f40cba5d4f8d6ff9815a12890fb1dbc9e32771a29ccc4ecbb300475dfeb057 xml 
filter:<failed-to-calculate>)' in results
+        assert 'otherpkg: E: somerestriction-file-unauthorized 
/restricted/1/dangerous (sha256 file digest default 
filter:537b320f9c3b30276bd54b838b6e6b72e923e70dbdb126926f992d594a30256c shell 
filter:eb372739a52b2c1a400038e1bea1ff3e194ed2a2986d098d01fd631fb3f29c81 xml 
filter:<failed-to-calculate>)' in results
 
 
 def test_unaffected_pkg():
@@ -209,7 +212,7 @@
         pkg.add_file_with_content('/other/place/suspicious.txt', 'whatever')
         pkg.add_file_with_content('/related/and/also/sensitive', 'related 
sensitive stuff')
         test.check(pkg)
-        assert len(output.results) == 0
+        assert len(filter_missing_file_warnings(output.results)) == 0
 
 
 def test_missing_nodigests_entry():
@@ -223,8 +226,9 @@
         pkg.add_file_with_content('/related/and/also/sensitive', 'related 
sensitive stuff')
         pkg.add_file_with_content('/restricted/1/evil', 'evil stuff')
         test.check(pkg)
-        assert len(output.results) == 1
-        error = output.results[0]
+        results = filter_missing_file_warnings(output.results)
+        assert len(results) == 1
+        error = results[0]
         assert error == 'testpkg: E: somerestriction-file-unauthorized 
/restricted/1/evil (sha256 file digest default 
filter:f2175fc16d5a482baa71b2e77831b354afa91a5fda5ef0df59d8e87376598d4f shell 
filter:8336ce7a3fb22e404a767f7b7302cfc6637083fa1b4fd63fe11c2d977d65dfa2 xml 
filter:<failed-to-calculate>)'
 
 
@@ -254,11 +258,23 @@
         pkg.add_file_with_content('/other/place/suspicious.txt', 'whatever')
         pkg.add_file_with_content('/related/and/also/sensitive', 'wrong 
content')
         test.check(pkg)
-        assert len(output.results) == 1
-        error = output.results[0]
+        results = filter_missing_file_warnings(output.results)
+        assert len(results) == 1
+        error = results[0]
         assert error == 'testpkg: E: somerestriction-file-digest-mismatch 
/related/and/also/sensitive expected 
sha1:ab5ec199027247773d2d617895f49179d7b3186e, 
has:a6abec9ea1e13ca93d1c704758bd52f62ef16433'
 
 
+def test_partial_match():
+    # test what happens when a package has matches for one digest group and
+    # another one. this should still be rejected.
+    output, test = get_digestcheck('digests.config')
+    with FakePkg('testpkg3') as pkg:
+        pkg.add_file_with_content('/restricted/1/firstfile', 'first file here')
+        pkg.add_file_with_content('/restricted/1/secondfile', 'second file 
here')
+        test.check(pkg)
+        assert len(output.results) == 2
+
+
 def test_multiple_packages():
     # the first two should match, the last one shouldn't
     for pkgname in ('testpkg2', 'otherpkg', 'badpkg'):
@@ -282,7 +298,8 @@
     with FakePkg('shellpkg') as pkg:
         pkg.add_file_with_content('/shell/test.sh', shell_script)
         test.check(pkg)
-        assert len(output.results) == 0
+        results = filter_missing_file_warnings(output.results)
+        assert len(results) == 0
 
     # the same file with removed empty lines and whitespace should result in
     # the same digest
@@ -291,7 +308,8 @@
         trimmed_script = '\n'.join([line.rstrip() for line in 
shell_script.splitlines() if line])
         pkg.add_file_with_content('/shell/test.sh', trimmed_script)
         test.check(pkg)
-        assert len(output.results) == 0
+        results = filter_missing_file_warnings(output.results)
+        assert len(results) == 0
 
     # the file with changed actual code should result in a digest mismatch
     output, test = get_digestcheck('digests_filtered.config')
@@ -310,7 +328,8 @@
     with FakePkg('shellpkg') as pkg:
         pkg.add_file_with_content('/shell/test.py', python_script)
         test.check(pkg)
-        assert len(output.results) == 0
+        results = filter_missing_file_warnings(output.results)
+        assert len(results) == 0
 
     # These shebangs should yield the same hash
     equivalent_shebangs = [
@@ -331,7 +350,8 @@
                 '\n'.join(python_script.splitlines()[1:])
             pkg.add_file_with_content('/shell/test.py', changed_script)
             test.check(pkg)
-            assert len(output.results) == 0
+            results = filter_missing_file_warnings(output.results)
+            assert len(results) == 0
 
     for shebang in different_shebangs:
         output, test = get_digestcheck('digests_filtered.config')

Reply via email to