Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package osc for openSUSE:Factory checked in 
at 2025-12-18 18:36:04
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/osc (Old)
 and      /work/SRC/openSUSE:Factory/.osc.new.1928 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "osc"

Thu Dec 18 18:36:04 2025 rev:221 rq:1323505 version:1.23.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/osc/osc.changes  2025-12-16 16:02:02.533475034 
+0100
+++ /work/SRC/openSUSE:Factory/.osc.new.1928/osc.changes        2025-12-18 
18:37:10.804050401 +0100
@@ -1,0 +2,30 @@
+Thu Dec 18 09:32:40 UTC 2025 - Daniel Mach <[email protected]>
+
+- 1.23.0
+  - Command-line:
+    - Add '--target-owner' option to 'git-obs pr create' to specify the target 
owner explicitly
+    - Add '--target-branch' option to 'git-obs staging search' command
+    - Added 'git-obs staging search' command to find project PRs with 
referenced package PRs that have all been approved
+    - Change 'git-obs pr dump' to produce directories that match the specified 
pull request IDs
+    - Change 'git-obs pr dump' to write STATUS file
+    - Properly error out on invalid 'PR:' references in 'git-obs pr dump'
+    - Fix 'git-obs pr create' when the source repo is not a fork
+    - Fix 'git-obs api' command when server returns 'null'
+    - Fix 'osc build --alternative-project=...' when there's no .osc in the 
current directory
+    - Fix argument and store handling in 'osc results' command
+  - Library:
+    - Add Manifest.get_package_paths() method that lists all paths to 
existings packages in a project
+    - Fix Manifest class to handle loading empty YAML files or strings
+    - Fix working with meta during git rebase by determining the current 
branch from rebase head
+    - Fix handling local branch when fetching remote
+    - Move get_label_ids() from PullRequest to Repo class
+    - Change GitStore not to require apiurl anymore
+    - Fix storing last_buildroot for git packages
+    - Store the last buildroot only if there's a store detected
+    - Fix BuildRoot so it acts as a tuple and the individual values are 
accessible via indexes
+    - Make PullReqest.parse_id() more permissive by accepting trailing 
whitespaces
+    - Fix 'missingok' argument in server_diff()
+    - Fix gitea_api.PullRequest ordering methods
+    - Add return to gitea_api.Branch.list()
+
+-------------------------------------------------------------------

Old:
----
  osc-1.22.0.tar.gz

New:
----
  osc-1.23.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ osc.spec ++++++
--- /var/tmp/diff_new_pack.qWwYQY/_old  2025-12-18 18:37:11.980099809 +0100
+++ /var/tmp/diff_new_pack.qWwYQY/_new  2025-12-18 18:37:11.984099976 +0100
@@ -80,7 +80,7 @@
 %endif
 
 Name:           osc
-Version:        1.22.0
+Version:        1.23.0
 Release:        0
 Summary:        Command-line client for the Open Build Service
 License:        GPL-2.0-or-later

++++++ PKGBUILD ++++++
--- /var/tmp/diff_new_pack.qWwYQY/_old  2025-12-18 18:37:12.020101489 +0100
+++ /var/tmp/diff_new_pack.qWwYQY/_new  2025-12-18 18:37:12.028101825 +0100
@@ -1,6 +1,6 @@
 pkgname=osc
-pkgver=1.22.0
-pkgrel=1
+pkgver=1.23.0
+pkgrel=0
 pkgdesc="Command-line client for the Open Build Service"
 arch=('x86_64')
 url="https://www.github.com/openSUSE/osc";

++++++ debian.changelog ++++++
--- /var/tmp/diff_new_pack.qWwYQY/_old  2025-12-18 18:37:12.076103841 +0100
+++ /var/tmp/diff_new_pack.qWwYQY/_new  2025-12-18 18:37:12.080104010 +0100
@@ -1,4 +1,4 @@
-osc (1.22.0-0) unstable; urgency=low
+osc (1.23.0-0) unstable; urgency=low
 
   * Placeholder
 

++++++ osc-1.22.0.tar.gz -> osc-1.23.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/NEWS new/osc-1.23.0/NEWS
--- old/osc-1.22.0/NEWS 2025-11-10 16:49:39.000000000 +0100
+++ new/osc-1.23.0/NEWS 2025-12-18 10:30:46.000000000 +0100
@@ -1,3 +1,30 @@
+- 1.23.0
+  - Command-line:
+    - Add '--target-owner' option to 'git-obs pr create' to specify the target 
owner explicitly
+    - Add '--target-branch' option to 'git-obs staging search' command
+    - Added 'git-obs staging search' command to find project PRs with 
referenced package PRs that have all been approved
+    - Change 'git-obs pr dump' to produce directories that match the specified 
pull request IDs
+    - Change 'git-obs pr dump' to write STATUS file
+    - Properly error out on invalid 'PR:' references in 'git-obs pr dump'
+    - Fix 'git-obs pr create' when the source repo is not a fork
+    - Fix 'git-obs api' command when server returns 'null'
+    - Fix 'osc build --alternative-project=...' when there's no .osc in the 
current directory
+    - Fix argument and store handling in 'osc results' command
+  - Library:
+    - Add Manifest.get_package_paths() method that lists all paths to 
existings packages in a project
+    - Fix Manifest class to handle loading empty YAML files or strings
+    - Fix working with meta during git rebase by determining the current 
branch from rebase head
+    - Fix handling local branch when fetching remote
+    - Move get_label_ids() from PullRequest to Repo class
+    - Change GitStore not to require apiurl anymore
+    - Fix storing last_buildroot for git packages
+    - Store the last buildroot only if there's a store detected
+    - Fix BuildRoot so it acts as a tuple and the individual values are 
accessible via indexes
+    - Make PullReqest.parse_id() more permissive by accepting trailing 
whitespaces
+    - Fix 'missingok' argument in server_diff()
+    - Fix gitea_api.PullRequest ordering methods
+    - Add return to gitea_api.Branch.list()
+
 - 1.22.0
   - Command-line:
     - Add 'git-obs staging' commands
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/osc/__init__.py 
new/osc-1.23.0/osc/__init__.py
--- old/osc-1.22.0/osc/__init__.py      2025-11-10 16:49:39.000000000 +0100
+++ new/osc-1.23.0/osc/__init__.py      2025-12-18 10:30:46.000000000 +0100
@@ -13,7 +13,7 @@
 
 
 from .util import git_version
-__version__ = git_version.get_version('1.22.0')
+__version__ = git_version.get_version('1.23.0')
 
 
 # vim: sw=4 et
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/osc/build.py new/osc-1.23.0/osc/build.py
--- old/osc-1.22.0/osc/build.py 2025-11-10 16:49:39.000000000 +0100
+++ new/osc-1.23.0/osc/build.py 2025-12-18 10:30:46.000000000 +0100
@@ -1622,7 +1622,7 @@
         cmd = [change_personality[bi.buildarch]] + cmd
 
     # record our settings for later builds
-    if not opts.local_package and store.is_package:
+    if store and store.is_package:
         store.last_buildroot = repo, arch, vm_type
 
     try:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/osc/commandline.py 
new/osc-1.23.0/osc/commandline.py
--- old/osc-1.22.0/osc/commandline.py   2025-11-10 16:49:39.000000000 +0100
+++ new/osc-1.23.0/osc/commandline.py   2025-12-18 10:30:46.000000000 +0100
@@ -6439,6 +6439,7 @@
             osc results PROJECT [PACKAGE[:FLAVOR]]
         """
 
+        from . import store as osc_store
         from .core import MultibuildFlavorResolver
         from .core import csv
         from .core import decode_it
@@ -6449,24 +6450,25 @@
         from .core import is_project_dir
         from .core import result_xml_to_dicts
         from .core import slash_split
-        from .core import store_read_package
-        from .core import store_read_project
 
         args = slash_split(args)
 
         apiurl = self.get_api_url()
         if len(args) > 2:
             raise oscerr.WrongArgs('Too many arguments (required none, one, or 
two)')
-        project = package = None
-        wd = Path.cwd()
-        if is_project_dir(wd):
-            project = store_read_project(wd)
-        elif is_package_dir(wd):
-            project = store_read_project(wd)
-            package = store_read_package(wd)
-        if len(args) > 0:
+
+        project = None
+        package = None
+
+        if len(args) == 0:
+            store = osc_store.get_store(Path.cwd())
+            project = store.project
+            if store.is_package:
+                package = store.package
+        elif len(args) == 1:
+            project = self._process_project_name(args[0])
+        elif len(args) == 2:
             project = self._process_project_name(args[0])
-        if len(args) > 1:
             package = args[1]
 
         if project is None:
@@ -7552,7 +7554,9 @@
         self._debug("arg_repository: ", arg_repository)
         self._debug("arg_descr: ", arg_descr)
 
-        store_obj = osc_store.get_store(".")
+        # we shouldn't make any assumptions if the working copy is valid or 
not, we simply *try* to read the store and get the metadata from it
+        # the code that calls parse_repoarchdescr() is responsible for 
checking the store if necessary
+        store_obj = osc_store.get_store(".", check=False)
 
         repositories = []
         # store list of repos for potential offline use
@@ -7905,7 +7909,12 @@
             except oscerr.NoWorkingCopy:
                 if opts.apiurl and opts.alternative_project:
                     # HACK: ignore invalid working copy and run the build 
anyway if --alternative-project is specified
-                    store = git_scm.GitStore(Path.cwd(), check=False)
+                    try:
+                        store = git_scm.GitStore(Path.cwd(), check=False)
+                    except oscerr.NoWorkingCopy:
+                        # HACK: if running from an empty directory that has no 
.git in the parent tree, initialize an empty Store() object
+                        #       this allows running the build with 
--alternative-project even if .osc doesn't exist at all
+                        store = osc_store.Store(Path.cwd(), check=False)
                 else:
                     raise
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/osc/commands_git/pr_create.py 
new/osc-1.23.0/osc/commands_git/pr_create.py
--- old/osc-1.22.0/osc/commands_git/pr_create.py        2025-11-10 
16:49:39.000000000 +0100
+++ new/osc-1.23.0/osc/commands_git/pr_create.py        2025-12-18 
10:30:46.000000000 +0100
@@ -63,6 +63,11 @@
             help="Source branch (default: the current branch in local git 
repo)",
         )
         self.add_argument(
+            "--target-owner",
+            metavar="OWNER",
+            help="Target owner (default: parent of the source repo)",
+        )
+        self.add_argument(
             "--target-branch",
             metavar="BRANCH",
             help="Target branch (default: derived from the current branch in 
local git repo)",
@@ -99,9 +104,23 @@
         source_repo_obj = gitea_api.Repo.get(self.gitea_conn, source_owner, 
source_repo)
         source_branch_obj = gitea_api.Branch.get(self.gitea_conn, 
source_owner, source_repo, source_branch)
 
-        # remote git repo - target
-        target_owner = source_repo_obj.parent_obj.owner
-        target_repo = source_repo_obj.parent_obj.repo
+        if args.target_owner:
+            target_owner = args.target_owner
+
+            target_repo = None
+            parents = gitea_api.Repo.get_parent_repos(self.gitea_conn, 
source_owner, source_repo)
+            for parent in parents:
+                if parent.owner.lower() == args.target_owner.lower():
+                    target_repo = parent.repo
+                    break
+            if not target_repo:
+                raise gitea_api.GitObsRuntimeError(f"Unable to create a pull 
request because owner '{target_owner}' has no matching parent repo for 
'{source_owner}/{source_repo}'")
+        elif source_repo_obj.parent_obj is None:
+            raise gitea_api.GitObsRuntimeError(f"Unable to create a pull 
request because repo '{source_owner}/{source_repo}' is not a fork")
+        else:
+            # remote git repo - target
+            target_owner = source_repo_obj.parent_obj.owner
+            target_repo = source_repo_obj.parent_obj.repo
 
         if args.target_branch:
             target_branch = args.target_branch
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/osc/commands_git/pr_dump.py 
new/osc-1.23.0/osc/commands_git/pr_dump.py
--- old/osc-1.22.0/osc/commands_git/pr_dump.py  2025-11-10 16:49:39.000000000 
+0100
+++ new/osc-1.23.0/osc/commands_git/pr_dump.py  2025-12-18 10:30:46.000000000 
+0100
@@ -1,4 +1,5 @@
 import os
+import sys
 from typing import Optional
 
 import osc.commandline_git
@@ -12,6 +13,9 @@
     - 0:   default return code
     - 1-9: reserved for error states
     - 11:  pull request(s) skipped due to no longer being open
+
+    In addition to the return codes, a STATUS file is written in each target 
directory,
+    the values are: RUNNING, FAILED, SUCCESS
     """
     # NOTE: the return codes are according to `git-obs pr review interactive`
 
@@ -24,11 +28,12 @@
         self.add_argument(
             "--subdir-fmt",
             metavar="FMT",
-            default="{pr.base_owner}/{pr.base_repo}/{pr.number}",
+            default="{owner}/{repo}/{number}",
             help=(
                 "Formatting string for a subdir associated with each pull 
request\n"
-                "(default: '{pr.base_owner}/{pr.base_repo}/{pr.number}')\n"
+                "(default: '{owner}/{repo}/{number}')\n"
                 "Available values:\n"
+                "  - 'owner', 'repo' and 'number' parsed from the specified 
pull request ID\n"
                 "  - 'pr' object which is an instance of 
'osc.gitea_api.PullRequest'\n"
                 "  - 'login_name', 'login_user' from the currently used Gitea 
login entry"
             ),
@@ -41,14 +46,8 @@
         ).completer = complete_checkout_pr
 
     def run(self, args):
-        import json
-        import shutil
-        import sys
         from osc import gitea_api
-        from osc import obs_api
         from osc.output import tty
-        from osc.util.xml import xml_indent
-        from osc.util.xml import ET
 
         self.print_gitea_settings()
 
@@ -64,6 +63,9 @@
                 continue
 
             path = args.subdir_fmt.format(
+                owner=owner,
+                repo=repo,
+                number=number,
                 pr=pr_obj,
                 login_name=self.gitea_login.name,
                 login_user=self.gitea_login.user,
@@ -71,242 +73,266 @@
             # sanitize path for os.path.join()
             path = path.strip("/")
 
-            metadata_dir = os.path.join(path, "metadata")
+            def write_status(value: str):
+                os.makedirs(path, exist_ok=True)
+                with open(os.path.join(path, "STATUS"), "w", encoding="utf-8") 
as f:
+                    f.write(value)
+
+            write_status("RUNNING")
             try:
-                with open(os.path.join(metadata_dir, "pr.json")) as f:
-                    pr_data = json.load(f)
-                    if pr_data["updated_at"] == pr_obj.updated_at:
-                        # no update, skip the dump
-                        continue
-            except FileNotFoundError:
-                # no local metadata cached, we can't skip the dump
-                pass
-
-            review_obj_list = pr_obj.get_reviews(self.gitea_conn)
-
-            # see 
https://github.com/go-gitea/gitea/blob/main/modules/structs/pull_review.go - 
look for "type ReviewStateType string"
-            state_map = {
-                "APPROVED": "accepted",
-                "REQUEST_CHANGES": "declined",
-                "REQUEST_REVIEW": "new",  # review hasn't started
-                "PENDING": "review",  # review is in progress
-                "COMMENT": "deleted",  # just to make XML validation happy, 
we'll replace it with "comment" later
-            }
-
-            xml_review_list = []
-            for review_obj in review_obj_list:
-                xml_review_list.append(
-                    {
-                        "state": state_map[review_obj.state],
-                        "who": review_obj.who,
-                        "created": review_obj.created_at,
-                        "when": review_obj.updated_at,
-                        "comment": review_obj.body,
-                    }
-                )
+                self.dump_pr(owner, repo, number, pr_obj=pr_obj, path=path)
+            except:
+                write_status("FAILED")
+                raise
+            write_status("SUCCESS")
 
-            # store timeline as <history/> entries
-            timeline = gitea_api.IssueTimelineEntry.list(self.gitea_conn, 
owner, repo, number)
-            xml_history_list = []
-            for entry in timeline:
-                if entry.is_empty():
-                    xml_history_list.append(
-                        {
-                            "who": "",
-                            "when": "",
-                            "description": "ERROR: Gitea returned ``None`` 
instead of a timeline entry",
-                            "comment": "",
-                        }
-                    )
-                    continue
+        if skipped:
+            print(f"{tty.colorize('WARNING', 'yellow,bold')}: Skipped pull 
requests that were no longer open: {' '.join(skipped)}", file=sys.stderr)
+            return 11
 
-                text, body = entry.format()
-                if text is None:
-                    continue
+        return 0
+
+    def dump_pr(self, owner: str, repo: str, number: str, *, pr_obj, path):
+        import json
+        import shutil
+        import sys
+        from osc import gitea_api
+        from osc import obs_api
+        from osc.util.xml import xml_indent
+        from osc.util.xml import ET
+
+        metadata_dir = os.path.join(path, "metadata")
+        try:
+            with open(os.path.join(metadata_dir, "pr.json")) as f:
+                pr_data = json.load(f)
+                if pr_data["updated_at"] == pr_obj.updated_at:
+                    # no update, skip the dump
+                    return
+        except FileNotFoundError:
+            # no local metadata cached, we can't skip the dump
+            pass
+
+        review_obj_list = pr_obj.get_reviews(self.gitea_conn)
+
+        # see 
https://github.com/go-gitea/gitea/blob/main/modules/structs/pull_review.go - 
look for "type ReviewStateType string"
+        state_map = {
+            "APPROVED": "accepted",
+            "REQUEST_CHANGES": "declined",
+            "REQUEST_REVIEW": "new",  # review hasn't started
+            "PENDING": "review",  # review is in progress
+            "COMMENT": "deleted",  # just to make XML validation happy, we'll 
replace it with "comment" later
+        }
+
+        xml_review_list = []
+        for review_obj in review_obj_list:
+            xml_review_list.append(
+                {
+                    "state": state_map[review_obj.state],
+                    "who": review_obj.who,
+                    "created": review_obj.created_at,
+                    "when": review_obj.updated_at,
+                    "comment": review_obj.body,
+                }
+            )
+
+        # store timeline as <history/> entries
+        timeline = gitea_api.IssueTimelineEntry.list(self.gitea_conn, owner, 
repo, number)
+        xml_history_list = []
+        for entry in timeline:
+            if entry.is_empty():
                 xml_history_list.append(
                     {
-                        "who": entry.user,
-                        "when": gitea_api.dt_sanitize(entry.created_at),
-                        "description": text,
-                        "comment": body or "",
+                        "who": "",
+                        "when": "",
+                        "description": "ERROR: Gitea returned ``None`` instead 
of a timeline entry",
+                        "comment": "",
                     }
                 )
+                continue
 
-            req = obs_api.Request(
-                id=pr_id,
-                title=pr_obj.title,
-                description=pr_obj.body,
-                creator=pr_obj.user,
-                # each pull request maps to only one action
-                action_list=[
-                    {
-                        "type": "submit",
-                        "source": {
-                            "project": pr_obj.head_owner,
-                            "package": pr_obj.head_repo,
-                            "rev": pr_obj.head_commit,
-                        },
-                        "target": {
-                            "project": pr_obj.base_owner,
-                            "package": pr_obj.base_repo,
-                        },
-                    },
-                ],
-                review_list=xml_review_list,
-                history_list=xml_history_list,
+            text, body = entry.format()
+            if text is None:
+                continue
+            xml_history_list.append(
+                {
+                    "who": entry.user,
+                    "when": gitea_api.dt_sanitize(entry.created_at),
+                    "description": text,
+                    "comment": body or "",
+                }
             )
 
-            # HACK: changes to request XML that are not compatible with OBS
-            req_xml = req.to_xml()
+        req = obs_api.Request(
+            id="{owner}/{repo}#{number}",
+            title=pr_obj.title,
+            description=pr_obj.body,
+            creator=pr_obj.user,
+            # each pull request maps to only one action
+            action_list=[
+                {
+                    "type": "submit",
+                    "source": {
+                        "project": pr_obj.head_owner,
+                        "package": pr_obj.head_repo,
+                        "rev": pr_obj.head_commit,
+                    },
+                    "target": {
+                        "project": pr_obj.base_owner,
+                        "package": pr_obj.base_repo,
+                    },
+                },
+            ],
+            review_list=xml_review_list,
+            history_list=xml_history_list,
+        )
 
-            req_xml_action = req_xml.find("action")
-            assert req_xml_action is not None
-            req_xml_action.attrib["type"] = "gitea-pull-request"
-            req_xml_action.insert(
-                0,
-                ET.Comment(
-                    "The type='gitea-pull-request' attribute value is a custom 
extension to the OBS XML schema."
-                ),
-            )
+        # HACK: changes to request XML that are not compatible with OBS
+        req_xml = req.to_xml()
 
-            req_xml_action_source = req_xml_action.find("source")
-            assert req_xml_action_source is not None
-            req_xml_action_source.append(
-                ET.Comment("The 'branch' attribute is a custom extension to 
the OBS XML schema.")
-            )
-            req_xml_action_source.attrib["branch"] = pr_obj.head_branch
+        req_xml_action = req_xml.find("action")
+        assert req_xml_action is not None
+        req_xml_action.attrib["type"] = "gitea-pull-request"
+        req_xml_action.insert(
+            0,
+            ET.Comment(
+                "The type='gitea-pull-request' attribute value is a custom 
extension to the OBS XML schema."
+            ),
+        )
 
-            req_xml_action_target = req_xml_action.find("target")
-            assert req_xml_action_target is not None
-            req_xml_action_target.append(
-                ET.Comment("The 'rev' and 'branch' attributes are custom 
extensions to the OBS XML schema.")
-            )
-            req_xml_action_target.attrib["rev"] = pr_obj.base_commit
-            req_xml_action_target.attrib["branch"] = pr_obj.base_branch
+        req_xml_action_source = req_xml_action.find("source")
+        assert req_xml_action_source is not None
+        req_xml_action_source.append(
+            ET.Comment("The 'branch' attribute is a custom extension to the 
OBS XML schema.")
+        )
+        req_xml_action_source.attrib["branch"] = pr_obj.head_branch
 
-            req_xml_review_list = req_xml.findall("review")
-            for req_xml_review in req_xml_review_list:
-                if req_xml_review.attrib["state"] == "deleted":
-                    req_xml_review.attrib["state"] = "comment"
-                    req_xml_review.insert(
-                        0,
-                        ET.Comment("The state='comment' attribute value is a 
custom extension to the OBS XML schema."),
-                    )
+        req_xml_action_target = req_xml_action.find("target")
+        assert req_xml_action_target is not None
+        req_xml_action_target.append(
+            ET.Comment("The 'rev' and 'branch' attributes are custom 
extensions to the OBS XML schema.")
+        )
+        req_xml_action_target.attrib["rev"] = pr_obj.base_commit
+        req_xml_action_target.attrib["branch"] = pr_obj.base_branch
 
-            try:
-                # remove old metadata first to ensure that we never keep any 
of the old files on an update
-                shutil.rmtree(metadata_dir)
-            except FileNotFoundError:
-                pass
-            os.makedirs(metadata_dir, exist_ok=True)
-
-            with open(os.path.join(metadata_dir, "obs-request.xml"), "wb") as 
f:
-                xml_indent(req_xml)
-                ET.ElementTree(req_xml).write(f, encoding="utf-8")
-
-            with open(os.path.join(metadata_dir, "pr.json"), "w", 
encoding="utf-8") as f:
-                json.dump(pr_obj._data, f, indent=4, sort_keys=True)
-
-            with open(os.path.join(metadata_dir, "base.json"), "w", 
encoding="utf-8") as f:
-                json.dump(pr_obj._data["base"], f, indent=4, sort_keys=True)
-
-            with open(os.path.join(metadata_dir, "head.json"), "w", 
encoding="utf-8") as f:
-                json.dump(pr_obj._data["head"], f, indent=4, sort_keys=True)
-
-            with open(os.path.join(metadata_dir, "reviews.json"), "w", 
encoding="utf-8") as f:
-                json.dump([i._data for i in review_obj_list], f, indent=4, 
sort_keys=True)
-
-            with open(os.path.join(metadata_dir, "timeline.json"), "w", 
encoding="utf-8") as f:
-                # the list doesn't come from Gitea API but is post-processed 
for our overall sanity
-                json.dump(xml_history_list, f, indent=4, sort_keys=True)
-
-            base_dir = os.path.join(path, "base")
-            # we must use the `merge_base` instead of `head_commit`, because 
the latter changes after merging the PR and the `base` directory would contain 
incorrect data
-            gitea_api.Repo.clone_or_update(self.gitea_conn, owner, repo, 
branch=pr_obj.base_branch, commit=pr_obj.merge_base, directory=base_dir)
-
-            head_dir = os.path.join(path, "head")
-            gitea_api.Repo.clone_or_update(
-                self.gitea_conn, owner, repo, pr_number=pr_obj.number, 
commit=pr_obj.head_commit, directory=head_dir, reference=base_dir
-            )
+        req_xml_review_list = req_xml.findall("review")
+        for req_xml_review in req_xml_review_list:
+            if req_xml_review.attrib["state"] == "deleted":
+                req_xml_review.attrib["state"] = "comment"
+                req_xml_review.insert(
+                    0,
+                    ET.Comment("The state='comment' attribute value is a 
custom extension to the OBS XML schema."),
+                )
 
-            with open(os.path.join(metadata_dir, "submodules-base.json"), "w", 
encoding="utf-8") as f:
-                base_submodules = gitea_api.Git(base_dir).get_submodules()
-                json.dump(base_submodules, f, indent=4, sort_keys=True)
-
-            with open(os.path.join(metadata_dir, "submodules-head.json"), "w", 
encoding="utf-8") as f:
-                head_submodules = gitea_api.Git(head_dir).get_submodules()
-                json.dump(head_submodules, f, indent=4, sort_keys=True)
-
-            submodule_diff = {
-                "added": {},
-                "removed": {},
-                "unchanged": {},
-                "changed": {},
-            }
-
-            # TODO: determine if the submodules point to packages or something 
else; submodules may point to arbitrary git repos such as other packages, 
projects or anything else
-            all_submodules = sorted(set(base_submodules) | 
set(head_submodules))
-            for i in all_submodules:
-
-                if i in base_submodules:
-                    url = base_submodules[i].get("url", "")
-                    if not url.startswith("../../"):
-                        print(f"Warning: incorrect path '{url}' in base 
submodule '{i}'", file=sys.stderr)
-
-                if i in head_submodules:
-                    url = head_submodules[i].get("url", "")
-                    if not url.startswith("../../"):
-                        print(f"Warning: incorrect path '{url}' in head 
submodule '{i}'", file=sys.stderr)
-
-                if i in base_submodules and i not in head_submodules:
-                    submodule_diff["removed"][i] = base_submodules[i]
-                elif i not in base_submodules and i in head_submodules:
-                    submodule_diff["added"][i] = head_submodules[i]
-                else:
-                    for key in ["branch", "path", "url"]:
-                        # we don't expect migrating packages to another paths, 
branches etc.
-                        if key not in base_submodules[i] and key in 
head_submodules[i]:
-                            # we allow adding new keys in the pull request to 
fix missing data
-                            pass
-                        else:
-                            base_value = base_submodules[i].get(key, None)
-                            head_value = head_submodules[i].get(key, None)
-                            assert base_value == head_value, f"Submodule 
metadata has changed: key='{key}', base_value='{base_value}', 
head_value='{head_value}'"
-
-                    base_commit = base_submodules[i].get("commit","")
-                    head_commit = head_submodules[i].get("commit","")
-
-                    if base_commit == head_commit:
-                        submodule_diff["unchanged"][i] = base_submodules[i]
-                        continue
-
-                    # we expect the data to be identical in base and head with 
the exception of the commit
-                    # we also drop `commit` and add `base_commit` and 
`head_commit`
-                    data = base_submodules[i].copy()
-                    if base_commit:
-                        del data["commit"]
-                    data["base_commit"] = base_commit
-                    data["head_commit"] = head_commit
-                    submodule_diff["changed"][i] = data
-
-            with open(os.path.join(metadata_dir, "submodules-diff.json"), "w", 
encoding="utf-8") as f:
-                json.dump(submodule_diff, f, indent=4, sort_keys=True)
-
-            referenced_pull_requests = {}
-            for ref_owner, ref_repo, ref_number in 
pr_obj.parse_pr_references():
-                ref_id = f"{ref_owner}/{ref_repo}#{ref_number}"
-                referenced_pr_obj = gitea_api.PullRequest.get(self.gitea_conn, 
ref_owner, ref_repo, ref_number)
-                referenced_pull_requests[ref_id] = referenced_pr_obj.dict()
-
-            with open(
-                os.path.join(metadata_dir, "referenced-pull-requests.json"),
-                "w",
-                encoding="utf-8",
-            ) as f:
-                json.dump(referenced_pull_requests, f, indent=4, 
sort_keys=True)
+        try:
+            # remove old metadata first to ensure that we never keep any of 
the old files on an update
+            shutil.rmtree(metadata_dir)
+        except FileNotFoundError:
+            pass
+        os.makedirs(metadata_dir, exist_ok=True)
+
+        with open(os.path.join(metadata_dir, "obs-request.xml"), "wb") as f:
+            xml_indent(req_xml)
+            ET.ElementTree(req_xml).write(f, encoding="utf-8")
+
+        with open(os.path.join(metadata_dir, "base.json"), "w", 
encoding="utf-8") as f:
+            json.dump(pr_obj._data["base"], f, indent=4, sort_keys=True)
+
+        with open(os.path.join(metadata_dir, "head.json"), "w", 
encoding="utf-8") as f:
+            json.dump(pr_obj._data["head"], f, indent=4, sort_keys=True)
+
+        with open(os.path.join(metadata_dir, "reviews.json"), "w", 
encoding="utf-8") as f:
+            json.dump([i._data for i in review_obj_list], f, indent=4, 
sort_keys=True)
+
+        with open(os.path.join(metadata_dir, "timeline.json"), "w", 
encoding="utf-8") as f:
+            # the list doesn't come from Gitea API but is post-processed for 
our overall sanity
+            json.dump(xml_history_list, f, indent=4, sort_keys=True)
+
+        base_dir = os.path.join(path, "base")
+        # we must use the `merge_base` instead of `head_commit`, because the 
latter changes after merging the PR and the `base` directory would contain 
incorrect data
+        gitea_api.Repo.clone_or_update(self.gitea_conn, owner, repo, 
branch=pr_obj.base_branch, commit=pr_obj.merge_base, directory=base_dir)
+
+        head_dir = os.path.join(path, "head")
+        gitea_api.Repo.clone_or_update(
+            self.gitea_conn, owner, repo, pr_number=pr_obj.number, 
commit=pr_obj.head_commit, directory=head_dir, reference=base_dir
+        )
 
-        if skipped:
-            print(f"{tty.colorize('WARNING', 'yellow,bold')}: Skipped pull 
requests that were no longer open: {' '.join(skipped)}", file=sys.stderr)
-            return 11
+        with open(os.path.join(metadata_dir, "submodules-base.json"), "w", 
encoding="utf-8") as f:
+            base_submodules = gitea_api.Git(base_dir).get_submodules()
+            json.dump(base_submodules, f, indent=4, sort_keys=True)
+
+        with open(os.path.join(metadata_dir, "submodules-head.json"), "w", 
encoding="utf-8") as f:
+            head_submodules = gitea_api.Git(head_dir).get_submodules()
+            json.dump(head_submodules, f, indent=4, sort_keys=True)
+
+        submodule_diff = {
+            "added": {},
+            "removed": {},
+            "unchanged": {},
+            "changed": {},
+        }
+
+        # TODO: determine if the submodules point to packages or something 
else; submodules may point to arbitrary git repos such as other packages, 
projects or anything else
+        all_submodules = sorted(set(base_submodules) | set(head_submodules))
+        for i in all_submodules:
+
+            if i in base_submodules:
+                url = base_submodules[i].get("url", "")
+                if not url.startswith("../../"):
+                    print(f"Warning: incorrect path '{url}' in base submodule 
'{i}'", file=sys.stderr)
+
+            if i in head_submodules:
+                url = head_submodules[i].get("url", "")
+                if not url.startswith("../../"):
+                    print(f"Warning: incorrect path '{url}' in head submodule 
'{i}'", file=sys.stderr)
+
+            if i in base_submodules and i not in head_submodules:
+                submodule_diff["removed"][i] = base_submodules[i]
+            elif i not in base_submodules and i in head_submodules:
+                submodule_diff["added"][i] = head_submodules[i]
+            else:
+                for key in ["branch", "path", "url"]:
+                    # we don't expect migrating packages to another paths, 
branches etc.
+                    if key not in base_submodules[i] and key in 
head_submodules[i]:
+                        # we allow adding new keys in the pull request to fix 
missing data
+                        pass
+                    else:
+                        base_value = base_submodules[i].get(key, None)
+                        head_value = head_submodules[i].get(key, None)
+                        assert base_value == head_value, f"Submodule metadata 
has changed: key='{key}', base_value='{base_value}', head_value='{head_value}'"
 
-        return 0
+                base_commit = base_submodules[i].get("commit","")
+                head_commit = head_submodules[i].get("commit","")
+
+                if base_commit == head_commit:
+                    submodule_diff["unchanged"][i] = base_submodules[i]
+                    continue
+
+                # we expect the data to be identical in base and head with the 
exception of the commit
+                # we also drop `commit` and add `base_commit` and `head_commit`
+                data = base_submodules[i].copy()
+                if base_commit:
+                    del data["commit"]
+                data["base_commit"] = base_commit
+                data["head_commit"] = head_commit
+                submodule_diff["changed"][i] = data
+
+        with open(os.path.join(metadata_dir, "submodules-diff.json"), "w", 
encoding="utf-8") as f:
+            json.dump(submodule_diff, f, indent=4, sort_keys=True)
+
+        referenced_pull_requests = {}
+        for ref_owner, ref_repo, ref_number in pr_obj.parse_pr_references():
+            ref_id = f"{ref_owner}/{ref_repo}#{ref_number}"
+            referenced_pr_obj = gitea_api.PullRequest.get(self.gitea_conn, 
ref_owner, ref_repo, ref_number)
+            referenced_pull_requests[ref_id] = referenced_pr_obj.dict()
+
+        with open(
+            os.path.join(metadata_dir, "referenced-pull-requests.json"),
+            "w",
+            encoding="utf-8",
+        ) as f:
+            json.dump(referenced_pull_requests, f, indent=4, sort_keys=True)
+
+        # pr.json must come last, because we use it to detect if the pull 
request has changed since the last dump
+        # and storing it earlier would lead to potentially partial data on 
disk that never gets updated
+        with open(os.path.join(metadata_dir, "pr.json"), "w", 
encoding="utf-8") as f:
+            json.dump(pr_obj._data, f, indent=4, sort_keys=True)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/osc/commands_git/staging_search.py 
new/osc-1.23.0/osc/commands_git/staging_search.py
--- old/osc-1.22.0/osc/commands_git/staging_search.py   1970-01-01 
01:00:00.000000000 +0100
+++ new/osc-1.23.0/osc/commands_git/staging_search.py   2025-12-18 
10:30:46.000000000 +0100
@@ -0,0 +1,108 @@
+import sys
+
+import osc.commandline_git
+
+
+class StagingSearchCommand(osc.commandline_git.GitObsCommand):
+    """
+    Search staging pull requests.
+    """
+
+    name = "search"
+    parent = "StagingCommand"
+
+    def init_arguments(self):
+        self.add_argument_owner_repo()
+        self.add_argument(
+            "--type",
+            dest="type",
+            # the choices must match the *_LABEL constants in 
StagingPullRequestWrapper class
+            choices=("BACKLOG", "INPROGRESS", "ONHOLD"),
+            required=True,
+            help="Filter by review state.",
+        )
+        self.add_argument(
+            "--package-review-state",
+            dest="package_review_state",
+            choices=("APPROVED", "ALL"),
+            default="APPROVED",
+            help="Filter by review state on *all* referenced *package* PRs.",
+        )
+        self.add_argument(
+            "--target-branch",
+            dest="target_branches",
+            action="append",
+            help="Filter by target branch.",
+        )
+        self.add_argument(
+            "--export",
+            action="store_true",
+            help="Show json objects instead of human readable text",
+        )
+
+    def run(self, args):
+        from osc import gitea_api
+        from osc.output import KeyValueTable
+        from osc.output import tty
+
+        self.print_gitea_settings()
+
+        pr_state = "open"
+        owner, repo = args.owner_repo
+
+        labels = gitea_api.Repo.get_label_ids(self.gitea_conn, owner, repo)
+
+        label = getattr(gitea_api.StagingPullRequestWrapper, 
f"{args.type}_LABEL")
+        label_id = labels.get(label, None)
+        if label_id is None:
+            raise gitea_api.GitObsRuntimeError(f"Label '{label}' doesn't exist 
in '{owner}/{repo}'")
+
+        pr_obj_list = gitea_api.PullRequest.list(self.gitea_conn, owner, repo, 
state=pr_state, labels=[label_id])
+        pr_obj_list.sort()
+
+        if args.target_branches:
+            pr_obj_list = [i for i in pr_obj_list if i.base_branch in 
args.target_branches]
+
+        table = KeyValueTable()
+        result = []
+        skipped = []
+        for pr in pr_obj_list:
+            ref_prs = pr.parse_pr_references()
+            if len(ref_prs) == 0:
+                skipped.append(pr)
+                continue
+
+            package_review_state_matched = True
+
+            if args.package_review_state != "ALL":
+                for ref_owner, ref_repo, ref_pr_number in ref_prs:
+                    ref_pr = gitea_api.PullRequest.get(self.gitea_conn, 
ref_owner, ref_repo, ref_pr_number)
+
+                    all_reviews = ref_pr.get_reviews(self.gitea_conn)
+                    for review_obj in all_reviews:
+                        if review_obj.state != args.package_review_state:
+                            package_review_state_matched = False
+                            break
+
+                    if not package_review_state_matched:
+                        break
+
+            if package_review_state_matched:
+                if args.export:
+                    result.append(pr.dict())
+                else:
+                    table.add(pr.id, pr.title)
+                    table.add("", pr.url)
+
+        if args.export:
+            from json import dumps
+
+            print(dumps(result, indent=4, sort_keys=True))
+        else:
+            print(str(table))
+
+        # print warnings at the end to make them more obvious
+        if skipped:
+            print(file=sys.stderr)
+            for pr_obj in skipped:
+                print(f"{tty.colorize('WARNING', 'yellow,bold')}: Skipped 
'{pr_obj.id}' due to empty or invalid 'PR:' references.", file=sys.stderr)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/osc/core.py new/osc-1.23.0/osc/core.py
--- old/osc-1.22.0/osc/core.py  2025-11-10 16:49:39.000000000 +0100
+++ new/osc-1.23.0/osc/core.py  2025-12-18 10:30:46.000000000 +0100
@@ -2954,13 +2954,13 @@
     new_project: str,
     new_package: str,
     new_revision: str,
-    unified=False,
-    missingok=False,
-    meta=False,
-    expand=True,
-    onlyissues=False,
-    full=True,
-    xml=False,
+    unified: bool = False,
+    missingok: bool = False,
+    meta: bool = False,
+    expand: bool = True,
+    onlyissues: bool = False,
+    full: bool = True,
+    xml: bool = False,
     files: Optional[list] = None,
 ):
     query: Dict[str, Union[str, int]] = {"cmd": "diff"}
@@ -2991,7 +2991,13 @@
         query["file"] = UrlQueryArray(files)
 
     u = makeurl(apiurl, ['source', new_project, new_package], query=query)
-    f = http_POST(u)
+    try:
+        f = http_POST(u)
+    except HTTPError as e:
+        if e.status == 404 and missingok:
+            return b"# diff failed: " + e.read()
+        raise
+
     if onlyissues and not xml:
         del_issue_list = []
         add_issue_list = []
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/osc/git_scm/manifest.py 
new/osc-1.23.0/osc/git_scm/manifest.py
--- old/osc-1.22.0/osc/git_scm/manifest.py      2025-11-10 16:49:39.000000000 
+0100
+++ new/osc-1.23.0/osc/git_scm/manifest.py      2025-12-18 10:30:46.000000000 
+0100
@@ -16,6 +16,10 @@
 
         with open(path, "r", encoding="utf-8") as f:
             data = osc_yaml.yaml_load(f)
+
+        # empty file gets loaded as None and we need a dictionary
+        data = data or {}
+
         obj = cls(data)
         return obj
 
@@ -24,6 +28,10 @@
         from ..util import yaml as osc_yaml
 
         data = osc_yaml.yaml_loads(text)
+
+        # empty string gets loaded as None and we need a dictionary
+        data = data or {}
+
         obj = cls(data)
         return obj
 
@@ -81,6 +89,28 @@
 
         return None
 
+    def get_package_paths(self, project_path: str):
+        """
+        Return all paths to the existing package directories in a project.
+        """
+        result = []
+
+        for path in self.packages:
+            package_path = os.path.normpath(os.path.join(project_path, path))
+            if os.path.isdir(package_path):
+                result.append(package_path)
+
+        for path in self.package_directories:
+            topdir_path = os.path.normpath(os.path.join(project_path, path))
+            for fn in os.listdir(topdir_path):
+                if fn.startswith("."):
+                    continue
+                package_path = os.path.join(topdir_path, fn)
+                if os.path.isdir(package_path):
+                    result.append(package_path)
+
+        return result
+
 
 class Subdirs(Manifest):
     """
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/osc/git_scm/store.py 
new/osc-1.23.0/osc/git_scm/store.py
--- old/osc-1.22.0/osc/git_scm/store.py 2025-11-10 16:49:39.000000000 +0100
+++ new/osc-1.23.0/osc/git_scm/store.py 2025-12-18 10:30:46.000000000 +0100
@@ -77,6 +77,9 @@
         for field in self.__fields__:
             yield getattr(self, field)
 
+    def __getitem__(self, indx):
+        return list(self)[indx]
+
     def __eq__(self, other):
         if isinstance(other, tuple) and len(other) == 3:
             return (self.repo, self.arch, self.vm_type) == other
@@ -260,7 +263,7 @@
             raise oscerr.NoWorkingCopy(msg)
 
         missing = []
-        for name in ["apiurl", "project"]:
+        for name in ["project"]:
             if not getattr(self, name):
                 missing.append(name)
 
@@ -310,7 +313,7 @@
                 print(msg, file=sys.stderr)
 
         missing = []
-        for name in ["apiurl", "project", "package"]:
+        for name in ["project", "package"]:
             if not getattr(self, name):
                 missing.append(name)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/osc/gitea_api/branch.py 
new/osc-1.23.0/osc/gitea_api/branch.py
--- old/osc-1.22.0/osc/gitea_api/branch.py      2025-11-10 16:49:39.000000000 
+0100
+++ new/osc-1.23.0/osc/gitea_api/branch.py      2025-12-18 10:30:46.000000000 
+0100
@@ -59,6 +59,7 @@
         obj_list = []
         for response in conn.request_all_pages("GET", url):
             obj_list.extend([cls(i, response=response, conn=conn) for i in 
response.json() or []])
+        return obj_list
 
     @classmethod
     def create(
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/osc/gitea_api/connection.py 
new/osc-1.23.0/osc/gitea_api/connection.py
--- old/osc-1.22.0/osc/gitea_api/connection.py  2025-11-10 16:49:39.000000000 
+0100
+++ new/osc-1.23.0/osc/gitea_api/connection.py  2025-12-18 10:30:46.000000000 
+0100
@@ -114,7 +114,13 @@
         return urllib.parse.urlunsplit(("", "", url_path_str, url_query_str, 
""))
 
     def request(
-        self, method, url, json_data: Optional[dict] = None, *, context: 
Optional[dict] = None
+        self,
+        method,
+        url,
+        json_data: Optional[dict] = None,
+        *,
+        context: Optional[dict] = None,
+        exception_map: Optional[dict] = None,
     ) -> GiteaHTTPResponse:
         """
         Make a request and return ``GiteaHTTPResponse``.
@@ -167,7 +173,7 @@
         if response.status // 100 != 2:
             from .exceptions import response_to_exception
 
-            raise response_to_exception(response, context=context)
+            raise response_to_exception(response, context=context, 
exception_map=exception_map)
 
         return response
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/osc/gitea_api/exceptions.py 
new/osc-1.23.0/osc/gitea_api/exceptions.py
--- old/osc-1.22.0/osc/gitea_api/exceptions.py  2025-11-10 16:49:39.000000000 
+0100
+++ new/osc-1.23.0/osc/gitea_api/exceptions.py  2025-12-18 10:30:46.000000000 
+0100
@@ -7,7 +7,12 @@
 from .connection import GiteaHTTPResponse
 
 
-def response_to_exception(response: GiteaHTTPResponse, *, context: 
Optional[dict] = None):
+def response_to_exception(
+    response: GiteaHTTPResponse,
+    *,
+    context: Optional[dict] = None,
+    exception_map: Optional[dict] = None,
+):
     """
     Throw an appropriate exception based on the contents of ``response``.
     Raise generic ``GiteaException`` if no exception matches the ``response``.
@@ -20,10 +25,21 @@
     """
     try:
         data = response.json()
-        messages = [data["message"]] + (data.get("errors", None) or [])
+        if data:
+            messages = [data["message"]] + (data.get("errors", None) or [])
+        else:
+            messages = []
     except json.JSONDecodeError:
         messages = [response.data.decode("utf-8")]
 
+    # use the specified status -> exception_class mapping
+    if exception_map and response.status in exception_map:
+        cls = exception_map.get(response.status, None)
+        if cls:
+            kwargs = context.copy() if context else {}
+            return cls(response, **kwargs)
+
+    # match exception based on status and response message
     for cls in EXCEPTION_CLASSES:
         if cls.RESPONSE_STATUS is not None and cls.RESPONSE_STATUS != 
response.status:
             continue
@@ -220,5 +236,17 @@
         return result
 
 
+class PullRequestDoesNotExist(GiteaException):
+    def __init__(self, response: GiteaHTTPResponse, owner: str, repo: str, 
number: str):
+        super().__init__(response)
+        self.owner = owner
+        self.repo = repo
+        self.number = number
+
+    def __str__(self):
+        result = f"Pull request '{self.owner}/{self.repo}#{self.number}' does 
not exist"
+        return result
+
+
 # gather all exceptions from this module that inherit from GiteaException
 EXCEPTION_CLASSES = [i for i in globals().values() if hasattr(i, 
"RESPONSE_MESSAGE_RE") and inspect.isclass(i) and issubclass(i, GiteaException)]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/osc/gitea_api/git.py 
new/osc-1.23.0/osc/gitea_api/git.py
--- old/osc-1.22.0/osc/gitea_api/git.py 2025-11-10 16:49:39.000000000 +0100
+++ new/osc-1.23.0/osc/gitea_api/git.py 2025-12-18 10:30:46.000000000 +0100
@@ -99,6 +99,13 @@
 
         return path
 
+    @property
+    def git_dir(self) -> Optional[str]:
+        try:
+            return self._run_git(["rev-parse", "--git-dir"])
+        except subprocess.CalledProcessError:
+            return None
+
     def init(self, *, initial_branch: Optional[str] = None, quiet: bool = 
True, mute_stderr: bool = False):
         cmd = ["init"]
         if initial_branch:
@@ -131,9 +138,29 @@
     @property
     def current_branch(self) -> Optional[str]:
         try:
-            return self._run_git(["branch", "--show-current"], 
mute_stderr=True)
+            result = self._run_git(["branch", "--show-current"], 
mute_stderr=True)
         except subprocess.CalledProcessError:
-            return None
+            result = None
+
+        if not result:
+            # try to determine the branch during rebase
+            git_dir = self.git_dir
+            if git_dir:
+                paths = [
+                    os.path.join(git_dir, "rebase-apply", "head-name"),
+                    os.path.join(git_dir, "rebase-merge", "head-name"),
+                ]
+                for path in paths:
+                    try:
+                        with open(path, "r", encoding="utf-8") as f:
+                            line = f.readline()
+                            # parse "refs/heads/<branch>"
+                            result = line.strip().split("/", 2)[-1]
+                            break
+                    except FileNotFoundError:
+                        pass
+
+        return result
 
     def branch(self, branch: str, set_upstream_to: Optional[str] = None):
         cmd = ["branch"]
@@ -251,9 +278,29 @@
     def get_current_remote(self, fallback_to_origin: bool = True) -> 
Optional[str]:
         result = None
         try:
-            result = self._run_git(["rev-parse", "--abbrev-ref", "@{u}"], 
mute_stderr=True)
-            if result:
-                result = result.split("/")[0]
+            # get the upstream branch that the current branch is tracking:
+            #   case 1: upstream is a remote-tracking branch    origin/main
+            #   case 2: upstream is a local branch              slfo-main
+            upstream = self._run_git(
+                ["rev-parse", "--abbrev-ref", "@{u}"], mute_stderr=True
+            )
+            if "/" in upstream:
+                result = upstream.split("/")[0]
+                try:
+                    self._run_git(["remote", "get-url", result], 
mute_stderr=True)
+                except subprocess.CalledProcessError:
+                    result = None
+            else:
+                # case 2: upstream is a local branch
+                # look up the remote that the local branch tracks
+                try:
+                    remote_ref = self._run_git(
+                        ["config", f"branch.{upstream}.remote"], 
mute_stderr=True
+                    )
+                    if remote_ref:
+                        result = remote_ref
+                except subprocess.CalledProcessError:
+                    pass
         except subprocess.CalledProcessError:
             pass
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/osc/gitea_api/pr.py 
new/osc-1.23.0/osc/gitea_api/pr.py
--- old/osc-1.22.0/osc/gitea_api/pr.py  2025-11-10 16:49:39.000000000 +0100
+++ new/osc-1.23.0/osc/gitea_api/pr.py  2025-12-18 10:30:46.000000000 +0100
@@ -10,6 +10,7 @@
 from .connection import Connection
 from .connection import GiteaHTTPResponse
 from .user import User
+from .repo import Repo
 
 if typing.TYPE_CHECKING:
     from .pr_review import PullRequestReview
@@ -18,19 +19,19 @@
 @functools.total_ordering
 class PullRequest(GiteaModel):
     def __eq__(self, other):
-        (self.base_owner, self.base_repo, self.number) == (other.base_owner, 
other.base_repo, other.number)
+        return (self.base_owner, self.base_repo, self.number) == 
(other.base_owner, other.base_repo, other.number)
 
     def __lt__(self, other):
-        (self.base_owner, self.base_repo, self.number) < (other.base_owner, 
other.base_repo, other.number)
+        return (self.base_owner, self.base_repo, self.number) < 
(other.base_owner, other.base_repo, other.number)
 
     @classmethod
     def split_id(cls, pr_id: str) -> Tuple[str, str, int]:
         """
         Split <owner>/<repo>#<number> or <owner>/<repo>!<number> into 
individual components and return them in a tuple.
         """
-        match = re.match(r"^([^/]+)/([^/]+)[#!]([0-9]+)$", pr_id)
+        match = re.match(r"^([^/]+)/([^/]+)[#!]([0-9]+)\s*$", pr_id)
         if not match:
-            match = re.match(r"^([^/]+)/([^/]+)/pulls/([0-9]+)$", pr_id)
+            match = re.match(r"^([^/]+)/([^/]+)/pulls/([0-9]+)\s*$", pr_id)
 
         if not match:
             raise ValueError(f"Invalid pull request id: {pr_id}")
@@ -363,7 +364,7 @@
         """
         url = conn.makeurl("repos", target_owner, target_repo, "pulls")
         if labels:
-            ids = cls._get_label_ids(conn, target_owner, target_repo)
+            ids = Repo.get_label_ids(conn, target_owner, target_repo)
             labels = [ids[i] for i in labels]
         data = {
             "base": target_branch,
@@ -392,8 +393,18 @@
         :param repo: Name of the repo.
         :param number: Number of the pull request in the repo.
         """
+        from .exceptions import PullRequestDoesNotExist
+
         url = conn.makeurl("repos", owner, repo, "pulls", str(number))
-        response = conn.request("GET", url)
+        response = conn.request(
+            "GET",
+            url,
+            context={"owner": owner, "repo": repo, "number": number},
+            exception_map={
+                # the 404 exception class cannot be identified from the "not 
found" message
+                404: PullRequestDoesNotExist,
+            },
+        )
         obj = cls(response.json(), response=response, conn=conn)
         return obj
 
@@ -420,13 +431,24 @@
         :param description: Change pull request description.
         :param allow_maintainer_edit: Change whether users with write access 
to the base branch can also push to the pull request's head branch.
         """
+        from .exceptions import PullRequestDoesNotExist
+
         json_data = {
             "title": title,
             "body": description,
             "allow_maintainer_edit": allow_maintainer_edit,
         }
         url = conn.makeurl("repos", owner, repo, "pulls", str(number))
-        response = conn.request("PATCH", url, json_data=json_data)
+        response = conn.request(
+            "PATCH",
+            url,
+            json_data=json_data,
+            context={"owner": owner, "repo": repo, "number": number},
+            exception_map={
+                # the 404 exception class cannot be identified from the "not 
found" message
+                404: PullRequestDoesNotExist,
+            },
+        )
         obj = cls(response.json(), response=response, conn=conn)
         return obj
 
@@ -438,6 +460,7 @@
         repo: str,
         *,
         state: Optional[str] = "open",
+        labels: Optional[List[int]] = None,
     ) -> List["PullRequest"]:
         """
         List pull requests in a repo.
@@ -454,6 +477,10 @@
             "state": state,
             "limit": 50,
         }
+        
+        if labels:
+            q["labels"] = labels
+                        
         url = conn.makeurl("repos", owner, repo, "pulls", query=q)
         obj_list = []
         for response in conn.request_all_pages("GET", url):
@@ -718,19 +745,6 @@
         return obj
 
     @classmethod
-    def _get_label_ids(cls, conn: Connection, owner: str, repo: str) -> 
Dict[str, int]:
-        """
-        Helper to map labels to their IDs
-        """
-        result = {}
-        url = conn.makeurl("repos", owner, repo, "labels")
-        response = conn.request("GET", url)
-        labels = response.json()
-        for label in labels:
-            result[label["name"]] = label["id"]
-        return result
-
-    @classmethod
     def add_labels(
         cls,
         conn: Connection,
@@ -752,7 +766,7 @@
 
         label_id_list = []
         invalid_labels = []
-        label_name_id_map = cls._get_label_ids(conn, owner, repo)
+        label_name_id_map = Repo.get_label_ids(conn, owner, repo)
         for label in labels:
             label_id = label_name_id_map.get(label, None)
             if not label_id:
@@ -791,7 +805,7 @@
 
         label_id_list = []
         invalid_labels = []
-        label_name_id_map = cls._get_label_ids(conn, owner, repo)
+        label_name_id_map = Repo.get_label_ids(conn, owner, repo)
         for label in labels:
             label_id = label_name_id_map.get(label, None)
             if not label_id:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/osc/gitea_api/repo.py 
new/osc-1.23.0/osc/gitea_api/repo.py
--- old/osc-1.22.0/osc/gitea_api/repo.py        2025-11-10 16:49:39.000000000 
+0100
+++ new/osc-1.23.0/osc/gitea_api/repo.py        2025-12-18 10:30:46.000000000 
+0100
@@ -2,7 +2,7 @@
 import os
 import re
 import subprocess
-from typing import List
+from typing import Dict, List
 from typing import Optional
 from typing import Tuple
 
@@ -312,3 +312,28 @@
         for response in conn.request_all_pages("GET", url):
             obj_list.extend([cls(i, response=response) for i in 
response.json()])
         return obj_list
+
+    @classmethod
+    def get_label_ids(cls, conn: Connection, owner: str, repo: str) -> 
Dict[str, int]:
+        """
+        Helper to map labels to their IDs
+        """
+        result = {}
+        url = conn.makeurl("repos", owner, repo, "labels")
+        response = conn.request("GET", url)
+        labels = response.json()
+        for label in labels:
+            result[label["name"]] = label["id"]
+        return result
+
+    @classmethod
+    def get_parent_repos(cls, conn: Connection, owner: str, repo: str) -> 
List["Repo"]:
+        """
+        Get a list of all parent repositories
+        """
+        result = []
+        obj = cls.get(conn, owner, repo)
+        while obj.parent_obj is not None:
+            obj = cls.get(conn, obj.parent_obj.owner, obj.parent_obj.repo)
+            result.append(obj)
+        return result
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/osc/gitea_api/staging.py 
new/osc-1.23.0/osc/gitea_api/staging.py
--- old/osc-1.22.0/osc/gitea_api/staging.py     2025-11-10 16:49:39.000000000 
+0100
+++ new/osc-1.23.0/osc/gitea_api/staging.py     2025-12-18 10:30:46.000000000 
+0100
@@ -5,7 +5,8 @@
 class StagingPullRequestWrapper:
     BACKLOG_LABEL = "staging/Backlog"
     INPROGRESS_LABEL = "staging/In Progress"
-
+    ONHOLD_LABEL = "staging/On Hold"
+    
     def __init__(self, conn, owner: str, repo: str, number: int, *, topdir: 
str, cache_directory: Optional[str] = None):
         from . import PullRequest
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/osc/util/git_version.py 
new/osc-1.23.0/osc/util/git_version.py
--- old/osc-1.22.0/osc/util/git_version.py      2025-11-10 16:49:39.000000000 
+0100
+++ new/osc-1.23.0/osc/util/git_version.py      2025-12-18 10:30:46.000000000 
+0100
@@ -9,7 +9,7 @@
     """
     # the `version` variable contents get substituted during `git archive`
     # it requires adding this to .gitattributes: <path to this file> 
export-subst
-    version = "1.22.0"
+    version = "1.23.0"
     if version.startswith(("$", "%")):
         # "$": version hasn't been substituted during `git archive`
         # "%": "Format:" and "$" characters get removed from the version 
string (a GitHub bug?)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/tests/test_git_scm_store.py 
new/osc-1.23.0/tests/test_git_scm_store.py
--- old/osc-1.22.0/tests/test_git_scm_store.py  2025-11-10 16:49:39.000000000 
+0100
+++ new/osc-1.23.0/tests/test_git_scm_store.py  2025-12-18 10:30:46.000000000 
+0100
@@ -368,6 +368,21 @@
             GitStore(pkg_path)
         self.assertIn("WARNING", stderr.getvalue())
 
+    def test_project_with_empty_manifest(self):
+        prj_path = os.path.join(self.tmpdir, "project")
+        self._git_init(prj_path)
+        self._setup_project(prj_path, project="PROJ")
+        self._write(os.path.join(prj_path, "_manifest"), "")
+
+        pkg_path = os.path.join(prj_path, "package")
+        os.makedirs(pkg_path)
+
+        store = GitStore(prj_path)
+        self.assertEqual(store.project, "PROJ")
+
+        paths = store.manifest.get_package_paths(store.topdir)
+        self.assertEqual(paths, [pkg_path])
+
 
 if __name__ == "__main__":
     unittest.main()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.22.0/tests/test_gitea_api_pr.py 
new/osc-1.23.0/tests/test_gitea_api_pr.py
--- old/osc-1.22.0/tests/test_gitea_api_pr.py   2025-11-10 16:49:39.000000000 
+0100
+++ new/osc-1.23.0/tests/test_gitea_api_pr.py   2025-12-18 10:30:46.000000000 
+0100
@@ -167,7 +167,7 @@
 class TestGiteaApiPullRequestReferences(unittest.TestCase):
     PR_BODY = """
 PR: foo/bar!1
-PR:  foo/bar#2
+PR:  foo/bar#2\r
 text
 PR: bar/baz#3
 text
@@ -190,7 +190,7 @@
         actual = PullRequest.add_pr_references(self.PR_BODY, [('xxx', 'xxx', 
4), ('yyy', 'yyy', 5)])
         expected = """
 PR: foo/bar!1
-PR:  foo/bar#2
+PR:  foo/bar#2\r
 text
 PR: bar/baz#3
 PR: xxx/xxx!4
@@ -207,7 +207,7 @@
         actual = PullRequest.add_pr_references(self.PR_BODY + "\nPR: a/b#123", 
[('xxx', 'xxx', 4), ('yyy', 'yyy', 5)])
         expected = """
 PR: foo/bar!1
-PR:  foo/bar#2
+PR:  foo/bar#2\r
 text
 PR: bar/baz#3
 text

++++++ osc.dsc ++++++
--- /var/tmp/diff_new_pack.qWwYQY/_old  2025-12-18 18:37:13.236152577 +0100
+++ /var/tmp/diff_new_pack.qWwYQY/_new  2025-12-18 18:37:13.240152745 +0100
@@ -1,6 +1,6 @@
 Format: 1.0
 Source: osc
-Version: 1.22.0-0
+Version: 1.23.0-0
 Binary: osc
 Maintainer: Adrian Schroeter <[email protected]>
 Architecture: any

Reply via email to