Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package streamlink for openSUSE:Factory 
checked in at 2025-12-18 18:31:33
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/streamlink (Old)
 and      /work/SRC/openSUSE:Factory/.streamlink.new.1928 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "streamlink"

Thu Dec 18 18:31:33 2025 rev:28 rq:1323361 version:8.1.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/streamlink/streamlink.changes    2025-11-17 
12:22:11.954786453 +0100
+++ /work/SRC/openSUSE:Factory/.streamlink.new.1928/streamlink.changes  
2025-12-18 18:32:29.580236866 +0100
@@ -1,0 +2,9 @@
+Tue Dec 16 18:38:06 UTC 2025 - Richard Rahl <[email protected]>
+
+- Update to version 8.1.0:
+  * Deprecated: --hls-segment-queue-threshold in favor of
+    --stream-segmented-queue-deadline
+  * Improved: debug logging of segmented streams
+  * twitch: fixed clips GQL API query
+
+-------------------------------------------------------------------

Old:
----
  streamlink-8.0.0.tar.gz
  streamlink-8.0.0.tar.gz.asc

New:
----
  streamlink-8.1.0.tar.gz
  streamlink-8.1.0.tar.gz.asc

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ streamlink.spec ++++++
--- /var/tmp/diff_new_pack.Uh53cX/_old  2025-12-18 18:32:30.228264088 +0100
+++ /var/tmp/diff_new_pack.Uh53cX/_new  2025-12-18 18:32:30.228264088 +0100
@@ -24,7 +24,7 @@
 %endif
 %{?sle15_python_module_pythons}%{!?sle15_python_module_pythons:%define pythons 
python3}
 Name:           streamlink%{psuffix}
-Version:        8.0.0
+Version:        8.1.0
 Release:        0
 Summary:        Program to pipe streams from services into a video player
 License:        Apache-2.0 AND BSD-2-Clause

++++++ streamlink-8.0.0.tar.gz -> streamlink-8.1.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/CHANGELOG.md 
new/streamlink-8.1.0/CHANGELOG.md
--- old/streamlink-8.0.0/CHANGELOG.md   2025-11-11 17:11:49.000000000 +0100
+++ new/streamlink-8.1.0/CHANGELOG.md   2025-12-14 19:07:02.000000000 +0100
@@ -1,5 +1,16 @@
 # Changelog
 
+## streamlink 8.1.0 (2025-12-14)
+
+- Deprecated: `--hls-segment-queue-threshold` in favor of 
`--stream-segmented-queue-deadline` 
([#6734](https://github.com/streamlink/streamlink/pull/6734))
+- Improved: debug logging of segmented streams 
([#6730](https://github.com/streamlink/streamlink/pull/6730))
+- Updated plugins:
+  - twitch: fixed clips GQL API query 
([#6744](https://github.com/streamlink/streamlink/pull/6744))
+- Tests: removed accidental `pytest_cov` dependency when running tests 
([#6727](https://github.com/streamlink/streamlink/pull/6727))
+
+[Full 
changelog](https://github.com/streamlink/streamlink/compare/8.0.0...8.1.0)
+
+
 ## streamlink 8.0.0 (2025-11-11)
 
 - BREAKING: dropped support for [EOL Python 
3.9](https://peps.python.org/pep-0596/#lifespan) 
([#6674](https://github.com/streamlink/streamlink/pull/6674))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/PKG-INFO 
new/streamlink-8.1.0/PKG-INFO
--- old/streamlink-8.0.0/PKG-INFO       2025-11-11 17:12:14.485784800 +0100
+++ new/streamlink-8.1.0/PKG-INFO       2025-12-14 19:07:27.963754200 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 2.4
 Name: streamlink
-Version: 8.0.0
+Version: 8.1.0
 Summary: Streamlink is a command-line utility that extracts streams from 
various services and pipes them into a video player of choice.
 Author: Streamlink
 Author-email: [email protected]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/completions/bash/streamlink 
new/streamlink-8.1.0/completions/bash/streamlink
--- old/streamlink-8.0.0/completions/bash/streamlink    2025-11-11 
17:12:10.000000000 +0100
+++ new/streamlink-8.1.0/completions/bash/streamlink    2025-12-14 
19:07:24.000000000 +0100
@@ -2,7 +2,7 @@
 
 
 
-_shtab_streamlink_cli_option_strings=('-h' '--help' '-V' '--version' 
'--version-check' '--auto-version-check' '--config' '--no-config' '--locale' 
'-l' '--loglevel' '--logformat' '--logdateformat' '--logfile' '-Q' '--quiet' 
'-j' '--json' '--plugins' '--show-matchers' '--can-handle-url' 
'--can-handle-url-no-redirect' '--no-plugin-cache' '--no-plugin-sideloading' 
'--plugin-dir' '--plugin-dirs' '--interface' '-4' '--ipv4' '-6' '--ipv6' '-p' 
'--player' '-a' '--player-args' '--player-env' '-v' '--player-verbose' 
'--verbose-player' '-n' '--player-fifo' '--fifo' '--player-http' 
'--player-continuous-http' '--player-external-http' 
'--player-external-http-continuous' '--player-external-http-interface' 
'--player-external-http-port' '--player-passthrough' '--player-no-close' '-t' 
'--title' '-O' '--stdout' '-o' '--output' '-r' '--record' '-R' 
'--record-and-pipe' '--fs-safe-rules' '-f' '--force' '--skip' '--progress' 
'--url' '--default-stream' '--stream-url' '--retry-streams' '--retry-max' 
'--retr
 y-open' '--stream-types' '--stream-priority' '--stream-sorting-excludes' 
'--ringbuffer-size' '--stream-segment-attempts' '--stream-segment-threads' 
'--stream-segment-timeout' '--stream-segmented-duration' '--stream-timeout' 
'--mux-subtitles' '--hls-live-edge' '--hls-segment-stream-data' 
'--hls-playlist-reload-attempts' '--hls-playlist-reload-time' 
'--hls-segment-queue-threshold' '--hls-segment-ignore-names' 
'--hls-segment-key-uri' '--hls-audio-select' '--hls-start-offset' 
'--hls-duration' '--hls-live-restart' '--dash-manifest-reload-attempts' 
'--ffmpeg-ffmpeg' '--ffmpeg-no-validation' '--ffmpeg-verbose' 
'--ffmpeg-verbose-path' '--ffmpeg-loglevel' '--ffmpeg-fout' 
'--ffmpeg-video-transcode' '--ffmpeg-audio-transcode' '--ffmpeg-copyts' 
'--ffmpeg-start-at-zero' '--ffmpeg-validation-timeout' '--http-proxy' 
'--http-cookie' '--http-header' '--http-query-param' '--http-ignore-env' 
'--http-no-ssl-verify' '--http-disable-dh' '--http-ssl-cert' 
'--http-ssl-cert-crt-key' '--http-timeout' '--webb
 rowser' '--webbrowser-executable' '--webbrowser-timeout' 
'--webbrowser-cdp-host' '--webbrowser-cdp-port' '--webbrowser-cdp-timeout' 
'--webbrowser-headless' '--bbciplayer-username' '--bbciplayer-password' 
'--bbciplayer-hd' '--clubbingtv-username' '--clubbingtv-password' 
'--kick-low-latency' '--niconico-email' '--niconico-password' 
'--niconico-user-session' '--niconico-purge-credentials' 
'--niconico-timeshift-offset' '--openrectv-email' '--openrectv-password' 
'--pixiv-sessionid' '--pixiv-devicetoken' '--pixiv-purge-credentials' 
'--pixiv-performer' '--raiplay-email' '--raiplay-password' 
'--raiplay-purge-credentials' '--soop-username' '--soop-password' 
'--soop-purge-credentials' '--soop-stream-password' '--steam-email' 
'--steam-password' '--streann-url' '--tf1-email' '--tf1-password' 
'--tf1-purge-credentials' '--twitcasting-password' '--twitch-low-latency' 
'--twitch-supported-codecs' '--twitch-api-header' '--twitch-access-token-param' 
'--twitch-force-client-integrity' '--twitch-purge-cl
 ient-integrity' '--ustream-password' '--ustvnow-username' '--ustvnow-password' 
'--wwenetwork-email' '--wwenetwork-password' '--yupptv-boxid' 
'--yupptv-yuppflixtoken' '--yupptv-purge-credentials' '--zattoo-email' 
'--zattoo-password' '--zattoo-purge-credentials' '--zattoo-stream-types')
+_shtab_streamlink_cli_option_strings=('-h' '--help' '-V' '--version' 
'--version-check' '--auto-version-check' '--config' '--no-config' '--locale' 
'-l' '--loglevel' '--logformat' '--logdateformat' '--logfile' '-Q' '--quiet' 
'-j' '--json' '--plugins' '--show-matchers' '--can-handle-url' 
'--can-handle-url-no-redirect' '--no-plugin-cache' '--no-plugin-sideloading' 
'--plugin-dir' '--plugin-dirs' '--interface' '-4' '--ipv4' '-6' '--ipv6' '-p' 
'--player' '-a' '--player-args' '--player-env' '-v' '--player-verbose' 
'--verbose-player' '-n' '--player-fifo' '--fifo' '--player-http' 
'--player-continuous-http' '--player-external-http' 
'--player-external-http-continuous' '--player-external-http-interface' 
'--player-external-http-port' '--player-passthrough' '--player-no-close' '-t' 
'--title' '-O' '--stdout' '-o' '--output' '-r' '--record' '-R' 
'--record-and-pipe' '--fs-safe-rules' '-f' '--force' '--skip' '--progress' 
'--url' '--default-stream' '--stream-url' '--retry-streams' '--retry-max' 
'--retr
 y-open' '--stream-types' '--stream-priority' '--stream-sorting-excludes' 
'--ringbuffer-size' '--stream-segment-attempts' '--stream-segment-threads' 
'--stream-segment-timeout' '--stream-segmented-duration' 
'--stream-segmented-queue-deadline' '--stream-timeout' '--mux-subtitles' 
'--hls-live-edge' '--hls-segment-stream-data' '--hls-playlist-reload-attempts' 
'--hls-playlist-reload-time' '--hls-segment-queue-threshold' 
'--hls-segment-ignore-names' '--hls-segment-key-uri' '--hls-audio-select' 
'--hls-start-offset' '--hls-duration' '--hls-live-restart' 
'--dash-manifest-reload-attempts' '--ffmpeg-ffmpeg' '--ffmpeg-no-validation' 
'--ffmpeg-verbose' '--ffmpeg-verbose-path' '--ffmpeg-loglevel' '--ffmpeg-fout' 
'--ffmpeg-video-transcode' '--ffmpeg-audio-transcode' '--ffmpeg-copyts' 
'--ffmpeg-start-at-zero' '--ffmpeg-validation-timeout' '--http-proxy' 
'--http-cookie' '--http-header' '--http-query-param' '--http-ignore-env' 
'--http-no-ssl-verify' '--http-disable-dh' '--http-ssl-cert' '--http-ssl-ce
 rt-crt-key' '--http-timeout' '--webbrowser' '--webbrowser-executable' 
'--webbrowser-timeout' '--webbrowser-cdp-host' '--webbrowser-cdp-port' 
'--webbrowser-cdp-timeout' '--webbrowser-headless' '--bbciplayer-username' 
'--bbciplayer-password' '--bbciplayer-hd' '--clubbingtv-username' 
'--clubbingtv-password' '--kick-low-latency' '--niconico-email' 
'--niconico-password' '--niconico-user-session' '--niconico-purge-credentials' 
'--niconico-timeshift-offset' '--openrectv-email' '--openrectv-password' 
'--pixiv-sessionid' '--pixiv-devicetoken' '--pixiv-purge-credentials' 
'--pixiv-performer' '--raiplay-email' '--raiplay-password' 
'--raiplay-purge-credentials' '--soop-username' '--soop-password' 
'--soop-purge-credentials' '--soop-stream-password' '--steam-email' 
'--steam-password' '--streann-url' '--tf1-email' '--tf1-password' 
'--tf1-purge-credentials' '--twitcasting-password' '--twitch-low-latency' 
'--twitch-supported-codecs' '--twitch-api-header' '--twitch-access-token-param' 
'--twitch-force-
 client-integrity' '--twitch-purge-client-integrity' '--ustream-password' 
'--ustvnow-username' '--ustvnow-password' '--wwenetwork-email' 
'--wwenetwork-password' '--yupptv-boxid' '--yupptv-yuppflixtoken' 
'--yupptv-purge-credentials' '--zattoo-email' '--zattoo-password' 
'--zattoo-purge-credentials' '--zattoo-stream-types')
 
 
 
@@ -173,6 +173,7 @@
 
       if [[ "$current_action_nargs" != "*" ]] && \
          [[ "$current_action_nargs" != "+" ]] && \
+         [[ "$current_action_nargs" != "?" ]] && \
          [[ "$current_action_nargs" != *"..." ]] && \
          (( $word_index + 1 - $current_action_args_start_index - $pos_only >= \
             $current_action_nargs )); then
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/completions/zsh/_streamlink 
new/streamlink-8.1.0/completions/zsh/_streamlink
--- old/streamlink-8.0.0/completions/zsh/_streamlink    2025-11-11 
17:12:11.000000000 +0100
+++ new/streamlink-8.1.0/completions/zsh/_streamlink    2025-12-14 
19:07:24.000000000 +0100
@@ -484,6 +484,23 @@
 
             Default is 0.
         ]:stream_segmented_duration:"
+  "--stream-segmented-queue-deadline[
+            A multiplication factor of the time frame in which new segments 
must be queued in order to prevent playback issues
+            due to lack of video\/audio data. If this segment-queue-deadline 
has not been met, the stream will be stopped early.
+
+            The intention of this segment-queue-deadline is to be able to stop 
early when the end of a stream is not announced
+            by the server, so Streamlink doesn\'t have to wait until a buffer 
read-timeout occurs. See --stream-timeout.
+
+            The base time this multiplication factor is applied to depends on 
the specific
+            stream implementation and the respective values returned by the 
streaming server.
+            This deadline check is done after trying to fetch new data.
+
+            Set to \`\`0\`\` to disable.
+
+            Default is 3.0.
+
+            By default, wait three times as long for new segments to be made 
available than the server\'s advertised time frame.
+        ]:stream_segmented_queue_deadline:"
   "--stream-timeout[
             The maximum time to wait for an unfiltered stream to continue 
outputting data.
 
@@ -529,17 +546,7 @@
             Default is default.
         ]:hls_playlist_reload_time:"
   "--hls-segment-queue-threshold[
-            The multiplication factor of the HLS playlist\'s target duration 
after which the stream will be stopped early
-            if no new segments were queued after refreshing the playlist 
(multiple times). The target duration defines the
-            maximum duration a single segment can have, meaning new segments 
must be available during this time frame,
-            otherwise playback issues can occur.
-
-            The intention of this queue threshold is to be able to stop early 
when the end of a stream doesn\'t get
-            announced by the server, so Streamlink doesn\'t have to wait until 
a read-timeout occurs. See --stream-timeout.
-
-            Set to \`\`0\`\` to disable.
-
-            Default is 3.
+            Deprecated in favor of --stream-segmented-queue-deadline.
         ]:hls_segment_queue_threshold:"
   "--hls-segment-ignore-names[
             A comma-delimited list of segment names that will get filtered out.
@@ -856,12 +863,19 @@
   ":Stream to play.:"
 )
 
+# guard to ensure default positional specs are added only once per session
+_shtab_streamlink_cli_defaults_added=0
+
 
 _shtab_streamlink_cli() {
-  local context state line curcontext="$curcontext" one_or_more='(-)*' 
remainder='(*)'
+  local context state line curcontext="$curcontext" one_or_more='(*)' 
remainder='(-)*' default='*::: :->streamlink'
 
-  if ((${_shtab_streamlink_cli_options[(I)${(q)one_or_more}*]} + 
${_shtab_streamlink_cli_options[(I)${(q)remainder}*]} == 0)); then  # noqa: E501
-    _shtab_streamlink_cli_options+=(': :_shtab_streamlink_cli_commands' '*::: 
:->streamlink')
+  # Add default positional/remainder specs only if none exist, and only once 
per session
+  if (( ! _shtab_streamlink_cli_defaults_added )); then
+    if (( ${_shtab_streamlink_cli_options[(I)${(q)one_or_more}*]} +          
${_shtab_streamlink_cli_options[(I)${(q)remainder}*]} +          
${_shtab_streamlink_cli_options[(I)${(q)default}]} == 0 )); then
+      _shtab_streamlink_cli_options+=(': :_shtab_streamlink_cli_commands' 
'*::: :->streamlink')
+    fi
+    _shtab_streamlink_cli_defaults_added=1
   fi
   _arguments -C -s $_shtab_streamlink_cli_options
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/docs/_build/man/streamlink.1 
new/streamlink-8.1.0/docs/_build/man/streamlink.1
--- old/streamlink-8.0.0/docs/_build/man/streamlink.1   2025-11-11 
17:12:09.000000000 +0100
+++ new/streamlink-8.1.0/docs/_build/man/streamlink.1   2025-12-14 
19:07:23.000000000 +0100
@@ -27,7 +27,7 @@
 .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
 .in \\n[rst2man-indent\\n[rst2man-indent-level]]u
 ..
-.TH "STREAMLINK" "1" "Nov 11, 2025" "8.0.0" "Streamlink"
+.TH "STREAMLINK" "1" "Dec 14, 2025" "8.1.0" "Streamlink"
 .SH NAME
 streamlink \- extracts streams from various services and pipes them into a 
video player of choice
 .SH SYNOPSIS
@@ -806,6 +806,25 @@
 .UNINDENT
 .INDENT 0.0
 .TP
+.B \-\-stream\-segmented\-queue\-deadline FACTOR
+A multiplication factor of the time frame in which new segments must be queued 
in order to prevent playback issues
+due to lack of video/audio data. If this segment\-queue\-deadline has not been 
met, the stream will be stopped early.
+.sp
+The intention of this segment\-queue\-deadline is to be able to stop early 
when the end of a stream is not announced
+by the server, so Streamlink doesn\(aqt have to wait until a buffer 
read\-timeout occurs. See \fI\%\-\-stream\-timeout\fP\&.
+.sp
+The base time this multiplication factor is applied to depends on the specific
+stream implementation and the respective values returned by the streaming 
server.
+This deadline check is done after trying to fetch new data.
+.sp
+Set to \fB0\fP to disable.
+.sp
+Default is: \fB3.0\fP\&.
+.sp
+By default, wait three times as long for new segments to be made available 
than the server\(aqs advertised time frame.
+.UNINDENT
+.INDENT 0.0
+.TP
 .B \-\-stream\-timeout TIMEOUT
 The maximum time to wait for an unfiltered stream to continue outputting data.
 .sp
@@ -873,17 +892,7 @@
 .INDENT 0.0
 .TP
 .B \-\-hls\-segment\-queue\-threshold FACTOR
-The multiplication factor of the HLS playlist\(aqs target duration after which 
the stream will be stopped early
-if no new segments were queued after refreshing the playlist (multiple times). 
The target duration defines the
-maximum duration a single segment can have, meaning new segments must be 
available during this time frame,
-otherwise playback issues can occur.
-.sp
-The intention of this queue threshold is to be able to stop early when the end 
of a stream doesn\(aqt get
-announced by the server, so Streamlink doesn\(aqt have to wait until a 
read\-timeout occurs. See \fI\%\-\-stream\-timeout\fP\&.
-.sp
-Set to \fB0\fP to disable.
-.sp
-Default is: \fB3\fP\&.
+Deprecated in favor of \fI\%\-\-stream\-segmented\-queue\-deadline\fP\&.
 .UNINDENT
 .INDENT 0.0
 .TP
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/docs/changelog.md 
new/streamlink-8.1.0/docs/changelog.md
--- old/streamlink-8.0.0/docs/changelog.md      2025-11-11 17:11:49.000000000 
+0100
+++ new/streamlink-8.1.0/docs/changelog.md      2025-12-14 19:07:02.000000000 
+0100
@@ -1,5 +1,16 @@
 # Changelog
 
+## streamlink 8.1.0 (2025-12-14)
+
+- Deprecated: `--hls-segment-queue-threshold` in favor of 
`--stream-segmented-queue-deadline` 
([#6734](https://github.com/streamlink/streamlink/pull/6734))
+- Improved: debug logging of segmented streams 
([#6730](https://github.com/streamlink/streamlink/pull/6730))
+- Updated plugins:
+  - twitch: fixed clips GQL API query 
([#6744](https://github.com/streamlink/streamlink/pull/6744))
+- Tests: removed accidental `pytest_cov` dependency when running tests 
([#6727](https://github.com/streamlink/streamlink/pull/6727))
+
+[Full 
changelog](https://github.com/streamlink/streamlink/compare/8.0.0...8.1.0)
+
+
 ## streamlink 8.0.0 (2025-11-11)
 
 - BREAKING: dropped support for [EOL Python 
3.9](https://peps.python.org/pep-0596/#lifespan) 
([#6674](https://github.com/streamlink/streamlink/pull/6674))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/docs/deprecations.rst 
new/streamlink-8.1.0/docs/deprecations.rst
--- old/streamlink-8.0.0/docs/deprecations.rst  2025-11-11 17:11:49.000000000 
+0100
+++ new/streamlink-8.1.0/docs/deprecations.rst  2025-12-14 19:07:02.000000000 
+0100
@@ -1,6 +1,15 @@
 Deprecations
 ============
 
+streamlink 8.1.0
+----------------
+
+--hls-segment-queue-threshold
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The ``--hls-segment-queue-threshold`` CLI argument has been deprecated in 
favor of :option:`--stream-segmented-queue-deadline`.
+
+
 streamlink 8.0.0
 ----------------
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/pyproject.toml 
new/streamlink-8.1.0/pyproject.toml
--- old/streamlink-8.0.0/pyproject.toml 2025-11-11 17:12:14.487784600 +0100
+++ new/streamlink-8.1.0/pyproject.toml 2025-12-14 19:07:27.966754200 +0100
@@ -106,12 +106,12 @@
   "freezegun >=1.5.0",
 ]
 lint = [
-  "ruff ==0.14.1",
+  "ruff ==0.14.7",
 ]
 typing = [
   { include-group = "test" },
   { include-group = "docs" },
-  "mypy[faster-cache] ==1.18.2",
+  "mypy[faster-cache] ==1.19.0",
   "typing-extensions >=4.0.0",
   "lxml-stubs",
   "trio-typing",
@@ -195,7 +195,6 @@
 minversion = "8.4.0"
 addopts = """
 --disable-plugin-autoload
--p pytest_cov
 -p trio
 -p requests_mock
 """
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/setup.py 
new/streamlink-8.1.0/setup.py
--- old/streamlink-8.0.0/setup.py       2025-11-11 17:12:14.488784600 +0100
+++ new/streamlink-8.1.0/setup.py       2025-12-14 19:07:27.966754200 +0100
@@ -85,5 +85,5 @@
         cmdclass=get_cmdclasses(cmdclass),
         entry_points=entry_points,
         data_files=data_files,
-        version="8.0.0",
+        version="8.1.0",
     )
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/src/streamlink/_version.py 
new/streamlink-8.1.0/src/streamlink/_version.py
--- old/streamlink-8.0.0/src/streamlink/_version.py     2025-11-11 
17:12:14.488784600 +0100
+++ new/streamlink-8.1.0/src/streamlink/_version.py     2025-12-14 
19:07:27.967754100 +0100
@@ -1 +1 @@
-__version__ = "8.0.0"
+__version__ = "8.1.0"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/src/streamlink/plugins/tv3cat.py 
new/streamlink-8.1.0/src/streamlink/plugins/tv3cat.py
--- old/streamlink-8.0.0/src/streamlink/plugins/tv3cat.py       2025-11-11 
17:11:49.000000000 +0100
+++ new/streamlink-8.1.0/src/streamlink/plugins/tv3cat.py       2025-12-14 
19:07:02.000000000 +0100
@@ -87,8 +87,8 @@
         )
 
         log.debug(f"{streams=}")
-        for _geo, data in streams:
-            if _geo == geo:
+        for key, data in streams:
+            if key == geo:
                 return data
 
         log.error("The content is geo-blocked")
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/src/streamlink/plugins/twitch.py 
new/streamlink-8.1.0/src/streamlink/plugins/twitch.py
--- old/streamlink-8.0.0/src/streamlink/plugins/twitch.py       2025-11-11 
17:11:49.000000000 +0100
+++ new/streamlink-8.1.0/src/streamlink/plugins/twitch.py       2025-12-14 
19:07:02.000000000 +0100
@@ -134,7 +134,7 @@
         daterange = self.m3u8.dateranges[-1]
         if self._is_daterange_ad(daterange):
             self.m3u8.dateranges_ads.append(daterange)
-            log.trace(f"Ad daterange: {daterange!r}")  # type: 
ignore[attr-defined]
+            log.trace("Advertisement: %r", daterange)  # type: 
ignore[attr-defined]
 
     def get_segment(self, uri: str, **data) -> TwitchHLSSegment:
         ad = self._is_segment_ad(self._date, self._extinf.title if 
self._extinf else None)
@@ -234,8 +234,6 @@
     stream: TwitchHLSStream
 
     def should_filter_segment(self, segment: TwitchHLSSegment) -> bool:  # 
type: ignore[override]
-        if segment.ad:  # pragma: no cover
-            log.trace(f"Filtering out segment: {segment.num=} {segment.title=} 
{segment.date=}")  # type: ignore[attr-defined]
         return segment.ad
 
 
@@ -553,8 +551,9 @@
     def clips(self, clipname):
         query = self._gql_persisted_query(
             "VideoAccessToken_Clip",
-            "36b89d2507fce29e5ca551df756d27c1cfe079e2609642b4390aa4c35796eb11",
+            "993d9a5131f15a37bd16f32342c44ed1e0b1a9b968c6afdb662d2cddd595f6c5",
             slug=clipname,
+            platform="web",
         )
 
         return self.call(
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/streamlink-8.0.0/src/streamlink/session/http_useragents.py 
new/streamlink-8.1.0/src/streamlink/session/http_useragents.py
--- old/streamlink-8.0.0/src/streamlink/session/http_useragents.py      
2025-11-11 17:11:49.000000000 +0100
+++ new/streamlink-8.1.0/src/streamlink/session/http_useragents.py      
2025-12-14 19:07:02.000000000 +0100
@@ -1,11 +1,11 @@
-ANDROID = "Mozilla/5.0 (Linux; Android 16) AppleWebKit/537.36 (KHTML, like 
Gecko) Chrome/141.0.7390.123 Mobile Safari/537.36"
-CHROME = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, 
like Gecko) Chrome/141.0.0.0 Safari/537.36"
+ANDROID = "Mozilla/5.0 (Linux; Android 16) AppleWebKit/537.36 (KHTML, like 
Gecko) Chrome/142.0.7444.172 Mobile Safari/537.36"
+CHROME = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, 
like Gecko) Chrome/142.0.0.0 Safari/537.36"
 CHROME_OS = "Mozilla/5.0 (X11; CrOS x86_64 16181.61.0) AppleWebKit/537.36 
(KHTML, like Gecko) Chrome/134.0.6998.198 Safari/537.36"
-FIREFOX = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:144.0) Gecko/20100101 
Firefox/144.0"
+FIREFOX = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:145.0) Gecko/20100101 
Firefox/145.0"
 IE_11 = "Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko"
-IPHONE = "Mozilla/5.0 (iPhone; CPU iPhone OS 18_7_1 like Mac OS X) 
AppleWebKit/605.1.15 (KHTML, like Gecko) Version/26.0 Mobile/15E148 
Safari/604.1"
-OPERA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, 
like Gecko) Chrome/141.0.0.0 Safari/537.36 OPR/123.0.0.0"
-SAFARI = "Mozilla/5.0 (Macintosh; Intel Mac OS X 15_7_1) AppleWebKit/605.1.15 
(KHTML, like Gecko) Version/26.0 Safari/605.1.15"
+IPHONE = "Mozilla/5.0 (iPhone; CPU iPhone OS 18_7_2 like Mac OS X) 
AppleWebKit/605.1.15 (KHTML, like Gecko) Version/26.0 Mobile/15E148 
Safari/604.1"
+OPERA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, 
like Gecko) Chrome/142.0.0.0 Safari/537.36 OPR/124.0.0.0"
+SAFARI = "Mozilla/5.0 (Macintosh; Intel Mac OS X 15_7_2) AppleWebKit/605.1.15 
(KHTML, like Gecko) Version/26.0 Safari/605.1.15"
 
 # Backwards compatibility
 EDGE = CHROME
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/src/streamlink/session/options.py 
new/streamlink-8.1.0/src/streamlink/session/options.py
--- old/streamlink-8.0.0/src/streamlink/session/options.py      2025-11-11 
17:11:49.000000000 +0100
+++ new/streamlink-8.1.0/src/streamlink/session/options.py      2025-12-14 
19:07:02.000000000 +0100
@@ -149,6 +149,10 @@
           - ``float``
           - ``0.0``
           - Limit the playback duration of segmented streams, rounded to the 
nearest segment
+        * - stream-segmented-queue-deadline
+          - ``float``
+          - ``3``
+          - Multiplication factor of the deadline for new segments to be queued
         * - stream-timeout
           - ``float``
           - ``60.0``
@@ -182,10 +186,10 @@
             - ``segment``: duration of the last segment
             - ``live-edge``: sum of segment durations of the ``hls-live-edge`` 
value minus one
             - ``default``: the playlist's target duration
-        * - hls-segment-queue-threshold
+        * - hls-segment-queue-threshold *(deprecated)*
           - ``float``
           - ``3``
-          - Factor of the playlist's targetduration which sets the threshold 
for stopping early on missing segments
+          - See ``stream-segmented-queue-deadline``
         * - hls-segment-stream-data
           - ``bool``
           - ``False``
@@ -298,13 +302,13 @@
             "stream-segment-threads": 1,
             "stream-segment-timeout": 10.0,
             "stream-segmented-duration": 0.0,
+            "stream-segmented-queue-deadline": 3,
             "stream-timeout": 60.0,
             "hls-live-edge": 3,
             "hls-live-restart": False,
             "hls-start-offset": 0.0,
             "hls-playlist-reload-attempts": 3,
             "hls-playlist-reload-time": "default",
-            "hls-segment-queue-threshold": 3,
             "hls-segment-stream-data": False,
             "hls-segment-ignore-names": [],
             "hls-segment-key-uri": None,
@@ -462,4 +466,5 @@
         "http-trust-env": _set_http_attr,
         "http-timeout": _set_http_attr,
         "hls-duration": _factory_set_deprecated("stream-segmented-duration", 
float),
+        "hls-segment-queue-threshold": 
_factory_set_deprecated("stream-segmented-queue-deadline", float),
     }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/src/streamlink/stream/dash/dash.py 
new/streamlink-8.1.0/src/streamlink/stream/dash/dash.py
--- old/streamlink-8.0.0/src/streamlink/stream/dash/dash.py     2025-11-11 
17:11:49.000000000 +0100
+++ new/streamlink-8.1.0/src/streamlink/stream/dash/dash.py     2025-12-14 
19:07:02.000000000 +0100
@@ -102,6 +102,10 @@
         if time_to_sleep > 0:
             self.wait(time_to_sleep)
 
+    @property
+    def _queue_deadline_wait(self) -> float:
+        return self.mpd.minimumUpdatePeriod.total_seconds()
+
     def iter_segments(self):
         init = True
         back_off_factor = 1
@@ -124,6 +128,7 @@
                 if not representation:
                     continue
 
+                queued = False
                 iter_segments = representation.segments(
                     sequence=self.sequence,
                     init=init,
@@ -134,13 +139,17 @@
                     if init and not segment.init:
                         self.sequence = segment.num
                         init = False
-                    yield segment
+                    queued |= yield segment
 
                 # close worker if type is not dynamic (all segments were put 
into writer queue)
                 if self.mpd.type != "dynamic":
                     self.close()
                     return
 
+                # Implicit end of stream
+                if self.check_queue_deadline(queued):
+                    return
+
                 if not self.reload():
                     back_off_factor = max(back_off_factor * 1.3, 10.0)
                 else:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/streamlink-8.0.0/src/streamlink/stream/dash/manifest.py 
new/streamlink-8.1.0/src/streamlink/stream/dash/manifest.py
--- old/streamlink-8.0.0/src/streamlink/stream/dash/manifest.py 2025-11-11 
17:11:49.000000000 +0100
+++ new/streamlink-8.1.0/src/streamlink/stream/dash/manifest.py 2025-12-14 
19:07:02.000000000 +0100
@@ -372,11 +372,17 @@
         """
         Find the first Representation instance with a matching ident
         """
+        p, a, r = ident
         for period in self.periods:
+            if p != period.id:
+                continue
             for adaptationset in period.adaptationSets:
+                if a != adaptationset.id:
+                    continue
                 for representation in adaptationset.representations:
-                    if representation.ident == ident:
-                        return representation
+                    if r != representation.id:
+                        continue
+                    return representation
 
 
 class ProgramInformation(MPDNode):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/src/streamlink/stream/hls/hls.py 
new/streamlink-8.1.0/src/streamlink/stream/hls/hls.py
--- old/streamlink-8.0.0/src/streamlink/stream/hls/hls.py       2025-11-11 
17:11:49.000000000 +0100
+++ new/streamlink-8.1.0/src/streamlink/stream/hls/hls.py       2025-12-14 
19:07:02.000000000 +0100
@@ -306,8 +306,6 @@
     writer: HLSStreamWriter
     stream: HLSStream
 
-    SEGMENT_QUEUE_TIMING_THRESHOLD_MIN = 5.0
-
     reload_attempts: int
     reload_time: float | Literal["segment", "live-edge"]
 
@@ -320,10 +318,8 @@
         self.playlist_changed = False
         self.playlist_end: int | None = None
         self.playlist_targetduration: float = 0
-        self.playlist_sequence_last: datetime = now()
         self.playlist_segments: list[HLSSegment] = []
 
-        self.segment_queue_timing_threshold_factor = 
self.session.options.get("hls-segment-queue-threshold")
         self.live_edge = self.session.options.get("hls-live-edge")
         self.duration_offset_start = int(self.stream.start_offset + 
(self.session.options.get("hls-start-offset") or 0))
         self.hls_live_restart = self.stream.force_restart or 
self.session.options.get("hls-live-restart")
@@ -434,20 +430,6 @@
     def valid_segment(self, segment: HLSSegment) -> bool:
         return segment.num >= self.sequence
 
-    def _segment_queue_timing_threshold_reached(self) -> bool:
-        if self.segment_queue_timing_threshold_factor <= 0:
-            return False
-
-        threshold = max(
-            self.SEGMENT_QUEUE_TIMING_THRESHOLD_MIN,
-            self.playlist_targetduration * 
self.segment_queue_timing_threshold_factor,
-        )
-        if now() <= self.playlist_sequence_last + timedelta(seconds=threshold):
-            return False
-
-        log.warning(f"No new segments in playlist for more than 
{threshold:.2f}s. Stopping...")
-        return True
-
     @staticmethod
     def duration_to_sequence(duration: float, segments: list[HLSSegment]) -> 
int:
         d = 0.0
@@ -464,10 +446,31 @@
         # could not skip far enough, so return the default
         return default
 
+    @property
+    def _queue_deadline_wait(self) -> float:
+        return self.playlist_targetduration
+
+    def wait_and_reload(self):
+        # Exclude fetch+processing time from the overall reload time and 
reload in a strict time interval
+        time_completed = now()
+        time_elapsed = max(0.0, (time_completed - 
self._reload_last).total_seconds())
+        time_wait = max(0.0, self._reload_time - time_elapsed)
+        if self.wait(time_wait):
+            if time_wait > 0:
+                # If we had to wait, then don't call now() twice and instead 
reference the timestamp from before
+                # the wait() call, to prevent a shifting time offset due to 
the execution time
+                self._reload_last = time_completed + 
timedelta(seconds=time_wait)
+            else:
+                # Otherwise, get the current time, as the reload interval 
already has shifted
+                self._reload_last = now()
+
+            try:
+                self.reload()
+            except StreamError as err:
+                log.warning(f"Reloading failed: {err}")
+
     def iter_segments(self):
-        self._reload_last \
-            = self.playlist_sequence_last \
-            = now()  # fmt: skip
+        self._reload_last = now()
 
         try:
             self.reload()
@@ -506,42 +509,17 @@
             for segment in self.playlist_segments:
                 if not self.valid_segment(segment):
                     continue
-
-                log.debug(f"Adding segment {segment.num} to queue")
-
-                yield segment
-                queued = True
-
-                if self.closed:  # pragma: no cover
-                    return
+                queued |= yield segment
 
             # End of stream
             if self.closed or self.playlist_end is not None and (not queued or 
self.sequence > self.playlist_end):
                 return
 
-            if queued:
-                self.playlist_sequence_last = now()
-            elif self._segment_queue_timing_threshold_reached():
+            # Implicit end of stream
+            if self.check_queue_deadline(queued):
                 return
 
-            # Exclude playlist fetch+processing time from the overall playlist 
reload time
-            # and reload playlist in a strict time interval
-            time_completed = now()
-            time_elapsed = max(0.0, (time_completed - 
self._reload_last).total_seconds())
-            time_wait = max(0.0, self._reload_time - time_elapsed)
-            if self.wait(time_wait):
-                if time_wait > 0:
-                    # If we had to wait, then don't call now() twice and 
instead reference the timestamp from before
-                    # the wait() call, to prevent a shifting time offset due 
to the execution time.
-                    self._reload_last = time_completed + 
timedelta(seconds=time_wait)
-                else:
-                    # Otherwise, get the current time, as the reload interval 
already has shifted.
-                    self._reload_last = now()
-
-                try:
-                    self.reload()
-                except StreamError as err:
-                    log.warning(f"Failed to reload playlist: {err}")
+            self.wait_and_reload()
 
 
 class HLSStreamReader(FilteredStream, SegmentedStreamReader[HLSSegment, 
Response]):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/streamlink-8.0.0/src/streamlink/stream/hls/segment.py 
new/streamlink-8.1.0/src/streamlink/stream/hls/segment.py
--- old/streamlink-8.0.0/src/streamlink/stream/hls/segment.py   2025-11-11 
17:11:49.000000000 +0100
+++ new/streamlink-8.1.0/src/streamlink/stream/hls/segment.py   2025-12-14 
19:07:02.000000000 +0100
@@ -6,6 +6,7 @@
 from typing import TYPE_CHECKING, NamedTuple
 
 from streamlink.stream.segmented.segment import Segment
+from streamlink.utils.dataclass import FormattedDataclass
 from streamlink.utils.l10n import Language
 
 
@@ -37,7 +38,8 @@
 
 
 # EXT-X-DATERANGE
-class DateRange(NamedTuple):
+@dataclass(kw_only=True)
+class DateRange(metaclass=FormattedDataclass):
     id: str | None
     classname: str | None
     start_date: datetime | None
@@ -45,27 +47,29 @@
     duration: timedelta | None
     planned_duration: timedelta | None
     end_on_next: bool
-    x: dict[str, str]
+    x: dict[str, str] = field(repr=False)
 
 
 # EXT-X-KEY
-class Key(NamedTuple):
+@dataclass(kw_only=True)
+class Key:
     method: str
-    uri: str | None
-    iv: bytes | None  # version >= 2
+    uri: str | None = field(repr=False)
+    iv: bytes | None = field(repr=False)  # version >= 2
     key_format: str | None  # version >= 5
     key_format_versions: str | None  # version >= 5
 
 
 # EXT-X-MAP
-class Map(NamedTuple):
-    uri: str
+@dataclass(kw_only=True)
+class Map:
+    uri: str = field(repr=False)
     key: Key | None
     byterange: ByteRange | None
 
 
 # EXT-X-MEDIA
-@dataclass
+@dataclass(kw_only=True)
 class Media:
     uri: str | None
     type: str
@@ -102,7 +106,8 @@
 
 
 # EXT-X-STREAM-INF
-class StreamInfo(NamedTuple):
+@dataclass(kw_only=True)
+class StreamInfo:
     bandwidth: int
     program_id: str | None  # version < 6
     codecs: list[str]
@@ -113,7 +118,8 @@
 
 
 # EXT-X-I-FRAME-STREAM-INF
-class IFrameStreamInfo(NamedTuple):
+@dataclass(kw_only=True)
+class IFrameStreamInfo:
     bandwidth: int
     program_id: str | None
     codecs: list[str]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/streamlink-8.0.0/src/streamlink/stream/segmented/segment.py 
new/streamlink-8.1.0/src/streamlink/stream/segmented/segment.py
--- old/streamlink-8.0.0/src/streamlink/stream/segmented/segment.py     
2025-11-11 17:11:49.000000000 +0100
+++ new/streamlink-8.1.0/src/streamlink/stream/segmented/segment.py     
2025-12-14 19:07:02.000000000 +0100
@@ -1,10 +1,21 @@
-from dataclasses import dataclass
+from dataclasses import dataclass, field
+from urllib.parse import urlparse
+
+from streamlink.utils.dataclass import FormattedDataclass
 
 
 @dataclass(kw_only=True)
-class Segment:
+class Segment(metaclass=FormattedDataclass, extra=["fileext"]):
     num: int
     init: bool = False
     discontinuity: bool = False
-    uri: str
+    uri: str = field(repr=False)
     duration: float
+
+    @property
+    def fileext(self) -> str | None:
+        path = urlparse(self.uri).path
+        ext = path.split(".")[-1]
+        if 4 >= len(ext) >= 2:
+            return ext
+        return None
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/streamlink-8.0.0/src/streamlink/stream/segmented/segmented.py 
new/streamlink-8.1.0/src/streamlink/stream/segmented/segmented.py
--- old/streamlink-8.0.0/src/streamlink/stream/segmented/segmented.py   
2025-11-11 17:11:49.000000000 +0100
+++ new/streamlink-8.1.0/src/streamlink/stream/segmented/segmented.py   
2025-12-14 19:07:02.000000000 +0100
@@ -4,6 +4,7 @@
 import queue
 from concurrent import futures
 from concurrent.futures import ThreadPoolExecutor
+from datetime import timedelta
 from threading import Event, current_thread
 from typing import TYPE_CHECKING, ClassVar, Generic, TypeAlias, TypeVar
 
@@ -11,11 +12,13 @@
 from streamlink.stream.segmented.segment import Segment
 from streamlink.stream.stream import StreamIO
 from streamlink.utils.thread import NamedThread
+from streamlink.utils.times import now
 
 
 if TYPE_CHECKING:
     from collections.abc import Generator
     from concurrent.futures import Future
+    from datetime import datetime
 
     from streamlink.stream.stream import Stream
 
@@ -182,6 +185,8 @@
     writer: SegmentedStreamWriter[TSegment, TResult]
     stream: Stream
 
+    _QUEUE_DEADLINE_MIN = 5.0
+
     def __init__(self, reader: SegmentedStreamReader, name: str | None = None, 
**kwargs) -> None:
         super().__init__(daemon=True, name=name)
 
@@ -196,6 +201,9 @@
         self.duration: float = 0.0
         self.duration_limit: float = 
self.session.options.get("stream-segmented-duration")
 
+        self._queue_deadline_factor: float = 
self.session.options.get("stream-segmented-queue-deadline")
+        self._queue_last: datetime = now()
+
     def close(self) -> None:
         """
         Shuts down the thread.
@@ -209,6 +217,37 @@
         self.closed = True
         self._wait.set()
 
+    @property
+    def _queue_deadline_wait(self) -> float:  # pragma: no cover
+        """
+        The max time in seconds to wait for new segments while fetching data 
in a polling implementation.
+        Will be multiplied by the ``stream-segmented-queue-deadline`` session 
option in the queue deadline check.
+        Needs to be overridden by subclasses which intent to support queue 
deadlines.
+        """
+        return 0.0
+
+    def check_queue_deadline(self, queued: bool) -> bool:
+        """
+        Check whether new segments were queued in a specific time frame during 
the current iteration of resource fetching,
+        so the stream can be stopped early. Should be called in a subclass's 
``iter_segments()`` after fetching data.
+        :return: True if the stream should be stopped early, False otherwise.
+        """
+
+        if queued:
+            self._queue_last = now()
+            return False
+
+        deadline = max(0.0, self._queue_deadline_wait) * 
self._queue_deadline_factor
+        if deadline <= 0.0:
+            return False
+
+        deadline = max(self._QUEUE_DEADLINE_MIN, deadline)
+        if now() <= self._queue_last + timedelta(seconds=deadline):
+            return False
+
+        log.warning(f"No new segments for more than {deadline:.2f}s. 
Stopping...")
+        return True
+
     def check_sequence_gap(self, segment: TSegment) -> None:
         size = segment.num - self.sequence
         if size > 0:
@@ -219,31 +258,47 @@
             warning = "This is unsupported and will result in incoherent 
output data."
             log.warning(f"{msg}{warning}")
 
-    def iter_segments(self) -> Generator[TSegment, None, None]:
+    def iter_segments(self) -> Generator[TSegment, bool, None]:
         """
         The iterator that generates segments for the worker thread.
         Should be overridden by the inheriting class.
         """
 
         return
-        # noinspection PyUnreachableCode
+        # noinspection PyUnreachableCode,PyTypeChecker
         yield
 
     def run(self) -> None:
-        for segment in self.iter_segments():
-            if self.closed:  # pragma: no cover
-                break
+        self._queue_last = now()
 
-            self.check_sequence_gap(segment)
+        iter_segments = self.iter_segments()
+        queued: bool | None = None
 
-            self.sequence = segment.num + 1
-            self.duration += segment.duration
+        try:
+            while True:
+                if queued is None:
+                    segment = next(iter_segments)
+                else:
+                    segment = iter_segments.send(queued)
 
-            self.writer.put(segment)
+                if self.closed:  # pragma: no cover
+                    break
 
-            if self.duration >= self.duration_limit > 0.0:
-                log.info(f"Stopping stream early after 
{self.duration_limit:.2f}s")
-                break
+                log.debug("Queuing %r", segment)
+
+                self.check_sequence_gap(segment)
+
+                self.sequence = segment.num + 1
+                self.duration += segment.duration
+
+                self.writer.put(segment)
+                queued = True
+
+                if self.duration >= self.duration_limit > 0.0:
+                    log.info(f"Stopping stream early after 
{self.duration_limit:.2f}s")
+                    break
+        except StopIteration:
+            pass
 
         # End of stream, tells the writer to exit
         self.writer.put(None)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/src/streamlink/utils/dataclass.py 
new/streamlink-8.1.0/src/streamlink/utils/dataclass.py
--- old/streamlink-8.0.0/src/streamlink/utils/dataclass.py      1970-01-01 
01:00:00.000000000 +0100
+++ new/streamlink-8.1.0/src/streamlink/utils/dataclass.py      2025-12-14 
19:07:02.000000000 +0100
@@ -0,0 +1,79 @@
+from __future__ import annotations
+
+import dataclasses
+from datetime import datetime, timedelta
+from typing import TYPE_CHECKING, Callable, TypeAlias, TypeVar
+from weakref import WeakKeyDictionary
+
+
+try:
+    from typing import dataclass_transform  # type: ignore[attr-defined]
+except ImportError:  # pragma: no cover
+    from typing_extensions import dataclass_transform
+
+
+if TYPE_CHECKING:
+    from _typeshed import DataclassInstance
+
+    _Ta = TypeVar("_Ta")
+    _Tb = TypeVar("_Tb")
+
+    _TFormatters: TypeAlias = dict[type[_Tb], Callable[[_Tb], str]]
+
+
+_DEFAULT_FORMATTERS: _TFormatters = {
+    float: lambda n: f"{n:.3f}",
+    datetime: lambda dt: dt.strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
+    timedelta: str,
+}
+_FORMATTER_REFS: WeakKeyDictionary[FormattedDataclass, tuple[_TFormatters, 
list[str]] | None] = WeakKeyDictionary()
+
+
+@dataclass_transform()
+class FormattedDataclass(type):
+    def __new__(
+        cls,
+        name,
+        bases,
+        namespace,
+        formatters: _TFormatters | None = None,
+        extra: list[str] | None = None,
+        **kwargs,
+    ) -> FormattedDataclass:
+        obj = super().__new__(cls, name, bases, namespace, **kwargs)
+
+        formatters = formatters or {}
+        extra = extra or []
+
+        frmttrs: _TFormatters = _DEFAULT_FORMATTERS.copy()
+        for base in bases:
+            if base_data := _FORMATTER_REFS.get(base):  # pragma: no branch
+                frmttrs.update(base_data[0])
+                extra = [*base_data[1], *extra]
+        frmttrs.update(formatters)
+
+        _FORMATTER_REFS[obj] = frmttrs, extra
+
+        def serialize(self: DataclassInstance) -> str:
+            items: list[str] = []
+
+            def add_item(key: str) -> None:
+                value = getattr(self, key, None)
+                if formatter := frmttrs.get(type(value)):
+                    value = formatter(value)
+                else:
+                    value = repr(value)
+                items.append(f"{key}={value}")
+
+            # noinspection PyDataclass
+            for fld in dataclasses.fields(self):
+                if fld.repr:
+                    add_item(fld.name)
+            for ex in extra:
+                add_item(ex)
+
+            return f"{self.__class__.__name__}({', '.join(items)})"
+
+        obj.__str__ = obj.__repr__ = serialize  # type: ignore[assignment]
+
+        return obj
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/src/streamlink.egg-info/PKG-INFO 
new/streamlink-8.1.0/src/streamlink.egg-info/PKG-INFO
--- old/streamlink-8.0.0/src/streamlink.egg-info/PKG-INFO       2025-11-11 
17:12:14.000000000 +0100
+++ new/streamlink-8.1.0/src/streamlink.egg-info/PKG-INFO       2025-12-14 
19:07:27.000000000 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 2.4
 Name: streamlink
-Version: 8.0.0
+Version: 8.1.0
 Summary: Streamlink is a command-line utility that extracts streams from 
various services and pipes them into a video player of choice.
 Author: Streamlink
 Author-email: [email protected]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/src/streamlink.egg-info/SOURCES.txt 
new/streamlink-8.1.0/src/streamlink.egg-info/SOURCES.txt
--- old/streamlink-8.0.0/src/streamlink.egg-info/SOURCES.txt    2025-11-11 
17:12:14.000000000 +0100
+++ new/streamlink-8.1.0/src/streamlink.egg-info/SOURCES.txt    2025-12-14 
19:07:27.000000000 +0100
@@ -271,6 +271,7 @@
 src/streamlink/utils/cache.py
 src/streamlink/utils/crypto.py
 src/streamlink/utils/data.py
+src/streamlink/utils/dataclass.py
 src/streamlink/utils/formatter.py
 src/streamlink/utils/l10n.py
 src/streamlink/utils/module.py
@@ -599,6 +600,7 @@
 tests/utils/test_cache.py
 tests/utils/test_crypto.py
 tests/utils/test_data.py
+tests/utils/test_dataclass.py
 tests/utils/test_formatter.py
 tests/utils/test_l10n.py
 tests/utils/test_module.py
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/src/streamlink_cli/argparser.py 
new/streamlink-8.1.0/src/streamlink_cli/argparser.py
--- old/streamlink-8.0.0/src/streamlink_cli/argparser.py        2025-11-11 
17:11:49.000000000 +0100
+++ new/streamlink-8.1.0/src/streamlink_cli/argparser.py        2025-12-14 
19:07:02.000000000 +0100
@@ -1018,6 +1018,28 @@
         """,
     )
     transport.add_argument(
+        "--stream-segmented-queue-deadline",
+        metavar="FACTOR",
+        type=num(float, ge=0.0),
+        help="""
+            A multiplication factor of the time frame in which new segments 
must be queued in order to prevent playback issues
+            due to lack of video/audio data. If this segment-queue-deadline 
has not been met, the stream will be stopped early.
+
+            The intention of this segment-queue-deadline is to be able to stop 
early when the end of a stream is not announced
+            by the server, so Streamlink doesn't have to wait until a buffer 
read-timeout occurs. See --stream-timeout.
+
+            The base time this multiplication factor is applied to depends on 
the specific
+            stream implementation and the respective values returned by the 
streaming server.
+            This deadline check is done after trying to fetch new data.
+
+            Set to ``0`` to disable.
+
+            Default is 3.0.
+
+            By default, wait three times as long for new segments to be made 
available than the server's advertised time frame.
+        """,
+    )
+    transport.add_argument(
         "--stream-timeout",
         type=num(float, gt=0),
         metavar="TIMEOUT",
@@ -1094,19 +1116,9 @@
     transport_hls.add_argument(
         "--hls-segment-queue-threshold",
         metavar="FACTOR",
-        type=num(float, ge=0),
+        type=num(float, ge=0.0),
         help="""
-            The multiplication factor of the HLS playlist's target duration 
after which the stream will be stopped early
-            if no new segments were queued after refreshing the playlist 
(multiple times). The target duration defines the
-            maximum duration a single segment can have, meaning new segments 
must be available during this time frame,
-            otherwise playback issues can occur.
-
-            The intention of this queue threshold is to be able to stop early 
when the end of a stream doesn't get
-            announced by the server, so Streamlink doesn't have to wait until 
a read-timeout occurs. See --stream-timeout.
-
-            Set to ``0`` to disable.
-
-            Default is 3.
+            Deprecated in favor of --stream-segmented-queue-deadline.
         """,
     )
     transport_hls.add_argument(
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/tests/stream/dash/test_dash.py 
new/streamlink-8.1.0/tests/stream/dash/test_dash.py
--- old/streamlink-8.0.0/tests/stream/dash/test_dash.py 2025-11-11 
17:11:49.000000000 +0100
+++ new/streamlink-8.1.0/tests/stream/dash/test_dash.py 2025-12-14 
19:07:02.000000000 +0100
@@ -2,7 +2,7 @@
 
 from contextlib import nullcontext
 from datetime import datetime, timezone
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Generator, Iterator
 from unittest.mock import ANY, Mock, call
 
 import freezegun
@@ -486,6 +486,19 @@
     def worker(self, reader: DASHStreamReader):
         return DASHStreamWorker(reader)
 
+    @staticmethod
+    def _iter_segments(iter_segments: Generator[DASHSegment, bool, None]) -> 
Iterator[DASHSegment]:
+        queued: bool | None = None
+        try:
+            while True:
+                if queued is None:
+                    yield next(iter_segments)
+                else:
+                    yield iter_segments.send(queued)
+                queued = True
+        except StopIteration:
+            pass
+
     def test_dynamic_reload(
         self,
         monkeypatch: pytest.MonkeyPatch,
@@ -502,7 +515,7 @@
         mpd.type = "dynamic"
         monkeypatch.setattr("streamlink.stream.dash.dash.MPD", lambda *args, 
**kwargs: mpd)
 
-        segment_iter = worker.iter_segments()
+        segment_iter = self._iter_segments(worker.iter_segments())
 
         def next_segments(num):
             items = []
@@ -546,7 +559,7 @@
         mpd.type = "static"
 
         representation.segments.return_value = segments
-        assert list(worker.iter_segments()) == segments
+        assert list(self._iter_segments(worker.iter_segments())) == segments
         assert representation.segments.call_args_list == [call(sequence=-1, 
init=True, timestamp=timestamp)]
         assert worker._wait.is_set()
 
@@ -574,7 +587,7 @@
         mpd.periods[0].duration.total_seconds.return_value = period_duration
 
         representation.segments.return_value = segments
-        assert list(worker.iter_segments()) == segments
+        assert list(self._iter_segments(worker.iter_segments())) == segments
         assert representation.segments.call_args_list == [call(sequence=-1, 
init=True, timestamp=timestamp)]
         assert mock_wait.call_args_list == [call(5)]
         assert worker._wait.is_set()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/tests/stream/hls/test_hls.py 
new/streamlink-8.1.0/tests/stream/hls/test_hls.py
--- old/streamlink-8.0.0/tests/stream/hls/test_hls.py   2025-11-11 
17:11:49.000000000 +0100
+++ new/streamlink-8.1.0/tests/stream/hls/test_hls.py   2025-12-14 
19:07:02.000000000 +0100
@@ -319,7 +319,7 @@
     def get_session(self, options=None, *args, **kwargs):
         return super().get_session({**self.OPTIONS, **(options or {})}, *args, 
**kwargs)
 
-    def test_segment_queue_timing_threshold_reached(self) -> None:
+    def test_queue_deadline_reached(self) -> None:
         self.subject(
             start=False,
             playlists=[
@@ -333,13 +333,13 @@
 
         with (
             freezegun.freeze_time(EPOCH) as frozen_time,
-            patch("streamlink.stream.hls.hls.log") as mock_log,
+            patch("streamlink.stream.segmented.segmented.log") as mock_log,
         ):
             self.start()
 
             assert worker.handshake_reload.wait_ready(1), "Loads playlist for 
the first time"
             assert worker.sequence == -1, "Initial sequence number"
-            assert worker.playlist_sequence_last == EPOCH, "Sets the initial 
last queue time"
+            assert worker._queue_last == EPOCH, "Sets the initial last queue 
time"
 
             # first playlist reload has taken one second
             frozen_time.tick(ONE_SECOND)
@@ -347,7 +347,7 @@
 
             assert worker.handshake_wait.wait_ready(1), "Arrives at wait() 
call #1"
             assert worker.sequence == 1, "Updates the sequence number"
-            assert worker.playlist_sequence_last == EPOCH + ONE_SECOND, 
"Updates the last queue time"
+            assert worker._queue_last == EPOCH + ONE_SECOND, "Updates the last 
queue time"
             assert worker.playlist_targetduration == 5.0
 
             # trigger next reload when the target duration has passed
@@ -357,7 +357,7 @@
 
             assert worker.handshake_wait.wait_ready(1), "Arrives at wait() 
call #2"
             assert worker.sequence == 2, "Updates the sequence number again"
-            assert worker.playlist_sequence_last == EPOCH + ONE_SECOND + 
targetduration, "Updates the last queue time again"
+            assert worker._queue_last == EPOCH + ONE_SECOND + targetduration, 
"Updates the last queue time again"
             assert worker.playlist_targetduration == 5.0
 
             for num in range(3, 6):
@@ -368,7 +368,7 @@
 
                 assert worker.handshake_wait.wait_ready(1), f"Arrives at 
wait() call #{num}"
                 assert worker.sequence == 2, "Sequence number is unchanged"
-                assert worker.playlist_sequence_last == EPOCH + ONE_SECOND + 
targetduration, "Last queue time is unchanged"
+                assert worker._queue_last == EPOCH + ONE_SECOND + 
targetduration, "Last queue time is unchanged"
                 assert worker.playlist_targetduration == 5.0
 
             assert mock_log.warning.call_args_list == []
@@ -381,12 +381,12 @@
             self.await_read(read_all=True)
             self.await_close(1)
 
-            assert mock_log.warning.call_args_list == [call("No new segments 
in playlist for more than 15.00s. Stopping...")]
+            assert mock_log.warning.call_args_list == [call("No new segments 
for more than 15.00s. Stopping...")]
 
-    def test_segment_queue_timing_threshold_reached_ignored(self) -> None:
+    def test_queue_deadline_reached_ignored(self) -> None:
         segments = self.subject(
             start=False,
-            options={"hls-segment-queue-threshold": 0},
+            options={"stream-segmented-queue-deadline": 0.0},
             playlists=[
                 # no EXT-X-ENDLIST, last mocked playlist response will be 
repreated forever
                 Playlist(0, targetduration=5, segments=[Segment(0)]),
@@ -400,7 +400,7 @@
 
             assert worker.handshake_reload.wait_ready(1), "Loads playlist for 
the first time"
             assert worker.sequence == -1, "Initial sequence number"
-            assert worker.playlist_sequence_last == EPOCH, "Sets the initial 
last queue time"
+            assert worker._queue_last == EPOCH, "Sets the initial last queue 
time"
 
             # first playlist reload has taken one second
             frozen_time.tick(ONE_SECOND)
@@ -408,7 +408,7 @@
 
             assert worker.handshake_wait.wait_ready(1), "Arrives at first 
wait() call"
             assert worker.sequence == 1, "Updates the sequence number"
-            assert worker.playlist_sequence_last == EPOCH + ONE_SECOND, 
"Updates the last queue time"
+            assert worker._queue_last == EPOCH + ONE_SECOND, "Updates the last 
queue time"
             assert worker.playlist_targetduration == 5.0
             assert self.await_read() == self.content(segments)
 
@@ -420,7 +420,7 @@
 
                 assert worker.handshake_wait.wait_ready(1), f"Arrives at 
wait() #{num + 1}"
                 assert worker.sequence == 1, "Sequence number is unchanged"
-                assert worker.playlist_sequence_last == EPOCH + ONE_SECOND, 
"Last queue time is unchanged"
+                assert worker._queue_last == EPOCH + ONE_SECOND, "Last queue 
time is unchanged"
 
         assert self.thread.data == [], "No new data"
         assert worker.is_alive()
@@ -428,7 +428,7 @@
         # make stream end gracefully to avoid any unnecessary thread blocking
         self.thread.reader.writer.put(None)
 
-    def test_segment_queue_timing_threshold_reached_min(self) -> None:
+    def test_queue_deadline_reached_min(self) -> None:
         self.subject(
             start=False,
             playlists=[
@@ -441,13 +441,13 @@
 
         with (
             freezegun.freeze_time(EPOCH) as frozen_time,
-            patch("streamlink.stream.hls.hls.log") as mock_log,
+            patch("streamlink.stream.segmented.segmented.log") as mock_log,
         ):
             self.start()
 
             assert worker.handshake_reload.wait_ready(1), "Loads playlist for 
the first time"
             assert worker.sequence == -1, "Initial sequence number"
-            assert worker.playlist_sequence_last == EPOCH, "Sets the initial 
last queue time"
+            assert worker._queue_last == EPOCH, "Sets the initial last queue 
time"
 
             # first playlist reload has taken one second
             frozen_time.tick(ONE_SECOND)
@@ -455,7 +455,7 @@
 
             assert worker.handshake_wait.wait_ready(1), "Arrives at wait() 
call #1"
             assert worker.sequence == 1, "Updates the sequence number"
-            assert worker.playlist_sequence_last == EPOCH + ONE_SECOND, 
"Updates the last queue time"
+            assert worker._queue_last == EPOCH + ONE_SECOND, "Updates the last 
queue time"
             assert worker.playlist_targetduration == 1.0
 
             for num in range(2, 7):
@@ -466,7 +466,7 @@
 
                 assert worker.handshake_wait.wait_ready(1), f"Arrives at 
wait() call #{num}"
                 assert worker.sequence == 1, "Sequence number is unchanged"
-                assert worker.playlist_sequence_last == EPOCH + ONE_SECOND, 
"Last queue time is unchanged"
+                assert worker._queue_last == EPOCH + ONE_SECOND, "Last queue 
time is unchanged"
                 assert worker.playlist_targetduration == 1.0
 
             assert mock_log.warning.call_args_list == []
@@ -484,7 +484,7 @@
             self.thread.close()
             self.thread.join(1)
 
-            assert mock_log.warning.call_args_list == [call("No new segments 
in playlist for more than 5.00s. Stopping...")]
+            assert mock_log.warning.call_args_list == [call("No new segments 
for more than 5.00s. Stopping...")]
 
     def test_playlist_reload_offset(self) -> None:
         segments = self.subject(
@@ -641,6 +641,50 @@
         assert worker.duration_limit == expected
         assert [(record.category, str(record.message)) for record in 
recwarn.list] == warning
 
+    @pytest.mark.parametrize(
+        ("session", "expected", "warning"),
+        [
+            pytest.param(
+                {},
+                3.0,
+                [],
+                id="default-value",
+            ),
+            pytest.param(
+                {"stream-segmented-queue-deadline": 5.0},
+                5.0,
+                [],
+                id="stream-segmented-queue-deadline",
+            ),
+            pytest.param(
+                {"hls-segment-queue-threshold": 5.0},
+                5.0,
+                [
+                    (
+                        StreamlinkDeprecationWarning,
+                        "`hls-segment-queue-threshold` has been deprecated in 
favor of "
+                        + "the `stream-segmented-queue-deadline` option",
+                    ),
+                ],
+                id="hls-segment-queue-threshold",
+            ),
+        ],
+        indirect=["session"],
+    )
+    def test_queue_deadline(
+        self,
+        recwarn: pytest.WarningsRecorder,
+        session: Streamlink,
+        expected: float,
+        warning: list,
+    ):
+        stream = HLSStream(session, "https://foo/";)
+        reader = HLSStreamReader(stream)
+        worker = HLSStreamWorker(reader)
+
+        assert worker._queue_deadline_factor == expected
+        assert [(record.category, str(record.message)) for record in 
recwarn.list] == warning
+
 
 class TestHLSStreamWorkerPlaylistSequenceWarning:
     warns = pytest.mark.parametrize(
@@ -1217,7 +1261,7 @@
 
 @patch("streamlink.stream.hls.hls.log")
 @patch("streamlink.stream.hls.hls.HLSStreamWorker.wait", 
Mock(return_value=True))
-@patch("streamlink.stream.hls.hls.HLSStreamWorker._segment_queue_timing_threshold_reached",
 Mock(return_value=False))
+@patch("streamlink.stream.hls.hls.HLSStreamWorker.check_queue_deadline", 
Mock(return_value=False))
 class TestHlsPlaylistParseErrors(TestMixinStreamHLS, unittest.TestCase):
     __stream__ = EventedWriterHLSStream
 
@@ -1250,8 +1294,8 @@
         self.close()
         self.await_close()
         assert mock_log.warning.mock_calls == [
-            call("Failed to reload playlist: Missing #EXTM3U header"),
-            call("Failed to reload playlist: Missing #EXTM3U header"),
+            call("Reloading failed: Missing #EXTM3U header"),
+            call("Reloading failed: Missing #EXTM3U header"),
         ]
 
     @patch("streamlink.stream.hls.hls.parse_m3u8", 
Mock(return_value=FakePlaylist(is_master=True)))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/tests/stream/test_segmented.py 
new/streamlink-8.1.0/tests/stream/test_segmented.py
--- old/streamlink-8.0.0/tests/stream/test_segmented.py 2025-11-11 
17:11:49.000000000 +0100
+++ new/streamlink-8.1.0/tests/stream/test_segmented.py 2025-12-14 
19:07:02.000000000 +0100
@@ -1,5 +1,40 @@
+import pytest
+
+from streamlink.stream.segmented.segment import Segment
 from streamlink.stream.segmented.segmented import log
 
 
 def test_logger_name():
     assert log.name == "streamlink.stream.segmented"
+
+
[email protected](
+    ("data", "expected"),
+    [
+        pytest.param(
+            dict(
+                num=1,
+                init=False,
+                discontinuity=False,
+                uri="/path/to/segment.ts?query#fragment",
+                duration=4.0,
+            ),
+            "Segment(num=1, init=False, discontinuity=False, duration=4.000, 
fileext='ts')",
+            id="with-fileext",
+        ),
+        pytest.param(
+            dict(
+                num=1,
+                init=False,
+                discontinuity=False,
+                uri="/path/to/segment.other?query#fragment",
+                duration=4.0,
+            ),
+            "Segment(num=1, init=False, discontinuity=False, duration=4.000, 
fileext=None)",
+            id="without-fileext",
+        ),
+    ],
+)
+def test_segment_serialization(data: dict, expected: str):
+    segment = Segment(**data)
+    assert repr(segment) == expected
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/tests/utils/test_dataclass.py 
new/streamlink-8.1.0/tests/utils/test_dataclass.py
--- old/streamlink-8.0.0/tests/utils/test_dataclass.py  1970-01-01 
01:00:00.000000000 +0100
+++ new/streamlink-8.1.0/tests/utils/test_dataclass.py  2025-12-14 
19:07:02.000000000 +0100
@@ -0,0 +1,93 @@
+from __future__ import annotations
+
+import math
+from dataclasses import dataclass, field
+from typing import TYPE_CHECKING
+
+import pytest
+
+from streamlink.utils.dataclass import FormattedDataclass
+from streamlink.utils.times import fromtimestamp
+
+
+if TYPE_CHECKING:
+    from datetime import datetime, timedelta
+
+
[email protected](scope="module")
+def baseclass() -> type:
+    @dataclass(kw_only=True)
+    class Foo(
+        metaclass=FormattedDataclass,
+        formatters={
+            str: lambda s: f"{s.upper()!r}",
+        },
+    ):
+        foo: str
+        bar: int
+        baz: float
+        qux: str = field(repr=False)
+        abc: datetime = field(default=fromtimestamp(0.5))
+        xyz: timedelta = field(default=fromtimestamp(1.5) - fromtimestamp(0))
+
+    return Foo
+
+
[email protected](scope="module")
+def subclass(baseclass: type) -> type:
+    @dataclass(kw_only=True)
+    class Bar(
+        baseclass,
+        metaclass=FormattedDataclass,
+        formatters={
+            float: lambda x: f"{(x * 2.0):.3f}",
+        },
+        extra=["oof"],
+    ):
+        @property
+        def oof(self) -> str:
+            return self.foo[::-1]
+
+    return Bar
+
+
[email protected](scope="module")
+def subsubclass(subclass: type) -> type:
+    @dataclass(kw_only=True)
+    class Baz(subclass, metaclass=FormattedDataclass, extra=["barbar"]):
+        @property
+        def barbar(self) -> int:
+            return self.bar * self.bar
+
+    return Baz
+
+
[email protected](
+    ("fixture", "expected"),
+    [
+        pytest.param(
+            "baseclass",
+            "Foo(foo='FOO', bar=123, baz=3.142, 
abc=1970-01-01T00:00:00.500000Z, xyz=0:00:01.500000)",
+            id="baseclass",
+        ),
+        pytest.param(
+            "subclass",
+            "Bar(foo='FOO', bar=123, baz=6.283, 
abc=1970-01-01T00:00:00.500000Z, xyz=0:00:01.500000, oof='OOF')",
+            id="subclass",
+        ),
+        pytest.param(
+            "subsubclass",
+            "Baz(foo='FOO', bar=123, baz=6.283, 
abc=1970-01-01T00:00:00.500000Z, xyz=0:00:01.500000, oof='OOF', barbar=15129)",
+            id="subsubclass",
+        ),
+    ],
+)
+def test_serialize(request: pytest.FixtureRequest, fixture: str, expected: 
str):
+    dc = request.getfixturevalue(fixture)
+    item = dc(
+        foo="foo",
+        bar=123,
+        baz=math.pi,
+        qux="qux",
+    )
+    assert str(item) == repr(item) == expected
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/streamlink-8.0.0/tests/webbrowser/cdp/test_client.py 
new/streamlink-8.1.0/tests/webbrowser/cdp/test_client.py
--- old/streamlink-8.0.0/tests/webbrowser/cdp/test_client.py    2025-11-11 
17:11:49.000000000 +0100
+++ new/streamlink-8.1.0/tests/webbrowser/cdp/test_client.py    2025-12-14 
19:07:02.000000000 +0100
@@ -611,10 +611,10 @@
         mock_on_fetch_request_paused = AsyncMock()
         monkeypatch.setattr(cdp_client_session, "_on_fetch_request_paused", 
mock_on_fetch_request_paused)
 
-        for _on_request in on_request:
-            cdp_client_session.add_request_handler(async_handler(), 
on_request=_on_request)
-            cdp_client_session.add_request_handler(async_handler(), 
on_request=_on_request)
-            cdp_client_session.add_request_handler(async_handler(), 
url_pattern="http://foo";, on_request=_on_request)
+        for on_request_item in on_request:
+            cdp_client_session.add_request_handler(async_handler(), 
on_request=on_request_item)
+            cdp_client_session.add_request_handler(async_handler(), 
on_request=on_request_item)
+            cdp_client_session.add_request_handler(async_handler(), 
url_pattern="http://foo";, on_request=on_request_item)
 
         async def navigate():
             async with cdp_client_session.navigate("https://foo";):

Reply via email to