Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package sacad for openSUSE:Factory checked 
in at 2024-08-13 13:25:22
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/sacad (Old)
 and      /work/SRC/openSUSE:Factory/.sacad.new.7232 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "sacad"

Tue Aug 13 13:25:22 2024 rev:12 rq:1193648 version:2.8.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/sacad/sacad.changes      2023-10-25 
18:04:42.974263997 +0200
+++ /work/SRC/openSUSE:Factory/.sacad.new.7232/sacad.changes    2024-08-13 
13:25:46.412572711 +0200
@@ -1,0 +2,10 @@
+Tue Aug 13 06:12:30 UTC 2024 - Dirk Müller <dmuel...@suse.com>
+
+- update to 2.8.0:
+  * remove Amazon sources
+  * fix: remove use of imhdr
+  * boost exact album matches ranking for Deezer source
+  * fix: path sanitization removing spaces
+  * feat: add option to convert progressive JPEG to baseline
+
+-------------------------------------------------------------------

Old:
----
  sacad-2.7.5.tar.gz

New:
----
  sacad-2.8.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ sacad.spec ++++++
--- /var/tmp/diff_new_pack.zkfWV8/_old  2024-08-13 13:25:46.996597126 +0200
+++ /var/tmp/diff_new_pack.zkfWV8/_new  2024-08-13 13:25:47.000597293 +0200
@@ -1,7 +1,7 @@
 #
 # spec file for package sacad
 #
-# Copyright (c) 2023 SUSE LLC
+# Copyright (c) 2024 SUSE LLC
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -17,7 +17,7 @@
 
 
 Name:           sacad
-Version:        2.7.5
+Version:        2.8.0
 Release:        0
 Summary:        Search and download music album covers
 License:        MPL-2.0

++++++ sacad-2.7.5.tar.gz -> sacad-2.8.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/.github/workflows/ci.yml 
new/sacad-2.8.0/.github/workflows/ci.yml
--- old/sacad-2.7.5/.github/workflows/ci.yml    2023-06-13 22:55:01.000000000 
+0200
+++ new/sacad-2.8.0/.github/workflows/ci.yml    2024-07-28 22:16:52.000000000 
+0200
@@ -12,7 +12,7 @@
     runs-on: ubuntu-latest
     strategy:
       matrix:
-        python-version: ["3.7", "3.8", "3.9", "3.10"]
+        python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
 
     steps:
       - uses: actions/checkout@v2
@@ -21,10 +21,6 @@
           python-version: ${{matrix.python-version}}
       - run: |
           sudo apt-get install libxml2-dev libxslt1-dev
-          pip install -U pip
-          pip install coveralls
+          pip install -U requests pip setuptools
           pip install -r requirements.txt
-      - run: coverage run --source=sacad setup.py test
-      - run: coveralls --service=github
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+      - run: python -m unittest discover -v .
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/.pre-commit-config.yaml 
new/sacad-2.8.0/.pre-commit-config.yaml
--- old/sacad-2.7.5/.pre-commit-config.yaml     2023-06-13 22:55:01.000000000 
+0200
+++ new/sacad-2.8.0/.pre-commit-config.yaml     2024-07-28 22:16:52.000000000 
+0200
@@ -1,7 +1,7 @@
 # https://pre-commit.com
 repos:
   - repo: https://github.com/pre-commit/pre-commit-hooks
-    rev: v4.3.0
+    rev: v4.6.0
     hooks:
       - id: check-added-large-files
       - id: check-case-conflict
@@ -24,7 +24,7 @@
           - --markdown-linebreak-ext=md
 
   - repo: https://github.com/pre-commit/mirrors-mypy
-    rev: v0.971
+    rev: v1.10.0
     hooks:
       - id: mypy
         args:
@@ -32,48 +32,18 @@
         additional_dependencies:
           - types-requests
 
-  - repo: https://github.com/pycqa/flake8
-    rev: 3.9.2
+  - repo: https://github.com/astral-sh/ruff-pre-commit
+    rev: v0.4.4
     hooks:
-      - id: flake8
-        args:
-          # https://www.flake8rules.com/
-          # E203 and W503 and are not compatible with black,
-          # see 
https://black.readthedocs.io/en/stable/compatible_configs.html#flake8
-          - --extend-ignore=E203,W503
-          - --max-complexity=20
-          - --max-line-length=120
-
-  - repo: https://github.com/pycqa/pydocstyle
-    rev: 6.1.1
-    hooks:
-      - id: pydocstyle
-        args:
-          # http://www.pydocstyle.org/en/5.1.1/error_codes.html
-          - --ignore=D105,D107,D202,D210,D211,D212
+      - id: ruff
+        args: [--fix, --exit-non-zero-on-fix]
+      - id: ruff-format
 
   - repo: https://github.com/shellcheck-py/shellcheck-py
-    rev: v0.8.0.4
+    rev: v0.10.0.1
     hooks:
       - id: shellcheck
 
-  - repo: https://github.com/pre-commit/mirrors-isort
-    rev: v5.10.1
-    hooks:
-      - id: isort
-        args:
-          - -l=120
-          # black compatibility, see 
https://black.readthedocs.io/en/stable/compatible_configs.html#isort
-          - --profile=black
-
-  - repo: https://github.com/psf/black
-    rev: 22.6.0
-    hooks:
-      - id: black
-        language_version: python3
-        args:
-          - --line-length=120
-
   - repo: https://github.com/pre-commit/mirrors-prettier
     rev: v2.7.1
     hooks:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/README.md new/sacad-2.8.0/README.md
--- old/sacad-2.7.5/README.md   2023-06-13 22:55:01.000000000 +0200
+++ new/sacad-2.8.0/README.md   2024-07-28 22:16:52.000000000 +0200
@@ -4,9 +4,7 @@
 
 [![PyPI 
version](https://img.shields.io/pypi/v/sacad.svg?style=flat)](https://pypi.python.org/pypi/sacad/)
 [![AUR 
version](https://img.shields.io/aur/version/sacad.svg?style=flat)](https://aur.archlinux.org/packages/sacad/)
-[![Tests 
status](https://github.com/desbma/sacad/actions/workflows/ci.yml/badge.svg)](https://github.com/desbma/sacad/actions)
-[![Coverage](https://img.shields.io/coveralls/desbma/sacad/master.svg?style=flat)](https://coveralls.io/github/desbma/sacad?branch=master)
-[![Lines of 
code](https://tokei.rs/b1/github/desbma/sacad)](https://github.com/desbma/sacad)
+[![CI 
status](https://img.shields.io/github/actions/workflow/status/desbma/sacad/ci.yml)](https://github.com/desbma/sacad/actions)
 [![Supported Python 
versions](https://img.shields.io/pypi/pyversions/sacad.svg?style=flat)](https://pypi.python.org/pypi/sacad/)
 
[![License](https://img.shields.io/github/license/desbma/sacad.svg?style=flat)](https://github.com/desbma/sacad/blob/master/LICENSE)
 
@@ -20,8 +18,7 @@
 - Support JPEG and PNG formats
 - Customizable output: save image along with the audio files / in a different 
directory named by artist/album / embed cover in audio files...
 - Currently support the following cover sources:
-  - Amazon CD (.com, .ca, .cn, .fr, .de, .co.jp and .co.uk variants)
-  - Amazon digital music
+  - ~~Amazon CD (.com, .ca, .cn, .fr, .de, .co.jp and .co.uk variants) & 
Amazon digital music~~ (removed, too unreliable)
   - ~~CoverLib~~ (site is dead)
   - Deezer
   - Discogs
@@ -47,7 +44,7 @@
 
 ## Installation
 
-SACAD requires [Python](https://www.python.org/downloads/) >= 3.7.
+SACAD requires [Python](https://www.python.org/downloads/) >= 3.8.
 
 ### Standalone Windows executable
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/freeze.py new/sacad-2.8.0/freeze.py
--- old/sacad-2.7.5/freeze.py   2023-06-13 22:55:01.000000000 +0200
+++ new/sacad-2.8.0/freeze.py   2024-07-28 22:16:52.000000000 +0200
@@ -1,4 +1,4 @@
-""" Package freezing for Windows. """
+"""Package freezing for Windows."""
 
 import os
 import re
@@ -19,7 +19,7 @@
     packages=["sacad"],
     options={"build_exe": build_exe_options},
     executables=[
-        Executable(os.path.join("sacad", "__main__.py"), 
targetName="sacad.exe"),
-        Executable(os.path.join("sacad", "recurse.py"), 
targetName="sacad_r.exe"),
+        Executable(os.path.join("sacad", "__main__.py"), 
target_name="sacad.exe"),
+        Executable(os.path.join("sacad", "recurse.py"), 
target_name="sacad_r.exe"),
     ],
 )
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/pyproject.toml 
new/sacad-2.8.0/pyproject.toml
--- old/sacad-2.7.5/pyproject.toml      1970-01-01 01:00:00.000000000 +0100
+++ new/sacad-2.8.0/pyproject.toml      2024-07-28 22:16:52.000000000 +0200
@@ -0,0 +1,2 @@
+[tool.ruff]
+line-length = 120
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/sacad/__init__.py 
new/sacad-2.8.0/sacad/__init__.py
--- old/sacad-2.7.5/sacad/__init__.py   2023-06-13 22:55:01.000000000 +0200
+++ new/sacad-2.8.0/sacad/__init__.py   2024-07-28 22:16:52.000000000 +0200
@@ -1,8 +1,8 @@
 #!/usr/bin/env python3
 
-""" Smart Automatic Cover Art Downloader : search and download music album 
covers. """
+"""Smart Automatic Cover Art Downloader : search and download music album 
covers."""
 
-__version__ = "2.7.5"
+__version__ = "2.8.0"
 __author__ = "desbma"
 __license__ = "MPL 2.0"
 
@@ -39,9 +39,9 @@
     out_filepath: str,
     *,
     size_tolerance_prct: int,
-    amazon_tlds: Sequence[str] = (),
     source_classes: Optional[Sequence[Any]] = None,
     preserve_format: bool = False,
+    convert_progressive_jpeg: bool = False,
 ) -> bool:
     """Search and download a cover, return True if success, False instead."""
     logger = logging.getLogger("Main")
@@ -52,9 +52,6 @@
         source_classes = tuple(COVER_SOURCE_CLASSES.values())
     assert source_classes is not None  # makes MyPy chill
     cover_sources = [cls(*source_args) for cls in source_classes]
-    if sources.AmazonCdCoverSource in source_classes:
-        for tld in amazon_tlds:
-            cover_sources.append(sources.AmazonCdCoverSource(*source_args, 
tld=tld))
 
     # schedule search work
     search_futures = []
@@ -100,6 +97,7 @@
                 size_tolerance_prct,
                 out_filepath,
                 preserve_format=preserve_format,
+                convert_progressive_jpeg=convert_progressive_jpeg,
             )
         except Exception as e:
             logger.warning(f"Download of {result} failed: 
{e.__class__.__qualname__} {e}")
@@ -130,15 +128,6 @@
                 if available""",
     )
     arg_parser.add_argument(
-        "-a",
-        "--amazon-sites",
-        nargs="+",
-        choices=sources.AmazonCdCoverSource.TLDS[1:],
-        default=(),
-        dest="amazon_tlds",
-        help="""Amazon site TLDs to use as search source, in addition to 
amazon.com""",
-    )
-    arg_parser.add_argument(
         "-s",
         "--cover-sources",
         choices=tuple(COVER_SOURCE_CLASSES.keys()),
@@ -153,6 +142,12 @@
         default=False,
         help="Preserve source image format if possible. Target format will 
still be prefered when sorting results.",
     )
+    arg_parser.add_argument(
+        "--convert-progressive-jpeg",
+        action="store_true",
+        default=False,
+        help="Convert progressive JPEG to baseline if needed. May result in 
bigger files and loss of quality.",
+    )
 
 
 def cl_main() -> None:
@@ -219,9 +214,9 @@
         args.size,
         args.out_filepath,
         size_tolerance_prct=args.size_tolerance_prct,
-        amazon_tlds=args.amazon_tlds,
         source_classes=args.cover_sources,
         preserve_format=args.preserve_format,
+        convert_progressive_jpeg=args.convert_progressive_jpeg,
     )
     future = asyncio.ensure_future(coroutine)
     asyncio.get_event_loop().run_until_complete(future)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/sacad/__main__.py 
new/sacad-2.8.0/sacad/__main__.py
--- old/sacad-2.7.5/sacad/__main__.py   2023-06-13 22:55:01.000000000 +0200
+++ new/sacad-2.8.0/sacad/__main__.py   2024-07-28 22:16:52.000000000 +0200
@@ -1,6 +1,6 @@
 #!/usr/bin/env python3
 
-""" Command line entry point for sacad program. """
+"""Command line entry point for sacad program."""
 
 import sacad
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/sacad/colored_logging.py 
new/sacad-2.8.0/sacad/colored_logging.py
--- old/sacad-2.7.5/sacad/colored_logging.py    2023-06-13 22:55:01.000000000 
+0200
+++ new/sacad-2.8.0/sacad/colored_logging.py    2024-07-28 22:16:52.000000000 
+0200
@@ -1,4 +1,4 @@
-""" Formatter for the logging module, coloring terminal output according to 
error criticity. """
+"""Formatter for the logging module, coloring terminal output according to 
error criticity."""
 
 import enum
 import logging
@@ -11,7 +11,6 @@
 
 
 class ColoredFormatter(logging.Formatter):
-
     """Logging formatter coloring terminal output according to error 
criticity."""
 
     def format(self, record):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/sacad/cover.py 
new/sacad-2.8.0/sacad/cover.py
--- old/sacad-2.7.5/sacad/cover.py      2023-06-13 22:55:01.000000000 +0200
+++ new/sacad-2.8.0/sacad/cover.py      2024-07-28 22:16:52.000000000 +0200
@@ -1,8 +1,7 @@
-""" Sacad album cover. """
+"""Sacad album cover."""
 
 import asyncio
 import enum
-import imghdr
 import io
 import itertools
 import logging
@@ -13,6 +12,7 @@
 import pickle
 import shutil
 import urllib.parse
+from typing import Dict
 
 import appdirs
 import bitarray
@@ -35,7 +35,6 @@
 
 
 class CoverSourceQuality(enum.IntFlag):
-
     """Flags to describe cover source quality."""
 
     # whether or not the search query matching is fuzzy (does a typo return 
results ?)
@@ -61,7 +60,6 @@
 
 
 class CoverImageMetadata(enum.IntFlag):
-
     """Flags to describe image metadata."""
 
     NONE = 0
@@ -83,7 +81,6 @@
 
 
 class CoverSourceResult:
-
     """Cover image returned by a source, candidate to be downloaded."""
 
     METADATA_PEEK_SIZE_INCREMENT = 2**12
@@ -166,20 +163,33 @@
             s += f" [x{len(self.urls)}]"
         return s
 
-    async def get(self, target_format, target_size, size_tolerance_prct, 
out_filepath, *, preserve_format=False):
+    async def get(
+        self,
+        target_format: CoverImageFormat,
+        target_size: int,
+        size_tolerance_prct: float,
+        out_filepath: str,
+        *,
+        preserve_format: bool = False,
+        convert_progressive_jpeg: bool = False,
+    ) -> None:
         """Download cover and process it."""
         images_data = []
         for i, url in enumerate(self.urls):
             # download
             logging.getLogger("Cover").info(f"Downloading cover {url!r} (part 
{i + 1}/{len(self.urls)})...")
-            headers = {}
+            headers: Dict[str, str] = {}
             self.source.updateHttpHeaders(headers)
 
             async def pre_cache_callback(img_data):
                 return await __class__.crunch(img_data, self.format)
 
             store_in_cache_callback, image_data = await self.source.http.query(
-                url, headers=headers, verify=False, 
cache=__class__.image_cache, pre_cache_callback=pre_cache_callback
+                url,
+                headers=headers,
+                verify=False,
+                cache=__class__.image_cache,  # type: ignore
+                pre_cache_callback=pre_cache_callback,
             )
 
             # store immediately in cache
@@ -193,14 +203,18 @@
             abs(max(self.size) - target_size) > target_size * 
size_tolerance_prct / 100
         )
         need_join = len(images_data) > 1
-        if (need_format_change and (not preserve_format)) or need_join or 
need_size_change:
+        need_post_process = (need_format_change and (not preserve_format)) or 
need_join or need_size_change
+        need_post_process = need_post_process or (
+            __class__.isProgressiveJpegData(images_data[0]) and 
convert_progressive_jpeg  # type: ignore
+        )
+        if need_post_process:
             # post process
             image_data = self.postProcess(
                 images_data, target_format if need_format_change else None, 
target_size if need_size_change else None
             )
 
             # crunch image again
-            image_data = await __class__.crunch(image_data, target_format)
+            image_data = await __class__.crunch(image_data, target_format)  # 
type: ignore
 
             format_changed = need_format_change
         else:
@@ -213,6 +227,16 @@
         with open(out_filepath, "wb") as file:
             file.write(image_data)
 
+    @staticmethod
+    def isProgressiveJpegData(data: bytes) -> bool:
+        """Return True if data is from a progressive JPEG."""
+        in_bytes = io.BytesIO(data)
+        try:
+            img = PIL.Image.open(in_bytes)
+            return bool(img.info["progressive"])
+        except Exception:
+            return False
+
     def postProcess(self, images_data, new_format, new_size):
         """
         Convert image binary data.
@@ -560,8 +584,7 @@
         try:
             img = PIL.Image.open(img_stream)
         except (IOError, OSError, RuntimeError):  # PIL.UnidentifiedImageError 
inherits from OSError
-            format = imghdr.what(None, h=img_data)
-            format = SUPPORTED_IMG_FORMATS.get(format, None)
+            pass
         else:
             format = img.format.lower()
             format = SUPPORTED_IMG_FORMATS.get(format, None)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/sacad/http_helpers.py 
new/sacad-2.8.0/sacad/http_helpers.py
--- old/sacad-2.7.5/sacad/http_helpers.py       2023-06-13 22:55:01.000000000 
+0200
+++ new/sacad-2.8.0/sacad/http_helpers.py       2024-07-28 22:16:52.000000000 
+0200
@@ -1,4 +1,4 @@
-""" Common HTTP code. """
+"""Common HTTP code."""
 
 import asyncio
 import logging
@@ -26,7 +26,6 @@
 
 
 class Http:
-
     """Async HTTP client code."""
 
     def __init__(
@@ -222,7 +221,6 @@
                         response_headers.update(response.headers)
 
                     break  # http retry loop
-
         except aiohttp.ClientResponseError as e:
             self.logger.debug(f"Probing {url!r} failed: 
{e.__class__.__qualname__} {e}")
             resp_ok = False
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/sacad/mkstemp_ctx.py 
new/sacad-2.8.0/sacad/mkstemp_ctx.py
--- old/sacad-2.7.5/sacad/mkstemp_ctx.py        2023-06-13 22:55:01.000000000 
+0200
+++ new/sacad-2.8.0/sacad/mkstemp_ctx.py        2024-07-28 22:16:52.000000000 
+0200
@@ -1,4 +1,4 @@
-""" Additions to the tempfile module. """
+"""Additions to the tempfile module."""
 
 import contextlib
 import os
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/sacad/rate_watcher.py 
new/sacad-2.8.0/sacad/rate_watcher.py
--- old/sacad-2.7.5/sacad/rate_watcher.py       2023-06-13 22:55:01.000000000 
+0200
+++ new/sacad-2.8.0/sacad/rate_watcher.py       2024-07-28 22:16:52.000000000 
+0200
@@ -1,4 +1,4 @@
-""" Provide a class with a context manager to help avoid overloading web 
servers. """
+"""Provide a class with a context manager to help avoid overloading web 
servers."""
 
 import asyncio
 import logging
@@ -10,7 +10,6 @@
 
 
 class AccessRateWatcher:
-
     """Access rate limiter, supporting concurrent access by threads and/or 
processes."""
 
     def __init__(
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/sacad/recurse.py 
new/sacad-2.8.0/sacad/recurse.py
--- old/sacad-2.7.5/sacad/recurse.py    2023-06-13 22:55:01.000000000 +0200
+++ new/sacad-2.8.0/sacad/recurse.py    2024-07-28 22:16:52.000000000 +0200
@@ -1,6 +1,6 @@
 #!/usr/bin/env python3
 
-""" Recursively search and download album covers for a music library. """
+"""Recursively search and download album covers for a music library."""
 
 import argparse
 import asyncio
@@ -36,7 +36,6 @@
 
 # TODO use a dataclasses.dataclass when Python < 3.7 is dropped
 class Work:
-
     """Represent a single search & download work item."""
 
     def __init__(self, cover_filepath, audio_filepaths, metadata):
@@ -167,7 +166,7 @@
     return r
 
 
-VALID_PATH_CHARS = frozenset(r"-_.()!#$%&'@^{}~" + string.ascii_letters + 
string.digits)
+VALID_PATH_CHARS = frozenset(r"-_.()!#$%&'@^{}~" + string.ascii_letters + 
string.digits + " ")
 
 
 def sanitize_for_path(s):
@@ -288,7 +287,6 @@
 def get_covers(work, args):
     """Get missing covers."""
     with contextlib.ExitStack() as cm:
-
         if args.cover_pattern == EMBEDDED_ALBUM_ART_SYMBOL:
             tmp_prefix = 
f"{os.path.splitext(os.path.basename(inspect.getfile(inspect.currentframe())))[0]}_"
             tmp_dir = 
cm.enter_context(tempfile.TemporaryDirectory(prefix=tmp_prefix))
@@ -358,9 +356,9 @@
                     args.size,
                     cover_filepath,
                     size_tolerance_prct=args.size_tolerance_prct,
-                    amazon_tlds=args.amazon_tlds,
                     source_classes=args.cover_sources,
                     preserve_format=args.preserve_format,
+                    convert_progressive_jpeg=args.convert_progressive_jpeg,
                 )
                 future = asyncio.ensure_future(coroutine)
                 futures[future] = cur_work
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/sacad/sources/__init__.py 
new/sacad-2.8.0/sacad/sources/__init__.py
--- old/sacad-2.7.5/sacad/sources/__init__.py   2023-06-13 22:55:01.000000000 
+0200
+++ new/sacad-2.8.0/sacad/sources/__init__.py   2024-07-28 22:16:52.000000000 
+0200
@@ -1,7 +1,5 @@
-""" SACAD cover sources. """
+"""SACAD cover sources."""
 
-from sacad.sources.amazoncd import AmazonCdCoverSource, 
AmazonCdCoverSourceResult  # noqa: F401
-from sacad.sources.amazondigital import AmazonDigitalCoverSource, 
AmazonDigitalCoverSourceResult  # noqa: F401
 from sacad.sources.deezer import DeezerCoverSource, DeezerCoverSourceResult  # 
noqa: F401
 from sacad.sources.discogs import DiscogsCoverSource, DiscogsCoverSourceResult 
 # noqa: F401
 from sacad.sources.itunes import ItunesCoverSource, ItunesCoverSourceResult  # 
noqa: F401
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/sacad/sources/amazonbase.py 
new/sacad-2.8.0/sacad/sources/amazonbase.py
--- old/sacad-2.7.5/sacad/sources/amazonbase.py 2023-06-13 22:55:01.000000000 
+0200
+++ new/sacad-2.8.0/sacad/sources/amazonbase.py 1970-01-01 01:00:00.000000000 
+0100
@@ -1,30 +0,0 @@
-""" Base class for Amazon cover sources. """
-
-from sacad.sources.base import CoverSource
-
-
-class AmazonBaseCoverSource(CoverSource):
-
-    """Base class for Amazon cover sources."""
-
-    def __init__(self, *args, base_domain, **kwargs):
-        super().__init__(
-            *args,
-            allow_cookies=True,
-            min_delay_between_accesses=2,
-            jitter_range_ms=(0, 3000),
-            rate_limited_domains=(base_domain,),
-            **kwargs,
-        )
-        self.base_domain = base_domain
-
-    def processQueryString(self, s):
-        """See CoverSource.processQueryString."""
-        return __class__.unaccentuate(__class__.unpunctuate(s.lower()))
-
-    def isBlocked(self, html):
-        """Return True if Amazon source has blocked our IP (temporarily), and 
is sending a captcha."""
-        blocked_titles = ("Robot Check", "Bot Check", "Amazon CAPTCHA")
-        title = html.find("head/title")
-        assert title is not None
-        return title.text in blocked_titles
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/sacad/sources/amazoncd.py 
new/sacad-2.8.0/sacad/sources/amazoncd.py
--- old/sacad-2.7.5/sacad/sources/amazoncd.py   2023-06-13 22:55:01.000000000 
+0200
+++ new/sacad-2.8.0/sacad/sources/amazoncd.py   1970-01-01 01:00:00.000000000 
+0100
@@ -1,143 +0,0 @@
-""" Amazon CD cover source. """
-
-import collections
-import urllib.parse
-
-import lxml.cssselect
-import lxml.etree
-
-from sacad.cover import CoverImageFormat, CoverImageMetadata, 
CoverSourceQuality, CoverSourceResult
-from sacad.sources.amazonbase import AmazonBaseCoverSource
-
-
-class AmazonCdCoverSourceResult(CoverSourceResult):
-
-    """Amazon CD cover search result."""
-
-    def __init__(self, *args, **kwargs):
-        super().__init__(
-            *args, source_quality=CoverSourceQuality.FUZZY_SEARCH | 
CoverSourceQuality.UNRELATED_RESULT_RISK, **kwargs
-        )
-
-
-class AmazonCdCoverSource(AmazonBaseCoverSource):
-
-    """Cover source returning Amazon.com audio CD images."""
-
-    TLDS = ("com", "ca", "cn", "fr", "de", "co.jp", "co.uk")
-    RESULTS_SELECTORS = (
-        
lxml.cssselect.CSSSelector("span.rush-component[data-component-type='s-product-image']"),
-        lxml.cssselect.CSSSelector("#resultsCol li.s-result-item"),
-    )
-    IMG_SELECTORS = (lxml.cssselect.CSSSelector("img.s-image"), 
lxml.cssselect.CSSSelector("img.s-access-image"))
-    PRODUCT_LINK_SELECTORS = (lxml.cssselect.CSSSelector("a"), 
lxml.cssselect.CSSSelector("a.s-access-detail-page"))
-    PRODUCT_PAGE_IMG_SELECTOR = lxml.cssselect.CSSSelector("img#landingImage")
-
-    def __init__(self, *args, tld="com", **kwargs):
-        assert tld in __class__.TLDS
-        self.base_url = f"https://www.amazon.{tld}/s";
-        super().__init__(*args, 
base_domain=urllib.parse.urlsplit(self.base_url).netloc, **kwargs)
-
-    def getSearchUrl(self, album, artist):
-        """See CoverSource.getSearchUrl."""
-        params = collections.OrderedDict()
-        params["i"] = "popular"
-        params["rh"] = f"p_32:{artist},p_28:{album}"
-        params["s"] = "relevancerank"
-        return __class__.assembleUrl(self.base_url, params)
-
-    async def parseResults(self, api_data):
-        """See CoverSource.parseResults."""
-        results = []
-
-        # parse page
-        parser = lxml.etree.HTMLParser()
-        html = lxml.etree.XML(api_data.decode("utf-8", "ignore"), parser)
-        if self.isBlocked(html):
-            self.logger.warning("Source is sending a captcha")
-            return results
-
-        for page_struct_version, result_selector in 
enumerate(__class__.RESULTS_SELECTORS):
-            result_nodes = result_selector(html)
-            if result_nodes:
-                break
-
-        for rank, result_node in enumerate(result_nodes, 1):
-            try:
-                img_node = 
__class__.IMG_SELECTORS[page_struct_version](result_node)[0]
-            except IndexError:
-                # no image for that product
-                continue
-            # get thumbnail & full image url
-            thumbnail_url = img_node.get("src")
-            url_parts = thumbnail_url.rsplit(".", 2)
-            img_url = ".".join((url_parts[0], url_parts[2]))
-            # assume size is fixed
-            size = (500, 500)
-            check_metadata = CoverImageMetadata.SIZE
-            # try to get higher res image...
-            if (self.target_size > size[0]) and (  # ...only if needed
-                rank <= 3
-            ):  # and only for first 3 results because this is time
-                # consuming (1 more GET request per result)
-                product_url = 
__class__.PRODUCT_LINK_SELECTORS[page_struct_version](result_node)[0].get("href")
-                product_url_split = urllib.parse.urlsplit(product_url)
-                if not product_url_split.scheme:
-                    # relative redirect url
-                    product_url_query = 
urllib.parse.parse_qsl(product_url_split.query)
-                    product_url_query = 
collections.OrderedDict(product_url_query)
-                    try:
-                        # needed if page_struct_version == 1
-                        product_url = product_url_query["url"]
-                    except KeyError:
-                        # page_struct_version == 0, make url absolute
-                        product_url = urllib.parse.urljoin(self.base_url, 
product_url)
-                    product_url_split = urllib.parse.urlsplit(product_url)
-                product_url_query = 
urllib.parse.parse_qsl(product_url_split.query)
-                product_url_query = collections.OrderedDict(product_url_query)
-                try:
-                    # remove timestamp from url to improve future cache hit 
rate
-                    del product_url_query["qid"]
-                except KeyError:
-                    pass
-                product_url_query = urllib.parse.urlencode(product_url_query)
-                product_url_no_ts = urllib.parse.urlunsplit(
-                    product_url_split[:3] + (product_url_query,) + 
product_url_split[4:]
-                )
-                store_in_cache_callback, product_page_data = await 
self.fetchResults(product_url_no_ts)
-                product_page_html = 
lxml.etree.XML(product_page_data.decode("latin-1"), parser)
-                try:
-                    img_node = 
__class__.PRODUCT_PAGE_IMG_SELECTOR(product_page_html)[0]
-                except IndexError:
-                    # unable to get better image
-                    pass
-                else:
-                    better_img_url = img_node.get("data-old-hires")
-                    # img_node.get("data-a-dynamic-image") contains json with 
image urls too, but they are not larger
-                    # than previous 500px image and are often covered by 
autorip badges (can be removed by cleaning url
-                    # though)
-                    if better_img_url:
-                        img_url = better_img_url
-                        size_url_hint = img_url.rsplit(".", 
2)[1].strip("_").rsplit("_", 1)[-1]
-                        assert size_url_hint.startswith("SL")
-                        size_url_hint = int(size_url_hint[2:])
-                        size = (size_url_hint, size_url_hint)
-                        check_metadata = CoverImageMetadata.NONE
-                    await store_in_cache_callback()
-
-            # assume format is always jpg
-            format = CoverImageFormat.JPEG
-            # add result
-            results.append(
-                AmazonCdCoverSourceResult(
-                    img_url,
-                    size,
-                    format,
-                    thumbnail_url=thumbnail_url,
-                    source=self,
-                    rank=rank,
-                    check_metadata=check_metadata,
-                )
-            )
-
-        return results
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/sacad/sources/amazondigital.py 
new/sacad-2.8.0/sacad/sources/amazondigital.py
--- old/sacad-2.7.5/sacad/sources/amazondigital.py      2023-06-13 
22:55:01.000000000 +0200
+++ new/sacad-2.8.0/sacad/sources/amazondigital.py      1970-01-01 
01:00:00.000000000 +0100
@@ -1,160 +0,0 @@
-""" Amazon digital cover source. """
-
-import collections
-import operator
-import urllib.parse
-
-import lxml.cssselect
-import lxml.etree
-
-from sacad.cover import CoverImageFormat, CoverImageMetadata, 
CoverSourceQuality, CoverSourceResult
-from sacad.sources.amazonbase import AmazonBaseCoverSource
-
-AmazonDigitalImageFormat = collections.namedtuple("AmazonDigitalImageFormat", 
("id", "slice_count", "total_res"))
-AMAZON_DIGITAL_IMAGE_FORMATS = [
-    AmazonDigitalImageFormat(
-        0, 1, 600
-    ),  # 
http://z2-ec2.images-amazon.com/R/1/a=B00BJ93R7O+c=A17SFUTIVB227Z+d=_SCR(0,0,0)_=.jpg
-    AmazonDigitalImageFormat(
-        1, 2, 700
-    ),  # 
http://z2-ec2.images-amazon.com/R/1/a=B00BJ93R7O+c=A17SFUTIVB227Z+d=_SCR(1,1,1)_=.jpg
-    AmazonDigitalImageFormat(
-        1, 4, 1280
-    ),  # 
http://z2-ec2.images-amazon.com/R/1/a=B01NBTSVDN+c=A17SFUTIVB227Z+d=_SCR(1,3,3)_=.jpg
-    AmazonDigitalImageFormat(
-        2, 3, 1025
-    ),  # 
http://z2-ec2.images-amazon.com/R/1/a=B00BJ93R7O+c=A17SFUTIVB227Z+d=_SCR(2,2,2)_=.jpg
-    AmazonDigitalImageFormat(
-        2, 5, 1920
-    ),  # 
http://z2-ec2.images-amazon.com/R/1/a=B01NBTSVDN+c=A17SFUTIVB227Z+d=_SCR(2,4,4)_=.jpg
-    AmazonDigitalImageFormat(
-        3, 4, 1500
-    ),  # 
http://z2-ec2.images-amazon.com/R/1/a=B00BJ93R7O+c=A17SFUTIVB227Z+d=_SCR(3,3,3)_=.jpg
-    AmazonDigitalImageFormat(3, 7, 2560),
-]  # 
http://z2-ec2.images-amazon.com/R/1/a=B01NBTSVDN+c=A17SFUTIVB227Z+d=_SCR(3,6,6)_=.jpg
-AMAZON_DIGITAL_IMAGE_FORMATS.sort(key=operator.attrgetter("total_res"), 
reverse=True)
-
-
-class AmazonDigitalCoverSourceResult(CoverSourceResult):
-
-    """Amazon digital cover search result."""
-
-    def __init__(self, *args, **kwargs):
-        super().__init__(
-            *args, source_quality=CoverSourceQuality.FUZZY_SEARCH | 
CoverSourceQuality.UNRELATED_RESULT_RISK, **kwargs
-        )
-
-
-class AmazonDigitalCoverSource(AmazonBaseCoverSource):
-
-    """Cover source returning Amazon.com digital music images."""
-
-    BASE_URL = "https://www.amazon.com";
-    DYNAPI_KEY = "A17SFUTIVB227Z"
-    RESULTS_SELECTORS = (
-        
lxml.cssselect.CSSSelector("span.rush-component[data-component-type='s-product-image']"),
-        lxml.cssselect.CSSSelector("div#dm_mp3Player 
li.s-mp3-federated-bar-item"),
-    )
-    IMG_SELECTORS = (lxml.cssselect.CSSSelector("img.s-image"), 
lxml.cssselect.CSSSelector("img.s-access-image"))
-    LINK_SELECTOR = lxml.cssselect.CSSSelector("a")
-
-    def __init__(self, *args, **kwargs):
-        super().__init__(*args, 
base_domain=urllib.parse.urlsplit(__class__.BASE_URL).netloc, **kwargs)
-
-    def getSearchUrl(self, album, artist):
-        """See CoverSource.getSearchUrl."""
-        url = f"{__class__.BASE_URL}/s"
-        params = collections.OrderedDict()
-        params["k"] = " ".join((artist, album))
-        params["i"] = "digital-music"
-        params["s"] = "relevancerank"
-        return __class__.assembleUrl(url, params)
-
-    async def parseResults(self, api_data):
-        """See CoverSource.parseResults."""
-        results = []
-
-        # parse page
-        parser = lxml.etree.HTMLParser()
-        html = lxml.etree.XML(api_data.decode("utf-8"), parser)
-        if self.isBlocked(html):
-            self.logger.warning("Source is sending a captcha")
-            return results
-
-        for page_struct_version, result_selector in 
enumerate(__class__.RESULTS_SELECTORS):
-            result_nodes = result_selector(html)
-            if result_nodes:
-                break
-
-        for rank, result_node in enumerate(result_nodes, 1):
-            # get thumbnail & full image url
-            img_node = 
__class__.IMG_SELECTORS[page_struct_version](result_node)[0]
-            thumbnail_url = img_node.get("src")
-            thumbnail_url = thumbnail_url.replace("Stripe-Prime-Only", "")
-            url_parts = thumbnail_url.rsplit(".", 2)
-            img_url = ".".join((url_parts[0], url_parts[2]))
-
-            # assume size is fixed
-            size = (500, 500)
-
-            # try to get higher res image...
-            if self.target_size > size[0]:  # ...but only if needed
-                self.logger.debug("Looking for optimal subimages 
configuration...")
-                product_url = 
__class__.LINK_SELECTOR(result_node)[0].get("href")
-                product_url = urllib.parse.urlsplit(product_url)
-                product_id = product_url.path.split("/")[3]
-
-                # TODO don't pick up highest res image if user asked less?
-                for amazon_img_format in AMAZON_DIGITAL_IMAGE_FORMATS:
-                    # TODO review this, it seem to always fail now
-                    self.logger.debug("Trying %u subimages..." % 
(amazon_img_format.slice_count**2))
-                    urls = tuple(
-                        self.generateImgUrls(
-                            product_id, __class__.DYNAPI_KEY, 
amazon_img_format.id, amazon_img_format.slice_count
-                        )
-                    )
-                    url_ok = await self.probeUrl(urls[-1])
-                    if not url_ok:
-                        # images at this size are not available
-                        continue
-
-                    # images at this size are available
-                    img_url = urls
-                    size = (amazon_img_format.total_res,) * 2
-                    break
-
-            # assume format is always jpg
-            format = CoverImageFormat.JPEG
-
-            # add result
-            results.append(
-                AmazonDigitalCoverSourceResult(
-                    img_url,
-                    size,
-                    format,
-                    thumbnail_url=thumbnail_url,
-                    source=self,
-                    rank=rank,
-                    check_metadata=CoverImageMetadata.SIZE,
-                )
-            )
-
-        return results
-
-    def generateImgUrls(self, product_id, dynapi_key, format_id, slice_count):
-        """Generate URLs for slice_count^2 subimages of a product."""
-        for x in range(slice_count):
-            for y in range(slice_count):
-                yield (
-                    "http://z2-ec2.images-amazon.com/R/1/a=";
-                    + product_id
-                    + "+c="
-                    + dynapi_key
-                    + "+d=_SCR%28"
-                    + str(format_id)
-                    + ","
-                    + str(x)
-                    + ","
-                    + str(y)
-                    + "%29_=.jpg"
-                )
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/sacad/sources/base.py 
new/sacad-2.8.0/sacad/sources/base.py
--- old/sacad-2.7.5/sacad/sources/base.py       2023-06-13 22:55:01.000000000 
+0200
+++ new/sacad-2.8.0/sacad/sources/base.py       2024-07-28 22:16:52.000000000 
+0200
@@ -1,4 +1,4 @@
-""" Common code for all cover sources cover sources. """
+"""Common code for all cover sources cover sources."""
 
 import abc
 import asyncio
@@ -21,7 +21,6 @@
 
 
 class CoverSource(metaclass=abc.ABCMeta):
-
     """Base class for all cover sources."""
 
     def __init__(
@@ -95,7 +94,7 @@
             post_data = None
         try:
             store_in_cache_callback, api_data = await self.fetchResults(url, 
post_data)
-            results = await self.parseResults(api_data)
+            results = await self.parseResults(api_data, search_album=album, 
search_artist=artist)
         except Exception as e:
             # raise
             self.logger.warning(
@@ -239,6 +238,6 @@
         pass
 
     @abc.abstractmethod
-    async def parseResults(self, api_data):
+    async def parseResults(self, api_data, *, search_album, search_artist):
         """Parse API data and return an iterable of results."""
         pass
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/sacad/sources/deezer.py 
new/sacad-2.8.0/sacad/sources/deezer.py
--- old/sacad-2.7.5/sacad/sources/deezer.py     2023-06-13 22:55:01.000000000 
+0200
+++ new/sacad-2.8.0/sacad/sources/deezer.py     2024-07-28 22:16:52.000000000 
+0200
@@ -1,4 +1,4 @@
-""" Deezer cover source. """
+"""Deezer cover source."""
 
 import collections
 import json
@@ -9,7 +9,6 @@
 
 
 class DeezerCoverSourceResult(CoverSourceResult):
-
     """Deezer search cover result."""
 
     def __init__(self, *args, **kwargs):
@@ -21,7 +20,6 @@
 
 
 class DeezerCoverSource(CoverSource):
-
     """
     Cover source using the official Deezer API.
 
@@ -57,19 +55,25 @@
         # API search is fuzzy, not need to alter query
         return s
 
-    async def parseResults(self, api_data):
+    async def parseResults(self, api_data, *, search_album, search_artist):
         """See CoverSource.parseResults."""
         results = []
 
         # get unique albums
         json_data = json.loads(api_data)
         albums = []
+        index_exact_match = 0
         for e in json_data["data"]:
             album = e["album"]
             album_id = album["id"]
             if album_id in map(operator.itemgetter("id"), albums):
                 continue
-            albums.append(album)
+            if album["title"].lower() == search_album.lower():
+                # override default sorting by putting exact matches first
+                albums.insert(index_exact_match, album)
+                index_exact_match += 1
+            else:
+                albums.append(album)
 
         for rank, album in enumerate(albums, 1):
             for key, size in __class__.COVER_SIZES.items():
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/sacad/sources/discogs.py 
new/sacad-2.8.0/sacad/sources/discogs.py
--- old/sacad-2.7.5/sacad/sources/discogs.py    2023-06-13 22:55:01.000000000 
+0200
+++ new/sacad-2.8.0/sacad/sources/discogs.py    2024-07-28 22:16:52.000000000 
+0200
@@ -1,4 +1,4 @@
-""" Discogs cover source. """
+"""Discogs cover source."""
 
 import collections
 import json
@@ -11,7 +11,6 @@
 
 
 class DiscogsCoverSourceResult(CoverSourceResult):
-
     """Discogs search cover result."""
 
     def __init__(self, *args, **kwargs):
@@ -24,7 +23,6 @@
 
 
 class DiscogsCoverSource(CoverSource):
-
     """
     Cover source using the official API.
 
@@ -56,7 +54,7 @@
         headers["Accept"] = "application/vnd.discogs.v2.discogs+json"
         headers["Authorization"] = f"Discogs key={__class__.API_KEY}, 
secret={__class__.API_SECRET}"
 
-    async def parseResults(self, api_data):
+    async def parseResults(self, api_data, *, search_album, search_artist):
         """See CoverSource.parseResults."""
         json_data = json.loads(api_data)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/sacad/sources/itunes.py 
new/sacad-2.8.0/sacad/sources/itunes.py
--- old/sacad-2.7.5/sacad/sources/itunes.py     2023-06-13 22:55:01.000000000 
+0200
+++ new/sacad-2.8.0/sacad/sources/itunes.py     2024-07-28 22:16:52.000000000 
+0200
@@ -1,4 +1,4 @@
-""" Itunes cover source. """
+"""Itunes cover source."""
 
 import collections
 import json
@@ -9,19 +9,17 @@
 
 
 class ItunesCoverSourceResult(CoverSourceResult):
-
     """Itunes search cover result."""
 
     def __init__(self, *args, **kwargs):
         super().__init__(
             *args,
-            source_quality=CoverSourceQuality.NO_UNRELATED_RESULT_RISK | 
CoverSourceQuality.EXACT_SEARCH,
+            source_quality=CoverSourceQuality.EXACT_SEARCH | 
CoverSourceQuality.NO_UNRELATED_RESULT_RISK,
             **kwargs,
         )
 
 
 class ItunesCoverSource(CoverSource):
-
     """Itunes cover source."""
 
     SEARCH_URL = "https://itunes.apple.com/search";
@@ -38,12 +36,16 @@
         url_params["term"] = f"{artist} {album}"
         return __class__.assembleUrl(__class__.SEARCH_URL, url_params)
 
-    async def parseResults(self, api_data):
+    async def parseResults(self, api_data, *, search_album, search_artist):
         """See CoverSource.parseResults."""
         json_data = json.loads(api_data)
 
         results = []
         for rank, result in enumerate(json_data["results"], 1):
+            if (search_album != 
self.processAlbumString(result["collectionName"])) or (
+                search_artist != self.processArtistString(result["artistName"])
+            ):
+                continue
             thumbnail_url = result["artworkUrl60"]
             base_img_url = result["artworkUrl60"].rsplit("/", 1)[0]
             url_found = False
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/sacad/sources/lastfm.py 
new/sacad-2.8.0/sacad/sources/lastfm.py
--- old/sacad-2.7.5/sacad/sources/lastfm.py     2023-06-13 22:55:01.000000000 
+0200
+++ new/sacad-2.8.0/sacad/sources/lastfm.py     2024-07-28 22:16:52.000000000 
+0200
@@ -1,4 +1,4 @@
-""" LastFM cover source. """
+"""LastFM cover source."""
 
 import collections
 import os.path
@@ -10,7 +10,6 @@
 
 
 class LastFmCoverSourceResult(CoverSourceResult):
-
     """LastFM cover search result."""
 
     def __init__(self, *args, **kwargs):
@@ -22,7 +21,6 @@
 
 
 class LastFmCoverSource(CoverSource):
-
     """
     Cover source using the official LastFM API.
 
@@ -62,7 +60,7 @@
         char_blacklist = frozenset(char_blacklist)
         return __class__.unpunctuate(s.lower(), char_blacklist=char_blacklist)
 
-    async def parseResults(self, api_data):
+    async def parseResults(self, api_data, *, search_album, search_artist):
         """See CoverSource.parseResults."""
         results = []
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/sacad/tqdm_logging.py 
new/sacad-2.8.0/sacad/tqdm_logging.py
--- old/sacad-2.7.5/sacad/tqdm_logging.py       2023-06-13 22:55:01.000000000 
+0200
+++ new/sacad-2.8.0/sacad/tqdm_logging.py       2024-07-28 22:16:52.000000000 
+0200
@@ -1,4 +1,4 @@
-""" Code to help using the logging module with tqdm progress bars. """
+"""Code to help using the logging module with tqdm progress bars."""
 
 import contextlib
 import logging
@@ -8,7 +8,6 @@
 
 
 class TqdmLoggingHandler(logging.Handler):
-
     """Logging handler sending messages to the tqdm write method (avoids 
overlap)."""
 
     def __init__(self, tqdm, *args, **kwargs):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/setup.py new/sacad-2.8.0/setup.py
--- old/sacad-2.7.5/setup.py    2023-06-13 22:55:01.000000000 +0200
+++ new/sacad-2.8.0/setup.py    2024-07-28 22:16:52.000000000 +0200
@@ -1,6 +1,6 @@
 #!/usr/bin/env python3
 
-""" Package setup. """
+"""Package setup."""
 
 import os
 import re
@@ -8,8 +8,8 @@
 
 from setuptools import find_packages, setup
 
-if sys.hexversion < 0x3060000:
-    print("Python version %s is unsupported, >= 3.7.0 is needed" % 
(".".join(map(str, sys.version_info[:3]))))
+if sys.hexversion < 0x3080000:
+    print("Python version %s is unsupported, >= 3.8.0 is needed" % 
(".".join(map(str, sys.version_info[:3]))))
     exit(1)
 
 with open(os.path.join("sacad", "__init__.py"), "rt") as f:
@@ -53,10 +53,11 @@
         "Programming Language :: Python",
         "Programming Language :: Python :: 3",
         "Programming Language :: Python :: 3 :: Only",
-        "Programming Language :: Python :: 3.7",
         "Programming Language :: Python :: 3.8",
         "Programming Language :: Python :: 3.9",
         "Programming Language :: Python :: 3.10",
+        "Programming Language :: Python :: 3.11",
+        "Programming Language :: Python :: 3.12",
         "Topic :: Internet :: WWW/HTTP",
         "Topic :: Multimedia :: Graphics",
         "Topic :: Utilities",
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/tests/__init__.py 
new/sacad-2.8.0/tests/__init__.py
--- old/sacad-2.7.5/tests/__init__.py   2023-06-13 22:55:01.000000000 +0200
+++ new/sacad-2.8.0/tests/__init__.py   2024-07-28 22:16:52.000000000 +0200
@@ -1,6 +1,6 @@
 #!/usr/bin/env python3
 
-""" Unit tests for main module. """
+"""Unit tests for main module."""
 
 import asyncio
 import contextlib
@@ -9,7 +9,6 @@
 import socket
 import unittest
 import unittest.mock
-import urllib.parse
 import warnings
 
 import PIL.Image
@@ -74,7 +73,6 @@
 
 @unittest.skipUnless(is_internet_reachable(), "Need Internet access")
 class TestSacad(unittest.TestCase):
-
     """Test suite for main module."""
 
     @staticmethod
@@ -105,12 +103,15 @@
                                 size_tolerance_prct=size_tolerance,
                             )
                             sched_and_run(coroutine, delay=0.5)
-                            out_format, out_width, out_height = 
__class__.getImgInfo(tmp_filepath)
-                            self.assertEqual(out_format, format)
-                            self.assertLessEqual(out_width, size * (100 + 
size_tolerance) / 100)
-                            self.assertGreaterEqual(out_width, size * (100 - 
size_tolerance) / 100)
-                            self.assertLessEqual(out_height, size * (100 + 
size_tolerance) / 100)
-                            self.assertGreaterEqual(out_height, size * (100 - 
size_tolerance) / 100)
+                            if os.path.getsize(tmp_filepath):
+                                out_format, out_width, out_height = 
__class__.getImgInfo(tmp_filepath)
+                                self.assertEqual(out_format, format)
+                                self.assertLessEqual(out_width, size * (100 + 
size_tolerance) / 100)
+                                self.assertGreaterEqual(out_width, size * (100 
- size_tolerance) / 100)
+                                self.assertLessEqual(out_height, size * (100 + 
size_tolerance) / 100)
+                                self.assertGreaterEqual(out_height, size * 
(100 - size_tolerance) / 100)
+                            elif size < 1200:
+                                self.fail("No result")
 
     @unittest.skipIf(os.getenv("CI") is not None, "Test is not reliable on CI 
servers")
     def test_getImageUrlMetadata(self):
@@ -194,12 +195,7 @@
                 sacad.sources.LastFmCoverSource(*source_args),
                 sacad.sources.DeezerCoverSource(*source_args),
                 sacad.sources.DiscogsCoverSource(*source_args),
-                sacad.sources.AmazonDigitalCoverSource(*source_args),
             ]
-            sources.extend(
-                sacad.sources.AmazonCdCoverSource(*source_args, tld=tld)
-                for tld in sacad.sources.AmazonCdCoverSource.TLDS
-            )
             for artist, album in zip(("Michael Jackson", "Björk"), 
("Thriller", "Vespertine")):
                 for source in sources:
                     with self.subTest(size=size, source=source, artist=artist, 
album=album):
@@ -215,10 +211,6 @@
                                 )
                             )
                             or ((size > 1000) and isinstance(source, 
sacad.sources.DeezerCoverSource))
-                            or (
-                                isinstance(source, 
sacad.sources.AmazonCdCoverSource)
-                                and 
(urllib.parse.urlsplit(source.base_url).netloc.rsplit(".", 1)[-1] in ("cn", 
"jp"))
-                            )
                         ):
                             self.assertGreaterEqual(len(results), 1)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/tests/rate_watcher_test.py 
new/sacad-2.8.0/tests/rate_watcher_test.py
--- old/sacad-2.7.5/tests/rate_watcher_test.py  2023-06-13 22:55:01.000000000 
+0200
+++ new/sacad-2.8.0/tests/rate_watcher_test.py  2024-07-28 22:16:52.000000000 
+0200
@@ -1,6 +1,6 @@
 #!/usr/bin/env python3
 
-""" Unit tests for rate watcher. """
+"""Unit tests for rate watcher."""
 
 import os
 import tempfile
@@ -15,7 +15,6 @@
 
 
 class TestRateWatcher(unittest.TestCase):
-
     """Test suite for rate watcher."""
 
     def test_minDelayBetweenAccesses(self):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/tests/recurse_test.py 
new/sacad-2.8.0/tests/recurse_test.py
--- old/sacad-2.7.5/tests/recurse_test.py       2023-06-13 22:55:01.000000000 
+0200
+++ new/sacad-2.8.0/tests/recurse_test.py       2024-07-28 22:16:52.000000000 
+0200
@@ -1,6 +1,6 @@
 #!/usr/bin/env python3
 
-""" Unit tests for recurse module. """
+"""Unit tests for recurse module."""
 
 import collections
 import contextlib
@@ -39,7 +39,6 @@
 
 
 class TestRecursive(unittest.TestCase):
-
     """Test suite for recurse module."""
 
     def __init__(self, *args, **kwargs):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/sacad-2.7.5/win/Makefile new/sacad-2.8.0/win/Makefile
--- old/sacad-2.7.5/win/Makefile        2023-06-13 22:55:01.000000000 +0200
+++ new/sacad-2.8.0/win/Makefile        2024-07-28 22:16:52.000000000 +0200
@@ -4,7 +4,7 @@
 PYTHON_VERSION       := 3.8.10
 PYTHON_VERSION_MAJOR := $(word 1,$(subst ., ,${PYTHON_VERSION})).$(word 
2,$(subst ., ,${PYTHON_VERSION}))
 PYTHON_VERSION_SHORT := $(subst .,,${PYTHON_VERSION_MAJOR})
-CXFREEZE_VERSION     := 6.11.1
+CXFREEZE_VERSION     := 7.2.0
 LXML_VERSION         := 4.7.1
 BITARRAY_VERSION     := 2.3.5
 
@@ -135,10 +135,8 @@
        rm $(dir $@)python${PYTHON_VERSION_SHORT}.zip
 
 ${PIP_INSTALLED}: ${PYTHON_INSTALLED} ${GET_PIP}
-       ${WINE_PYTHON} ${GET_PIP} install -qq
-       mv $(dir ${PYTHON_INSTALLED})Lib/site-packages/* $(dir 
${PYTHON_INSTALLED})
-       ${WINE_PIP} -qq install setuptools==60.10.0
-       mv $(dir ${PYTHON_INSTALLED})Lib/site-packages/* $(dir 
${PYTHON_INSTALLED})
+       ${WINE_PYTHON} ${GET_PIP}
+       mv -f $(dir ${PYTHON_INSTALLED})Lib/site-packages/* $(dir 
${PYTHON_INSTALLED})
        touch $@
 
 ${CXFREEZE_INSTALLED}: ${CXFREEZE_WHEEL} ${PIP_INSTALLED}
@@ -168,15 +166,15 @@
 
 ${GET_PIP}:
        mkdir -p $(dir $@)
-       ${CURL} https://bootstrap.pypa.io/pip/3.6/$(notdir $@) > $@
+       ${CURL} https://bootstrap.pypa.io/pip/3.7/$(notdir $@) > $@
 
 ${CXFREEZE_WHEEL-win32}:
        mkdir -p $(dir $@)
-       ${CURL} 
https://files.pythonhosted.org/packages/9e/5f/7872323430985758cea081315f66b015093edbbcea6da3561cc321c8128c/$(notdir
 $@) > $@
+       ${CURL} 
https://files.pythonhosted.org/packages/2e/46/0afe229acba3d0976bddc0f03b09c43602391abf23f10e5bf14e03be9b56/$(notdir
 $@) > $@
 
 ${CXFREEZE_WHEEL-win64}:
        mkdir -p $(dir $@)
-       ${CURL} 
https://files.pythonhosted.org/packages/ce/96/0af2b244d91cc118c762a6510af403a590a3b3949aa6e6673b19f7bd3977/$(notdir
 $@) > $@
+       ${CURL} 
https://files.pythonhosted.org/packages/17/86/6242fdb636b88a16a2a0924111fe84d9b8df14f4d9ecba40e299f3488ba1/$(notdir
 $@) > $@
 
 ${LXML_WHEEL-win32}:
        mkdir -p $(dir $@)
@@ -201,7 +199,8 @@
 
 ${DOWNLOAD_DIR}/sample.ogg:
        mkdir -p $(dir $@)
-       ${CURL} 
https://www.dropbox.com/s/zg80jd35ie3aury/Hydrate-Kenny_Beltrey.ogg > $@
+       # https://en.wikipedia.org/wiki/File:Opeth_-_Deliverance.ogg
+       ${CURL} 
'https://www.dropbox.com/scl/fi/jmxbozi9210svk7jhqaju/Opeth_-_Deliverance.ogg?rlkey=9vzbakz3hu7sru07tub3kxuoc&st=dogd1iwk'
 > $@
 
 
 #

Reply via email to