Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-unearth for openSUSE:Factory 
checked in at 2022-12-16 17:52:02
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-unearth (Old)
 and      /work/SRC/openSUSE:Factory/.python-unearth.new.1835 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-unearth"

Fri Dec 16 17:52:02 2022 rev:3 rq:1043285 version:0.7.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-unearth/python-unearth.changes    
2022-12-07 17:36:28.353034405 +0100
+++ /work/SRC/openSUSE:Factory/.python-unearth.new.1835/python-unearth.changes  
2022-12-16 17:52:09.324184002 +0100
@@ -1,0 +2,15 @@
+Thu Dec 15 23:14:43 UTC 2022 - Torsten Gruner <simmpho...@opensuse.org>
+
+- Update to version 0.7.0
+  * Features
+    + Suppress warnings for insecure requests  -  by @frostming (419df)
+    + Support PEP 691: JSON simple API and PEP 658: static metadata in API  -  
by @frostming in #28 (6dadc)
+  * Bug Fixes
+    + Fix the logger name to be zipapp friendly  -  by @frostming in #29 
(5d930)
+- Version 0.6.3
+  * Bug Fixes
+    + Skip invalid versions rejected by packaging 22.0  -  by @pdm-project in 
#26 (8fc2a)
+  * Documentation
+    + Update the version in docs  -  by @pdm-project (f2a02)
+
+-------------------------------------------------------------------

Old:
----
  unearth-0.6.2.tar.gz

New:
----
  unearth-0.7.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-unearth.spec ++++++
--- /var/tmp/diff_new_pack.135Eip/_old  2022-12-16 17:52:09.848186887 +0100
+++ /var/tmp/diff_new_pack.135Eip/_new  2022-12-16 17:52:09.856186931 +0100
@@ -17,7 +17,7 @@
 
 
 Name:           python-unearth
-Version:        0.6.2
+Version:        0.7.0
 Release:        0
 Summary:        A utility to fetch and download python packages
 License:        MIT

++++++ unearth-0.6.2.tar.gz -> unearth-0.7.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/PKG-INFO new/unearth-0.7.0/PKG-INFO
--- old/unearth-0.6.2/PKG-INFO  2022-11-23 03:47:28.657159600 +0100
+++ new/unearth-0.7.0/PKG-INFO  2022-12-12 06:10:18.483614400 +0100
@@ -1,22 +1,23 @@
 Metadata-Version: 2.1
 Name: unearth
-Version: 0.6.2
+Version: 0.7.0
 Summary: A utility to fetch and download python packages
 License: MIT
-Author-email: Frost Ming <miangh...@gmail.com>
+Author-email: Frost Ming <m...@frostming.com>
 Requires-Python: >=3.7
 Classifier: Development Status :: 3 - Alpha
 Classifier: Intended Audience :: Developers
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3 :: Only
 Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
 Classifier: Programming Language :: Python :: 3.7
 Classifier: Programming Language :: Python :: 3.8
 Classifier: Programming Language :: Python :: 3.9
 Provides-Extra: keyring
 Project-URL: Changelog, https://github.com/frostming/unearth/releases
+Project-URL: Documentation, https://unearth.readthedocs.io
 Project-URL: Homepage, https://github.com/frostming/unearth
-Project-URL: Repository, https://github.com/frostming/unearth
 Description-Content-Type: text/markdown
 
 # unearth
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/pyproject.toml 
new/unearth-0.7.0/pyproject.toml
--- old/unearth-0.6.2/pyproject.toml    2022-11-23 03:47:14.640849600 +0100
+++ new/unearth-0.7.0/pyproject.toml    2022-12-12 06:10:02.955712000 +0100
@@ -8,7 +8,7 @@
 name = "unearth"
 description = "A utility to fetch and download python packages"
 authors = [
-    { name = "Frost Ming", email = "miangh...@gmail.com" },
+    { name = "Frost Ming", email = "m...@frostming.com" },
 ]
 readme = "README.md"
 requires-python = ">=3.7"
@@ -26,16 +26,17 @@
     "Programming Language :: Python :: 3.8",
     "Programming Language :: Python :: 3.9",
     "Programming Language :: Python :: 3.10",
+    "Programming Language :: Python :: 3.11",
     "Programming Language :: Python :: 3 :: Only",
 ]
-version = "0.6.2"
+version = "0.7.0"
 
 [project.license]
 text = "MIT"
 
 [project.urls]
-Repository = "https://github.com/frostming/unearth";
 Homepage = "https://github.com/frostming/unearth";
+Documentation = "https://unearth.readthedocs.io";
 Changelog = "https://github.com/frostming/unearth/releases";
 
 [project.optional-dependencies]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/src/unearth/__main__.py 
new/unearth-0.7.0/src/unearth/__main__.py
--- old/unearth-0.6.2/src/unearth/__main__.py   2022-11-23 03:47:14.640849600 
+0100
+++ new/unearth-0.7.0/src/unearth/__main__.py   2022-12-12 06:10:02.955712000 
+0100
@@ -33,7 +33,7 @@
 
 
 def _setup_logger(verbosity: bool) -> None:
-    logger = logging.getLogger(__package__)
+    logger = logging.getLogger("unearth")
     logger.setLevel(logging.DEBUG if verbosity else logging.WARNING)
     handler = logging.StreamHandler()
     handler.setLevel(logging.DEBUG)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/src/unearth/auth.py 
new/unearth-0.7.0/src/unearth/auth.py
--- old/unearth-0.6.2/src/unearth/auth.py       2022-11-23 03:47:14.640849600 
+0100
+++ new/unearth-0.7.0/src/unearth/auth.py       2022-12-12 06:10:02.955712000 
+0100
@@ -19,7 +19,7 @@
 
 AuthInfo = Tuple[str, str]
 MaybeAuth = Optional[Tuple[str, Optional[str]]]
-logger = logging.getLogger(__package__)
+logger = logging.getLogger(__name__)
 
 
 def get_keyring_auth(url: str | None, username: str | None) -> AuthInfo | None:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/src/unearth/collector.py 
new/unearth-0.7.0/src/unearth/collector.py
--- old/unearth-0.6.2/src/unearth/collector.py  2022-11-23 03:47:14.640849600 
+0100
+++ new/unearth-0.7.0/src/unearth/collector.py  2022-12-12 06:10:02.955712000 
+0100
@@ -2,6 +2,7 @@
 from __future__ import annotations
 
 import functools
+import json
 import logging
 import mimetypes
 from html.parser import HTMLParser
@@ -14,16 +15,23 @@
 from unearth.session import PyPISession
 from unearth.utils import is_archive_file, path_to_url
 
-logger = logging.getLogger(__package__)
+SUPPORTED_CONTENT_TYPES = (
+    "text/html",
+    "application/vnd.pypi.simple.v1+html",
+    "application/vnd.pypi.simple.v1+json",
+)
+logger = logging.getLogger(__name__)
 
 
 class LinkCollectError(Exception):
     pass
 
 
-class HTMLPage(NamedTuple):
+class IndexPage(NamedTuple):
     link: Link
-    html: str
+    content: bytes
+    encoding: str | None
+    content_type: str
 
 
 class IndexHTMLParser(HTMLParser):
@@ -41,9 +49,10 @@
             self.anchors.append(dict(attrs))
 
 
-def parse_html_page(page: HTMLPage) -> Iterable[Link]:
+def parse_html_page(page: IndexPage) -> Iterable[Link]:
+    """PEP 503 simple index API"""
     parser = IndexHTMLParser()
-    parser.feed(page.html)
+    parser.feed(page.content.decode(page.encoding or "utf-8"))
     base_url = parser.base_url or page.link.url_without_fragment
     for anchor in parser.anchors:
         href = anchor.get("href")
@@ -52,8 +61,45 @@
         url = parse.urljoin(base_url, href)
         requires_python = anchor.get("data-requires-python")
         yank_reason = anchor.get("data-yanked")
+        data_dist_info_metadata = anchor.get("data-dist-info-metadata")
+        dist_info_metadata: bool | dict[str, str] | None = None
+        if data_dist_info_metadata:
+            hash_name, has_hash, hash_value = 
data_dist_info_metadata.partition("=")
+            if has_hash:
+                dist_info_metadata = {hash_name: hash_value}
+            else:
+                dist_info_metadata = True
+        yield Link(
+            url,
+            base_url,
+            yank_reason=yank_reason,
+            requires_python=requires_python,
+            dist_info_metadata=dist_info_metadata,
+        )
+
+
+def parse_json_response(page: IndexPage) -> Iterable[Link]:
+    """PEP 691 JSON simple API"""
+    data = json.loads(page.content)
+    base_url = page.link.url_without_fragment
+    for file in data.get("files", []):
+        url = file.get("url")
+        if not url:
+            continue
+        url = parse.urljoin(base_url, url)
+        requires_python: str | None = file.get("requires-python")
+        yank_reason: str | None = file.get("yanked") or None
+        dist_info_metadata: bool | dict[str, str] | None = file.get(
+            "dist-info-metadata"
+        )
+        hashes: dict[str, str] | None = file.get("hashes")
         yield Link(
-            url, base_url, yank_reason=yank_reason, 
requires_python=requires_python
+            url,
+            base_url,
+            yank_reason=yank_reason,
+            requires_python=requires_python,
+            dist_info_metadata=dist_info_metadata,
+            hashes=hashes,
         )
 
 
@@ -74,31 +120,33 @@
                 for child in path.iterdir():
                     file_url = path_to_url(str(child))
                     if _is_html_file(file_url):
-                        yield from _collect_links_from_html(session, 
Link(file_url))
+                        yield from _collect_links_from_index(session, 
Link(file_url))
                     else:
                         yield Link(file_url)
             else:
                 index_html = 
Link(path_to_url(path.joinpath("index.html").as_posix()))
-                yield from _collect_links_from_html(session, index_html)
+                yield from _collect_links_from_index(session, index_html)
         else:
-            yield from _collect_links_from_html(session, location)
+            yield from _collect_links_from_index(session, location)
 
     else:
-        yield from _collect_links_from_html(session, location)
+        yield from _collect_links_from_index(session, location)
 
 
 @functools.lru_cache(maxsize=None)
-def fetch_page(session: PyPISession, location: Link) -> HTMLPage:
+def fetch_page(session: PyPISession, location: Link) -> IndexPage:
     if location.is_vcs:
         raise LinkCollectError("It is a VCS link.")
     resp = _get_html_response(session, location)
     from_cache = getattr(resp, "from_cache", False)
     cache_text = " (from cache)" if from_cache else ""
     logger.debug("Fetching HTML page %s%s", location.redacted, cache_text)
-    return HTMLPage(Link(resp.url), resp.text)
+    return IndexPage(
+        Link(resp.url), resp.content, resp.encoding, 
resp.headers["Content-Type"]
+    )
 
 
-def _collect_links_from_html(session: PyPISession, location: Link) -> 
Iterable[Link]:
+def _collect_links_from_index(session: PyPISession, location: Link) -> 
Iterable[Link]:
     if not session.is_secure_origin(location):
         return []
     try:
@@ -107,7 +155,11 @@
         logger.warning("Failed to collect links from %s: %s", 
location.redacted, e)
         return []
     else:
-        return parse_html_page(page)
+        content_type_l = page.content_type.lower()
+        if content_type_l.startswith("application/vnd.pypi.simple.v1+json"):
+            return parse_json_response(page)
+        else:
+            return parse_html_page(page)
 
 
 def _is_html_file(file_url: str) -> bool:
@@ -116,20 +168,31 @@
 
 def _get_html_response(session: PyPISession, location: Link) -> Response:
     if is_archive_file(location.filename):
-        # Send a HEAD request to ensure the file is an HTML file to avoid 
downloading
-        # a large file.
-        _ensure_html_response(session, location)
+        # If the URL looks like a file, send a HEAD request to ensure
+        # the link is an HTML page to avoid downloading a large file.
+        _ensure_index_response(session, location)
 
     resp = session.get(
         location.normalized,
-        headers={"Accept": "text/html", "Cache-Control": "max-age=0"},
+        headers={
+            "Accept": ", ".join(
+                [
+                    "application/vnd.pypi.simple.v1+json",
+                    "application/vnd.pypi.simple.v1+html; q=0.1",
+                    "text/html; q=0.01",
+                ]
+            ),
+            # Don't cache the /simple/{package} page, to ensure it gets updated
+            # immediately when a new release is uploaded.
+            "Cache-Control": "max-age=0",
+        },
     )
     _check_for_status(resp)
-    _ensure_html_type(resp)
+    _ensure_index_content_type(resp)
     return resp
 
 
-def _ensure_html_response(session: PyPISession, location: Link) -> None:
+def _ensure_index_response(session: PyPISession, location: Link) -> None:
     if location.parsed.scheme not in {"http", "https"}:
         raise LinkCollectError(
             "NotHTTP: the file looks like an archive but its content-type "
@@ -138,7 +201,7 @@
 
     resp = session.head(location.url)
     _check_for_status(resp)
-    _ensure_html_type(resp)
+    _ensure_index_content_type(resp)
 
 
 def _check_for_status(resp: Response) -> None:
@@ -156,10 +219,14 @@
         raise LinkCollectError(f"Server Error({resp.status_code}): {reason}")
 
 
-def _ensure_html_type(resp: Response) -> None:
-    content_type = resp.headers.get("content-type", "").lower()
-    if not content_type.startswith("text/html"):
-        raise LinkCollectError(
-            f"NotHTML: only HTML is supported but its content-type "
-            f"is {content_type}."
-        )
+def _ensure_index_content_type(resp: Response) -> None:
+    content_type = resp.headers.get("Content-Type", "Unknown")
+
+    content_type_l = content_type.lower()
+    if content_type_l.startswith(SUPPORTED_CONTENT_TYPES):
+        return
+
+    raise LinkCollectError(
+        f"Content-Type unsupported: {content_type}. "
+        f"The only supported are {', '.join(SUPPORTED_CONTENT_TYPES)}."
+    )
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/src/unearth/evaluator.py 
new/unearth-0.7.0/src/unearth/evaluator.py
--- old/unearth-0.6.2/src/unearth/evaluator.py  2022-11-23 03:47:14.640849600 
+0100
+++ new/unearth-0.7.0/src/unearth/evaluator.py  2022-12-12 06:10:02.955712000 
+0100
@@ -5,7 +5,7 @@
 import hashlib
 import logging
 import sys
-from typing import Any, cast
+from typing import Any
 from urllib.parse import urlencode
 
 import packaging.requirements
@@ -16,14 +16,14 @@
     canonicalize_name,
     parse_wheel_filename,
 )
-from packaging.version import InvalidVersion
+from packaging.version import InvalidVersion, Version
 from requests import Session
 
 from unearth.link import Link
 from unearth.pep425tags import get_supported
 from unearth.utils import ARCHIVE_EXTENSIONS, splitext, strip_extras
 
-logger = logging.getLogger(__package__)
+logger = logging.getLogger(__name__)
 
 
 def is_equality_specifier(specifier: SpecifierSet) -> bool:
@@ -169,11 +169,14 @@
 
         if not self.hashes:
             return
-        if link.hash_name and link.hash_name in self.hashes:
-            if link.hash not in self.hashes[link.hash_name]:
-                hash_mismatch(
-                    link.hash_name, cast(str, link.hash), 
self.hashes[link.hash_name]
-                )
+        link_hashes = link.hash_option
+        if link_hashes:
+            for hash_name, allowed_hashes in self.hashes.items():
+                if hash_name in link_hashes:
+                    given_hash = link_hashes[hash_name][0]
+                    if given_hash not in allowed_hashes:
+                        hash_mismatch(hash_name, given_hash, allowed_hashes)
+                    return
 
         hash_name, allowed_hashes = next(iter(self.hashes.items()))
         given_hash = self._get_hash(link, hash_name)
@@ -181,8 +184,6 @@
             hash_mismatch(hash_name, given_hash, allowed_hashes)
 
     def _get_hash(self, link: Link, hash_name: str) -> str:
-        if link.hash_name == hash_name:
-            return cast(str, link.hash)
         resp = self.session.get(link.normalized, stream=True)
         hasher = hashlib.new(hash_name)
         for chunk in resp.iter_content(chunk_size=1024 * 8):
@@ -240,6 +241,12 @@
                     raise LinkMismatchError(
                         f"Missing version in the filename {egg_info}"
                     )
+                try:
+                    Version(version)
+                except InvalidVersion:
+                    raise LinkMismatchError(
+                        f"Invalid version in the filename {egg_info}: 
{version}"
+                    )
             self._check_hashes(link)
         except LinkMismatchError as e:
             logger.debug("Skip link %s: %s", link, e)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/src/unearth/finder.py 
new/unearth-0.7.0/src/unearth/finder.py
--- old/unearth-0.6.2/src/unearth/finder.py     2022-11-23 03:47:14.640849600 
+0100
+++ new/unearth-0.7.0/src/unearth/finder.py     2022-12-12 06:10:02.955712000 
+0100
@@ -6,7 +6,7 @@
 import os
 import pathlib
 from tempfile import TemporaryDirectory
-from typing import Iterable, NamedTuple, cast
+from typing import Iterable, NamedTuple
 from urllib.parse import urljoin
 
 import packaging.requirements
@@ -349,8 +349,8 @@
             The path to the installable file or directory.
         """
         # Strip the rev part for VCS links
-        if hashes is None and link.hash_name:
-            hashes = {link.hash_name: [cast(str, link.hash)]}
+        if hashes is None:
+            hashes = link.hash_option
         if download_dir is None:
             download_dir = TemporaryDirectory(prefix="unearth-download-").name
         file = unpack_link(
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/src/unearth/link.py 
new/unearth-0.7.0/src/unearth/link.py
--- old/unearth-0.6.2/src/unearth/link.py       2022-11-23 03:47:14.640849600 
+0100
+++ new/unearth-0.7.0/src/unearth/link.py       2022-12-12 06:10:02.955712000 
+0100
@@ -5,7 +5,7 @@
 import os
 import pathlib
 import sys
-from typing import Any
+from typing import Any, cast
 from urllib.parse import ParseResult, unquote, urlparse
 
 from unearth.utils import (
@@ -34,6 +34,10 @@
         comes_from (str|None): The index page that contains this link
         yank_reason (str|None): The reason why this link is yanked
         requires_python (str|None): The data-python-requires attribute of this 
link
+        dist_info_metadata (str|None): (PEP 658) The hash name and value of the
+            dist-info metadata, or true if hash is not available
+        hashes (dict[str, str]|None): The hash name and value of the link from
+            JSON simple API
         vcs (str|None): The vcs type of this link(git/hg/svn/bzr)
     """
 
@@ -41,6 +45,8 @@
     comes_from: str | None = None
     yank_reason: str | None = None
     requires_python: str | None = None
+    dist_info_metadata: bool | dict[str, str] | None = None
+    hashes: dict[str, str] | None = None
     vcs: str | None = dc.field(init=False, default=None)
 
     def __post_init__(self) -> None:
@@ -58,9 +64,12 @@
             "comes_from": self.comes_from,
             "yank_reason": self.yank_reason,
             "requires_python": self.requires_python,
+            "metadata": self.dist_info_link.url_without_fragment
+            if self.dist_info_link
+            else None,
         }
 
-    def _ident(self) -> tuple:
+    def __ident(self) -> tuple:
         return (self.normalized, self.yank_reason, self.requires_python)
 
     @cached_property
@@ -71,10 +80,10 @@
         return f"<Link {self.redacted} (from {self.comes_from})>"
 
     def __hash__(self) -> int:
-        return hash(self._ident())
+        return hash(self.__ident())
 
     def __eq__(self, __o: object) -> bool:
-        return isinstance(__o, Link) and self._ident() == __o._ident()
+        return isinstance(__o, Link) and self.__ident() == __o.__ident()
 
     @classmethod
     def from_path(cls, file_path: str | pathlib.Path) -> Link:
@@ -99,6 +108,14 @@
         return os.path.basename(unquote(self.parsed.path))
 
     @property
+    def dist_info_link(self) -> Link | None:
+        return (
+            type(self)(f"{self.url_without_fragment}.metadata", 
self.comes_from)
+            if self.dist_info_metadata is not None
+            else None
+        )
+
+    @property
     def is_wheel(self) -> bool:
         return self.filename.endswith(".whl")
 
@@ -145,3 +162,12 @@
     @property
     def is_yanked(self) -> bool:
         return self.yank_reason is not None
+
+    @property
+    def hash_option(self) -> dict[str, list[str]] | None:
+        """Return the hash option for the downloader to use"""
+        if self.hashes:
+            return {name: [value] for name, value in self.hashes.items()}
+        if self.hash_name:
+            return {self.hash_name: [cast(str, self.hash)]}
+        return None
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/src/unearth/preparer.py 
new/unearth-0.7.0/src/unearth/preparer.py
--- old/unearth-0.6.2/src/unearth/preparer.py   2022-11-23 03:47:14.640849600 
+0100
+++ new/unearth-0.7.0/src/unearth/preparer.py   2022-12-12 06:10:02.955712000 
+0100
@@ -27,7 +27,7 @@
 from unearth.vcs import vcs_support
 
 READ_CHUNK_SIZE = 8192
-logger = logging.getLogger(__package__)
+logger = logging.getLogger(__name__)
 
 
 def set_extracted_file_to_default_mode_plus_executable(path: str) -> None:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/src/unearth/session.py 
new/unearth-0.7.0/src/unearth/session.py
--- old/unearth-0.6.2/src/unearth/session.py    2022-11-23 03:47:14.640849600 
+0100
+++ new/unearth-0.7.0/src/unearth/session.py    2022-12-12 06:10:02.955712000 
+0100
@@ -6,6 +6,7 @@
 import logging
 import mimetypes
 import os
+import warnings
 from pathlib import Path
 from typing import Any, Iterable, cast
 
@@ -18,7 +19,7 @@
 from unearth.link import Link
 from unearth.utils import build_url_from_netloc, parse_netloc
 
-logger = logging.getLogger(__package__)
+logger = logging.getLogger(__name__)
 
 DEFAULT_MAX_RETRIES = 5
 DEFAULT_SECURE_ORIGINS = [
@@ -39,6 +40,11 @@
     def cert_verify(self, conn, url, verify, cert):
         return super().cert_verify(conn, url, verify=False, cert=cert)
 
+    def send(self, request, *args, **kwargs):
+        with warnings.catch_warnings():
+            urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+            return super().send(request, *args, **kwargs)
+
 
 class InsecureHTTPAdapter(InsecureMixin, requests.adapters.HTTPAdapter):
     pass
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/src/unearth/vcs/base.py 
new/unearth-0.7.0/src/unearth/vcs/base.py
--- old/unearth-0.6.2/src/unearth/vcs/base.py   2022-11-23 03:47:14.640849600 
+0100
+++ new/unearth-0.7.0/src/unearth/vcs/base.py   2022-12-12 06:10:02.955712000 
+0100
@@ -12,7 +12,7 @@
 from unearth.link import Link
 from unearth.utils import compare_urls
 
-logger = logging.getLogger(__package__.split(".")[0])
+logger = logging.getLogger(__name__)
 
 
 class HiddenText:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/src/unearth/vcs/bazaar.py 
new/unearth-0.7.0/src/unearth/vcs/bazaar.py
--- old/unearth-0.6.2/src/unearth/vcs/bazaar.py 2022-11-23 03:47:14.640849600 
+0100
+++ new/unearth-0.7.0/src/unearth/vcs/bazaar.py 2022-12-12 06:10:02.955712000 
+0100
@@ -8,7 +8,7 @@
 from unearth.utils import display_path, path_to_url
 from unearth.vcs.base import HiddenText, VersionControl, vcs_support
 
-logger = logging.getLogger(__package__.split(".")[0])
+logger = logging.getLogger(__name__)
 
 
 @vcs_support.register
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/src/unearth/vcs/git.py 
new/unearth-0.7.0/src/unearth/vcs/git.py
--- old/unearth-0.6.2/src/unearth/vcs/git.py    2022-11-23 03:47:14.640849600 
+0100
+++ new/unearth-0.7.0/src/unearth/vcs/git.py    2022-12-12 06:10:02.955712000 
+0100
@@ -10,7 +10,7 @@
 from unearth.utils import add_ssh_scheme_to_git_uri, display_path, path_to_url
 from unearth.vcs.base import HiddenText, VersionControl, vcs_support
 
-logger = logging.getLogger(__package__.split(".")[0])
+logger = logging.getLogger(__name__)
 
 
 @vcs_support.register
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/src/unearth/vcs/hg.py 
new/unearth-0.7.0/src/unearth/vcs/hg.py
--- old/unearth-0.6.2/src/unearth/vcs/hg.py     2022-11-23 03:47:14.640849600 
+0100
+++ new/unearth-0.7.0/src/unearth/vcs/hg.py     2022-12-12 06:10:02.955712000 
+0100
@@ -6,7 +6,7 @@
 from unearth.utils import display_path, path_to_url
 from unearth.vcs.base import HiddenText, VersionControl, vcs_support
 
-logger = logging.getLogger(__package__.split(".")[0])
+logger = logging.getLogger(__name__)
 
 
 @vcs_support.register
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/src/unearth/vcs/svn.py 
new/unearth-0.7.0/src/unearth/vcs/svn.py
--- old/unearth-0.6.2/src/unearth/vcs/svn.py    2022-11-23 03:47:14.640849600 
+0100
+++ new/unearth-0.7.0/src/unearth/vcs/svn.py    2022-12-12 06:10:02.955712000 
+0100
@@ -9,7 +9,7 @@
 from unearth.utils import display_path, split_auth_from_netloc
 from unearth.vcs.base import HiddenText, VersionControl, vcs_support
 
-logger = logging.getLogger(__package__.split(".")[0])
+logger = logging.getLogger(__name__)
 
 _svn_xml_url_re = re.compile('url="([^"]+)"')
 _svn_rev_re = re.compile(r'committed-rev="(\d+)"')
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/tests/conftest.py 
new/unearth-0.7.0/tests/conftest.py
--- old/unearth-0.6.2/tests/conftest.py 2022-11-23 03:47:14.640849600 +0100
+++ new/unearth-0.7.0/tests/conftest.py 2022-12-12 06:10:02.955712000 +0100
@@ -58,12 +58,12 @@
 def pypi():
     wsgi_app = create_app()
     with mock.patch.object(
-        PyPISession, "insecure_adapter_cls", return_value=WSGIAdapter(wsgi_app)
+        PyPISession, "insecure_adapter_cls", 
return_value=InsecureWSGIAdapter(wsgi_app)
     ):
         with mock.patch.object(
             PyPISession,
             "secure_adapter_cls",
-            return_value=InsecureWSGIAdapter(wsgi_app),
+            return_value=WSGIAdapter(wsgi_app),
         ):
             yield wsgi_app
 
@@ -95,3 +95,9 @@
         yield s
     finally:
         s.close()
+
+
+@pytest.fixture(params=["html", "json"])
+def content_type(request, monkeypatch):
+    monkeypatch.setenv("INDEX_RETURN_TYPE", request.param)
+    return request.param
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/tests/fixtures/app.py 
new/unearth-0.7.0/tests/fixtures/app.py
--- old/unearth-0.6.2/tests/fixtures/app.py     2022-11-23 03:47:14.640849600 
+0100
+++ new/unearth-0.7.0/tests/fixtures/app.py     2022-12-12 06:10:02.955712000 
+0100
@@ -1,3 +1,5 @@
+import os
+import random
 from pathlib import Path
 
 import flask
@@ -33,12 +35,29 @@
     canonical_name = canonicalize_name(package)
     if package != canonical_name:
         return flask.redirect(flask.url_for(".package_index", 
package=canonical_name))
-    return flask.send_from_directory(BASE_DIR / "index", package + ".html")
+    if os.getenv("INDEX_RETURN_TYPE", "html") == "json":
+        return flask.send_from_directory(BASE_DIR / "json", package + 
".json"), {
+            "Content-Type": "application/vnd.pypi.simple.v1+json"
+        }
+    else:
+        content_type = random.choice(
+            ["text/html", "application/vnd.pypi.simple.v1+html"]
+        )
+        return flask.send_from_directory(BASE_DIR / "index", package + 
".html"), {
+            "Content-Type": content_type
+        }
 
 
 @bp.route("/simple")
 def package_index_root():
-    packages = [p.stem for p in (BASE_DIR / "index").glob("*.html")]
+    packages = sorted(p.stem for p in (BASE_DIR / "index").glob("*.html"))
+    if os.getenv("INDEX_RETURN_TYPE", "html") == "json":
+        return flask.jsonify(
+            {
+                "meta": {"api-version": "1.0"},
+                "projects": [{"name": p} for p in packages],
+            }
+        ), {"Content-Type": "application/vnd.pypi.simple.v1+html"}
     return flask.render_template_string(INDEX_TEMPLATE, packages=packages)
 
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/tests/fixtures/json/black.json 
new/unearth-0.7.0/tests/fixtures/json/black.json
--- old/unearth-0.6.2/tests/fixtures/json/black.json    1970-01-01 
01:00:00.000000000 +0100
+++ new/unearth-0.7.0/tests/fixtures/json/black.json    2022-12-12 
06:10:02.955712000 +0100
@@ -0,0 +1,72 @@
+{
+  "meta": {
+    "api-version": "1.0"
+  },
+  "name": "black",
+  "files": [
+    {
+      "filename": "black-21.12b0-py3-none-any.whl",
+      "url": "/files/black-21.12b0-py3-none-any.whl",
+      "hashes": {
+        "sha256": 
"a615e69ae185e08fdd73e4715e260e2479c861b5740057fde6e8b4e3b7dd589f"
+      },
+      "requires-python": ">=3.6.2"
+    },
+    {
+      "filename": "black-21.12b0.tar.gz",
+      "url": "/files/black-21.12b0.tar.gz",
+      "hashes": {
+        "sha256": 
"77b80f693a569e2e527958459634f18df9b0ba2625ba4e0c2d5da5be42e6f2b3"
+      },
+      "requires-python": ">=3.6.2"
+    },
+    {
+      "filename": 
"black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+      "url": 
"/files/black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+      "hashes": {
+        "sha256": 
"10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176"
+      },
+      "requires-python": ">=3.6.2"
+    },
+    {
+      "filename": "black-22.3.0-cp38-cp38-win_amd64.whl",
+      "url": "/files/black-22.3.0-cp38-cp38-win_amd64.whl",
+      "hashes": {
+        "sha256": 
"cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0"
+      },
+      "requires-python": ">=3.6.2"
+    },
+    {
+      "filename": 
"black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+      "url": 
"/files/black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+      "hashes": {
+        "sha256": 
"6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21"
+      },
+      "requires-python": ">=3.6.2"
+    },
+    {
+      "filename": "black-22.3.0-cp39-cp39-win_amd64.whl",
+      "url": "/files/black-22.3.0-cp39-cp39-win_amd64.whl",
+      "hashes": {
+        "sha256": 
"9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265"
+      },
+      "requires-python": ">=3.6.2"
+    },
+    {
+      "filename": "black-22.3.0-py3-none-any.whl",
+      "url": "/files/black-22.3.0-py3-none-any.whl",
+      "hashes": {
+        "sha256": 
"bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"
+      },
+      "requires-python": ">=3.6.2"
+    },
+    {
+      "filename": "black-22.3.0.tar.gz",
+      "url": "/files/black-22.3.0.tar.gz",
+      "hashes": {
+        "sha256": 
"35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"
+      },
+      "requires-python": ">=3.6.2"
+    }
+  ]
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/tests/fixtures/json/click.json 
new/unearth-0.7.0/tests/fixtures/json/click.json
--- old/unearth-0.6.2/tests/fixtures/json/click.json    1970-01-01 
01:00:00.000000000 +0100
+++ new/unearth-0.7.0/tests/fixtures/json/click.json    2022-12-12 
06:10:02.955712000 +0100
@@ -0,0 +1,40 @@
+{
+  "meta": {
+    "api-version": "1.0"
+  },
+  "name": "click",
+  "files": [
+    {
+      "filename": "click-8.1.2-py3-none-any.whl",
+      "url": "/files/click-8.1.2-py3-none-any.whl",
+      "hashes": {
+        "sha256": 
"24e1a4a9ec5bf6299411369b208c1df2188d9eb8d916302fe6bf03faed227f1e"
+      },
+      "requires-python": ">=3.7"
+    },
+    {
+      "filename": "click-8.1.2.tar.gz",
+      "url": "/files/click-8.1.2.tar.gz",
+      "hashes": {
+        "sha256": 
"479707fe14d9ec9a0757618b7a100a0ae4c4e236fac5b7f80ca68028141a1a72"
+      },
+      "requires-python": ">=3.7"
+    },
+    {
+      "filename": "click-8.1.3-py3-none-any.whl",
+      "url": "/files/click-8.1.3-py3-none-any.whl",
+      "hashes": {
+        "sha256": 
"bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"
+      },
+      "requires-python": ">=3.7"
+    },
+    {
+      "filename": "click-8.1.3.tar.gz",
+      "url": "/files/click-8.1.3.tar.gz",
+      "hashes": {
+        "sha256": 
"7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"
+      },
+      "requires-python": ">=3.7"
+    }
+  ]
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/tests/fixtures/json/first.json 
new/unearth-0.7.0/tests/fixtures/json/first.json
--- old/unearth-0.6.2/tests/fixtures/json/first.json    1970-01-01 
01:00:00.000000000 +0100
+++ new/unearth-0.7.0/tests/fixtures/json/first.json    2022-12-12 
06:10:02.955712000 +0100
@@ -0,0 +1,22 @@
+{
+  "meta": {
+    "api-version": "1.0"
+  },
+  "name": "first",
+  "files": [
+    {
+      "filename": "first-2.0.1-py2.py3-none-any.whl",
+      "url": "/files/first-2.0.1-py2.py3-none-any.whl",
+      "hashes": {
+        "sha256": 
"41d5b64e70507d0c3ca742d68010a76060eea8a3d863e9b5130ab11a4a91aa0e"
+      }
+    },
+    {
+      "filename": "first-2.0.2.tar.gz",
+      "url": "/files/first-2.0.2.tar.gz",
+      "hashes": {
+        "sha256": 
"ff285b08c55f8c97ce4ea7012743af2495c9f1291785f163722bd36f6af6d3bf"
+      }
+    }
+  ]
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/tests/fixtures/json/pipenv.json 
new/unearth-0.7.0/tests/fixtures/json/pipenv.json
--- old/unearth-0.6.2/tests/fixtures/json/pipenv.json   1970-01-01 
01:00:00.000000000 +0100
+++ new/unearth-0.7.0/tests/fixtures/json/pipenv.json   2022-12-12 
06:10:02.955712000 +0100
@@ -0,0 +1,42 @@
+{
+  "meta": {
+    "api-version": "1.0"
+  },
+  "name": "pipenv",
+  "files": [
+    {
+      "filename": "pipenv-2022.4.20-py2.py3-none-any.whl",
+      "url": "/files/pipenv-2022.4.20-py2.py3-none-any.whl",
+      "hashes": {
+        "sha256": 
"e59387e904ed7642e2c01026545fe57f5df9cdb9468a7d32f21947748742b602"
+      },
+      "requires-python": ">=3.6",
+      "yanked": ""
+    },
+    {
+      "filename": "pipenv-2022.4.20.tar.gz",
+      "url": "/files/pipenv-2022.4.20.tar.gz",
+      "hashes": {
+        "sha256": 
"f9101d3f9a38e9296a33630a5ed29dee4d5f2661d199dd9a60071f0c49f2e432"
+      },
+      "requires-python": ">=3.6",
+      "yanked": ""
+    },
+    {
+      "filename": "pipenv-2022.4.30-py2.py3-none-any.whl",
+      "url": "/files/pipenv-2022.4.30-py2.py3-none-any.whl",
+      "hashes": {
+        "sha256": 
"30568d90a566148a630ce3382843d59beaede28d1c4e9045278972036ebd178c"
+      },
+      "requires-python": ">=3.7"
+    },
+    {
+      "filename": "pipenv-2022.4.30.tar.gz",
+      "url": "/files/pipenv-2022.4.30.tar.gz",
+      "hashes": {
+        "sha256": 
"e26ded6ab90a7900676a1db9955d5ee714115f443aecc072b09497153ed237c7"
+      },
+      "requires-python": ">=3.7"
+    }
+  ]
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/tests/test_collector.py 
new/unearth-0.7.0/tests/test_collector.py
--- old/unearth-0.6.2/tests/test_collector.py   2022-11-23 03:47:14.644849800 
+0100
+++ new/unearth-0.7.0/tests/test_collector.py   2022-12-12 06:10:02.955712000 
+0100
@@ -38,9 +38,10 @@
         )
     )
     assert not collected
-    assert "NotHTML: only HTML is supported" in caplog.records[0].message
+    assert "Content-Type unsupported" in caplog.records[0].message
 
 
+@pytest.mark.usefixtures("content_type")
 def test_collect_links_from_index_page(pypi, session):
     collected = sorted(
         collect_links_from_location(
@@ -49,6 +50,7 @@
         key=lambda link: link.filename,
     )
     assert len(collected) == 4
+    assert all(link.url.startswith("https://test.pypi.org";) for link in 
collected)
 
 
 @pytest.mark.parametrize("filename", ["findlinks", "findlinks/index.html"])
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/unearth-0.6.2/tests/test_finder.py 
new/unearth-0.7.0/tests/test_finder.py
--- old/unearth-0.6.2/tests/test_finder.py      2022-11-23 03:47:14.644849800 
+0100
+++ new/unearth-0.7.0/tests/test_finder.py      2022-12-12 06:10:02.955712000 
+0100
@@ -4,7 +4,7 @@
 from unearth.evaluator import TargetPython
 from unearth.finder import PackageFinder
 
-pytestmark = pytest.mark.usefixtures("pypi")
+pytestmark = pytest.mark.usefixtures("pypi", "content_type")
 
 
 @pytest.mark.parametrize(

Reply via email to