Script 'mail_helper' called by obssrc Hello community, here is the log from the commit of package python-tokenize-rt for openSUSE:Factory checked in at 2023-08-18 19:29:05 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Comparing /work/SRC/openSUSE:Factory/python-tokenize-rt (Old) and /work/SRC/openSUSE:Factory/.python-tokenize-rt.new.1766 (New) ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-tokenize-rt" Fri Aug 18 19:29:05 2023 rev:3 rq:1104641 version:5.1.0 Changes: -------- --- /work/SRC/openSUSE:Factory/python-tokenize-rt/python-tokenize-rt.changes 2021-12-27 16:07:17.433695913 +0100 +++ /work/SRC/openSUSE:Factory/.python-tokenize-rt.new.1766/python-tokenize-rt.changes 2023-08-18 19:29:39.339455406 +0200 @@ -1,0 +2,20 @@ +Fri Aug 11 07:54:56 UTC 2023 - Sebastian Wagner <se...@sebix.at> + +- update to version 5.1.0: + - drop support for python 3.7 + - remove no_implicit_optional + this is the default in mypy 0.990 +- update to version 5.0.0: + - remove python 2 compatible parsing + - convert pip install comand to a block + github will produce a copy-paste button + - remove unused type: ignore + - Update default branch to main + - upgrade flake8-typing-imports + - drop python3.6 support + python 3.6 reached end of life on 2021-12-23 + - remove --fail-under from tox (covdefaults handles this) + - improve coverage pragmas with covdefaults 2.1 + - replace exit(main()) with raise SystemExit(main()) + +------------------------------------------------------------------- Old: ---- tokenize-rt-4.2.1.tar.gz New: ---- tokenize-rt-5.1.0.tar.gz ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Other differences: ------------------ ++++++ python-tokenize-rt.spec ++++++ --- /var/tmp/diff_new_pack.kLioJ6/_old 2023-08-18 19:29:40.023456632 +0200 +++ /var/tmp/diff_new_pack.kLioJ6/_new 2023-08-18 19:29:40.031456647 +0200 @@ -1,7 +1,7 @@ # # spec file for package python-tokenize-rt # -# Copyright (c) 2021 SUSE LLC +# Copyright (c) 2023 SUSE LLC # # All modifications and additions to the file contributed by third parties # remain the property of their copyright owners, unless otherwise agreed @@ -16,21 +16,21 @@ # -%{?!python_module:%define python_module() python-%{**} python3-%{**}} Name: python-tokenize-rt -Version: 4.2.1 +Version: 5.1.0 Release: 0 Summary: A wrapper around the stdlib `tokenize` which roundtrips License: MIT Group: Development/Languages/Python URL: https://github.com/asottile/tokenize-rt Source: https://github.com/asottile/tokenize-rt/archive/refs/tags/v{%{version}}.tar.gz#/tokenize-rt-%{version}.tar.gz +BuildRequires: %{python_module base >= 3.8} BuildRequires: %{python_module pytest} BuildRequires: %{python_module setuptools} BuildRequires: fdupes BuildRequires: python-rpm-macros Requires(post): update-alternatives -Requires(postun): update-alternatives +Requires(postun):update-alternatives BuildArch: noarch %python_subpackages @@ -61,6 +61,8 @@ %license LICENSE %doc README.md %python_alternative %{_bindir}/tokenize-rt -%{python_sitelib}/* +%{python_sitelib}/tokenize_rt* +%{python_sitelib}/__pycache__/tokenize_rt* +%{python_sitelib}/tokenize_rt-%{version}*-info %changelog ++++++ tokenize-rt-4.2.1.tar.gz -> tokenize-rt-5.1.0.tar.gz ++++++ diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/tokenize-rt-4.2.1/.github/FUNDING.yml new/tokenize-rt-5.1.0/.github/FUNDING.yml --- old/tokenize-rt-4.2.1/.github/FUNDING.yml 2021-10-21 17:19:55.000000000 +0200 +++ new/tokenize-rt-5.1.0/.github/FUNDING.yml 1970-01-01 01:00:00.000000000 +0100 @@ -1 +0,0 @@ -github: asottile diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/tokenize-rt-4.2.1/.github/workflows/main.yml new/tokenize-rt-5.1.0/.github/workflows/main.yml --- old/tokenize-rt-4.2.1/.github/workflows/main.yml 1970-01-01 01:00:00.000000000 +0100 +++ new/tokenize-rt-5.1.0/.github/workflows/main.yml 2023-06-10 22:35:47.000000000 +0200 @@ -0,0 +1,19 @@ +name: main + +on: + push: + branches: [main, test-me-*] + tags: '*' + pull_request: + +jobs: + main-windows: + uses: asottile/workflows/.github/workflows/tox.yml@v1.5.0 + with: + env: '["py38"]' + os: windows-latest + main-linux: + uses: asottile/workflows/.github/workflows/tox.yml@v1.5.0 + with: + env: '["py38", "py39"]' + os: ubuntu-latest diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/tokenize-rt-4.2.1/.gitignore new/tokenize-rt-5.1.0/.gitignore --- old/tokenize-rt-4.2.1/.gitignore 2021-10-21 17:19:55.000000000 +0200 +++ new/tokenize-rt-5.1.0/.gitignore 2023-06-10 22:35:47.000000000 +0200 @@ -1,7 +1,4 @@ *.egg-info *.pyc -/.pytest_cache /.coverage /.tox -/venv* -/.mypy_cache diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/tokenize-rt-4.2.1/.pre-commit-config.yaml new/tokenize-rt-5.1.0/.pre-commit-config.yaml --- old/tokenize-rt-4.2.1/.pre-commit-config.yaml 2021-10-21 17:19:55.000000000 +0200 +++ new/tokenize-rt-5.1.0/.pre-commit-config.yaml 2023-06-10 22:35:47.000000000 +0200 @@ -1,44 +1,42 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.0.1 + rev: v4.4.0 hooks: - - id: check-docstring-first + - id: trailing-whitespace + - id: end-of-file-fixer - id: check-yaml - id: debug-statements - id: double-quote-string-fixer - - id: end-of-file-fixer - id: name-tests-test - id: requirements-txt-fixer - - id: trailing-whitespace - repo: https://github.com/asottile/setup-cfg-fmt - rev: v1.18.0 + rev: v2.3.0 hooks: - id: setup-cfg-fmt -- repo: https://github.com/PyCQA/flake8 - rev: 4.0.1 - hooks: - - id: flake8 - additional_dependencies: [flake8-typing-imports==1.7.0] -- repo: https://github.com/pre-commit/mirrors-autopep8 - rev: v1.5.7 - hooks: - - id: autopep8 -- repo: https://github.com/asottile/reorder_python_imports - rev: v2.6.0 +- repo: https://github.com/asottile/reorder-python-imports + rev: v3.9.0 hooks: - id: reorder-python-imports - args: [--py3-plus] + args: [--py37-plus, --add-import, 'from __future__ import annotations'] - repo: https://github.com/asottile/add-trailing-comma - rev: v2.1.0 + rev: v2.4.0 hooks: - id: add-trailing-comma args: [--py36-plus] - repo: https://github.com/asottile/pyupgrade - rev: v2.29.0 + rev: v3.4.0 hooks: - id: pyupgrade - args: [--py36-plus] + args: [--py37-plus] +- repo: https://github.com/pre-commit/mirrors-autopep8 + rev: v2.0.2 + hooks: + - id: autopep8 +- repo: https://github.com/PyCQA/flake8 + rev: 6.0.0 + hooks: + - id: flake8 - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.910-1 + rev: v1.3.0 hooks: - id: mypy diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/tokenize-rt-4.2.1/README.md new/tokenize-rt-5.1.0/README.md --- old/tokenize-rt-4.2.1/README.md 2021-10-21 17:19:55.000000000 +0200 +++ new/tokenize-rt-5.1.0/README.md 2023-06-10 22:35:47.000000000 +0200 @@ -1,6 +1,5 @@ -[](https://dev.azure.com/asottile/asottile/_build/latest?definitionId=25&branchName=master) -[](https://dev.azure.com/asottile/asottile/_build/latest?definitionId=25&branchName=master) -[](https://results.pre-commit.ci/latest/github/asottile/tokenize-rt/master) +[](https://github.com/asottile/tokenize-rt/actions/workflows/main.yml) +[](https://results.pre-commit.ci/latest/github/asottile/tokenize-rt/main) tokenize-rt =========== @@ -15,7 +14,9 @@ ## Installation -`pip install tokenize-rt` +```bash +pip install tokenize-rt +``` ## Usage diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/tokenize-rt-4.2.1/azure-pipelines.yml new/tokenize-rt-5.1.0/azure-pipelines.yml --- old/tokenize-rt-4.2.1/azure-pipelines.yml 2021-10-21 17:19:55.000000000 +0200 +++ new/tokenize-rt-5.1.0/azure-pipelines.yml 1970-01-01 01:00:00.000000000 +0100 @@ -1,23 +0,0 @@ -trigger: - branches: - include: [master, test-me-*] - tags: - include: ['*'] - -resources: - repositories: - - repository: asottile - type: github - endpoint: github - name: asottile/azure-pipeline-templates - ref: refs/tags/v2.1.0 - -jobs: -- template: job--python-tox.yml@asottile - parameters: - toxenvs: [py37] - os: windows -- template: job--python-tox.yml@asottile - parameters: - toxenvs: [pypy3, py36, py37, py38] - os: linux diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/tokenize-rt-4.2.1/requirements-dev.txt new/tokenize-rt-5.1.0/requirements-dev.txt --- old/tokenize-rt-4.2.1/requirements-dev.txt 2021-10-21 17:19:55.000000000 +0200 +++ new/tokenize-rt-5.1.0/requirements-dev.txt 2023-06-10 22:35:47.000000000 +0200 @@ -1,3 +1,3 @@ -covdefaults +covdefaults>=2.1 coverage pytest diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/tokenize-rt-4.2.1/setup.cfg new/tokenize-rt-5.1.0/setup.cfg --- old/tokenize-rt-4.2.1/setup.cfg 2021-10-21 17:19:55.000000000 +0200 +++ new/tokenize-rt-5.1.0/setup.cfg 2023-06-10 22:35:47.000000000 +0200 @@ -1,6 +1,6 @@ [metadata] name = tokenize_rt -version = 4.2.1 +version = 5.1.0 description = A wrapper around the stdlib `tokenize` which roundtrips. long_description = file: README.md long_description_content_type = text/markdown @@ -8,22 +8,17 @@ author = Anthony Sottile author_email = asott...@umich.edu license = MIT -license_file = LICENSE +license_files = LICENSE classifiers = License :: OSI Approved :: MIT License Programming Language :: Python :: 3 Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3.6 - Programming Language :: Python :: 3.7 - Programming Language :: Python :: 3.8 - Programming Language :: Python :: 3.9 - Programming Language :: Python :: 3.10 Programming Language :: Python :: Implementation :: CPython Programming Language :: Python :: Implementation :: PyPy [options] py_modules = tokenize_rt -python_requires = >=3.6.1 +python_requires = >=3.8 [options.entry_points] console_scripts = @@ -40,7 +35,6 @@ disallow_any_generics = true disallow_incomplete_defs = true disallow_untyped_defs = true -no_implicit_optional = true warn_redundant_casts = true warn_unused_ignores = true diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/tokenize-rt-4.2.1/setup.py new/tokenize-rt-5.1.0/setup.py --- old/tokenize-rt-4.2.1/setup.py 2021-10-21 17:19:55.000000000 +0200 +++ new/tokenize-rt-5.1.0/setup.py 2023-06-10 22:35:47.000000000 +0200 @@ -1,2 +1,4 @@ +from __future__ import annotations + from setuptools import setup setup() diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/tokenize-rt-4.2.1/testing/resources/backslash_continuation.py new/tokenize-rt-5.1.0/testing/resources/backslash_continuation.py --- old/tokenize-rt-4.2.1/testing/resources/backslash_continuation.py 2021-10-21 17:19:55.000000000 +0200 +++ new/tokenize-rt-5.1.0/testing/resources/backslash_continuation.py 2023-06-10 22:35:47.000000000 +0200 @@ -1,3 +1,4 @@ +from __future__ import annotations x = \ 5 diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/tokenize-rt-4.2.1/testing/resources/simple.py new/tokenize-rt-5.1.0/testing/resources/simple.py --- old/tokenize-rt-4.2.1/testing/resources/simple.py 2021-10-21 17:19:55.000000000 +0200 +++ new/tokenize-rt-5.1.0/testing/resources/simple.py 1970-01-01 01:00:00.000000000 +0100 @@ -1 +0,0 @@ -x = 5 diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/tokenize-rt-4.2.1/testing/resources/unicode_snowman.py new/tokenize-rt-5.1.0/testing/resources/unicode_snowman.py --- old/tokenize-rt-4.2.1/testing/resources/unicode_snowman.py 2021-10-21 17:19:55.000000000 +0200 +++ new/tokenize-rt-5.1.0/testing/resources/unicode_snowman.py 2023-06-10 22:35:47.000000000 +0200 @@ -1 +1,2 @@ +from __future__ import annotations x = 'â' diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/tokenize-rt-4.2.1/tests/tokenize_rt_test.py new/tokenize-rt-5.1.0/tests/tokenize_rt_test.py --- old/tokenize-rt-4.2.1/tests/tokenize_rt_test.py 2021-10-21 17:19:55.000000000 +0200 +++ new/tokenize-rt-5.1.0/tests/tokenize_rt_test.py 2023-06-10 22:35:47.000000000 +0200 @@ -1,3 +1,5 @@ +from __future__ import annotations + import re import pytest @@ -165,37 +167,6 @@ ] -@pytest.mark.parametrize('prefix', ('f', 'ur', 'rb', 'F', 'UR', 'RB')) -def test_src_to_tokens_string_prefix_normalization(prefix): - src = f"{prefix}'foo'\n" - ret = src_to_tokens(src) - assert ret == [ - Token('STRING', f"{prefix}'foo'", line=1, utf8_byte_offset=0), - Token('NEWLINE', '\n', line=1, utf8_byte_offset=5 + len(prefix)), - Token('ENDMARKER', '', line=2, utf8_byte_offset=0), - ] - - -def test_src_to_tokens_octal_literal_normalization(): - ret = src_to_tokens('0755\n') - assert ret == [ - Token('NUMBER', '0755', line=1, utf8_byte_offset=0), - Token('NEWLINE', '\n', line=1, utf8_byte_offset=4), - Token('ENDMARKER', '', line=2, utf8_byte_offset=0), - ] - - -@pytest.mark.parametrize('postfix', ('l', 'L')) -def test_src_to_tokens_long_literal_normalization(postfix): - src = f'123{postfix}\n' - ret = src_to_tokens(src) - assert ret == [ - Token('NUMBER', f'123{postfix}', line=1, utf8_byte_offset=0), - Token('NEWLINE', '\n', line=1, utf8_byte_offset=4), - Token('ENDMARKER', '', line=2, utf8_byte_offset=0), - ] - - @pytest.mark.parametrize( 'filename', ( @@ -315,8 +286,10 @@ assert rfind_string_parts(tokens, n) == (expected_i,) -def test_main(capsys): - main(('testing/resources/simple.py',)) +def test_main(capsys, tmp_path): + f = tmp_path.joinpath('simple.py') + f.write_text('x = 5\n') + main((str(f),)) out, _ = capsys.readouterr() assert out == ( "1:0 NAME 'x'\n" diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/tokenize-rt-4.2.1/tokenize_rt.py new/tokenize-rt-5.1.0/tokenize_rt.py --- old/tokenize-rt-4.2.1/tokenize_rt.py 2021-10-21 17:19:55.000000000 +0200 +++ new/tokenize-rt-5.1.0/tokenize_rt.py 2023-06-10 22:35:47.000000000 +0200 @@ -1,3 +1,5 @@ +from __future__ import annotations + import argparse import io import keyword @@ -6,20 +8,17 @@ import tokenize from typing import Generator from typing import Iterable -from typing import List from typing import NamedTuple -from typing import Optional from typing import Pattern from typing import Sequence -from typing import Tuple # this is a performance hack. see https://bugs.python.org/issue43014 -if ( +if ( # pragma: no branch sys.version_info < (3, 10) and callable(getattr(tokenize, '_compile', None)) -): # pragma: no cover (<py310) +): # pragma: <3.10 cover from functools import lru_cache - tokenize._compile = lru_cache()(tokenize._compile) # type: ignore + tokenize._compile = lru_cache()(tokenize._compile) ESCAPED_NL = 'ESCAPED_NL' UNIMPORTANT_WS = 'UNIMPORTANT_WS' @@ -27,15 +26,15 @@ class Offset(NamedTuple): - line: Optional[int] = None - utf8_byte_offset: Optional[int] = None + line: int | None = None + utf8_byte_offset: int | None = None class Token(NamedTuple): name: str src: str - line: Optional[int] = None - utf8_byte_offset: Optional[int] = None + line: int | None = None + utf8_byte_offset: int | None = None @property def offset(self) -> Offset: @@ -43,11 +42,10 @@ _string_re = re.compile('^([^\'"]*)(.*)$', re.DOTALL) -_string_prefixes = frozenset('bfru') _escaped_nl_re = re.compile(r'\\(\n|\r\n|\r)') -def _re_partition(regex: Pattern[str], s: str) -> Tuple[str, str, str]: +def _re_partition(regex: Pattern[str], s: str) -> tuple[str, str, str]: match = regex.search(s) if match: return s[:match.start()], s[slice(*match.span())], s[match.end():] @@ -55,7 +53,7 @@ return (s, '', '') -def src_to_tokens(src: str) -> List[Token]: +def src_to_tokens(src: str) -> list[Token]: tokenize_target = io.StringIO(src) lines = ('',) + tuple(tokenize_target) @@ -98,33 +96,7 @@ end_offset += len(newtok.encode()) tok_name = tokenize.tok_name[tok_type] - # when a string prefix is not recognized, the tokenizer produces a - # NAME token followed by a STRING token - if ( - tok_name == 'STRING' and - tokens and - tokens[-1].name == 'NAME' and - frozenset(tokens[-1].src.lower()) <= _string_prefixes - ): - newsrc = tokens[-1].src + tok_text - tokens[-1] = tokens[-1]._replace(src=newsrc, name=tok_name) - # produce octal literals as a single token in python 3 as well - elif ( - tok_name == 'NUMBER' and - tokens and - tokens[-1].name == 'NUMBER' - ): - tokens[-1] = tokens[-1]._replace(src=tokens[-1].src + tok_text) - # produce long literals as a single token in python 3 as well - elif ( - tok_name == 'NAME' and - tok_text.lower() == 'l' and - tokens and - tokens[-1].name == 'NUMBER' - ): - tokens[-1] = tokens[-1]._replace(src=tokens[-1].src + tok_text) - else: - tokens.append(Token(tok_name, tok_text, sline, end_offset)) + tokens.append(Token(tok_name, tok_text, sline, end_offset)) last_line, last_col = eline, ecol if sline != eline: end_offset = len(lines[last_line][:last_col].encode()) @@ -140,19 +112,19 @@ def reversed_enumerate( tokens: Sequence[Token], -) -> Generator[Tuple[int, Token], None, None]: +) -> Generator[tuple[int, Token], None, None]: for i in reversed(range(len(tokens))): yield i, tokens[i] -def parse_string_literal(src: str) -> Tuple[str, str]: +def parse_string_literal(src: str) -> tuple[str, str]: """parse a string literal's source into (prefix, string)""" match = _string_re.match(src) assert match is not None return match.group(1), match.group(2) -def rfind_string_parts(tokens: Sequence[Token], i: int) -> Tuple[int, ...]: +def rfind_string_parts(tokens: Sequence[Token], i: int) -> tuple[int, ...]: """find the indicies of the string parts of a (joined) string literal - `i` should start at the end of the string literal @@ -195,7 +167,7 @@ return tuple(reversed(ret)) -def main(argv: Optional[Sequence[str]] = None) -> int: +def main(argv: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument('filename') args = parser.parse_args(argv) @@ -210,4 +182,4 @@ if __name__ == '__main__': - exit(main()) + raise SystemExit(main()) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/tokenize-rt-4.2.1/tox.ini new/tokenize-rt-5.1.0/tox.ini --- old/tokenize-rt-4.2.1/tox.ini 2021-10-21 17:19:55.000000000 +0200 +++ new/tokenize-rt-5.1.0/tox.ini 2023-06-10 22:35:47.000000000 +0200 @@ -1,12 +1,12 @@ [tox] -envlist = py36,py37,py38,pypy3,pre-commit +envlist = py,pre-commit [testenv] deps = -rrequirements-dev.txt commands = coverage erase coverage run -m pytest {posargs:tests} - coverage report --fail-under 100 + coverage report [testenv:pre-commit] skip_install = true