Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-pytokens for openSUSE:Factory 
checked in at 2025-10-18 14:34:58
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-pytokens (Old)
 and      /work/SRC/openSUSE:Factory/.python-pytokens.new.18484 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-pytokens"

Sat Oct 18 14:34:58 2025 rev:2 rq:1311577 version:0.2.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-pytokens/python-pytokens.changes  
2025-09-23 16:06:03.296527668 +0200
+++ 
/work/SRC/openSUSE:Factory/.python-pytokens.new.18484/python-pytokens.changes   
    2025-10-18 14:35:10.283783755 +0200
@@ -1,0 +2,7 @@
+Thu Oct 16 04:05:18 UTC 2025 - Steve Kowalik <[email protected]>
+
+- Update to 0.2.0:
+  * Update tests and fix packaging
+- Run the testsuite.
+
+-------------------------------------------------------------------

Old:
----
  pytokens-0.1.10.tar.gz

New:
----
  pytokens-0.2.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-pytokens.spec ++++++
--- /var/tmp/diff_new_pack.tyCiRA/_old  2025-10-18 14:35:10.947811538 +0200
+++ /var/tmp/diff_new_pack.tyCiRA/_new  2025-10-18 14:35:10.951811706 +0200
@@ -17,13 +17,15 @@
 
 
 Name:           python-pytokens
-Version:        0.1.10
+Version:        0.2.0
 Release:        0
 Summary:        A Fast, spec compliant Python 3.12+ tokenizer that runs on 
older Pythons
 License:        MIT
 URL:            https://github.com/tusharsadhwani/pytokens
 Source:         
https://files.pythonhosted.org/packages/source/p/pytokens/pytokens-%{version}.tar.gz
 BuildRequires:  %{python_module pip}
+BuildRequires:  %{python_module pytest-cov}
+BuildRequires:  %{python_module pytest}
 BuildRequires:  %{python_module setuptools}
 BuildRequires:  %{python_module wheel}
 BuildRequires:  fdupes
@@ -45,8 +47,7 @@
 %python_expand %fdupes %{buildroot}%{$python_sitelib}
 
 %check
-# GH has no tags and pypi tarball no tests
-# pytest
+%pytest
 
 %files %{python_files}
 %doc README.md

++++++ pytokens-0.1.10.tar.gz -> pytokens-0.2.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytokens-0.1.10/MANIFEST.in 
new/pytokens-0.2.0/MANIFEST.in
--- old/pytokens-0.1.10/MANIFEST.in     1970-01-01 01:00:00.000000000 +0100
+++ new/pytokens-0.2.0/MANIFEST.in      2025-10-15 10:02:14.000000000 +0200
@@ -0,0 +1 @@
+recursive-include tests *
\ No newline at end of file
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytokens-0.1.10/PKG-INFO new/pytokens-0.2.0/PKG-INFO
--- old/pytokens-0.1.10/PKG-INFO        2025-02-19 15:50:36.232754200 +0100
+++ new/pytokens-0.2.0/PKG-INFO 2025-10-15 10:02:34.391293300 +0200
@@ -1,7 +1,7 @@
-Metadata-Version: 2.2
+Metadata-Version: 2.1
 Name: pytokens
-Version: 0.1.10
-Summary: A Fast, spec compliant Python 3.12+ tokenizer that runs on older 
Pythons.
+Version: 0.2.0
+Summary: A Fast, spec compliant Python 3.13+ tokenizer that runs on older 
Pythons.
 Home-page: https://github.com/tusharsadhwani/pytokens
 Author: Tushar Sadhwani
 Author-email: [email protected]
@@ -33,7 +33,7 @@
 
 # pytokens
 
-A Fast, spec compliant Python 3.12+ tokenizer that runs on older Pythons.
+A Fast, spec compliant Python 3.13+ tokenizer that runs on older Pythons.
 
 ## Installation
 
@@ -44,7 +44,7 @@
 ## Usage
 
 ```bash
-pytokens path/to/file.py
+python -m pytokens path/to/file.py
 ```
 
 ## Local Development / Testing
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytokens-0.1.10/README.md 
new/pytokens-0.2.0/README.md
--- old/pytokens-0.1.10/README.md       2024-12-19 23:23:20.000000000 +0100
+++ new/pytokens-0.2.0/README.md        2025-10-15 10:02:14.000000000 +0200
@@ -1,6 +1,6 @@
 # pytokens
 
-A Fast, spec compliant Python 3.12+ tokenizer that runs on older Pythons.
+A Fast, spec compliant Python 3.13+ tokenizer that runs on older Pythons.
 
 ## Installation
 
@@ -11,7 +11,7 @@
 ## Usage
 
 ```bash
-pytokens path/to/file.py
+python -m pytokens path/to/file.py
 ```
 
 ## Local Development / Testing
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytokens-0.1.10/setup.cfg 
new/pytokens-0.2.0/setup.cfg
--- old/pytokens-0.1.10/setup.cfg       2025-02-19 15:50:36.233758400 +0100
+++ new/pytokens-0.2.0/setup.cfg        2025-10-15 10:02:34.391669000 +0200
@@ -1,7 +1,7 @@
 [metadata]
 name = pytokens
-version = 0.1.10
-description = A Fast, spec compliant Python 3.12+ tokenizer that runs on older 
Pythons.
+version = 0.2.0
+description = A Fast, spec compliant Python 3.13+ tokenizer that runs on older 
Pythons.
 long_description = file: README.md
 long_description_content_type = text/markdown
 url = https://github.com/tusharsadhwani/pytokens
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytokens-0.1.10/src/pytokens.egg-info/PKG-INFO 
new/pytokens-0.2.0/src/pytokens.egg-info/PKG-INFO
--- old/pytokens-0.1.10/src/pytokens.egg-info/PKG-INFO  2025-02-19 
15:50:36.000000000 +0100
+++ new/pytokens-0.2.0/src/pytokens.egg-info/PKG-INFO   2025-10-15 
10:02:34.000000000 +0200
@@ -1,7 +1,7 @@
-Metadata-Version: 2.2
+Metadata-Version: 2.1
 Name: pytokens
-Version: 0.1.10
-Summary: A Fast, spec compliant Python 3.12+ tokenizer that runs on older 
Pythons.
+Version: 0.2.0
+Summary: A Fast, spec compliant Python 3.13+ tokenizer that runs on older 
Pythons.
 Home-page: https://github.com/tusharsadhwani/pytokens
 Author: Tushar Sadhwani
 Author-email: [email protected]
@@ -33,7 +33,7 @@
 
 # pytokens
 
-A Fast, spec compliant Python 3.12+ tokenizer that runs on older Pythons.
+A Fast, spec compliant Python 3.13+ tokenizer that runs on older Pythons.
 
 ## Installation
 
@@ -44,7 +44,7 @@
 ## Usage
 
 ```bash
-pytokens path/to/file.py
+python -m pytokens path/to/file.py
 ```
 
 ## Local Development / Testing
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytokens-0.1.10/src/pytokens.egg-info/SOURCES.txt 
new/pytokens-0.2.0/src/pytokens.egg-info/SOURCES.txt
--- old/pytokens-0.1.10/src/pytokens.egg-info/SOURCES.txt       2025-02-19 
15:50:36.000000000 +0100
+++ new/pytokens-0.2.0/src/pytokens.egg-info/SOURCES.txt        2025-10-15 
10:02:34.000000000 +0200
@@ -1,4 +1,5 @@
 LICENSE
+MANIFEST.in
 README.md
 setup.cfg
 setup.py
@@ -10,4 +11,6 @@
 src/pytokens.egg-info/SOURCES.txt
 src/pytokens.egg-info/dependency_links.txt
 src/pytokens.egg-info/requires.txt
-src/pytokens.egg-info/top_level.txt
\ No newline at end of file
+src/pytokens.egg-info/top_level.txt
+tests/pytokens_test.py
+tests/__pycache__/pytokens_test.cpython-312-pytest-8.3.4.pyc
\ No newline at end of file
Binary files 
old/pytokens-0.1.10/tests/__pycache__/pytokens_test.cpython-312-pytest-8.3.4.pyc
 and 
new/pytokens-0.2.0/tests/__pycache__/pytokens_test.cpython-312-pytest-8.3.4.pyc 
differ
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/pytokens-0.1.10/tests/pytokens_test.py 
new/pytokens-0.2.0/tests/pytokens_test.py
--- old/pytokens-0.1.10/tests/pytokens_test.py  1970-01-01 01:00:00.000000000 
+0100
+++ new/pytokens-0.2.0/tests/pytokens_test.py   2025-10-15 10:02:14.000000000 
+0200
@@ -0,0 +1,83 @@
+from pytokens import tokenize, Token, TokenType as T
+
+
+def test_tokenize() -> None:
+    source = "def foo():\n    7.e1\n"
+    tokens = list(tokenize(source))
+    assert tokens == [
+        Token(T.identifier, 0, 3, start_line=1, start_col=0, end_line=1, 
end_col=3),
+        Token(T.whitespace, 3, 4, start_line=1, start_col=3, end_line=1, 
end_col=4),
+        Token(T.identifier, 4, 7, start_line=1, start_col=4, end_line=1, 
end_col=7),
+        Token(T.lparen, 7, 8, start_line=1, start_col=7, end_line=1, 
end_col=8),
+        Token(T.rparen, 8, 9, start_line=1, start_col=8, end_line=1, 
end_col=9),
+        Token(T.op, 9, 10, start_line=1, start_col=9, end_line=1, end_col=10),
+        Token(T.newline, 10, 11, start_line=1, start_col=10, end_line=1, 
end_col=11),
+        Token(T.indent, 11, 15, start_line=2, start_col=0, end_line=2, 
end_col=4),
+        Token(T.number, 15, 19, start_line=2, start_col=4, end_line=2, 
end_col=8),
+        Token(T.newline, 19, 20, start_line=2, start_col=8, end_line=2, 
end_col=9),
+        Token(T.dedent, 20, 20, start_line=3, start_col=0, end_line=3, 
end_col=0),
+        Token(T.endmarker, 20, 20, start_line=3, start_col=0, end_line=3, 
end_col=0),
+    ]
+
+    # https://github.com/psf/black/issues/3700
+    source = "{\r}"
+    tokens = list(tokenize(source))
+    assert tokens == [
+        Token(T.lbrace, 0, 1, start_line=1, start_col=0, end_line=1, 
end_col=1),
+        Token(T.whitespace, 1, 2, start_line=1, start_col=1, end_line=1, 
end_col=2),
+        Token(T.rbrace, 2, 3, start_line=1, start_col=2, end_line=1, 
end_col=3),
+        Token(T.newline, 3, 4, start_line=1, start_col=3, end_line=1, 
end_col=4),
+        Token(T.endmarker, 4, 4, start_line=2, start_col=0, end_line=2, 
end_col=0),
+    ]
+
+    source = "€€, x🐍y = 1, 2"
+    tokens = list(tokenize(source))
+    assert tokens == [
+        Token(T.identifier, 0, 2, start_line=1, start_col=0, end_line=1, 
end_col=2),
+        Token(T.op, 2, 3, start_line=1, start_col=2, end_line=1, end_col=3),
+        Token(T.whitespace, 3, 4, start_line=1, start_col=3, end_line=1, 
end_col=4),
+        Token(T.identifier, 4, 7, start_line=1, start_col=4, end_line=1, 
end_col=7),
+        Token(T.whitespace, 7, 8, start_line=1, start_col=7, end_line=1, 
end_col=8),
+        Token(T.op, 8, 9, start_line=1, start_col=8, end_line=1, end_col=9),
+        Token(T.whitespace, 9, 10, start_line=1, start_col=9, end_line=1, 
end_col=10),
+        Token(T.number, 10, 11, start_line=1, start_col=10, end_line=1, 
end_col=11),
+        Token(T.op, 11, 12, start_line=1, start_col=11, end_line=1, 
end_col=12),
+        Token(T.whitespace, 12, 13, start_line=1, start_col=12, end_line=1, 
end_col=13),
+        Token(T.number, 13, 14, start_line=1, start_col=13, end_line=1, 
end_col=14),
+        Token(T.newline, 14, 15, start_line=1, start_col=14, end_line=1, 
end_col=15),
+        Token(T.endmarker, 15, 15, start_line=2, start_col=0, end_line=2, 
end_col=0),
+    ]
+
+    source = r'''rf"\N{42}"'''
+    tokens = list(tokenize(source))
+    assert tokens == [
+        Token(T.fstring_start, 0, 3, start_line=1, start_col=0, end_line=1, 
end_col=3),
+        Token(T.fstring_middle, 3, 5, start_line=1, start_col=3, end_line=1, 
end_col=5),
+        Token(T.lbrace, 5, 6, start_line=1, start_col=5, end_line=1, 
end_col=6),
+        Token(T.number, 6, 8, start_line=1, start_col=6, end_line=1, 
end_col=8),
+        Token(T.rbrace, 8, 9, start_line=1, start_col=8, end_line=1, 
end_col=9),
+        Token(T.fstring_end, 9, 10, start_line=1, start_col=9, end_line=1, 
end_col=10),
+        Token(T.newline, 10, 11, start_line=1, start_col=10, end_line=1, 
end_col=11),
+        Token(T.endmarker, 11, 11, start_line=2, start_col=0, end_line=2, 
end_col=0),
+    ]
+
+
+def test_weird_op_case() -> None:
+    source = "\n#\r0"
+    tokens = list(tokenize(source))
+    assert tokens == [
+        Token(T.nl, 0, 1, start_line=1, start_col=0, end_line=1, end_col=1),
+        Token(T.comment, 1, 4, start_line=2, start_col=0, end_line=2, 
end_col=3),
+        Token(T.nl, 4, 5, start_line=2, start_col=3, end_line=2, end_col=4),
+        Token(T.endmarker, 5, 5, start_line=3, start_col=0, end_line=3, 
end_col=0),
+    ]
+
+    source = "\n\r0"
+    tokens = list(tokenize(source))
+    assert tokens == [
+        Token(T.nl, 0, 1, start_line=1, start_col=0, end_line=1, end_col=1),
+        Token(T.whitespace, 1, 2, start_line=2, start_col=0, end_line=2, 
end_col=1),
+        Token(T.number, 2, 3, start_line=2, start_col=1, end_line=2, 
end_col=2),
+        Token(T.newline, 3, 4, start_line=2, start_col=2, end_line=2, 
end_col=3),
+        Token(T.endmarker, 4, 4, start_line=3, start_col=0, end_line=3, 
end_col=0),
+    ]

Reply via email to