https://github.com/python/cpython/commit/ce00de4c8cd39816f992e749c1074487d93abe9d
commit: ce00de4c8cd39816f992e749c1074487d93abe9d
branch: main
author: Hugo van Kemenade <[email protected]>
committer: hugovk <[email protected]>
date: 2024-03-27T16:46:35+02:00
summary:
gh-117225: doctest: only print "and X failed" when non-zero, don't pluralise "1
items" (#117228)
files:
A Misc/NEWS.d/next/Library/2024-03-25-21-15-56.gh-issue-117225.oOaZXb.rst
M Doc/library/doctest.rst
M Lib/doctest.py
M Lib/test/test_doctest/test_doctest.py
diff --git a/Doc/library/doctest.rst b/Doc/library/doctest.rst
index 835a3a76806148..135758187894ec 100644
--- a/Doc/library/doctest.rst
+++ b/Doc/library/doctest.rst
@@ -123,10 +123,10 @@ And so on, eventually ending with:
OverflowError: n too large
ok
2 items passed all tests:
- 1 tests in __main__
- 8 tests in __main__.factorial
- 9 tests in 2 items.
- 9 passed and 0 failed.
+ 1 test in __main__
+ 6 tests in __main__.factorial
+ 7 tests in 2 items.
+ 7 passed.
Test passed.
$
@@ -1933,7 +1933,7 @@ such a test runner::
optionflags=flags)
else:
fail, total = doctest.testmod(optionflags=flags)
- print("{} failures out of {} tests".format(fail, total))
+ print(f"{fail} failures out of {total} tests")
.. rubric:: Footnotes
diff --git a/Lib/doctest.py b/Lib/doctest.py
index 6049423b5147a5..7a9f4e40d814d6 100644
--- a/Lib/doctest.py
+++ b/Lib/doctest.py
@@ -1191,9 +1191,9 @@ class DocTestRunner:
2 tests in _TestClass
2 tests in _TestClass.__init__
2 tests in _TestClass.get
- 1 tests in _TestClass.square
+ 1 test in _TestClass.square
7 tests in 4 items.
- 7 passed and 0 failed.
+ 7 passed.
Test passed.
TestResults(failed=0, attempted=7)
@@ -1568,49 +1568,59 @@ def summarize(self, verbose=None):
"""
if verbose is None:
verbose = self._verbose
- notests = []
- passed = []
- failed = []
+
+ notests, passed, failed = [], [], []
total_tries = total_failures = total_skips = 0
- for item in self._stats.items():
- name, (failures, tries, skips) = item
+
+ for name, (failures, tries, skips) in self._stats.items():
assert failures <= tries
total_tries += tries
total_failures += failures
total_skips += skips
+
if tries == 0:
notests.append(name)
elif failures == 0:
passed.append((name, tries))
else:
- failed.append(item)
+ failed.append((name, (failures, tries, skips)))
+
if verbose:
if notests:
- print(f"{len(notests)} items had no tests:")
+ print(f"{_n_items(notests)} had no tests:")
notests.sort()
for name in notests:
print(f" {name}")
+
if passed:
- print(f"{len(passed)} items passed all tests:")
- passed.sort()
- for name, count in passed:
- print(f" {count:3d} tests in {name}")
+ print(f"{_n_items(passed)} passed all tests:")
+ for name, count in sorted(passed):
+ s = "" if count == 1 else "s"
+ print(f" {count:3d} test{s} in {name}")
+
if failed:
print(self.DIVIDER)
- print(f"{len(failed)} items had failures:")
- failed.sort()
- for name, (failures, tries, skips) in failed:
+ print(f"{_n_items(failed)} had failures:")
+ for name, (failures, tries, skips) in sorted(failed):
print(f" {failures:3d} of {tries:3d} in {name}")
+
if verbose:
- print(f"{total_tries} tests in {len(self._stats)} items.")
- print(f"{total_tries - total_failures} passed and {total_failures}
failed.")
+ s = "" if total_tries == 1 else "s"
+ print(f"{total_tries} test{s} in {_n_items(self._stats)}.")
+
+ and_f = f" and {total_failures} failed" if total_failures else ""
+ print(f"{total_tries - total_failures} passed{and_f}.")
+
if total_failures:
- msg = f"***Test Failed*** {total_failures} failures"
+ s = "" if total_failures == 1 else "s"
+ msg = f"***Test Failed*** {total_failures} failure{s}"
if total_skips:
- msg = f"{msg} and {total_skips} skipped tests"
+ s = "" if total_skips == 1 else "s"
+ msg = f"{msg} and {total_skips} skipped test{s}"
print(f"{msg}.")
elif verbose:
print("Test passed.")
+
return TestResults(total_failures, total_tries, skipped=total_skips)
#/////////////////////////////////////////////////////////////////
@@ -1627,6 +1637,15 @@ def merge(self, other):
d[name] = (failures, tries, skips)
+def _n_items(items: list) -> str:
+ """
+ Helper to pluralise the number of items in a list.
+ """
+ n = len(items)
+ s = "" if n == 1 else "s"
+ return f"{n} item{s}"
+
+
class OutputChecker:
"""
A class used to check the whether the actual output from a doctest
diff --git a/Lib/test/test_doctest/test_doctest.py
b/Lib/test/test_doctest/test_doctest.py
index 43be200b983227..3e883c56f6c766 100644
--- a/Lib/test/test_doctest/test_doctest.py
+++ b/Lib/test/test_doctest/test_doctest.py
@@ -2628,9 +2628,9 @@ def test_testfile(): r"""
...
NameError: name 'favorite_color' is not defined
**********************************************************************
- 1 items had failures:
+ 1 item had failures:
1 of 2 in test_doctest.txt
- ***Test Failed*** 1 failures.
+ ***Test Failed*** 1 failure.
TestResults(failed=1, attempted=2)
>>> doctest.master = None # Reset master.
@@ -2657,9 +2657,9 @@ def test_testfile(): r"""
Got:
'red'
**********************************************************************
- 1 items had failures:
+ 1 item had failures:
1 of 2 in test_doctest.txt
- ***Test Failed*** 1 failures.
+ ***Test Failed*** 1 failure.
TestResults(failed=1, attempted=2)
>>> doctest.master = None # Reset master.
@@ -2689,10 +2689,10 @@ def test_testfile(): r"""
<BLANKLINE>
b
ok
- 1 items passed all tests:
+ 1 item passed all tests:
2 tests in test_doctest.txt
- 2 tests in 1 items.
- 2 passed and 0 failed.
+ 2 tests in 1 item.
+ 2 passed.
Test passed.
TestResults(failed=0, attempted=2)
>>> doctest.master = None # Reset master.
@@ -2749,7 +2749,7 @@ def test_testfile(): r"""
**********************************************************************
...
**********************************************************************
- 1 items had failures:
+ 1 item had failures:
2 of 2 in test_doctest4.txt
***Test Failed*** 2 failures.
TestResults(failed=2, attempted=2)
@@ -2772,10 +2772,10 @@ def test_testfile(): r"""
Expecting:
'b\u0105r'
ok
- 1 items passed all tests:
+ 1 item passed all tests:
2 tests in test_doctest4.txt
- 2 tests in 1 items.
- 2 passed and 0 failed.
+ 2 tests in 1 item.
+ 2 passed.
Test passed.
TestResults(failed=0, attempted=2)
>>> doctest.master = None # Reset master.
@@ -2997,10 +2997,10 @@ def test_CLI(): r"""
Expecting:
'a'
ok
- 1 items passed all tests:
+ 1 item passed all tests:
2 tests in myfile.doc
- 2 tests in 1 items.
- 2 passed and 0 failed.
+ 2 tests in 1 item.
+ 2 passed.
Test passed.
Now we'll write a couple files, one with three tests, the other a python module
@@ -3074,7 +3074,7 @@ def test_CLI(): r"""
Got:
'ajkml'
**********************************************************************
- 1 items had failures:
+ 1 item had failures:
2 of 3 in myfile.doc
***Test Failed*** 2 failures.
@@ -3101,9 +3101,9 @@ def test_CLI(): r"""
Got:
'abcdef'
**********************************************************************
- 1 items had failures:
+ 1 item had failures:
1 of 2 in myfile.doc
- ***Test Failed*** 1 failures.
+ ***Test Failed*** 1 failure.
The fifth test uses verbose with the two options, so we should get verbose
success output for the tests in both files:
@@ -3126,10 +3126,10 @@ def test_CLI(): r"""
Expecting:
'a...l'
ok
- 1 items passed all tests:
+ 1 item passed all tests:
3 tests in myfile.doc
- 3 tests in 1 items.
- 3 passed and 0 failed.
+ 3 tests in 1 item.
+ 3 passed.
Test passed.
Trying:
1 + 1
@@ -3141,12 +3141,12 @@ def test_CLI(): r"""
Expecting:
'abc def'
ok
- 1 items had no tests:
+ 1 item had no tests:
myfile2
- 1 items passed all tests:
+ 1 item passed all tests:
2 tests in myfile2.test_func
2 tests in 2 items.
- 2 passed and 0 failed.
+ 2 passed.
Test passed.
We should also check some typical error cases.
diff --git
a/Misc/NEWS.d/next/Library/2024-03-25-21-15-56.gh-issue-117225.oOaZXb.rst
b/Misc/NEWS.d/next/Library/2024-03-25-21-15-56.gh-issue-117225.oOaZXb.rst
new file mode 100644
index 00000000000000..b6c4850f608c2a
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-03-25-21-15-56.gh-issue-117225.oOaZXb.rst
@@ -0,0 +1,2 @@
+doctest: only print "and X failed" when non-zero, don't pluralise "1 items".
+Patch by Hugo van Kemenade.
_______________________________________________
Python-checkins mailing list -- [email protected]
To unsubscribe send an email to [email protected]
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: [email protected]