diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 7f9d0f4da09be7..28ebc1643bd694 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -182,7 +182,7 @@ jobs: - name: Display build info run: .\python.bat -m test.pythoninfo - name: Tests - run: .\PCbuild\rt.bat -p Win32 -d -q -uall -u-cpu -rwW --slowest --timeout=1200 -j0 + run: .\PCbuild\rt.bat -p Win32 -d -q --fast-ci build_win_amd64: name: 'Windows (x64)' @@ -201,7 +201,7 @@ jobs: - name: Display build info run: .\python.bat -m test.pythoninfo - name: Tests - run: .\PCbuild\rt.bat -p x64 -d -q -uall -u-cpu -rwW --slowest --timeout=1200 -j0 + run: .\PCbuild\rt.bat -p x64 -d -q --fast-ci build_win_arm64: name: 'Windows (arm64)' @@ -252,7 +252,7 @@ jobs: - name: Display build info run: make pythoninfo - name: Tests - run: make buildbottest TESTOPTS="-j4 -uall,-cpu" + run: make test build_ubuntu: name: 'Ubuntu' @@ -319,7 +319,7 @@ jobs: run: sudo mount $CPYTHON_RO_SRCDIR -oremount,rw - name: Tests working-directory: ${{ env.CPYTHON_BUILDDIR }} - run: xvfb-run make buildbottest TESTOPTS="-j4 -uall,-cpu" + run: xvfb-run make test build_ubuntu_ssltests: name: 'Ubuntu SSL tests with OpenSSL' @@ -535,7 +535,7 @@ jobs: - name: Display build info run: make pythoninfo - name: Tests - run: xvfb-run make buildbottest TESTOPTS="-j4 -uall,-cpu" + run: xvfb-run make test all-required-green: # This job does nothing and is only used for the branch protection name: All required checks pass diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst index 22360b22fd924b..48d6176d26bb8f 100644 --- a/Doc/library/pathlib.rst +++ b/Doc/library/pathlib.rst @@ -1381,15 +1381,19 @@ call fails (for example because the path doesn't exist). >>> p.resolve() PosixPath('/home/antoine/pathlib/setup.py') - If the path doesn't exist and *strict* is ``True``, :exc:`FileNotFoundError` - is raised. If *strict* is ``False``, the path is resolved as far as possible - and any remainder is appended without checking whether it exists. If an - infinite loop is encountered along the resolution path, :exc:`RuntimeError` - is raised. + If a path doesn't exist or a symlink loop is encountered, and *strict* is + ``True``, :exc:`OSError` is raised. If *strict* is ``False``, the path is + resolved as far as possible and any remainder is appended without checking + whether it exists. .. versionchanged:: 3.6 The *strict* parameter was added (pre-3.6 behavior is strict). + .. versionchanged:: 3.13 + Symlink loops are treated like other errors: :exc:`OSError` is raised in + strict mode, and no exception is raised in non-strict mode. In previous + versions, :exc:`RuntimeError` is raised no matter the value of *strict*. + .. method:: Path.rglob(pattern, *, case_sensitive=None, follow_symlinks=None) Glob the given relative *pattern* recursively. This is like calling diff --git a/Doc/library/weakref.rst b/Doc/library/weakref.rst index 1406b663c6a8e2..d6e062df945c64 100644 --- a/Doc/library/weakref.rst +++ b/Doc/library/weakref.rst @@ -111,7 +111,7 @@ See :ref:`__slots__ documentation ` for details. Exceptions raised by the callback will be noted on the standard error output, but cannot be propagated; they are handled in exactly the same way as exceptions - raised from an object's :meth:`__del__` method. + raised from an object's :meth:`~object.__del__` method. Weak references are :term:`hashable` if the *object* is hashable. They will maintain their hash value even after the *object* was deleted. If @@ -221,8 +221,7 @@ than needed. Added support for ``|`` and ``|=`` operators, as specified in :pep:`584`. :class:`WeakValueDictionary` objects have an additional method that has the -same issues as the :meth:`keyrefs` method of :class:`WeakKeyDictionary` -objects. +same issues as the :meth:`WeakKeyDictionary.keyrefs` method. .. method:: WeakValueDictionary.valuerefs() @@ -281,7 +280,7 @@ objects. Exceptions raised by finalizer callbacks during garbage collection will be shown on the standard error output, but cannot be propagated. They are handled in the same way as exceptions raised - from an object's :meth:`__del__` method or a weak reference's + from an object's :meth:`~object.__del__` method or a weak reference's callback. When the program exits, each remaining live finalizer is called @@ -523,18 +522,18 @@ is still alive. For instance obj dead or exiting -Comparing finalizers with :meth:`__del__` methods -------------------------------------------------- +Comparing finalizers with :meth:`~object.__del__` methods +--------------------------------------------------------- Suppose we want to create a class whose instances represent temporary directories. The directories should be deleted with their contents when the first of the following events occurs: * the object is garbage collected, -* the object's :meth:`remove` method is called, or +* the object's :meth:`!remove` method is called, or * the program exits. -We might try to implement the class using a :meth:`__del__` method as +We might try to implement the class using a :meth:`~object.__del__` method as follows:: class TempDir: @@ -553,12 +552,12 @@ follows:: def __del__(self): self.remove() -Starting with Python 3.4, :meth:`__del__` methods no longer prevent +Starting with Python 3.4, :meth:`~object.__del__` methods no longer prevent reference cycles from being garbage collected, and module globals are no longer forced to :const:`None` during :term:`interpreter shutdown`. So this code should work without any issues on CPython. -However, handling of :meth:`__del__` methods is notoriously implementation +However, handling of :meth:`~object.__del__` methods is notoriously implementation specific, since it depends on internal details of the interpreter's garbage collector implementation. diff --git a/Doc/tools/.nitignore b/Doc/tools/.nitignore index 4188edc18f9036..f260a571661b76 100644 --- a/Doc/tools/.nitignore +++ b/Doc/tools/.nitignore @@ -133,7 +133,6 @@ Doc/library/unittest.mock.rst Doc/library/unittest.rst Doc/library/urllib.parse.rst Doc/library/urllib.request.rst -Doc/library/weakref.rst Doc/library/wsgiref.rst Doc/library/xml.dom.minidom.rst Doc/library/xml.dom.pulldom.rst diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst index 763f9778776990..82074750ec59a8 100644 --- a/Doc/using/configure.rst +++ b/Doc/using/configure.rst @@ -964,9 +964,18 @@ Main Makefile targets You can use the configure :option:`--enable-optimizations` option to make this the default target of the ``make`` command (``make all`` or just ``make``). -* ``make buildbottest``: Build Python and run the Python test suite, the same - way than buildbots test Python. Set ``TESTTIMEOUT`` variable (in seconds) - to change the test timeout (1200 by default: 20 minutes). + +* ``make test``: Build Python and run the Python test suite with ``--slow-ci`` + option. Variables: + + * ``TESTOPTS``: additional regrtest command line options. + * ``TESTPYTHONOPTS``: additional Python command line options. + * ``TESTTIMEOUT``: timeout in seconds (default: 20 minutes). + +* ``make buildbottest``: Similar to ``make test``, but use ``--slow-ci`` + option and default timeout of 20 minutes, instead of ``--fast-ci`` option + and a default timeout of 10 minutes. + * ``make install``: Build and install Python. * ``make regen-all``: Regenerate (almost) all generated files; ``make regen-stdlib-module-names`` and ``autoconf`` must be run separately diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst index 76f1f00dbd34dd..4874a6c77faa99 100644 --- a/Doc/whatsnew/3.12.rst +++ b/Doc/whatsnew/3.12.rst @@ -752,7 +752,7 @@ random * Add :func:`random.binomialvariate`. (Contributed by Raymond Hettinger in :gh:`81620`.) -* Add a default of ``lamb=1.0`` to :func:`random.expovariate`. +* Add a default of ``lambd=1.0`` to :func:`random.expovariate`. (Contributed by Raymond Hettinger in :gh:`100234`.) shutil diff --git a/Lib/concurrent/futures/process.py b/Lib/concurrent/futures/process.py index 48d8db3ed423a5..011e79a5e73d5a 100644 --- a/Lib/concurrent/futures/process.py +++ b/Lib/concurrent/futures/process.py @@ -521,7 +521,8 @@ def terminate_broken(self, cause): # gh-107219: Close the connection writer which can unblock # Queue._feed() if it was stuck in send_bytes(). - self.call_queue._writer.close() + if sys.platform == 'win32': + self.call_queue._writer.close() # clean up resources self.join_executor_internals() diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py index 7c425a2d8e7034..dbbf106f680964 100644 --- a/Lib/multiprocessing/connection.py +++ b/Lib/multiprocessing/connection.py @@ -42,7 +42,6 @@ BUFSIZE = 8192 # A very generous timeout when it comes to local connections... CONNECTION_TIMEOUT = 20. -WSA_OPERATION_ABORTED = 995 _mmap_counter = itertools.count() @@ -300,7 +299,7 @@ def _send_bytes(self, buf): finally: self._send_ov = None nwritten, err = ov.GetOverlappedResult(True) - if err == WSA_OPERATION_ABORTED: + if err == _winapi.ERROR_OPERATION_ABORTED: # close() was called by another thread while # WaitForMultipleObjects() was waiting for the overlapped # operation. diff --git a/Lib/multiprocessing/resource_tracker.py b/Lib/multiprocessing/resource_tracker.py index 3783c1ffc6e4a9..8e41f461cc934e 100644 --- a/Lib/multiprocessing/resource_tracker.py +++ b/Lib/multiprocessing/resource_tracker.py @@ -51,15 +51,31 @@ }) +class ReentrantCallError(RuntimeError): + pass + + class ResourceTracker(object): def __init__(self): - self._lock = threading.Lock() + self._lock = threading.RLock() self._fd = None self._pid = None + def _reentrant_call_error(self): + # gh-109629: this happens if an explicit call to the ResourceTracker + # gets interrupted by a garbage collection, invoking a finalizer (*) + # that itself calls back into ResourceTracker. + # (*) for example the SemLock finalizer + raise ReentrantCallError( + "Reentrant call into the multiprocessing resource tracker") + def _stop(self): with self._lock: + # This should not happen (_stop() isn't called by a finalizer) + # but we check for it anyway. + if self._lock._recursion_count() > 1: + return self._reentrant_call_error() if self._fd is None: # not running return @@ -81,6 +97,9 @@ def ensure_running(self): This can be run from any process. Usually a child process will use the resource created by its parent.''' with self._lock: + if self._lock._recursion_count() > 1: + # The code below is certainly not reentrant-safe, so bail out + return self._reentrant_call_error() if self._fd is not None: # resource tracker was launched before, is it still running? if self._check_alive(): @@ -159,7 +178,17 @@ def unregister(self, name, rtype): self._send('UNREGISTER', name, rtype) def _send(self, cmd, name, rtype): - self.ensure_running() + try: + self.ensure_running() + except ReentrantCallError: + # The code below might or might not work, depending on whether + # the resource tracker was already running and still alive. + # Better warn the user. + # (XXX is warnings.warn itself reentrant-safe? :-) + warnings.warn( + f"ResourceTracker called reentrantly for resource cleanup, " + f"which is unsupported. " + f"The {rtype} object {name!r} might leak.") msg = '{0}:{1}:{2}\n'.format(cmd, name, rtype).encode('ascii') if len(msg) > 512: # posix guarantees that writes to a pipe of less than PIPE_BUF @@ -176,6 +205,7 @@ def _send(self, cmd, name, rtype): unregister = _resource_tracker.unregister getfd = _resource_tracker.getfd + def main(fd): '''Run resource tracker.''' # protect the process from ^C and "killall python" etc diff --git a/Lib/pathlib.py b/Lib/pathlib.py index f4ec315da6b4fa..bd5f61b0b7c878 100644 --- a/Lib/pathlib.py +++ b/Lib/pathlib.py @@ -1230,26 +1230,7 @@ def resolve(self, strict=False): normalizing it. """ - def check_eloop(e): - winerror = getattr(e, 'winerror', 0) - if e.errno == ELOOP or winerror == _WINERROR_CANT_RESOLVE_FILENAME: - raise RuntimeError("Symlink loop from %r" % e.filename) - - try: - s = os.path.realpath(self, strict=strict) - except OSError as e: - check_eloop(e) - raise - p = self.with_segments(s) - - # In non-strict mode, realpath() doesn't raise on symlink loops. - # Ensure we get an exception by calling stat() - if not strict: - try: - p.stat() - except OSError as e: - check_eloop(e) - return p + return self.with_segments(os.path.realpath(self, strict=strict)) def owner(self): """ diff --git a/Lib/random.py b/Lib/random.py index 84bbfc5df1bf23..1d789b107904fb 100644 --- a/Lib/random.py +++ b/Lib/random.py @@ -492,7 +492,14 @@ def choices(self, population, weights=None, *, cum_weights=None, k=1): ## -------------------- real-valued distributions ------------------- def uniform(self, a, b): - "Get a random number in the range [a, b) or [a, b] depending on rounding." + """Get a random number in the range [a, b) or [a, b] depending on rounding. + + The mean (expected value) and variance of the random variable are: + + E[X] = (a + b) / 2 + Var[X] = (b - a) ** 2 / 12 + + """ return a + (b - a) * self.random() def triangular(self, low=0.0, high=1.0, mode=None): @@ -503,6 +510,11 @@ def triangular(self, low=0.0, high=1.0, mode=None): http://en.wikipedia.org/wiki/Triangular_distribution + The mean (expected value) and variance of the random variable are: + + E[X] = (low + high + mode) / 3 + Var[X] = (low**2 + high**2 + mode**2 - low*high - low*mode - high*mode) / 18 + """ u = self.random() try: @@ -593,12 +605,15 @@ def expovariate(self, lambd=1.0): positive infinity if lambd is positive, and from negative infinity to 0 if lambd is negative. - """ - # lambd: rate lambd = 1/mean - # ('lambda' is a Python reserved word) + The mean (expected value) and variance of the random variable are: + + E[X] = 1 / lambd + Var[X] = 1 / lambd ** 2 + """ # we use 1-random() instead of random() to preclude the # possibility of taking the log of zero. + return -_log(1.0 - self.random()) / lambd def vonmisesvariate(self, mu, kappa): @@ -654,8 +669,12 @@ def gammavariate(self, alpha, beta): pdf(x) = -------------------------------------- math.gamma(alpha) * beta ** alpha + The mean (expected value) and variance of the random variable are: + + E[X] = alpha * beta + Var[X] = alpha * beta ** 2 + """ - # alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2 # Warning: a few older sources define the gamma distribution in terms # of alpha > -1.0 @@ -714,6 +733,11 @@ def betavariate(self, alpha, beta): Conditions on the parameters are alpha > 0 and beta > 0. Returned values range between 0 and 1. + The mean (expected value) and variance of the random variable are: + + E[X] = alpha / (alpha + beta) + Var[X] = alpha * beta / ((alpha + beta)**2 * (alpha + beta + 1)) + """ ## See ## http://mail.python.org/pipermail/python-bugs-list/2001-January/003752.html @@ -766,6 +790,11 @@ def binomialvariate(self, n=1, p=0.5): Returns an integer in the range: 0 <= X <= n + The mean (expected value) and variance of the random variable are: + + E[X] = n * p + Var[x] = n * p * (1 - p) + """ # Error check inputs and handle edge cases if n < 0: diff --git a/Lib/test/libregrtest/cmdline.py b/Lib/test/libregrtest/cmdline.py index 99f28152f1a1c7..a0a8504fe8f606 100644 --- a/Lib/test/libregrtest/cmdline.py +++ b/Lib/test/libregrtest/cmdline.py @@ -4,6 +4,8 @@ import sys from test.support import os_helper +from .utils import MS_WINDOWS + USAGE = """\ python -m test [options] [test_name1 [test_name2 ...]] @@ -145,6 +147,7 @@ class Namespace(argparse.Namespace): def __init__(self, **kwargs) -> None: + self.ci = False self.testdir = None self.verbose = 0 self.quiet = False @@ -209,7 +212,13 @@ def _create_parser(): # We add help explicitly to control what argument group it renders under. group.add_argument('-h', '--help', action='help', help='show this help message and exit') - group.add_argument('--timeout', metavar='TIMEOUT', type=float, + group.add_argument('--fast-ci', action='store_true', + help='Fast Continuous Integration (CI) mode used by ' + 'GitHub Actions') + group.add_argument('--slow-ci', action='store_true', + help='Slow Continuous Integration (CI) mode used by ' + 'buildbot workers') + group.add_argument('--timeout', metavar='TIMEOUT', help='dump the traceback and exit if a test takes ' 'more than TIMEOUT seconds; disabled if TIMEOUT ' 'is negative or equals to zero') @@ -384,7 +393,49 @@ def _parse_args(args, **kwargs): for arg in ns.args: if arg.startswith('-'): parser.error("unrecognized arguments: %s" % arg) - sys.exit(1) + + if ns.timeout is not None: + # Support "--timeout=" (no value) so Makefile.pre.pre TESTTIMEOUT + # can be used by "make buildbottest" and "make test". + if ns.timeout != "": + try: + ns.timeout = float(ns.timeout) + except ValueError: + parser.error(f"invalid timeout value: {ns.timeout!r}") + else: + ns.timeout = None + + # Continuous Integration (CI): common options for fast/slow CI modes + if ns.slow_ci or ns.fast_ci: + # Similar to options: + # + # -j0 --randomize --fail-env-changed --fail-rerun --rerun + # --slowest --verbose3 --nowindows + if ns.use_mp is None: + ns.use_mp = 0 + ns.randomize = True + ns.fail_env_changed = True + ns.fail_rerun = True + ns.rerun = True + ns.print_slow = True + ns.verbose3 = True + if MS_WINDOWS: + ns.nowindows = True # Silence alerts under Windows + + # When both --slow-ci and --fast-ci options are present, + # --slow-ci has the priority + if ns.slow_ci: + # Similar to: -u "all" --timeout=1200 + if not ns.use: + ns.use = [['all']] + if ns.timeout is None: + ns.timeout = 1200 # 20 minutes + elif ns.fast_ci: + # Similar to: -u "all,-cpu" --timeout=600 + if not ns.use: + ns.use = [['all', '-cpu']] + if ns.timeout is None: + ns.timeout = 600 # 10 minutes if ns.single and ns.fromfile: parser.error("-s and -f don't go together!") diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py index a9dd08702deb59..2cd79a1eae5c91 100644 --- a/Lib/test/libregrtest/main.py +++ b/Lib/test/libregrtest/main.py @@ -20,7 +20,8 @@ StrPath, StrJSON, TestName, TestList, TestTuple, FilterTuple, strip_py_suffix, count, format_duration, printlist, get_temp_dir, get_work_dir, exit_timeout, - display_header, cleanup_temp_dir) + display_header, cleanup_temp_dir, + MS_WINDOWS) class Regrtest: @@ -424,7 +425,7 @@ def _run_tests(self, selected: TestTuple, tests: TestList | None) -> int: if (self.want_header or not(self.pgo or self.quiet or self.single_test_run or tests or self.cmdline_args)): - display_header() + display_header(self.use_resources) if self.randomize: print("Using random seed", self.random_seed) @@ -435,7 +436,15 @@ def _run_tests(self, selected: TestTuple, tests: TestList | None) -> int: setup_process() - self.logger.start_load_tracker() + if self.hunt_refleak and not self.num_workers: + # gh-109739: WindowsLoadTracker thread interfers with refleak check + use_load_tracker = False + else: + # WindowsLoadTracker is only needed on Windows + use_load_tracker = MS_WINDOWS + + if use_load_tracker: + self.logger.start_load_tracker() try: if self.num_workers: self._run_tests_mp(runtests, self.num_workers) @@ -448,7 +457,8 @@ def _run_tests(self, selected: TestTuple, tests: TestList | None) -> int: if self.want_rerun and self.results.need_rerun(): self.rerun_failed_tests(runtests) finally: - self.logger.stop_load_tracker() + if use_load_tracker: + self.logger.stop_load_tracker() self.display_summary() self.finalize_tests(tracer) diff --git a/Lib/test/libregrtest/results.py b/Lib/test/libregrtest/results.py index 1a8619fb62be2a..35df50d581ff6a 100644 --- a/Lib/test/libregrtest/results.py +++ b/Lib/test/libregrtest/results.py @@ -8,11 +8,13 @@ printlist, count, format_duration) +# Python uses exit code 1 when an exception is not catched +# argparse.ArgumentParser.error() uses exit code 2 EXITCODE_BAD_TEST = 2 EXITCODE_ENV_CHANGED = 3 EXITCODE_NO_TESTS_RAN = 4 EXITCODE_RERUN_FAIL = 5 -EXITCODE_INTERRUPTED = 130 +EXITCODE_INTERRUPTED = 130 # 128 + signal.SIGINT=2 class TestResults: diff --git a/Lib/test/libregrtest/utils.py b/Lib/test/libregrtest/utils.py index 6af949cea9c926..f3f0eb53b32100 100644 --- a/Lib/test/libregrtest/utils.py +++ b/Lib/test/libregrtest/utils.py @@ -547,7 +547,7 @@ def adjust_rlimit_nofile(): f"{new_fd_limit}: {err}.") -def display_header(): +def display_header(use_resources: tuple[str, ...]): encoding = sys.stdout.encoding # Print basic platform information @@ -569,6 +569,13 @@ def display_header(): print("== encodings: locale=%s, FS=%s" % (locale.getencoding(), sys.getfilesystemencoding())) + + if use_resources: + print(f"== resources ({len(use_resources)}): " + f"{', '.join(sorted(use_resources))}") + else: + print(f"== resources: (all disabled, use -u option)") + # This makes it easier to remember what to set in your local # environment when trying to reproduce a sanitizer failure. asan = support.check_sanitizer(address=True) diff --git a/Lib/test/lock_tests.py b/Lib/test/lock_tests.py index a4f52cb20ad301..e53e24b18f2760 100644 --- a/Lib/test/lock_tests.py +++ b/Lib/test/lock_tests.py @@ -330,6 +330,42 @@ def test_release_save_unacquired(self): lock.release() self.assertRaises(RuntimeError, lock._release_save) + def test_recursion_count(self): + lock = self.locktype() + self.assertEqual(0, lock._recursion_count()) + lock.acquire() + self.assertEqual(1, lock._recursion_count()) + lock.acquire() + lock.acquire() + self.assertEqual(3, lock._recursion_count()) + lock.release() + self.assertEqual(2, lock._recursion_count()) + lock.release() + lock.release() + self.assertEqual(0, lock._recursion_count()) + + phase = [] + + def f(): + lock.acquire() + phase.append(None) + while len(phase) == 1: + _wait() + lock.release() + phase.append(None) + + with threading_helper.wait_threads_exit(): + start_new_thread(f, ()) + while len(phase) == 0: + _wait() + self.assertEqual(len(phase), 1) + self.assertEqual(0, lock._recursion_count()) + phase.append(None) + while len(phase) == 2: + _wait() + self.assertEqual(len(phase), 3) + self.assertEqual(0, lock._recursion_count()) + def test_different_thread(self): # Cannot release from a different thread lock = self.locktype() @@ -1014,13 +1050,15 @@ def test_default_timeout(self): """ Test the barrier's default timeout """ - # create a barrier with a low default timeout - barrier = self.barriertype(self.N, timeout=0.3) + # gh-109401: Barrier timeout should be long enough + # to create 4 threads on a slow CI. + timeout = 1.0 + barrier = self.barriertype(self.N, timeout=timeout) def f(): i = barrier.wait() if i == self.N // 2: - # One thread is later than the default timeout of 0.3s. - time.sleep(1.0) + # One thread is later than the default timeout. + time.sleep(timeout * 2) self.assertRaises(threading.BrokenBarrierError, barrier.wait) self.run_threads(f) diff --git a/Lib/test/test_concurrent_futures/test_deadlock.py b/Lib/test/test_concurrent_futures/test_deadlock.py index a76e075c3be180..af702542081ad9 100644 --- a/Lib/test/test_concurrent_futures/test_deadlock.py +++ b/Lib/test/test_concurrent_futures/test_deadlock.py @@ -145,6 +145,9 @@ def test_exit_at_task_unpickle(self): self._check_crash(BrokenProcessPool, id, ExitAtUnpickle()) def test_error_at_task_unpickle(self): + # gh-109832: Restore stderr overriden by _raise_error_ignore_stderr() + self.addCleanup(setattr, sys, 'stderr', sys.stderr) + # Check problem occurring while unpickling a task on workers self._check_crash(BrokenProcessPool, id, ErrorAtUnpickle()) @@ -180,6 +183,9 @@ def test_error_during_result_pickle_on_worker(self): self._check_crash(PicklingError, _return_instance, ErrorAtPickle) def test_error_during_result_unpickle_in_result_handler(self): + # gh-109832: Restore stderr overriden by _raise_error_ignore_stderr() + self.addCleanup(setattr, sys, 'stderr', sys.stderr) + # Check problem occurring while unpickling a task in # the result_handler thread self._check_crash(BrokenProcessPool, diff --git a/Lib/test/test_importlib/test_locks.py b/Lib/test/test_importlib/test_locks.py index ba9cf51c261d52..7091c36aaaf761 100644 --- a/Lib/test/test_importlib/test_locks.py +++ b/Lib/test/test_importlib/test_locks.py @@ -29,6 +29,8 @@ class ModuleLockAsRLockTests: test_timeout = None # _release_save() unsupported test_release_save_unacquired = None + # _recursion_count() unsupported + test_recursion_count = None # lock status in repr unsupported test_repr = None test_locked_repr = None diff --git a/Lib/test/test_pathlib.py b/Lib/test/test_pathlib.py index 09df3fe471fc3e..484a5e6c3bd64d 100644 --- a/Lib/test/test_pathlib.py +++ b/Lib/test/test_pathlib.py @@ -3178,10 +3178,11 @@ def test_absolute(self): self.assertEqual(str(P('//a').absolute()), '//a') self.assertEqual(str(P('//a/b').absolute()), '//a/b') - def _check_symlink_loop(self, *args, strict=True): + def _check_symlink_loop(self, *args): path = self.cls(*args) - with self.assertRaises(RuntimeError): - print(path.resolve(strict)) + with self.assertRaises(OSError) as cm: + path.resolve(strict=True) + self.assertEqual(cm.exception.errno, errno.ELOOP) @unittest.skipIf( is_emscripten or is_wasi, @@ -3240,7 +3241,8 @@ def test_resolve_loop(self): os.symlink('linkZ/../linkZ', join('linkZ')) self._check_symlink_loop(BASE, 'linkZ') # Non-strict - self._check_symlink_loop(BASE, 'linkZ', 'foo', strict=False) + p = self.cls(BASE, 'linkZ', 'foo') + self.assertEqual(p.resolve(strict=False), p) # Loops with absolute symlinks. os.symlink(join('linkU/inside'), join('linkU')) self._check_symlink_loop(BASE, 'linkU') @@ -3249,7 +3251,8 @@ def test_resolve_loop(self): os.symlink(join('linkW/../linkW'), join('linkW')) self._check_symlink_loop(BASE, 'linkW') # Non-strict - self._check_symlink_loop(BASE, 'linkW', 'foo', strict=False) + q = self.cls(BASE, 'linkW', 'foo') + self.assertEqual(q.resolve(strict=False), q) def test_glob(self): P = self.cls diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py index 4b819cbbb8dfc3..15aab609ed1ba7 100644 --- a/Lib/test/test_regrtest.py +++ b/Lib/test/test_regrtest.py @@ -23,8 +23,9 @@ from test import support from test.support import os_helper, TestStats, without_optimizer from test.libregrtest import cmdline -from test.libregrtest import utils +from test.libregrtest import main from test.libregrtest import setup +from test.libregrtest import utils from test.libregrtest.utils import normalize_test_name if not support.has_subprocess_support: @@ -75,8 +76,15 @@ def test_help(self): def test_timeout(self): ns = self.parse_args(['--timeout', '4.2']) self.assertEqual(ns.timeout, 4.2) + + # negative, zero and empty string are treated as "no timeout" + for value in ('-1', '0', ''): + with self.subTest(value=value): + ns = self.parse_args([f'--timeout={value}']) + self.assertEqual(ns.timeout, None) + self.checkError(['--timeout'], 'expected one argument') - self.checkError(['--timeout', 'foo'], 'invalid float value') + self.checkError(['--timeout', 'foo'], 'invalid timeout value:') def test_wait(self): ns = self.parse_args(['--wait']) @@ -366,6 +374,44 @@ def test_unknown_option(self): self.checkError(['--unknown-option'], 'unrecognized arguments: --unknown-option') + def check_ci_mode(self, args, use_resources): + ns = cmdline._parse_args(args) + if utils.MS_WINDOWS: + self.assertTrue(ns.nowindows) + + # Check Regrtest attributes which are more reliable than Namespace + # which has an unclear API + regrtest = main.Regrtest(ns) + self.assertNotEqual(regrtest.num_workers, 0) + self.assertTrue(regrtest.want_rerun) + self.assertTrue(regrtest.randomize) + self.assertIsNone(regrtest.random_seed) + self.assertTrue(regrtest.fail_env_changed) + self.assertTrue(regrtest.fail_rerun) + self.assertTrue(regrtest.print_slowest) + self.assertTrue(regrtest.output_on_failure) + self.assertEqual(sorted(regrtest.use_resources), sorted(use_resources)) + return regrtest + + def test_fast_ci(self): + args = ['--fast-ci'] + use_resources = sorted(cmdline.ALL_RESOURCES) + use_resources.remove('cpu') + regrtest = self.check_ci_mode(args, use_resources) + self.assertEqual(regrtest.timeout, 10 * 60) + + def test_fast_ci_resource(self): + # it should be possible to override resources + args = ['--fast-ci', '-u', 'network'] + use_resources = ['network'] + self.check_ci_mode(args, use_resources) + + def test_slow_ci(self): + args = ['--slow-ci'] + use_resources = sorted(cmdline.ALL_RESOURCES) + regrtest = self.check_ci_mode(args, use_resources) + self.assertEqual(regrtest.timeout, 20 * 60) + @dataclasses.dataclass(slots=True) class Rerun: diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py index 9c16c4044f66a8..71fcad268b8036 100644 --- a/Lib/test/test_threading.py +++ b/Lib/test/test_threading.py @@ -1783,6 +1783,9 @@ class ConditionAsRLockTests(lock_tests.RLockTests): # Condition uses an RLock by default and exports its API. locktype = staticmethod(threading.Condition) + def test_recursion_count(self): + self.skipTest("Condition does not expose _recursion_count()") + class ConditionTests(lock_tests.ConditionTests): condtype = staticmethod(threading.Condition) diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py index a894bb10bd04da..eb83aa39425515 100644 --- a/Lib/test/test_venv.py +++ b/Lib/test/test_venv.py @@ -559,6 +559,13 @@ def test_zippath_from_non_installed_posix(self): platlibdir, stdlib_zip) additional_pythonpath_for_non_installed = [] + + # gh-109748: Don't copy __pycache__/ sub-directories, because they can + # be modified by other Python tests running in parallel. + ignored_names = {'__pycache__'} + def ignore_pycache(src, names): + return ignored_names + # Copy stdlib files to the non-installed python so venv can # correctly calculate the prefix. for eachpath in sys.path: @@ -575,7 +582,8 @@ def test_zippath_from_non_installed_posix(self): if os.path.isfile(fn): shutil.copy(fn, libdir) elif os.path.isdir(fn): - shutil.copytree(fn, os.path.join(libdir, name)) + shutil.copytree(fn, os.path.join(libdir, name), + ignore=ignore_pycache) else: additional_pythonpath_for_non_installed.append( eachpath) diff --git a/Lib/test/test_zipfile/test_core.py b/Lib/test/test_zipfile/test_core.py index 9960259c4cde0c..0f6c0f2107ce6b 100644 --- a/Lib/test/test_zipfile/test_core.py +++ b/Lib/test/test_zipfile/test_core.py @@ -3203,14 +3203,14 @@ def test_no_data(self): b = s.pack(2, 0) c = s.pack(3, 0) - self.assertEqual(b'', zipfile._strip_extra(a, (self.ZIP64_EXTRA,))) - self.assertEqual(b, zipfile._strip_extra(b, (self.ZIP64_EXTRA,))) + self.assertEqual(b'', zipfile._Extra.strip(a, (self.ZIP64_EXTRA,))) + self.assertEqual(b, zipfile._Extra.strip(b, (self.ZIP64_EXTRA,))) self.assertEqual( - b+b"z", zipfile._strip_extra(b+b"z", (self.ZIP64_EXTRA,))) + b+b"z", zipfile._Extra.strip(b+b"z", (self.ZIP64_EXTRA,))) - self.assertEqual(b+c, zipfile._strip_extra(a+b+c, (self.ZIP64_EXTRA,))) - self.assertEqual(b+c, zipfile._strip_extra(b+a+c, (self.ZIP64_EXTRA,))) - self.assertEqual(b+c, zipfile._strip_extra(b+c+a, (self.ZIP64_EXTRA,))) + self.assertEqual(b+c, zipfile._Extra.strip(a+b+c, (self.ZIP64_EXTRA,))) + self.assertEqual(b+c, zipfile._Extra.strip(b+a+c, (self.ZIP64_EXTRA,))) + self.assertEqual(b+c, zipfile._Extra.strip(b+c+a, (self.ZIP64_EXTRA,))) def test_with_data(self): s = struct.Struct("/dev/null 2>&1; then \ - pybuildbot.identify "CC='$(CC)'" "CXX='$(CXX)'"; \ - fi - $(TESTRUNNER) -j 1 -u all -W --slowest --fail-env-changed --fail-rerun --timeout=$(TESTTIMEOUT) $(TESTOPTS) + -@if which pybuildbot.identify >/dev/null 2>&1; then \ + pybuildbot.identify "CC='$(CC)'" "CXX='$(CXX)'"; \ + fi + $(TESTRUNNER) --slow-ci --timeout=$(TESTTIMEOUT) $(TESTOPTS) # Like testall, but run Python tests with HOSTRUNNER directly. .PHONY: hostrunnertest hostrunnertest: all - $(RUNSHARED) $(HOSTRUNNER) ./$(BUILDPYTHON) -m test -u all $(TESTOPTS) + $(RUNSHARED) $(HOSTRUNNER) ./$(BUILDPYTHON) -m test --slow-ci --timeout=$(TESTTIMEOUT) $(TESTOPTS) .PHONY: pythoninfo pythoninfo: all $(RUNSHARED) $(HOSTRUNNER) ./$(BUILDPYTHON) -m test.pythoninfo -QUICKTESTOPTS= $(TESTOPTS) -x test_subprocess test_io \ +QUICKTESTOPTS= -x test_subprocess test_io \ test_multibytecodec test_urllib2_localnet test_itertools \ test_multiprocessing_fork test_multiprocessing_spawn \ test_multiprocessing_forkserver \ @@ -1912,7 +1913,7 @@ QUICKTESTOPTS= $(TESTOPTS) -x test_subprocess test_io \ .PHONY: quicktest quicktest: all - $(TESTRUNNER) $(QUICKTESTOPTS) + $(TESTRUNNER) --fast-ci --timeout=$(TESTTIMEOUT) $(TESTOPTS) $(QUICKTESTOPTS) # SSL tests .PHONY: multisslcompile diff --git a/Misc/NEWS.d/next/Library/2023-02-20-12-00-11.gh-issue-88233.o5Zb0t.rst b/Misc/NEWS.d/next/Library/2023-02-20-12-00-11.gh-issue-88233.o5Zb0t.rst new file mode 100644 index 00000000000000..945f92d3dfa93b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-02-20-12-00-11.gh-issue-88233.o5Zb0t.rst @@ -0,0 +1,2 @@ +Refactored ``zipfile._strip_extra`` to use higher level abstactions for +extras instead of a heavy-state loop. diff --git a/Misc/NEWS.d/next/Library/2023-09-09-17-09-54.gh-issue-109187.dIayNW.rst b/Misc/NEWS.d/next/Library/2023-09-09-17-09-54.gh-issue-109187.dIayNW.rst new file mode 100644 index 00000000000000..31b3ef77807cde --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-09-09-17-09-54.gh-issue-109187.dIayNW.rst @@ -0,0 +1,3 @@ +:meth:`pathlib.Path.resolve` now treats symlink loops like other errors: in +strict mode, :exc:`OSError` is raised, and in non-strict mode, no exception +is raised. diff --git a/Misc/NEWS.d/next/Library/2023-09-22-20-16-44.gh-issue-109593.LboaNM.rst b/Misc/NEWS.d/next/Library/2023-09-22-20-16-44.gh-issue-109593.LboaNM.rst new file mode 100644 index 00000000000000..292aea0be24dfb --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-09-22-20-16-44.gh-issue-109593.LboaNM.rst @@ -0,0 +1 @@ +Avoid deadlocking on a reentrant call to the multiprocessing resource tracker. Such a reentrant call, though unlikely, can happen if a GC pass invokes the finalizer for a multiprocessing object such as SemLock. diff --git a/Misc/NEWS.d/next/Library/2023-09-25-23-00-37.gh-issue-109631.eWSqpO.rst b/Misc/NEWS.d/next/Library/2023-09-25-23-00-37.gh-issue-109631.eWSqpO.rst new file mode 100644 index 00000000000000..58af2e57068267 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-09-25-23-00-37.gh-issue-109631.eWSqpO.rst @@ -0,0 +1,3 @@ +:mod:`re` functions such as :func:`re.findall`, :func:`re.split`, +:func:`re.search` and :func:`re.sub` which perform short repeated matches +can now be interrupted by user. diff --git a/Misc/NEWS.d/next/Tests/2023-09-19-13-33-20.gh-issue-109566.aX0g9o.rst b/Misc/NEWS.d/next/Tests/2023-09-19-13-33-20.gh-issue-109566.aX0g9o.rst new file mode 100644 index 00000000000000..10f90132c37ec9 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2023-09-19-13-33-20.gh-issue-109566.aX0g9o.rst @@ -0,0 +1,4 @@ +regrtest: Add ``--fast-ci`` and ``--slow-ci`` options. ``--fast-ci`` uses a +default timeout of 10 minutes and ``-u all,-cpu`` (skip slowest tests). +``--slow-ci`` uses a default timeout of 20 minues and ``-u all`` (run all +tests). Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Tests/2023-09-25-23-59-37.gh-issue-109739.MUn7K5.rst b/Misc/NEWS.d/next/Tests/2023-09-25-23-59-37.gh-issue-109739.MUn7K5.rst new file mode 100644 index 00000000000000..291524c758ca68 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2023-09-25-23-59-37.gh-issue-109739.MUn7K5.rst @@ -0,0 +1,3 @@ +regrtest: Fix reference leak check on Windows. Disable the load tracker on +Windows in the reference leak check mode (-R option). Patch by Victor +Stinner. diff --git a/Misc/NEWS.d/next/Tests/2023-09-26-00-49-18.gh-issue-109748.nxlT1i.rst b/Misc/NEWS.d/next/Tests/2023-09-26-00-49-18.gh-issue-109748.nxlT1i.rst new file mode 100644 index 00000000000000..840366ba8d1611 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2023-09-26-00-49-18.gh-issue-109748.nxlT1i.rst @@ -0,0 +1,3 @@ +Fix ``test_zippath_from_non_installed_posix()`` of test_venv: don't copy +``__pycache__/`` sub-directories, because they can be modified by other Python +tests running in parallel. Patch by Victor Stinner. diff --git a/Modules/_sre/sre.h b/Modules/_sre/sre.h index f60078d6bb999b..83d89d57b11199 100644 --- a/Modules/_sre/sre.h +++ b/Modules/_sre/sre.h @@ -95,6 +95,7 @@ typedef struct { size_t data_stack_base; /* current repeat context */ SRE_REPEAT *repeat; + unsigned int sigcount; } SRE_STATE; typedef struct { diff --git a/Modules/_sre/sre_lib.h b/Modules/_sre/sre_lib.h index ae80009fd63bbe..3c805aeeca0974 100644 --- a/Modules/_sre/sre_lib.h +++ b/Modules/_sre/sre_lib.h @@ -564,7 +564,7 @@ SRE(match)(SRE_STATE* state, const SRE_CODE* pattern, int toplevel) Py_ssize_t alloc_pos, ctx_pos = -1; Py_ssize_t ret = 0; int jump; - unsigned int sigcount=0; + unsigned int sigcount = state->sigcount; SRE(match_context)* ctx; SRE(match_context)* nextctx; @@ -1567,8 +1567,10 @@ SRE(match)(SRE_STATE* state, const SRE_CODE* pattern, int toplevel) ctx_pos = ctx->last_ctx_pos; jump = ctx->jump; DATA_POP_DISCARD(ctx); - if (ctx_pos == -1) + if (ctx_pos == -1) { + state->sigcount = sigcount; return ret; + } DATA_LOOKUP_AT(SRE(match_context), ctx, ctx_pos); switch (jump) { diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c index fa98df516b8b10..e77e30dfe5e821 100644 --- a/Modules/_threadmodule.c +++ b/Modules/_threadmodule.c @@ -490,6 +490,18 @@ PyDoc_STRVAR(rlock_release_save_doc, \n\ For internal use by `threading.Condition`."); +static PyObject * +rlock_recursion_count(rlockobject *self, PyObject *Py_UNUSED(ignored)) +{ + unsigned long tid = PyThread_get_thread_ident(); + return PyLong_FromUnsignedLong( + self->rlock_owner == tid ? self->rlock_count : 0UL); +} + +PyDoc_STRVAR(rlock_recursion_count_doc, +"_recursion_count() -> int\n\ +\n\ +For internal use by reentrancy checks."); static PyObject * rlock_is_owned(rlockobject *self, PyObject *Py_UNUSED(ignored)) @@ -565,6 +577,8 @@ static PyMethodDef rlock_methods[] = { METH_VARARGS, rlock_acquire_restore_doc}, {"_release_save", (PyCFunction)rlock_release_save, METH_NOARGS, rlock_release_save_doc}, + {"_recursion_count", (PyCFunction)rlock_recursion_count, + METH_NOARGS, rlock_recursion_count_doc}, {"__enter__", _PyCFunction_CAST(rlock_acquire), METH_VARARGS | METH_KEYWORDS, rlock_acquire_doc}, {"__exit__", (PyCFunction)rlock_release, diff --git a/Tools/buildbot/test.bat b/Tools/buildbot/test.bat index c1b2605a4b2c7e..781f9a4c8206c8 100644 --- a/Tools/buildbot/test.bat +++ b/Tools/buildbot/test.bat @@ -5,7 +5,7 @@ setlocal set PATH=%PATH%;%SystemRoot%\SysNative\OpenSSH;%SystemRoot%\System32\OpenSSH set here=%~dp0 set rt_opts=-q -d -set regrtest_args=-j1 +set regrtest_args= set arm32_ssh= :CheckOpts @@ -23,7 +23,7 @@ if "%PROCESSOR_ARCHITECTURE%"=="ARM" if "%arm32_ssh%"=="true" goto NativeExecuti if "%arm32_ssh%"=="true" goto :Arm32Ssh :NativeExecution -call "%here%..\..\PCbuild\rt.bat" %rt_opts% -uall -rwW --slowest --timeout=1200 %regrtest_args% +call "%here%..\..\PCbuild\rt.bat" %rt_opts% --slow-ci %regrtest_args% exit /b %ERRORLEVEL% :Arm32Ssh @@ -35,7 +35,7 @@ if NOT "%REMOTE_PYTHON_DIR:~-1,1%"=="\" (set REMOTE_PYTHON_DIR=%REMOTE_PYTHON_DI set TEMP_ARGS=--temp %REMOTE_PYTHON_DIR%temp -set rt_args=%rt_opts% %dashU% -rwW --slowest --timeout=1200 %regrtest_args% %TEMP_ARGS% +set rt_args=%rt_opts% --slow-ci %dashU% %regrtest_args% %TEMP_ARGS% ssh %SSH_SERVER% "set TEMP=%REMOTE_PYTHON_DIR%temp& cd %REMOTE_PYTHON_DIR% & %REMOTE_PYTHON_DIR%PCbuild\rt.bat" %rt_args% set ERR=%ERRORLEVEL% scp %SSH_SERVER%:"%REMOTE_PYTHON_DIR%test-results.xml" "%PYTHON_SOURCE%\test-results.xml" diff --git a/Tools/scripts/run_tests.py b/Tools/scripts/run_tests.py index 445a34ae3e8eee..c62ae82dd788d5 100644 --- a/Tools/scripts/run_tests.py +++ b/Tools/scripts/run_tests.py @@ -18,9 +18,6 @@ def is_multiprocess_flag(arg): return arg.startswith('-j') or arg.startswith('--multiprocess') -def is_resource_use_flag(arg): - return arg.startswith('-u') or arg.startswith('--use') - def is_python_flag(arg): return arg.startswith('-p') or arg.startswith('--python') @@ -56,20 +53,13 @@ def main(regrtest_args): args.extend(test.support.args_from_interpreter_flags()) args.extend(['-m', 'test', # Run the test suite - '-r', # Randomize test order - '-w', # Re-run failed tests in verbose mode + '--fast-ci', # Fast Continuous Integration mode ]) - if sys.platform == 'win32': - args.append('-n') # Silence alerts under Windows if not any(is_multiprocess_flag(arg) for arg in regrtest_args): if cross_compile and hostrunner: # For now use only two cores for cross-compiled builds; # hostrunner can be expensive. args.extend(['-j', '2']) - else: - args.extend(['-j', '0']) # Use all CPU cores - if not any(is_resource_use_flag(arg) for arg in regrtest_args): - args.extend(['-u', 'all,-largefile,-audio,-gui']) if cross_compile and hostrunner: # If HOSTRUNNER is set and -p/--python option is not given, then