diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 6f8fe005621c88..98ab4008bed7cf 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -6,7 +6,7 @@ ENV WASI_SDK_VERSION=21 ENV WASI_SDK_PATH=/opt/wasi-sdk ENV WASMTIME_HOME=/opt/wasmtime -ENV WASMTIME_VERSION=18.0.3 +ENV WASMTIME_VERSION=22.0.0 ENV WASMTIME_CPU_ARCH=x86_64 RUN dnf -y --nodocs --setopt=install_weak_deps=False install /usr/bin/{blurb,clang,curl,git,ln,tar,xz} 'dnf-command(builddep)' && \ diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index e8f4a4693a814c..95e30ac3001c9c 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -72,6 +72,7 @@ Include/internal/pycore_freelist.h @ericsnowcurrently Include/internal/pycore_global_objects.h @ericsnowcurrently Include/internal/pycore_obmalloc.h @ericsnowcurrently Include/internal/pycore_pymem.h @ericsnowcurrently +Include/internal/pycore_stackref.h @Fidget-Spinner Modules/main.c @ericsnowcurrently Programs/_bootstrap_python.c @ericsnowcurrently Programs/python.c @ericsnowcurrently diff --git a/.github/workflows/reusable-wasi.yml b/.github/workflows/reusable-wasi.yml index c389fe9e173b38..db6c04ec2ac1c5 100644 --- a/.github/workflows/reusable-wasi.yml +++ b/.github/workflows/reusable-wasi.yml @@ -11,7 +11,7 @@ jobs: timeout-minutes: 60 runs-on: ubuntu-22.04 env: - WASMTIME_VERSION: 18.0.3 + WASMTIME_VERSION: 22.0.0 WASI_SDK_VERSION: 21 WASI_SDK_PATH: /opt/wasi-sdk CROSS_BUILD_PYTHON: cross-build/build @@ -20,9 +20,9 @@ jobs: - uses: actions/checkout@v4 # No problem resolver registered as one doesn't currently exist for Clang. - name: "Install wasmtime" - uses: jcbhmr/setup-wasmtime@v2 + uses: bytecodealliance/actions/wasmtime/setup@v1 with: - wasmtime-version: ${{ env.WASMTIME_VERSION }} + version: ${{ env.WASMTIME_VERSION }} - name: "Restore WASI SDK" id: cache-wasi-sdk uses: actions/cache@v4 @@ -50,8 +50,10 @@ jobs: uses: actions/cache@v4 with: path: ${{ env.CROSS_BUILD_PYTHON }}/config.cache - # Include env.pythonLocation in key to avoid changes in environment when setup-python updates Python - key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ inputs.config_hash }}-${{ env.pythonLocation }} + # Include env.pythonLocation in key to avoid changes in environment when setup-python updates Python. + # Include the hash of `Tools/wasm/wasi.py` as it may change the environment variables. + # (Make sure to keep the key in sync with the other config.cache step below.) + key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ env.WASI_SDK_VERSION }}-${{ env.WASMTIME_VERSION }}-${{ inputs.config_hash }}-${{ hashFiles('Tools/wasm/wasi.py') }}-${{ env.pythonLocation }} - name: "Configure build Python" run: python3 Tools/wasm/wasi.py configure-build-python -- --config-cache --with-pydebug - name: "Make build Python" @@ -60,8 +62,8 @@ jobs: uses: actions/cache@v4 with: path: ${{ env.CROSS_BUILD_WASI }}/config.cache - # Include env.pythonLocation in key to avoid changes in environment when setup-python updates Python - key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-wasi-sdk-${{ env.WASI_SDK_VERSION }}-${{ inputs.config_hash }}-${{ env.pythonLocation }} + # Should be kept in sync with the other config.cache step above. + key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ env.WASI_SDK_VERSION }}-${{ env.WASMTIME_VERSION }}-${{ inputs.config_hash }}-${{ hashFiles('Tools/wasm/wasi.py') }}-${{ env.pythonLocation }} - name: "Configure host" # `--with-pydebug` inferred from configure-build-python run: python3 Tools/wasm/wasi.py configure-host -- --config-cache diff --git a/Doc/c-api/cell.rst b/Doc/c-api/cell.rst index f8cd0344fdd1c0..61eb994c370946 100644 --- a/Doc/c-api/cell.rst +++ b/Doc/c-api/cell.rst @@ -39,7 +39,8 @@ Cell objects are not likely to be useful elsewhere. .. c:function:: PyObject* PyCell_Get(PyObject *cell) - Return the contents of the cell *cell*. + Return the contents of the cell *cell*, which can be ``NULL``. + If *cell* is not a cell object, returns ``NULL`` with an exception set. .. c:function:: PyObject* PyCell_GET(PyObject *cell) @@ -52,8 +53,10 @@ Cell objects are not likely to be useful elsewhere. Set the contents of the cell object *cell* to *value*. This releases the reference to any current content of the cell. *value* may be ``NULL``. *cell* - must be non-``NULL``; if it is not a cell object, ``-1`` will be returned. On - success, ``0`` will be returned. + must be non-``NULL``. + + On success, return ``0``. + If *cell* is not a cell object, set an exception and return ``-1``. .. c:function:: void PyCell_SET(PyObject *cell, PyObject *value) diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst index 63e3bed6727987..ce9d5a0f758b29 100644 --- a/Doc/c-api/module.rst +++ b/Doc/c-api/module.rst @@ -43,6 +43,8 @@ Module Objects to ``None``); the caller is responsible for providing a :attr:`__file__` attribute. + Return ``NULL`` with an exception set on error. + .. versionadded:: 3.3 .. versionchanged:: 3.4 @@ -265,6 +267,8 @@ of the following two module creation functions: API version *module_api_version*. If that version does not match the version of the running interpreter, a :exc:`RuntimeWarning` is emitted. + Return ``NULL`` with an exception set on error. + .. note:: Most uses of this function should be using :c:func:`PyModule_Create` @@ -461,6 +465,8 @@ objects dynamically. Note that both ``PyModule_FromDefAndSpec`` and If that version does not match the version of the running interpreter, a :exc:`RuntimeWarning` is emitted. + Return ``NULL`` with an exception set on error. + .. note:: Most uses of this function should be using :c:func:`PyModule_FromDefAndSpec` @@ -601,15 +607,16 @@ state: .. c:function:: int PyModule_AddIntConstant(PyObject *module, const char *name, long value) Add an integer constant to *module* as *name*. This convenience function can be - used from the module's initialization function. Return ``-1`` on error, ``0`` on - success. + used from the module's initialization function. + Return ``-1`` with an exception set on error, ``0`` on success. .. c:function:: int PyModule_AddStringConstant(PyObject *module, const char *name, const char *value) Add a string constant to *module* as *name*. This convenience function can be used from the module's initialization function. The string *value* must be - ``NULL``-terminated. Return ``-1`` on error, ``0`` on success. + ``NULL``-terminated. + Return ``-1`` with an exception set on error, ``0`` on success. .. c:macro:: PyModule_AddIntMacro(module, macro) @@ -617,7 +624,7 @@ state: Add an int constant to *module*. The name and the value are taken from *macro*. For example ``PyModule_AddIntMacro(module, AF_INET)`` adds the int constant *AF_INET* with the value of *AF_INET* to *module*. - Return ``-1`` on error, ``0`` on success. + Return ``-1`` with an exception set on error, ``0`` on success. .. c:macro:: PyModule_AddStringMacro(module, macro) @@ -630,7 +637,7 @@ state: The type object is finalized by calling internally :c:func:`PyType_Ready`. The name of the type object is taken from the last component of :c:member:`~PyTypeObject.tp_name` after dot. - Return ``-1`` on error, ``0`` on success. + Return ``-1`` with an exception set on error, ``0`` on success. .. versionadded:: 3.9 @@ -643,7 +650,7 @@ state: import machinery assumes the module does not support running without the GIL. This function is only available in Python builds configured with :option:`--disable-gil`. - Return ``-1`` on error, ``0`` on success. + Return ``-1`` with an exception set on error, ``0`` on success. .. versionadded:: 3.13 @@ -682,14 +689,14 @@ since multiple such modules can be created from a single definition. The caller must hold the GIL. - Return 0 on success or -1 on failure. + Return ``-1`` with an exception set on error, ``0`` on success. .. versionadded:: 3.3 .. c:function:: int PyState_RemoveModule(PyModuleDef *def) Removes the module object created from *def* from the interpreter state. - Return 0 on success or -1 on failure. + Return ``-1`` with an exception set on error, ``0`` on success. The caller must hold the GIL. diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst index 8eeac3fc8a1e58..2103a64d8ffbb7 100644 --- a/Doc/c-api/object.rst +++ b/Doc/c-api/object.rst @@ -52,6 +52,7 @@ Object Protocol The reference is borrowed from the interpreter, and is valid until the interpreter finalization. + .. versionadded:: 3.13 diff --git a/Doc/c-api/slice.rst b/Doc/c-api/slice.rst index 27a1757c745d8b..8adf6a961378a3 100644 --- a/Doc/c-api/slice.rst +++ b/Doc/c-api/slice.rst @@ -23,7 +23,9 @@ Slice Objects Return a new slice object with the given values. The *start*, *stop*, and *step* parameters are used as the values of the slice object attributes of the same names. Any of the values may be ``NULL``, in which case the - ``None`` will be used for the corresponding attribute. Return ``NULL`` if + ``None`` will be used for the corresponding attribute. + + Return ``NULL`` with an exception set if the new object could not be allocated. @@ -52,7 +54,7 @@ Slice Objects of bounds indices are clipped in a manner consistent with the handling of normal slices. - Returns ``0`` on success and ``-1`` on error with exception set. + Return ``0`` on success and ``-1`` on error with an exception set. .. note:: This function is considered not safe for resizable sequences. @@ -95,7 +97,7 @@ Slice Objects ``PY_SSIZE_T_MIN`` to ``PY_SSIZE_T_MIN``, and silently boost the step values less than ``-PY_SSIZE_T_MAX`` to ``-PY_SSIZE_T_MAX``. - Return ``-1`` on error, ``0`` on success. + Return ``-1`` with an exception set on error, ``0`` on success. .. versionadded:: 3.6.1 diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst index c9ef076c78c66a..0091e084308245 100644 --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -1592,7 +1592,7 @@ and :c:data:`PyType_Type` effectively act as defaults.) weak references to the type object itself. It is an error to set both the :c:macro:`Py_TPFLAGS_MANAGED_WEAKREF` bit and - :c:member:`~PyTypeObject.tp_weaklist`. + :c:member:`~PyTypeObject.tp_weaklistoffset`. **Inheritance:** @@ -1604,7 +1604,7 @@ and :c:data:`PyType_Type` effectively act as defaults.) **Default:** If the :c:macro:`Py_TPFLAGS_MANAGED_WEAKREF` bit is set in the - :c:member:`~PyTypeObject.tp_dict` field, then + :c:member:`~PyTypeObject.tp_flags` field, then :c:member:`~PyTypeObject.tp_weaklistoffset` will be set to a negative value, to indicate that it is unsafe to use this field. diff --git a/Doc/conf.py b/Doc/conf.py index 8a14646801ebac..29b1b2db32718b 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -272,6 +272,9 @@ ('c:data', 'PyExc_UnicodeWarning'), ('c:data', 'PyExc_UserWarning'), ('c:data', 'PyExc_Warning'), + # Undocumented public C macros + ('c:macro', 'Py_BUILD_ASSERT'), + ('c:macro', 'Py_BUILD_ASSERT_EXPR'), # Do not error nit-picky mode builds when _SubParsersAction.add_parser cannot # be resolved, as the method is currently undocumented. For context, see # https://github.com/python/cpython/pull/103289. diff --git a/Doc/howto/logging.rst b/Doc/howto/logging.rst index 316b16aa796af4..cbfe93319ddaa4 100644 --- a/Doc/howto/logging.rst +++ b/Doc/howto/logging.rst @@ -385,6 +385,46 @@ following diagram. .. raw:: html :file: logging_flow.svg +.. raw:: html + + + Loggers ^^^^^^^ diff --git a/Doc/howto/logging_flow.png b/Doc/howto/logging_flow.png index c2d0befe27326c..d60ed7c031585a 100644 Binary files a/Doc/howto/logging_flow.png and b/Doc/howto/logging_flow.png differ diff --git a/Doc/howto/logging_flow.svg b/Doc/howto/logging_flow.svg index a5f656b1df0b42..4974994ac6b400 100644 --- a/Doc/howto/logging_flow.svg +++ b/Doc/howto/logging_flow.svg @@ -1,9 +1,9 @@ @@ -57,7 +79,7 @@ Create - LogRecord + LogRecord @@ -100,7 +122,7 @@ - Pass to + Pass record to handlers of current logger @@ -135,16 +157,17 @@ - - Use lastResort - handler + + Use + lastResort + handler Handler enabled for - level of LogRecord? + level of record? @@ -292,7 +315,7 @@ Yes - LogRecord passed + Record passed to handler diff --git a/Doc/library/__main__.rst b/Doc/library/__main__.rst index 6232e173d9537d..647ff9da04d10d 100644 --- a/Doc/library/__main__.rst +++ b/Doc/library/__main__.rst @@ -251,9 +251,9 @@ attribute will include the package's path if imported:: >>> asyncio.__main__.__name__ 'asyncio.__main__' -This won't work for ``__main__.py`` files in the root directory of a .zip file -though. Hence, for consistency, minimal ``__main__.py`` like the :mod:`venv` -one mentioned below are preferred. +This won't work for ``__main__.py`` files in the root directory of a +``.zip`` file though. Hence, for consistency, a minimal ``__main__.py`` +without a ``__name__`` check is preferred. .. seealso:: diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 1d79f78e8e1b67..70bdd154d6c406 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -1262,6 +1262,9 @@ Executing code in thread or process pools The *executor* argument should be an :class:`concurrent.futures.Executor` instance. The default executor is used if *executor* is ``None``. + The default executor can be set by :meth:`loop.set_default_executor`, + otherwise, a :class:`concurrent.futures.ThreadPoolExecutor` will be + lazy-initialized and used by :func:`run_in_executor` if needed. Example:: diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst index a56e0eef5d11b1..e3d74d7dc0d91c 100644 --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -272,8 +272,12 @@ complex types are available: +----------------------------------+---------------------------------+-----------------+ | ctypes type | C type | Python type | +==================================+=================================+=================+ +| :class:`c_float_complex` | :c:expr:`float complex` | complex | ++----------------------------------+---------------------------------+-----------------+ | :class:`c_double_complex` | :c:expr:`double complex` | complex | +----------------------------------+---------------------------------+-----------------+ +| :class:`c_longdouble_complex` | :c:expr:`long double complex` | complex | ++----------------------------------+---------------------------------+-----------------+ All these types can be created by calling them with an optional initializer of @@ -2302,6 +2306,22 @@ These are the fundamental ctypes data types: .. versionadded:: 3.14 +.. class:: c_float_complex + + Represents the C :c:expr:`float complex` datatype, if available. The + constructor accepts an optional :class:`complex` initializer. + + .. versionadded:: 3.14 + + +.. class:: c_longdouble_complex + + Represents the C :c:expr:`long double complex` datatype, if available. The + constructor accepts an optional :class:`complex` initializer. + + .. versionadded:: 3.14 + + .. class:: c_int Represents the C :c:expr:`signed int` datatype. The constructor accepts an diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst index 1d82f92ea67857..17348dd907bf67 100644 --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -1934,6 +1934,10 @@ are always available. They are listed here in alphabetical order. .. versionchanged:: 3.12 Summation of floats switched to an algorithm that gives higher accuracy and better commutativity on most builds. + .. versionchanged:: 3.14 + Added specialization for summation of complexes, + using same algorithm as for summation of floats. + .. class:: super() super(type, object_or_type=None) diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst index b582321515db56..52487b4737ae2f 100644 --- a/Doc/library/os.path.rst +++ b/Doc/library/os.path.rst @@ -389,7 +389,7 @@ the :mod:`glob` module.) that contains symbolic links. On Windows, it converts forward slashes to backward slashes. To normalize case, use :func:`normcase`. - .. note:: + .. note:: On POSIX systems, in accordance with `IEEE Std 1003.1 2013 Edition; 4.13 Pathname Resolution `_, if a pathname begins with exactly two slashes, the first component diff --git a/Doc/library/os.rst b/Doc/library/os.rst index 8d95d01fe55ed9..2878d425310d75 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -1724,10 +1724,27 @@ or `the MSDN `_ on Windo Added support for pipes on Windows. -.. function:: splice(src, dst, count, offset_src=None, offset_dst=None) +.. function:: splice(src, dst, count, offset_src=None, offset_dst=None, flags=0) Transfer *count* bytes from file descriptor *src*, starting from offset *offset_src*, to file descriptor *dst*, starting from offset *offset_dst*. + + The splicing behaviour can be modified by specifying a *flags* value. + Any of the following variables may used, combined using bitwise OR + (the ``|`` operator): + + * If :const:`SPLICE_F_MOVE` is specified, + the kernel is asked to move pages instead of copying, + but pages may still be copied if the kernel cannot move the pages from the pipe. + + * If :const:`SPLICE_F_NONBLOCK` is specified, + the kernel is asked to not block on I/O. + This makes the splice pipe operations nonblocking, + but splice may nevertheless block because the spliced file descriptors may block. + + * If :const:`SPLICE_F_MORE` is specified, + it hints to the kernel that more data will be coming in a subsequent splice. + At least one of the file descriptors must refer to a pipe. If *offset_src* is ``None``, then *src* is read from the current position; respectively for *offset_dst*. The offset associated to the file descriptor that refers to a @@ -1746,6 +1763,8 @@ or `the MSDN `_ on Windo make sense to block because there are no writers connected to the write end of the pipe. + .. seealso:: The :manpage:`splice(2)` man page. + .. availability:: Linux >= 2.6.17 with glibc >= 2.5 .. versionadded:: 3.10 diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst index 0918bbb47e9ea6..f139abd2454d69 100644 --- a/Doc/library/pathlib.rst +++ b/Doc/library/pathlib.rst @@ -1539,7 +1539,7 @@ Creating files and directories Copying, renaming and deleting ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. method:: Path.copy(target, *, follow_symlinks=True) +.. method:: Path.copy(target, *, follow_symlinks=True, preserve_metadata=False) Copy the contents of this file to the *target* file. If *target* specifies a file that already exists, it will be replaced. @@ -1548,16 +1548,11 @@ Copying, renaming and deleting will be created as a symbolic link. If *follow_symlinks* is true and this file is a symbolic link, *target* will be a copy of the symlink target. - .. note:: - This method uses operating system functionality to copy file content - efficiently. The OS might also copy some metadata, such as file - permissions. After the copy is complete, users may wish to call - :meth:`Path.chmod` to set the permissions of the target file. - - .. warning:: - On old builds of Windows (before Windows 10 build 19041), this method - raises :exc:`OSError` when a symlink to a directory is encountered and - *follow_symlinks* is false. + If *preserve_metadata* is false (the default), only the file data is + guaranteed to be copied. Set *preserve_metadata* to true to ensure that the + file mode (permissions), flags, last access and modification times, and + extended attributes are copied where supported. This argument has no effect + on Windows, where metadata is always preserved when copying. .. versionadded:: 3.14 diff --git a/Doc/library/profile.rst b/Doc/library/profile.rst index 9721da7220d54d..d7940b3040bbdb 100644 --- a/Doc/library/profile.rst +++ b/Doc/library/profile.rst @@ -699,7 +699,7 @@ you are using :class:`profile.Profile` or :class:`cProfile.Profile`, As the :class:`cProfile.Profile` class cannot be calibrated, custom timer functions should be used with care and should be as fast as possible. For the best results with a custom timer, it might be necessary to hard-code it - in the C source of the internal :mod:`_lsprof` module. + in the C source of the internal :mod:`!_lsprof` module. Python 3.3 adds several new functions in :mod:`time` that can be used to make precise measurements of process or wall-clock time. For example, see diff --git a/Doc/library/socketserver.rst b/Doc/library/socketserver.rst index f1f87ea975ca42..69f06e6cf4d923 100644 --- a/Doc/library/socketserver.rst +++ b/Doc/library/socketserver.rst @@ -126,6 +126,12 @@ server is the address family. waits until all non-daemon threads complete, except if :attr:`block_on_close` attribute is ``False``. + .. attribute:: max_children + + Specify how many child processes will exist to handle requests at a time + for :class:`ForkingMixIn`. If the limit is reached, + new requests will wait until one child process has finished. + .. attribute:: daemon_threads For :class:`ThreadingMixIn` use daemonic threads by setting diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index 54cc7d1333d34e..d3f7cfb01d3c21 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -2095,8 +2095,9 @@ expression support in the :mod:`re` module). If *sep* is given, consecutive delimiters are not grouped together and are deemed to delimit empty strings (for example, ``'1,,2'.split(',')`` returns ``['1', '', '2']``). The *sep* argument may consist of multiple characters - (for example, ``'1<>2<>3'.split('<>')`` returns ``['1', '2', '3']``). - Splitting an empty string with a specified separator returns ``['']``. + as a single delimiter (to split with multiple delimiters, use + :func:`re.split`). Splitting an empty string with a specified separator + returns ``['']``. For example:: @@ -2106,6 +2107,8 @@ expression support in the :mod:`re` module). ['1', '2,3'] >>> '1,2,,3,'.split(',') ['1', '2', '', '3', ''] + >>> '1<>2<>3<4'.split('<>') + ['1', '2', '3<4'] If *sep* is not specified or is ``None``, a different splitting algorithm is applied: runs of consecutive whitespace are regarded as a single separator, @@ -3149,10 +3152,9 @@ produce new objects. If *sep* is given, consecutive delimiters are not grouped together and are deemed to delimit empty subsequences (for example, ``b'1,,2'.split(b',')`` returns ``[b'1', b'', b'2']``). The *sep* argument may consist of a - multibyte sequence (for example, ``b'1<>2<>3'.split(b'<>')`` returns - ``[b'1', b'2', b'3']``). Splitting an empty sequence with a specified - separator returns ``[b'']`` or ``[bytearray(b'')]`` depending on the type - of object being split. The *sep* argument may be any + multibyte sequence as a single delimiter. Splitting an empty sequence with + a specified separator returns ``[b'']`` or ``[bytearray(b'')]`` depending + on the type of object being split. The *sep* argument may be any :term:`bytes-like object`. For example:: @@ -3163,6 +3165,8 @@ produce new objects. [b'1', b'2,3'] >>> b'1,2,,3,'.split(b',') [b'1', b'2', b'', b'3', b''] + >>> b'1<>2<>3<4'.split(b'<>') + [b'1', b'2', b'3<4'] If *sep* is not specified or is ``None``, a different splitting algorithm is applied: runs of consecutive ASCII whitespace are regarded as a single diff --git a/Doc/reference/simple_stmts.rst b/Doc/reference/simple_stmts.rst index 4f6c0c63ae42be..618664b23f0680 100644 --- a/Doc/reference/simple_stmts.rst +++ b/Doc/reference/simple_stmts.rst @@ -293,7 +293,7 @@ statements, cannot be an unpacking) and the expression list, performs the binary operation specific to the type of assignment on the two operands, and assigns the result to the original target. The target is only evaluated once. -An augmented assignment expression like ``x += 1`` can be rewritten as ``x = x + +An augmented assignment statement like ``x += 1`` can be rewritten as ``x = x + 1`` to achieve a similar, but not exactly equal effect. In the augmented version, ``x`` is only evaluated once. Also, when possible, the actual operation is performed *in-place*, meaning that rather than creating a new object and diff --git a/Doc/requirements-oldest-sphinx.txt b/Doc/requirements-oldest-sphinx.txt index 4e49ba1a8ededd..068fe0cb426ecd 100644 --- a/Doc/requirements-oldest-sphinx.txt +++ b/Doc/requirements-oldest-sphinx.txt @@ -14,7 +14,7 @@ python-docs-theme>=2022.1 alabaster==0.7.16 Babel==2.15.0 -certifi==2024.6.2 +certifi==2024.7.4 charset-normalizer==3.3.2 docutils==0.19 idna==3.7 diff --git a/Doc/tools/check-warnings.py b/Doc/tools/check-warnings.py index c50b00636c36ce..67623b83d3a67d 100644 --- a/Doc/tools/check-warnings.py +++ b/Doc/tools/check-warnings.py @@ -14,7 +14,7 @@ from typing import TextIO # Fail if NEWS nit found before this line number -NEWS_NIT_THRESHOLD = 200 +NEWS_NIT_THRESHOLD = 1700 # Exclude these whether they're dirty or clean, # because they trigger a rebuild of dirty files. diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst index 2a1f06e2d286ff..2c73c224e4e8a1 100644 --- a/Doc/using/configure.rst +++ b/Doc/using/configure.rst @@ -427,7 +427,7 @@ Options for third-party dependencies .. option:: PANEL_CFLAGS .. option:: PANEL_LIBS - C compiler and Linker flags for PANEL, overriding ``pkg-config``. + C compiler and linker flags for PANEL, overriding ``pkg-config``. C compiler and linker flags for ``libpanel`` or ``libpanelw``, used by :mod:`curses.panel` module, overriding ``pkg-config``. @@ -615,7 +615,7 @@ also be used to improve performance. .. option:: --without-mimalloc - Disable the fast mimalloc allocator :ref:`mimalloc ` + Disable the fast :ref:`mimalloc ` allocator (enabled by default). See also :envvar:`PYTHONMALLOC` environment variable. diff --git a/Doc/whatsnew/3.14.rst b/Doc/whatsnew/3.14.rst index 6ebadd75092fac..da9b45cd8e58b3 100644 --- a/Doc/whatsnew/3.14.rst +++ b/Doc/whatsnew/3.14.rst @@ -89,8 +89,12 @@ Improved Modules ast --- -Added :func:`ast.compare` for comparing two ASTs. -(Contributed by Batuhan Taskaya and Jeremy Hylton in :issue:`15987`.) +* Added :func:`ast.compare` for comparing two ASTs. + (Contributed by Batuhan Taskaya and Jeremy Hylton in :issue:`15987`.) + +* Add support for :func:`copy.replace` for AST nodes. + + (Contributed by Bénédikt Tran in :gh:`121141`.) os -- @@ -110,6 +114,16 @@ pathlib another. (Contributed by Barney Gale in :gh:`73991`.) +pdb +--- + +* Hard-coded breakpoints (:func:`breakpoint` and :func:`pdb.set_trace()`) now + reuse the most recent :class:`~pdb.Pdb` instance that calls + :meth:`~pdb.Pdb.set_trace()`, instead of creating a new one each time. + As a result, all the instance specific data like :pdbcmd:`display` and + :pdbcmd:`commands` are preserved across hard-coded breakpoints. + (Contributed by Tian Gao in :gh:`121450`.) + symtable -------- @@ -305,6 +319,12 @@ Porting to Python 3.14 This section lists previously described changes and other bugfixes that may require changes to your code. +Changes in the Python API +------------------------- + +* :class:`functools.partial` is now a method descriptor. + Wrap it in :func:`staticmethod` if you want to preserve the old behavior. + (Contributed by Serhiy Storchaka and Dominykas Grigonis in :gh:`121027`.) Build Changes ============= diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst index 8aef0f5ac26728..938dd273e7e102 100644 --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -1495,7 +1495,7 @@ The dictionary returned by :meth:`.SSLSocket.getpeercert` contains additional stat ---- -The :mod:`stat` module is now backed by a C implementation in :mod:`_stat`. A C +The :mod:`stat` module is now backed by a C implementation in :mod:`!_stat`. A C implementation is required as most of the values aren't standardized and are platform-dependent. (Contributed by Christian Heimes in :issue:`11016`.) diff --git a/Doc/whatsnew/3.5.rst b/Doc/whatsnew/3.5.rst index cd8a903327cc2f..75654f3e78eb16 100644 --- a/Doc/whatsnew/3.5.rst +++ b/Doc/whatsnew/3.5.rst @@ -1935,8 +1935,8 @@ specifying the namespace in which the code will be running. tkinter ------- -The :mod:`tkinter._fix` module used for setting up the Tcl/Tk environment -on Windows has been replaced by a private function in the :mod:`_tkinter` +The :mod:`!tkinter._fix` module used for setting up the Tcl/Tk environment +on Windows has been replaced by a private function in the :mod:`!_tkinter` module which makes no permanent changes to environment variables. (Contributed by Zachary Ware in :issue:`20035`.) diff --git a/Doc/whatsnew/3.7.rst b/Doc/whatsnew/3.7.rst index 69d043bcf7efd5..ae750cb9bba696 100644 --- a/Doc/whatsnew/3.7.rst +++ b/Doc/whatsnew/3.7.rst @@ -2048,7 +2048,7 @@ The :mod:`macpath` is now deprecated and will be removed in Python 3.8. threading --------- -:mod:`dummy_threading` and :mod:`_dummy_thread` have been deprecated. It is +:mod:`!dummy_threading` and :mod:`!_dummy_thread` have been deprecated. It is no longer possible to build Python with threading disabled. Use :mod:`threading` instead. (Contributed by Antoine Pitrou in :issue:`31370`.) @@ -2184,7 +2184,7 @@ The following features and APIs have been removed from Python 3.7: ``socket.socketpair`` on Python 3.5 and newer. * :mod:`asyncio` no longer exports the :mod:`selectors` and - :mod:`_overlapped` modules as ``asyncio.selectors`` and + :mod:`!_overlapped` modules as ``asyncio.selectors`` and ``asyncio._overlapped``. Replace ``from asyncio import selectors`` with ``import selectors``. diff --git a/Include/cpython/modsupport.h b/Include/cpython/modsupport.h new file mode 100644 index 00000000000000..d3b88f58c82ca3 --- /dev/null +++ b/Include/cpython/modsupport.h @@ -0,0 +1,26 @@ +#ifndef Py_CPYTHON_MODSUPPORT_H +# error "this header file must not be included directly" +#endif + +// A data structure that can be used to run initialization code once in a +// thread-safe manner. The C++11 equivalent is std::call_once. +typedef struct { + uint8_t v; +} _PyOnceFlag; + +typedef struct _PyArg_Parser { + const char *format; + const char * const *keywords; + const char *fname; + const char *custom_msg; + _PyOnceFlag once; /* atomic one-time initialization flag */ + int is_kwtuple_owned; /* does this parser own the kwtuple object? */ + int pos; /* number of positional-only arguments */ + int min; /* minimal number of arguments */ + int max; /* maximal number of positional arguments */ + PyObject *kwtuple; /* tuple of keyword parameter names */ + struct _PyArg_Parser *next; +} _PyArg_Parser; + +PyAPI_FUNC(int) _PyArg_ParseTupleAndKeywordsFast(PyObject *, PyObject *, + struct _PyArg_Parser *, ...); diff --git a/Include/cpython/pyatomic.h b/Include/cpython/pyatomic.h index 55a139bb9158db..4ecef4f56edf42 100644 --- a/Include/cpython/pyatomic.h +++ b/Include/cpython/pyatomic.h @@ -510,6 +510,9 @@ _Py_atomic_load_ssize_acquire(const Py_ssize_t *obj); // See https://en.cppreference.com/w/cpp/atomic/atomic_thread_fence static inline void _Py_atomic_fence_seq_cst(void); +// Acquire fence +static inline void _Py_atomic_fence_acquire(void); + // Release fence static inline void _Py_atomic_fence_release(void); diff --git a/Include/cpython/pyatomic_gcc.h b/Include/cpython/pyatomic_gcc.h index f2ebdeeb5524e4..ef09954d53ac1d 100644 --- a/Include/cpython/pyatomic_gcc.h +++ b/Include/cpython/pyatomic_gcc.h @@ -542,6 +542,10 @@ static inline void _Py_atomic_fence_seq_cst(void) { __atomic_thread_fence(__ATOMIC_SEQ_CST); } + static inline void +_Py_atomic_fence_acquire(void) +{ __atomic_thread_fence(__ATOMIC_ACQUIRE); } + static inline void _Py_atomic_fence_release(void) { __atomic_thread_fence(__ATOMIC_RELEASE); } diff --git a/Include/cpython/pyatomic_msc.h b/Include/cpython/pyatomic_msc.h index f32995c1f578ac..84da21bdcbff4f 100644 --- a/Include/cpython/pyatomic_msc.h +++ b/Include/cpython/pyatomic_msc.h @@ -1066,6 +1066,18 @@ _Py_atomic_fence_seq_cst(void) #else # error "no implementation of _Py_atomic_fence_seq_cst" #endif +} + + static inline void +_Py_atomic_fence_acquire(void) +{ +#if defined(_M_ARM64) + __dmb(_ARM64_BARRIER_ISHLD); +#elif defined(_M_X64) || defined(_M_IX86) + _ReadBarrier(); +#else +# error "no implementation of _Py_atomic_fence_acquire" +#endif } static inline void diff --git a/Include/cpython/pyatomic_std.h b/Include/cpython/pyatomic_std.h index 0cdce4e6dd39f0..7c71e94c68f8e6 100644 --- a/Include/cpython/pyatomic_std.h +++ b/Include/cpython/pyatomic_std.h @@ -961,6 +961,13 @@ _Py_atomic_fence_seq_cst(void) atomic_thread_fence(memory_order_seq_cst); } + static inline void +_Py_atomic_fence_acquire(void) +{ + _Py_USING_STD; + atomic_thread_fence(memory_order_acquire); +} + static inline void _Py_atomic_fence_release(void) { diff --git a/Include/cpython/pyerrors.h b/Include/cpython/pyerrors.h index 42b4b03b10ca20..b36b4681f5dddb 100644 --- a/Include/cpython/pyerrors.h +++ b/Include/cpython/pyerrors.h @@ -100,7 +100,7 @@ PyAPI_FUNC(PyObject*) PyUnstable_Exc_PrepReraiseStar( /* In signalmodule.c */ -int PySignal_SetWakeupFd(int fd); +PyAPI_FUNC(int) PySignal_SetWakeupFd(int fd); /* Support for adding program text to SyntaxErrors */ diff --git a/Include/internal/mimalloc/mimalloc/atomic.h b/Include/internal/mimalloc/mimalloc/atomic.h index 52f82487685cdb..cdd9c372beafd5 100644 --- a/Include/internal/mimalloc/mimalloc/atomic.h +++ b/Include/internal/mimalloc/mimalloc/atomic.h @@ -23,7 +23,9 @@ terms of the MIT license. A copy of the license can be found in the file #define _Atomic(tp) std::atomic #define mi_atomic(name) std::atomic_##name #define mi_memory_order(name) std::memory_order_##name -#if !defined(ATOMIC_VAR_INIT) || (__cplusplus >= 202002L) // c++20, see issue #571 +#if (__cplusplus >= 202002L) // c++20, see issue #571 + #define MI_ATOMIC_VAR_INIT(x) x +#elif !defined(ATOMIC_VAR_INIT) #define MI_ATOMIC_VAR_INIT(x) x #else #define MI_ATOMIC_VAR_INIT(x) ATOMIC_VAR_INIT(x) @@ -39,7 +41,9 @@ terms of the MIT license. A copy of the license can be found in the file #include #define mi_atomic(name) atomic_##name #define mi_memory_order(name) memory_order_##name -#if !defined(ATOMIC_VAR_INIT) || (__STDC_VERSION__ >= 201710L) // c17, see issue #735 +#if (__STDC_VERSION__ >= 201710L) // c17, see issue #735 + #define MI_ATOMIC_VAR_INIT(x) x +#elif !defined(ATOMIC_VAR_INIT) #define MI_ATOMIC_VAR_INIT(x) x #else #define MI_ATOMIC_VAR_INIT(x) ATOMIC_VAR_INIT(x) diff --git a/Include/internal/pycore_compile.h b/Include/internal/pycore_compile.h index a1ac034e3e44af..325243e6a64e1f 100644 --- a/Include/internal/pycore_compile.h +++ b/Include/internal/pycore_compile.h @@ -76,15 +76,6 @@ int _PyCompile_ConstCacheMergeOne(PyObject *const_cache, PyObject **obj); // Export for '_opcode' extension module -PyAPI_FUNC(int) _PyCompile_OpcodeIsValid(int opcode); -PyAPI_FUNC(int) _PyCompile_OpcodeHasArg(int opcode); -PyAPI_FUNC(int) _PyCompile_OpcodeHasConst(int opcode); -PyAPI_FUNC(int) _PyCompile_OpcodeHasName(int opcode); -PyAPI_FUNC(int) _PyCompile_OpcodeHasJump(int opcode); -PyAPI_FUNC(int) _PyCompile_OpcodeHasFree(int opcode); -PyAPI_FUNC(int) _PyCompile_OpcodeHasLocal(int opcode); -PyAPI_FUNC(int) _PyCompile_OpcodeHasExc(int opcode); - PyAPI_FUNC(PyObject*) _PyCompile_GetUnaryIntrinsicName(int index); PyAPI_FUNC(PyObject*) _PyCompile_GetBinaryIntrinsicName(int index); diff --git a/Include/internal/pycore_dict.h b/Include/internal/pycore_dict.h index 9e0e1237915e82..56cc49432cc61e 100644 --- a/Include/internal/pycore_dict.h +++ b/Include/internal/pycore_dict.h @@ -322,6 +322,8 @@ _PyInlineValuesSize(PyTypeObject *tp) int _PyDict_DetachFromObject(PyDictObject *dict, PyObject *obj); +PyDictObject *_PyObject_MaterializeManagedDict_LockHeld(PyObject *); + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_frame.h b/Include/internal/pycore_frame.h index 1e0368faa5b510..506c20ca1950bd 100644 --- a/Include/internal/pycore_frame.h +++ b/Include/internal/pycore_frame.h @@ -13,7 +13,7 @@ extern "C" { #include "pycore_code.h" // STATS #include "pycore_stackref.h" // _PyStackRef -/* See Objects/frame_layout.md for an explanation of the frame stack +/* See InternalDocs/frames.md for an explanation of the frame stack * including explanation of the PyFrameObject and _PyInterpreterFrame * structs. */ diff --git a/Include/internal/pycore_lock.h b/Include/internal/pycore_lock.h index 8aa73946e2c645..e6da083b807ce5 100644 --- a/Include/internal/pycore_lock.h +++ b/Include/internal/pycore_lock.h @@ -128,12 +128,6 @@ _PyRawMutex_Unlock(_PyRawMutex *m) _PyRawMutex_UnlockSlow(m); } -// A data structure that can be used to run initialization code once in a -// thread-safe manner. The C++11 equivalent is std::call_once. -typedef struct { - uint8_t v; -} _PyOnceFlag; - // Type signature for one-time initialization functions. The function should // return 0 on success and -1 on failure. typedef int _Py_once_fn_t(void *arg); @@ -234,12 +228,12 @@ PyAPI_FUNC(void) _PySeqLock_AbandonWrite(_PySeqLock *seqlock); PyAPI_FUNC(uint32_t) _PySeqLock_BeginRead(_PySeqLock *seqlock); // End the read operation and confirm that the sequence number has not changed. -// Returns 1 if the read was successful or 0 if the read should be re-tried. -PyAPI_FUNC(uint32_t) _PySeqLock_EndRead(_PySeqLock *seqlock, uint32_t previous); +// Returns 1 if the read was successful or 0 if the read should be retried. +PyAPI_FUNC(int) _PySeqLock_EndRead(_PySeqLock *seqlock, uint32_t previous); // Check if the lock was held during a fork and clear the lock. Returns 1 -// if the lock was held and any associated datat should be cleared. -PyAPI_FUNC(uint32_t) _PySeqLock_AfterFork(_PySeqLock *seqlock); +// if the lock was held and any associated data should be cleared. +PyAPI_FUNC(int) _PySeqLock_AfterFork(_PySeqLock *seqlock); #ifdef __cplusplus } diff --git a/Include/internal/pycore_modsupport.h b/Include/internal/pycore_modsupport.h index 3d3cd6722528e9..11fde814875938 100644 --- a/Include/internal/pycore_modsupport.h +++ b/Include/internal/pycore_modsupport.h @@ -67,24 +67,6 @@ PyAPI_FUNC(void) _PyArg_BadArgument( // --- _PyArg_Parser API --------------------------------------------------- -typedef struct _PyArg_Parser { - const char *format; - const char * const *keywords; - const char *fname; - const char *custom_msg; - _PyOnceFlag once; /* atomic one-time initialization flag */ - int is_kwtuple_owned; /* does this parser own the kwtuple object? */ - int pos; /* number of positional-only arguments */ - int min; /* minimal number of arguments */ - int max; /* maximal number of positional arguments */ - PyObject *kwtuple; /* tuple of keyword parameter names */ - struct _PyArg_Parser *next; -} _PyArg_Parser; - -// Export for '_testclinic' shared extension -PyAPI_FUNC(int) _PyArg_ParseTupleAndKeywordsFast(PyObject *, PyObject *, - struct _PyArg_Parser *, ...); - // Export for '_dbm' shared extension PyAPI_FUNC(int) _PyArg_ParseStackAndKeywords( PyObject *const *args, diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index f58eccf729cb2a..d4ffd977940a02 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -55,12 +55,15 @@ typedef struct _Py_DebugOffsets { uint64_t version; // Runtime state offset; struct _runtime_state { + uint64_t size; uint64_t finalizing; uint64_t interpreters_head; } runtime_state; // Interpreter state offset; struct _interpreter_state { + uint64_t size; + uint64_t id; uint64_t next; uint64_t threads_head; uint64_t gc; @@ -74,16 +77,20 @@ typedef struct _Py_DebugOffsets { // Thread state offset; struct _thread_state{ + uint64_t size; uint64_t prev; uint64_t next; uint64_t interp; uint64_t current_frame; uint64_t thread_id; uint64_t native_thread_id; + uint64_t datastack_chunk; + uint64_t status; } thread_state; // InterpreterFrame offset; struct _interpreter_frame { + uint64_t size; uint64_t previous; uint64_t executable; uint64_t instr_ptr; @@ -91,16 +98,12 @@ typedef struct _Py_DebugOffsets { uint64_t owner; } interpreter_frame; - // CFrame offset; - struct _cframe { - uint64_t current_frame; - uint64_t previous; - } cframe; - // Code object offset; struct _code_object { + uint64_t size; uint64_t filename; uint64_t name; + uint64_t qualname; uint64_t linetable; uint64_t firstlineno; uint64_t argcount; @@ -111,25 +114,35 @@ typedef struct _Py_DebugOffsets { // PyObject offset; struct _pyobject { + uint64_t size; uint64_t ob_type; } pyobject; // PyTypeObject object offset; struct _type_object { + uint64_t size; uint64_t tp_name; } type_object; // PyTuple object offset; struct _tuple_object { + uint64_t size; uint64_t ob_item; } tuple_object; // Unicode object offset; struct _unicode_object { + uint64_t size; uint64_t state; uint64_t length; size_t asciiobject_size; } unicode_object; + + // GC runtime state offset; + struct _gc { + uint64_t size; + uint64_t collecting; + } gc; } _Py_DebugOffsets; /* Reference tracer state */ diff --git a/Include/internal/pycore_runtime_init.h b/Include/internal/pycore_runtime_init.h index 98920dbb7c7a92..da2b8d5570de62 100644 --- a/Include/internal/pycore_runtime_init.h +++ b/Include/internal/pycore_runtime_init.h @@ -35,10 +35,13 @@ extern PyTypeObject _PyExc_MemoryError; .cookie = "xdebugpy", \ .version = PY_VERSION_HEX, \ .runtime_state = { \ + .size = sizeof(_PyRuntimeState), \ .finalizing = offsetof(_PyRuntimeState, _finalizing), \ .interpreters_head = offsetof(_PyRuntimeState, interpreters.head), \ }, \ .interpreter_state = { \ + .size = sizeof(PyInterpreterState), \ + .id = offsetof(PyInterpreterState, id), \ .next = offsetof(PyInterpreterState, next), \ .threads_head = offsetof(PyInterpreterState, threads.head), \ .gc = offsetof(PyInterpreterState, gc), \ @@ -50,14 +53,18 @@ extern PyTypeObject _PyExc_MemoryError; .gil_runtime_state_holder = offsetof(PyInterpreterState, _gil.last_holder), \ }, \ .thread_state = { \ + .size = sizeof(PyThreadState), \ .prev = offsetof(PyThreadState, prev), \ .next = offsetof(PyThreadState, next), \ .interp = offsetof(PyThreadState, interp), \ .current_frame = offsetof(PyThreadState, current_frame), \ .thread_id = offsetof(PyThreadState, thread_id), \ .native_thread_id = offsetof(PyThreadState, native_thread_id), \ + .datastack_chunk = offsetof(PyThreadState, datastack_chunk), \ + .status = offsetof(PyThreadState, _status), \ }, \ .interpreter_frame = { \ + .size = sizeof(_PyInterpreterFrame), \ .previous = offsetof(_PyInterpreterFrame, previous), \ .executable = offsetof(_PyInterpreterFrame, f_executable), \ .instr_ptr = offsetof(_PyInterpreterFrame, instr_ptr), \ @@ -65,8 +72,10 @@ extern PyTypeObject _PyExc_MemoryError; .owner = offsetof(_PyInterpreterFrame, owner), \ }, \ .code_object = { \ + .size = sizeof(PyCodeObject), \ .filename = offsetof(PyCodeObject, co_filename), \ .name = offsetof(PyCodeObject, co_name), \ + .qualname = offsetof(PyCodeObject, co_qualname), \ .linetable = offsetof(PyCodeObject, co_linetable), \ .firstlineno = offsetof(PyCodeObject, co_firstlineno), \ .argcount = offsetof(PyCodeObject, co_argcount), \ @@ -75,19 +84,27 @@ extern PyTypeObject _PyExc_MemoryError; .co_code_adaptive = offsetof(PyCodeObject, co_code_adaptive), \ }, \ .pyobject = { \ + .size = sizeof(PyObject), \ .ob_type = offsetof(PyObject, ob_type), \ }, \ .type_object = { \ + .size = sizeof(PyTypeObject), \ .tp_name = offsetof(PyTypeObject, tp_name), \ }, \ .tuple_object = { \ + .size = sizeof(PyTupleObject), \ .ob_item = offsetof(PyTupleObject, ob_item), \ }, \ .unicode_object = { \ + .size = sizeof(PyUnicodeObject), \ .state = offsetof(PyUnicodeObject, _base._base.state), \ .length = offsetof(PyUnicodeObject, _base._base.length), \ .asciiobject_size = sizeof(PyASCIIObject), \ }, \ + .gc = { \ + .size = sizeof(struct _gc_runtime_state), \ + .collecting = offsetof(struct _gc_runtime_state, collecting), \ + }, \ }, \ .allocators = { \ .standard = _pymem_allocators_standard_INIT(runtime), \ diff --git a/Include/internal/pycore_stackref.h b/Include/internal/pycore_stackref.h index 32e445dd96f9a1..8d3d559814bfd9 100644 --- a/Include/internal/pycore_stackref.h +++ b/Include/internal/pycore_stackref.h @@ -48,7 +48,7 @@ extern "C" { CPython refcounting operations on it! */ -typedef union { +typedef union _PyStackRef { uintptr_t bits; } _PyStackRef; @@ -85,81 +85,67 @@ typedef union { # define PyStackRef_None ((_PyStackRef){.bits = ((uintptr_t)&_Py_NoneStruct) }) #endif +// Note: the following are all macros because MSVC (Windows) has trouble inlining them. -static inline int -PyStackRef_Is(_PyStackRef a, _PyStackRef b) { - return a.bits == b.bits; -} +#define PyStackRef_Is(a, b) ((a).bits == (b).bits) + +#define PyStackRef_IsDeferred(ref) (((ref).bits & Py_TAG_BITS) == Py_TAG_DEFERRED) -static inline int -PyStackRef_IsDeferred(_PyStackRef ref) -{ - return ((ref.bits & Py_TAG_BITS) == Py_TAG_DEFERRED); -} +#ifdef Py_GIL_DISABLED // Gets a PyObject * from a _PyStackRef static inline PyObject * PyStackRef_AsPyObjectBorrow(_PyStackRef stackref) { -#ifdef Py_GIL_DISABLED PyObject *cleared = ((PyObject *)((stackref).bits & (~Py_TAG_BITS))); return cleared; +} #else - return ((PyObject *)(stackref).bits); +# define PyStackRef_AsPyObjectBorrow(stackref) ((PyObject *)(stackref).bits) #endif -} // Converts a PyStackRef back to a PyObject *, stealing the // PyStackRef. +#ifdef Py_GIL_DISABLED static inline PyObject * PyStackRef_AsPyObjectSteal(_PyStackRef stackref) { -#ifdef Py_GIL_DISABLED if (!PyStackRef_IsNull(stackref) && PyStackRef_IsDeferred(stackref)) { return Py_NewRef(PyStackRef_AsPyObjectBorrow(stackref)); } return PyStackRef_AsPyObjectBorrow(stackref); +} #else - return PyStackRef_AsPyObjectBorrow(stackref); +# define PyStackRef_AsPyObjectSteal(stackref) PyStackRef_AsPyObjectBorrow(stackref) #endif -} // Converts a PyStackRef back to a PyObject *, converting the // stackref to a new reference. -static inline PyObject * -PyStackRef_AsPyObjectNew(_PyStackRef stackref) -{ - return Py_NewRef(PyStackRef_AsPyObjectBorrow(stackref)); -} +#define PyStackRef_AsPyObjectNew(stackref) Py_NewRef(PyStackRef_AsPyObjectBorrow(stackref)) -static inline PyTypeObject * -PyStackRef_TYPE(_PyStackRef stackref) -{ - return Py_TYPE(PyStackRef_AsPyObjectBorrow(stackref)); -} +#define PyStackRef_TYPE(stackref) Py_TYPE(PyStackRef_AsPyObjectBorrow(stackref)) // Converts a PyObject * to a PyStackRef, stealing the reference +#ifdef Py_GIL_DISABLED static inline _PyStackRef _PyStackRef_FromPyObjectSteal(PyObject *obj) { -#ifdef Py_GIL_DISABLED // Make sure we don't take an already tagged value. assert(((uintptr_t)obj & Py_TAG_BITS) == 0); int tag = (obj == NULL || _Py_IsImmortal(obj)) ? (Py_TAG_DEFERRED) : Py_TAG_PTR; return ((_PyStackRef){.bits = ((uintptr_t)(obj)) | tag}); +} +# define PyStackRef_FromPyObjectSteal(obj) _PyStackRef_FromPyObjectSteal(_PyObject_CAST(obj)) #else - return ((_PyStackRef){.bits = ((uintptr_t)(obj))}); +# define PyStackRef_FromPyObjectSteal(obj) ((_PyStackRef){.bits = ((uintptr_t)(obj))}) #endif -} - -#define PyStackRef_FromPyObjectSteal(obj) _PyStackRef_FromPyObjectSteal(_PyObject_CAST(obj)) // Converts a PyObject * to a PyStackRef, with a new reference +#ifdef Py_GIL_DISABLED static inline _PyStackRef PyStackRef_FromPyObjectNew(PyObject *obj) { -#ifdef Py_GIL_DISABLED // Make sure we don't take an already tagged value. assert(((uintptr_t)obj & Py_TAG_BITS) == 0); assert(obj != NULL); @@ -170,30 +156,27 @@ PyStackRef_FromPyObjectNew(PyObject *obj) else { return (_PyStackRef){ .bits = (uintptr_t)(Py_NewRef(obj)) | Py_TAG_PTR }; } +} +# define PyStackRef_FromPyObjectNew(obj) PyStackRef_FromPyObjectNew(_PyObject_CAST(obj)) #else - return ((_PyStackRef){ .bits = (uintptr_t)(Py_NewRef(obj)) }); +# define PyStackRef_FromPyObjectNew(obj) ((_PyStackRef){ .bits = (uintptr_t)(Py_NewRef(obj)) }) #endif -} - -#define PyStackRef_FromPyObjectNew(obj) PyStackRef_FromPyObjectNew(_PyObject_CAST(obj)) +#ifdef Py_GIL_DISABLED // Same as PyStackRef_FromPyObjectNew but only for immortal objects. static inline _PyStackRef PyStackRef_FromPyObjectImmortal(PyObject *obj) { -#ifdef Py_GIL_DISABLED // Make sure we don't take an already tagged value. assert(((uintptr_t)obj & Py_TAG_BITS) == 0); assert(obj != NULL); assert(_Py_IsImmortal(obj)); return (_PyStackRef){ .bits = (uintptr_t)obj | Py_TAG_DEFERRED }; +} +# define PyStackRef_FromPyObjectImmortal(obj) PyStackRef_FromPyObjectImmortal(_PyObject_CAST(obj)) #else - assert(_Py_IsImmortal(obj)); - return ((_PyStackRef){ .bits = (uintptr_t)(obj) }); +# define PyStackRef_FromPyObjectImmortal(obj) ((_PyStackRef){ .bits = (uintptr_t)(obj) }) #endif -} - -#define PyStackRef_FromPyObjectImmortal(obj) PyStackRef_FromPyObjectImmortal(_PyObject_CAST(obj)) #define PyStackRef_CLEAR(op) \ @@ -206,20 +189,20 @@ PyStackRef_FromPyObjectImmortal(PyObject *obj) } \ } while (0) +#ifdef Py_GIL_DISABLED static inline void PyStackRef_CLOSE(_PyStackRef stackref) { -#ifdef Py_GIL_DISABLED if (PyStackRef_IsDeferred(stackref)) { // No assert for being immortal or deferred here. // The GC unsets deferred objects right before clearing. return; } Py_DECREF(PyStackRef_AsPyObjectBorrow(stackref)); +} #else - Py_DECREF(PyStackRef_AsPyObjectBorrow(stackref)); +# define PyStackRef_CLOSE(stackref) Py_DECREF(PyStackRef_AsPyObjectBorrow(stackref)); #endif -} #define PyStackRef_XCLOSE(stackref) \ do { \ @@ -230,10 +213,10 @@ PyStackRef_CLOSE(_PyStackRef stackref) } while (0); +#ifdef Py_GIL_DISABLED static inline _PyStackRef PyStackRef_DUP(_PyStackRef stackref) { -#ifdef Py_GIL_DISABLED if (PyStackRef_IsDeferred(stackref)) { assert(PyStackRef_IsNull(stackref) || _Py_IsImmortal(PyStackRef_AsPyObjectBorrow(stackref))); @@ -241,21 +224,10 @@ PyStackRef_DUP(_PyStackRef stackref) } Py_INCREF(PyStackRef_AsPyObjectBorrow(stackref)); return stackref; +} #else - Py_INCREF(PyStackRef_AsPyObjectBorrow(stackref)); - return stackref; +# define PyStackRef_DUP(stackref) PyStackRef_FromPyObjectSteal(Py_NewRef(PyStackRef_AsPyObjectBorrow(stackref))); #endif -} - -static inline _PyStackRef -PyStackRef_XDUP(_PyStackRef stackref) -{ - if (!PyStackRef_IsNull(stackref)) { - return PyStackRef_DUP(stackref); - } - return stackref; -} - static inline void _PyObjectStack_FromStackRefStack(PyObject **dst, const _PyStackRef *src, size_t length) diff --git a/Include/internal/pycore_symtable.h b/Include/internal/pycore_symtable.h index 4cfdf92459c70a..d9ed16a3d2321f 100644 --- a/Include/internal/pycore_symtable.h +++ b/Include/internal/pycore_symtable.h @@ -127,12 +127,7 @@ typedef struct _symtable_entry { unsigned ste_can_see_class_scope : 1; /* true if this block can see names bound in an enclosing class scope */ int ste_comp_iter_expr; /* non-zero if visiting a comprehension range expression */ - int ste_lineno; /* first line of block */ - int ste_col_offset; /* offset of first line of block */ - int ste_end_lineno; /* end line of block */ - int ste_end_col_offset; /* end offset of first line of block */ - int ste_opt_lineno; /* lineno of last exec or import * */ - int ste_opt_col_offset; /* offset of last exec or import * */ + _Py_SourceLocation ste_loc; /* source location of block */ struct _symtable_entry *ste_annotation_block; /* symbol table entry for this entry's annotations */ struct symtable *ste_table; } PySTEntryObject; diff --git a/Include/internal/pycore_tuple.h b/Include/internal/pycore_tuple.h index 14a9e42c3a324c..dfbbd6fd0c7de5 100644 --- a/Include/internal/pycore_tuple.h +++ b/Include/internal/pycore_tuple.h @@ -21,6 +21,7 @@ extern PyStatus _PyTuple_InitGlobalObjects(PyInterpreterState *); #define _PyTuple_ITEMS(op) _Py_RVALUE(_PyTuple_CAST(op)->ob_item) extern PyObject *_PyTuple_FromArray(PyObject *const *, Py_ssize_t); +PyAPI_FUNC(PyObject *)_PyTuple_FromStackRefSteal(const union _PyStackRef *, Py_ssize_t); PyAPI_FUNC(PyObject *)_PyTuple_FromArraySteal(PyObject *const *, Py_ssize_t); typedef struct { diff --git a/Include/internal/pycore_typeobject.h b/Include/internal/pycore_typeobject.h index 32bd19d968b917..df6bfef715dd34 100644 --- a/Include/internal/pycore_typeobject.h +++ b/Include/internal/pycore_typeobject.h @@ -33,6 +33,7 @@ struct _types_runtime_state { struct { struct { PyTypeObject *type; + PyTypeObject def; int64_t interp_count; } types[_Py_MAX_MANAGED_STATIC_TYPES]; } managed_static; diff --git a/Include/modsupport.h b/Include/modsupport.h index ea4c0fce9f4562..af995f567b004c 100644 --- a/Include/modsupport.h +++ b/Include/modsupport.h @@ -134,6 +134,12 @@ PyAPI_FUNC(PyObject *) PyModule_FromDefAndSpec2(PyModuleDef *def, #endif /* New in 3.5 */ +#ifndef Py_LIMITED_API +# define Py_CPYTHON_MODSUPPORT_H +# include "cpython/modsupport.h" +# undef Py_CPYTHON_MODSUPPORT_H +#endif + #ifdef __cplusplus } #endif diff --git a/Include/object.h b/Include/object.h index a1e5b33b0fdaae..abfdb6ce24df21 100644 --- a/Include/object.h +++ b/Include/object.h @@ -249,11 +249,7 @@ PyAPI_FUNC(PyTypeObject*) Py_TYPE(PyObject *ob); #else static inline PyTypeObject* _Py_TYPE(PyObject *ob) { - #if defined(Py_GIL_DISABLED) - return (PyTypeObject *)_Py_atomic_load_ptr_relaxed(&ob->ob_type); - #else return ob->ob_type; - #endif } #if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 < 0x030b0000 # define Py_TYPE(ob) _Py_TYPE(_PyObject_CAST(ob)) @@ -284,11 +280,7 @@ static inline int Py_IS_TYPE(PyObject *ob, PyTypeObject *type) { static inline void Py_SET_TYPE(PyObject *ob, PyTypeObject *type) { -#ifdef Py_GIL_DISABLED - _Py_atomic_store_ptr(&ob->ob_type, type); -#else ob->ob_type = type; -#endif } #if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 < 0x030b0000 # define Py_SET_TYPE(ob, type) Py_SET_TYPE(_PyObject_CAST(ob), type) diff --git a/InternalDocs/compiler.md b/InternalDocs/compiler.md index 17fe0df6e1db10..b3dc0a48069969 100644 --- a/InternalDocs/compiler.md +++ b/InternalDocs/compiler.md @@ -623,8 +623,8 @@ Important files Objects ======= -* [Objects/locations.md](https://github.com/python/cpython/blob/main/Objects/locations.md): Describes the location table -* [Objects/frame_layout.md](https://github.com/python/cpython/blob/main/Objects/frame_layout.md): Describes the frame stack +* [Locations](locations.md): Describes the location table +* [Frames](frames.md): Describes frames and the frame stack * [Objects/object_layout.md](https://github.com/python/cpython/blob/main/Objects/object_layout.md): Describes object layout for 3.11 and later * [Exception Handling](exception_handling.md): Describes the exception table diff --git a/Lib/_pyio.py b/Lib/_pyio.py index 7d298e1674b49a..75b5ad1b1a47d2 100644 --- a/Lib/_pyio.py +++ b/Lib/_pyio.py @@ -1577,6 +1577,7 @@ def __init__(self, file, mode='r', closefd=True, opener=None): self._blksize = getattr(fdfstat, 'st_blksize', 0) if self._blksize <= 1: self._blksize = DEFAULT_BUFFER_SIZE + self._estimated_size = fdfstat.st_size if _setmode: # don't translate newlines (\r\n <=> \n) @@ -1654,14 +1655,18 @@ def readall(self): """ self._checkClosed() self._checkReadable() - bufsize = DEFAULT_BUFFER_SIZE - try: - pos = os.lseek(self._fd, 0, SEEK_CUR) - end = os.fstat(self._fd).st_size - if end >= pos: - bufsize = end - pos + 1 - except OSError: - pass + if self._estimated_size <= 0: + bufsize = DEFAULT_BUFFER_SIZE + else: + bufsize = self._estimated_size + 1 + + if self._estimated_size > 65536: + try: + pos = os.lseek(self._fd, 0, SEEK_CUR) + if self._estimated_size >= pos: + bufsize = self._estimated_size - pos + 1 + except OSError: + pass result = bytearray() while True: @@ -1737,6 +1742,7 @@ def truncate(self, size=None): if size is None: size = self.tell() os.ftruncate(self._fd, size) + self._estimated_size = size return size def close(self): diff --git a/Lib/ast.py b/Lib/ast.py index fb4d21b87d8bd0..a954d4a97d3c22 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -422,6 +422,8 @@ def compare( might differ in whitespace or similar details. """ + sentinel = object() # handle the possibility of a missing attribute/field + def _compare(a, b): # Compare two fields on an AST object, which may themselves be # AST objects, lists of AST objects, or primitive ASDL types @@ -449,8 +451,14 @@ def _compare_fields(a, b): if a._fields != b._fields: return False for field in a._fields: - a_field = getattr(a, field) - b_field = getattr(b, field) + a_field = getattr(a, field, sentinel) + b_field = getattr(b, field, sentinel) + if a_field is sentinel and b_field is sentinel: + # both nodes are missing a field at runtime + continue + if a_field is sentinel or b_field is sentinel: + # one of the node is missing a field + return False if not _compare(a_field, b_field): return False else: @@ -461,8 +469,11 @@ def _compare_attributes(a, b): return False # Attributes are always ints. for attr in a._attributes: - a_attr = getattr(a, attr) - b_attr = getattr(b, attr) + a_attr = getattr(a, attr, sentinel) + b_attr = getattr(b, attr, sentinel) + if a_attr is sentinel and b_attr is sentinel: + # both nodes are missing an attribute at runtime + continue if a_attr != b_attr: return False else: diff --git a/Lib/asyncio/__main__.py b/Lib/asyncio/__main__.py index 91fff9aaee337b..3e2fe93943d4ed 100644 --- a/Lib/asyncio/__main__.py +++ b/Lib/asyncio/__main__.py @@ -116,7 +116,7 @@ def run(self): if err := check(): raise RuntimeError(err) except Exception as e: - console.interact(banner="", exitmsg=exit_message) + console.interact(banner="", exitmsg="") else: try: run_multiline_interactive_console(console=console) diff --git a/Lib/bdb.py b/Lib/bdb.py index aa621053cfb4bc..d7543017940d4f 100644 --- a/Lib/bdb.py +++ b/Lib/bdb.py @@ -369,6 +369,7 @@ def set_trace(self, frame=None): If frame is not specified, debugging starts from caller's frame. """ + sys.settrace(None) if frame is None: frame = sys._getframe().f_back self.reset() diff --git a/Lib/copy.py b/Lib/copy.py index 7a1907d75494d7..a79976d3a658f0 100644 --- a/Lib/copy.py +++ b/Lib/copy.py @@ -4,8 +4,9 @@ import copy - x = copy.copy(y) # make a shallow copy of y - x = copy.deepcopy(y) # make a deep copy of y + x = copy.copy(y) # make a shallow copy of y + x = copy.deepcopy(y) # make a deep copy of y + x = copy.replace(y, a=1, b=2) # new object with fields replaced, as defined by `__replace__` For module specific errors, copy.Error is raised. @@ -56,7 +57,7 @@ class Error(Exception): pass error = Error # backward compatibility -__all__ = ["Error", "copy", "deepcopy"] +__all__ = ["Error", "copy", "deepcopy", "replace"] def copy(x): """Shallow copy operation on arbitrary Python objects. diff --git a/Lib/ctypes/__init__.py b/Lib/ctypes/__init__.py index d2e6a8bfc8c9d4..721522caeeac92 100644 --- a/Lib/ctypes/__init__.py +++ b/Lib/ctypes/__init__.py @@ -208,6 +208,10 @@ class c_longdouble(_SimpleCData): try: class c_double_complex(_SimpleCData): _type_ = "C" + class c_float_complex(_SimpleCData): + _type_ = "E" + class c_longdouble_complex(_SimpleCData): + _type_ = "F" except AttributeError: pass diff --git a/Lib/functools.py b/Lib/functools.py index d04957c555295e..a10493f0e25360 100644 --- a/Lib/functools.py +++ b/Lib/functools.py @@ -18,6 +18,7 @@ from collections import namedtuple # import types, weakref # Deferred to single_dispatch() from reprlib import recursive_repr +from types import MethodType from _thread import RLock # Avoid importing types, so we can speedup import time @@ -314,12 +315,7 @@ def __repr__(self): def __get__(self, obj, objtype=None): if obj is None: return self - import warnings - warnings.warn('functools.partial will be a method descriptor in ' - 'future Python versions; wrap it in staticmethod() ' - 'if you want to preserve the old behavior', - FutureWarning, 2) - return self + return MethodType(self, obj) def __reduce__(self): return type(self), (self.func,), (self.func, self.args, @@ -402,7 +398,7 @@ def _method(cls_or_self, /, *args, **keywords): def __get__(self, obj, cls=None): get = getattr(self.func, "__get__", None) result = None - if get is not None and not isinstance(self.func, partial): + if get is not None: new_func = get(obj, cls) if new_func is not self.func: # Assume __get__ returning something new indicates the diff --git a/Lib/importlib/util.py b/Lib/importlib/util.py index 7243d052cc27f3..8403ef9b44ad1a 100644 --- a/Lib/importlib/util.py +++ b/Lib/importlib/util.py @@ -13,7 +13,6 @@ import _imp import sys -import threading import types @@ -257,6 +256,9 @@ def create_module(self, spec): def exec_module(self, module): """Make the module load lazily.""" + # Threading is only needed for lazy loading, and importlib.util can + # be pulled in at interpreter startup, so defer until needed. + import threading module.__spec__.loader = self.loader module.__loader__ = self.loader # Don't need to worry about deep-copying as trying to set an attribute diff --git a/Lib/multiprocessing/shared_memory.py b/Lib/multiprocessing/shared_memory.py index 67e70fdc27cf31..99a8ce3320ad4e 100644 --- a/Lib/multiprocessing/shared_memory.py +++ b/Lib/multiprocessing/shared_memory.py @@ -539,6 +539,6 @@ def index(self, value): if value == entry: return position else: - raise ValueError(f"{value!r} not in this container") + raise ValueError("ShareableList.index(x): x not in list") __class_getitem__ = classmethod(types.GenericAlias) diff --git a/Lib/os.py b/Lib/os.py index 4b48afb040e565..aaa758d955fe4c 100644 --- a/Lib/os.py +++ b/Lib/os.py @@ -373,61 +373,45 @@ def walk(top, topdown=True, onerror=None, followlinks=False): # minor reason when (say) a thousand readable directories are still # left to visit. try: - scandir_it = scandir(top) + with scandir(top) as entries: + for entry in entries: + try: + if followlinks is _walk_symlinks_as_files: + is_dir = entry.is_dir(follow_symlinks=False) and not entry.is_junction() + else: + is_dir = entry.is_dir() + except OSError: + # If is_dir() raises an OSError, consider the entry not to + # be a directory, same behaviour as os.path.isdir(). + is_dir = False + + if is_dir: + dirs.append(entry.name) + else: + nondirs.append(entry.name) + + if not topdown and is_dir: + # Bottom-up: traverse into sub-directory, but exclude + # symlinks to directories if followlinks is False + if followlinks: + walk_into = True + else: + try: + is_symlink = entry.is_symlink() + except OSError: + # If is_symlink() raises an OSError, consider the + # entry not to be a symbolic link, same behaviour + # as os.path.islink(). + is_symlink = False + walk_into = not is_symlink + + if walk_into: + walk_dirs.append(entry.path) except OSError as error: if onerror is not None: onerror(error) continue - cont = False - with scandir_it: - while True: - try: - try: - entry = next(scandir_it) - except StopIteration: - break - except OSError as error: - if onerror is not None: - onerror(error) - cont = True - break - - try: - if followlinks is _walk_symlinks_as_files: - is_dir = entry.is_dir(follow_symlinks=False) and not entry.is_junction() - else: - is_dir = entry.is_dir() - except OSError: - # If is_dir() raises an OSError, consider the entry not to - # be a directory, same behaviour as os.path.isdir(). - is_dir = False - - if is_dir: - dirs.append(entry.name) - else: - nondirs.append(entry.name) - - if not topdown and is_dir: - # Bottom-up: traverse into sub-directory, but exclude - # symlinks to directories if followlinks is False - if followlinks: - walk_into = True - else: - try: - is_symlink = entry.is_symlink() - except OSError: - # If is_symlink() raises an OSError, consider the - # entry not to be a symbolic link, same behaviour - # as os.path.islink(). - is_symlink = False - walk_into = not is_symlink - - if walk_into: - walk_dirs.append(entry.path) - if cont: - continue - if topdown: # Yield before sub-directory traversal if going top down yield top, dirs, nondirs diff --git a/Lib/pathlib/__init__.py b/Lib/pathlib/__init__.py index 4b3edf535a61aa..2298a249529460 100644 --- a/Lib/pathlib/__init__.py +++ b/Lib/pathlib/__init__.py @@ -5,8 +5,8 @@ operating systems. """ -from ._abc import * +from ._os import * from ._local import * -__all__ = (_abc.__all__ + +__all__ = (_os.__all__ + _local.__all__) diff --git a/Lib/pathlib/_abc.py b/Lib/pathlib/_abc.py index 71973913921169..05f55badd77c58 100644 --- a/Lib/pathlib/_abc.py +++ b/Lib/pathlib/_abc.py @@ -16,10 +16,7 @@ import posixpath from glob import _GlobberBase, _no_recurse_symlinks from stat import S_ISDIR, S_ISLNK, S_ISREG, S_ISSOCK, S_ISBLK, S_ISCHR, S_ISFIFO -from ._os import copyfileobj - - -__all__ = ["UnsupportedOperation"] +from ._os import UnsupportedOperation, copyfileobj @functools.cache @@ -27,12 +24,6 @@ def _is_case_sensitive(parser): return parser.normcase('Aa') == 'Aa' -class UnsupportedOperation(NotImplementedError): - """An exception that is raised when an unsupported operation is called on - a path object. - """ - pass - class ParserBase: """Base class for path parsers, which do low-level path manipulation. @@ -790,7 +781,32 @@ def mkdir(self, mode=0o777, parents=False, exist_ok=False): """ raise UnsupportedOperation(self._unsupported_msg('mkdir()')) - def copy(self, target, follow_symlinks=True): + # Metadata keys supported by this path type. + _readable_metadata = _writable_metadata = frozenset() + + def _read_metadata(self, keys=None, *, follow_symlinks=True): + """ + Returns path metadata as a dict with string keys. + """ + raise UnsupportedOperation(self._unsupported_msg('_read_metadata()')) + + def _write_metadata(self, metadata, *, follow_symlinks=True): + """ + Sets path metadata from the given dict with string keys. + """ + raise UnsupportedOperation(self._unsupported_msg('_write_metadata()')) + + def _copy_metadata(self, target, *, follow_symlinks=True): + """ + Copies metadata (permissions, timestamps, etc) from this path to target. + """ + # Metadata types supported by both source and target. + keys = self._readable_metadata & target._writable_metadata + if keys: + metadata = self._read_metadata(keys, follow_symlinks=follow_symlinks) + target._write_metadata(metadata, follow_symlinks=follow_symlinks) + + def copy(self, target, *, follow_symlinks=True, preserve_metadata=False): """ Copy the contents of this file to the given target. If this file is a symlink and follow_symlinks is false, a symlink will be created at the @@ -802,6 +818,8 @@ def copy(self, target, follow_symlinks=True): raise OSError(f"{self!r} and {target!r} are the same file") if not follow_symlinks and self.is_symlink(): target.symlink_to(self.readlink()) + if preserve_metadata: + self._copy_metadata(target, follow_symlinks=False) return with self.open('rb') as source_f: try: @@ -814,6 +832,8 @@ def copy(self, target, follow_symlinks=True): f'Directory does not exist: {target}') from e else: raise + if preserve_metadata: + self._copy_metadata(target) def copytree(self, target, *, follow_symlinks=True, dirs_exist_ok=False, ignore=None, on_error=None): diff --git a/Lib/pathlib/_local.py b/Lib/pathlib/_local.py index 0105ea3042422e..eae8a30c876f19 100644 --- a/Lib/pathlib/_local.py +++ b/Lib/pathlib/_local.py @@ -17,8 +17,9 @@ except ImportError: grp = None -from ._abc import UnsupportedOperation, PurePathBase, PathBase -from ._os import copyfile +from ._os import (UnsupportedOperation, copyfile, file_metadata_keys, + read_file_metadata, write_file_metadata) +from ._abc import PurePathBase, PathBase __all__ = [ @@ -781,8 +782,12 @@ def mkdir(self, mode=0o777, parents=False, exist_ok=False): if not exist_ok or not self.is_dir(): raise + _readable_metadata = _writable_metadata = file_metadata_keys + _read_metadata = read_file_metadata + _write_metadata = write_file_metadata + if copyfile: - def copy(self, target, follow_symlinks=True): + def copy(self, target, *, follow_symlinks=True, preserve_metadata=False): """ Copy the contents of this file to the given target. If this file is a symlink and follow_symlinks is false, a symlink will be created at the @@ -791,12 +796,16 @@ def copy(self, target, follow_symlinks=True): try: target = os.fspath(target) except TypeError: - if isinstance(target, PathBase): - # Target is an instance of PathBase but not os.PathLike. - # Use generic implementation from PathBase. - return PathBase.copy(self, target, follow_symlinks=follow_symlinks) - raise - copyfile(os.fspath(self), target, follow_symlinks) + if not isinstance(target, PathBase): + raise + else: + try: + copyfile(os.fspath(self), target, follow_symlinks) + return + except UnsupportedOperation: + pass # Fall through to generic code. + PathBase.copy(self, target, follow_symlinks=follow_symlinks, + preserve_metadata=preserve_metadata) def chmod(self, mode, *, follow_symlinks=True): """ diff --git a/Lib/pathlib/_os.py b/Lib/pathlib/_os.py index bbb019b6534503..164ee8e9034427 100644 --- a/Lib/pathlib/_os.py +++ b/Lib/pathlib/_os.py @@ -2,7 +2,7 @@ Low-level OS functionality wrappers used by pathlib. """ -from errno import EBADF, EOPNOTSUPP, ETXTBSY, EXDEV +from errno import * import os import stat import sys @@ -20,6 +20,15 @@ _winapi = None +__all__ = ["UnsupportedOperation"] + + +class UnsupportedOperation(NotImplementedError): + """An exception that is raised when an unsupported operation is attempted. + """ + pass + + def get_copy_blocksize(infd): """Determine blocksize for fastcopying on Linux. Hopefully the whole file will be copied in a single call. @@ -106,18 +115,30 @@ def copyfile(source, target, follow_symlinks): Copy from one file to another using CopyFile2 (Windows only). """ if follow_symlinks: - flags = 0 + _winapi.CopyFile2(source, target, 0) else: + # Use COPY_FILE_COPY_SYMLINK to copy a file symlink. flags = _winapi.COPY_FILE_COPY_SYMLINK try: _winapi.CopyFile2(source, target, flags) return except OSError as err: # Check for ERROR_ACCESS_DENIED - if err.winerror != 5 or not _is_dirlink(source): + if err.winerror == 5 and _is_dirlink(source): + pass + else: raise + + # Add COPY_FILE_DIRECTORY to copy a directory symlink. flags |= _winapi.COPY_FILE_DIRECTORY - _winapi.CopyFile2(source, target, flags) + try: + _winapi.CopyFile2(source, target, flags) + except OSError as err: + # Check for ERROR_INVALID_PARAMETER + if err.winerror == 87: + raise UnsupportedOperation(err) from None + else: + raise else: copyfile = None @@ -157,3 +178,100 @@ def copyfileobj(source_f, target_f): write_target = target_f.write while buf := read_source(1024 * 1024): write_target(buf) + + +# Kinds of metadata supported by the operating system. +file_metadata_keys = {'mode', 'times_ns'} +if hasattr(os.stat_result, 'st_flags'): + file_metadata_keys.add('flags') +if hasattr(os, 'listxattr'): + file_metadata_keys.add('xattrs') +file_metadata_keys = frozenset(file_metadata_keys) + + +def read_file_metadata(path, keys=None, *, follow_symlinks=True): + """ + Returns local path metadata as a dict with string keys. + """ + if keys is None: + keys = file_metadata_keys + assert keys.issubset(file_metadata_keys) + result = {} + for key in keys: + if key == 'xattrs': + try: + result['xattrs'] = [ + (attr, os.getxattr(path, attr, follow_symlinks=follow_symlinks)) + for attr in os.listxattr(path, follow_symlinks=follow_symlinks)] + except OSError as err: + if err.errno not in (EPERM, ENOTSUP, ENODATA, EINVAL, EACCES): + raise + continue + st = os.stat(path, follow_symlinks=follow_symlinks) + if key == 'mode': + result['mode'] = stat.S_IMODE(st.st_mode) + elif key == 'times_ns': + result['times_ns'] = st.st_atime_ns, st.st_mtime_ns + elif key == 'flags': + result['flags'] = st.st_flags + return result + + +def write_file_metadata(path, metadata, *, follow_symlinks=True): + """ + Sets local path metadata from the given dict with string keys. + """ + assert frozenset(metadata.keys()).issubset(file_metadata_keys) + + def _nop(*args, ns=None, follow_symlinks=None): + pass + + if follow_symlinks: + # use the real function if it exists + def lookup(name): + return getattr(os, name, _nop) + else: + # use the real function only if it exists + # *and* it supports follow_symlinks + def lookup(name): + fn = getattr(os, name, _nop) + if fn in os.supports_follow_symlinks: + return fn + return _nop + + times_ns = metadata.get('times_ns') + if times_ns is not None: + lookup("utime")(path, ns=times_ns, follow_symlinks=follow_symlinks) + # We must copy extended attributes before the file is (potentially) + # chmod()'ed read-only, otherwise setxattr() will error with -EACCES. + xattrs = metadata.get('xattrs') + if xattrs is not None: + for attr, value in xattrs: + try: + os.setxattr(path, attr, value, follow_symlinks=follow_symlinks) + except OSError as e: + if e.errno not in (EPERM, ENOTSUP, ENODATA, EINVAL, EACCES): + raise + mode = metadata.get('mode') + if mode is not None: + try: + lookup("chmod")(path, mode, follow_symlinks=follow_symlinks) + except NotImplementedError: + # if we got a NotImplementedError, it's because + # * follow_symlinks=False, + # * lchown() is unavailable, and + # * either + # * fchownat() is unavailable or + # * fchownat() doesn't implement AT_SYMLINK_NOFOLLOW. + # (it returned ENOSUP.) + # therefore we're out of options--we simply cannot chown the + # symlink. give up, suppress the error. + # (which is what shutil always did in this circumstance.) + pass + flags = metadata.get('flags') + if flags is not None: + try: + lookup("chflags")(path, flags, follow_symlinks=follow_symlinks) + except OSError as why: + if why.errno not in (EOPNOTSUPP, ENOTSUP): + raise diff --git a/Lib/pdb.py b/Lib/pdb.py index 4af16d0a087c8c..7ff973149b167b 100644 --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -306,6 +306,8 @@ class Pdb(bdb.Bdb, cmd.Cmd): _file_mtime_table = {} + _last_pdb_instance = None + def __init__(self, completekey='tab', stdin=None, stdout=None, skip=None, nosigint=False, readrc=True): bdb.Bdb.__init__(self, skip=skip) @@ -359,6 +361,12 @@ def __init__(self, completekey='tab', stdin=None, stdout=None, skip=None, self._chained_exceptions = tuple() self._chained_exception_index = 0 + def set_trace(self, frame=None): + Pdb._last_pdb_instance = self + if frame is None: + frame = sys._getframe().f_back + super().set_trace(frame) + def sigint_handler(self, signum, frame): if self.allow_kbdint: raise KeyboardInterrupt @@ -2350,7 +2358,10 @@ def set_trace(*, header=None): an assertion fails). If given, *header* is printed to the console just before debugging begins. """ - pdb = Pdb() + if Pdb._last_pdb_instance is not None: + pdb = Pdb._last_pdb_instance + else: + pdb = Pdb() if header is not None: pdb.message(header) pdb.set_trace(sys._getframe().f_back) @@ -2481,9 +2492,12 @@ def main(): traceback.print_exception(e, colorize=_colorize.can_colorize()) print("Uncaught exception. Entering post mortem debugging") print("Running 'cont' or 'step' will restart the program") - pdb.interaction(None, e) - print(f"Post mortem debugger finished. The {target} will " - "be restarted") + try: + pdb.interaction(None, e) + except Restart: + print("Restarting", target, "with arguments:") + print("\t" + " ".join(sys.argv[1:])) + continue if pdb._user_requested_quit: break print("The program finished and will be restarted") diff --git a/Lib/site.py b/Lib/site.py index 9381f6f510eb46..460269433f021c 100644 --- a/Lib/site.py +++ b/Lib/site.py @@ -312,6 +312,10 @@ def joinuser(*args): # Same to sysconfig.get_path('purelib', os.name+'_user') def _get_path(userbase): version = sys.version_info + if hasattr(sys, 'abiflags') and 't' in sys.abiflags: + abi_thread = 't' + else: + abi_thread = '' implementation = _get_implementation() implementation_lower = implementation.lower() @@ -322,7 +326,7 @@ def _get_path(userbase): if sys.platform == 'darwin' and sys._framework: return f'{userbase}/lib/{implementation_lower}/site-packages' - return f'{userbase}/lib/python{version[0]}.{version[1]}/site-packages' + return f'{userbase}/lib/python{version[0]}.{version[1]}{abi_thread}/site-packages' def getuserbase(): @@ -390,6 +394,10 @@ def getsitepackages(prefixes=None): implementation = _get_implementation().lower() ver = sys.version_info + if hasattr(sys, 'abiflags') and 't' in sys.abiflags: + abi_thread = 't' + else: + abi_thread = '' if os.sep == '/': libdirs = [sys.platlibdir] if sys.platlibdir != "lib": @@ -397,7 +405,7 @@ def getsitepackages(prefixes=None): for libdir in libdirs: path = os.path.join(prefix, libdir, - f"{implementation}{ver[0]}.{ver[1]}", + f"{implementation}{ver[0]}.{ver[1]}{abi_thread}", "site-packages") sitepackages.append(path) else: @@ -526,8 +534,7 @@ def register_readline(): def write_history(): try: - # _pyrepl.__main__ is executed as the __main__ module - from __main__ import CAN_USE_PYREPL + from _pyrepl.main import CAN_USE_PYREPL except ImportError: CAN_USE_PYREPL = False diff --git a/Lib/sysconfig/__init__.py b/Lib/sysconfig/__init__.py index 98a14e5d3a3187..83e057c177f8c0 100644 --- a/Lib/sysconfig/__init__.py +++ b/Lib/sysconfig/__init__.py @@ -27,10 +27,10 @@ _INSTALL_SCHEMES = { 'posix_prefix': { - 'stdlib': '{installed_base}/{platlibdir}/{implementation_lower}{py_version_short}', - 'platstdlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}', - 'purelib': '{base}/lib/{implementation_lower}{py_version_short}/site-packages', - 'platlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}/site-packages', + 'stdlib': '{installed_base}/{platlibdir}/{implementation_lower}{py_version_short}{abi_thread}', + 'platstdlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}{abi_thread}', + 'purelib': '{base}/lib/{implementation_lower}{py_version_short}{abi_thread}/site-packages', + 'platlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}{abi_thread}/site-packages', 'include': '{installed_base}/include/{implementation_lower}{py_version_short}{abiflags}', 'platinclude': @@ -77,10 +77,10 @@ # Downstream distributors who patch posix_prefix/nt scheme are encouraged to # leave the following schemes unchanged 'posix_venv': { - 'stdlib': '{installed_base}/{platlibdir}/{implementation_lower}{py_version_short}', - 'platstdlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}', - 'purelib': '{base}/lib/{implementation_lower}{py_version_short}/site-packages', - 'platlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}/site-packages', + 'stdlib': '{installed_base}/{platlibdir}/{implementation_lower}{py_version_short}{abi_thread}', + 'platstdlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}{abi_thread}', + 'purelib': '{base}/lib/{implementation_lower}{py_version_short}{abi_thread}/site-packages', + 'platlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}{abi_thread}/site-packages', 'include': '{installed_base}/include/{implementation_lower}{py_version_short}{abiflags}', 'platinclude': @@ -148,11 +148,11 @@ def joinuser(*args): 'data': '{userbase}', }, 'posix_user': { - 'stdlib': '{userbase}/{platlibdir}/{implementation_lower}{py_version_short}', - 'platstdlib': '{userbase}/{platlibdir}/{implementation_lower}{py_version_short}', - 'purelib': '{userbase}/lib/{implementation_lower}{py_version_short}/site-packages', - 'platlib': '{userbase}/lib/{implementation_lower}{py_version_short}/site-packages', - 'include': '{userbase}/include/{implementation_lower}{py_version_short}', + 'stdlib': '{userbase}/{platlibdir}/{implementation_lower}{py_version_short}{abi_thread}', + 'platstdlib': '{userbase}/{platlibdir}/{implementation_lower}{py_version_short}{abi_thread}', + 'purelib': '{userbase}/lib/{implementation_lower}{py_version_short}{abi_thread}/site-packages', + 'platlib': '{userbase}/lib/{implementation_lower}{py_version_short}{abi_thread}/site-packages', + 'include': '{userbase}/include/{implementation_lower}{py_version_short}{abi_thread}', 'scripts': '{userbase}/bin', 'data': '{userbase}', }, @@ -487,6 +487,9 @@ def _init_config_vars(): # the init-function. _CONFIG_VARS['userbase'] = _getuserbase() + # e.g., 't' for free-threaded or '' for default build + _CONFIG_VARS['abi_thread'] = 't' if _CONFIG_VARS.get('Py_GIL_DISABLED') else '' + # Always convert srcdir to an absolute path srcdir = _CONFIG_VARS.get('srcdir', _PROJECT_BASE) if os.name == 'posix': @@ -655,6 +658,10 @@ def get_python_version(): return _PY_VERSION_SHORT +def _get_python_version_abi(): + return _PY_VERSION_SHORT + get_config_var("abi_thread") + + def expand_makefile_vars(s, vars): """Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in 'string' according to 'vars' (a dictionary mapping variable names to diff --git a/Lib/test/_test_eintr.py b/Lib/test/_test_eintr.py index 15586f15dfab30..493932d6c6d441 100644 --- a/Lib/test/_test_eintr.py +++ b/Lib/test/_test_eintr.py @@ -18,6 +18,7 @@ import socket import subprocess import sys +import textwrap import time import unittest @@ -492,29 +493,31 @@ def test_devpoll(self): self.check_elapsed_time(dt) -class FNTLEINTRTest(EINTRBaseTest): +class FCNTLEINTRTest(EINTRBaseTest): def _lock(self, lock_func, lock_name): self.addCleanup(os_helper.unlink, os_helper.TESTFN) - code = '\n'.join(( - "import fcntl, time", - "with open('%s', 'wb') as f:" % os_helper.TESTFN, - " fcntl.%s(f, fcntl.LOCK_EX)" % lock_name, - " time.sleep(%s)" % self.sleep_time)) - start_time = time.monotonic() - proc = self.subprocess(code) + rd1, wr1 = os.pipe() + rd2, wr2 = os.pipe() + for fd in (rd1, wr1, rd2, wr2): + self.addCleanup(os.close, fd) + code = textwrap.dedent(f""" + import fcntl, os, time + with open('{os_helper.TESTFN}', 'wb') as f: + fcntl.{lock_name}(f, fcntl.LOCK_EX) + os.write({wr1}, b"ok") + _ = os.read({rd2}, 2) # wait for parent process + time.sleep({self.sleep_time}) + """) + proc = self.subprocess(code, pass_fds=[wr1, rd2]) with kill_on_error(proc): with open(os_helper.TESTFN, 'wb') as f: # synchronize the subprocess + ok = os.read(rd1, 2) + self.assertEqual(ok, b"ok") + + # notify the child that the parent is ready start_time = time.monotonic() - for _ in support.sleeping_retry(support.LONG_TIMEOUT, error=False): - try: - lock_func(f, fcntl.LOCK_EX | fcntl.LOCK_NB) - lock_func(f, fcntl.LOCK_UN) - except BlockingIOError: - break - else: - dt = time.monotonic() - start_time - raise Exception("failed to sync child in %.1f sec" % dt) + os.write(wr2, b"go") # the child locked the file just a moment ago for 'sleep_time' seconds # that means that the lock below will block for 'sleep_time' minus some diff --git a/Lib/test/crashers/README b/Lib/test/crashers/README index d844385113eb45..7111946b93b280 100644 --- a/Lib/test/crashers/README +++ b/Lib/test/crashers/README @@ -15,7 +15,3 @@ what the variables are. Once the crash is fixed, the test case should be moved into an appropriate test (even if it was originally from the test suite). This ensures the regression doesn't happen again. And if it does, it should be easier to track down. - -Also see Lib/test_crashers.py which exercises the crashers in this directory. -In particular, make sure to add any new infinite loop crashers to the black -list so it doesn't try to run them. diff --git a/Lib/test/libregrtest/run_workers.py b/Lib/test/libregrtest/run_workers.py index a71050e66db3bd..387ddf9614cf79 100644 --- a/Lib/test/libregrtest/run_workers.py +++ b/Lib/test/libregrtest/run_workers.py @@ -22,7 +22,7 @@ from .single import PROGRESS_MIN_TIME from .utils import ( StrPath, TestName, - format_duration, print_warning, count, plural, get_signal_name) + format_duration, print_warning, count, plural) from .worker import create_worker_process, USE_PROCESS_GROUP if MS_WINDOWS: @@ -366,7 +366,7 @@ def _runtest(self, test_name: TestName) -> MultiprocessResult: err_msg=None, state=State.TIMEOUT) if retcode != 0: - name = get_signal_name(retcode) + name = support.get_signal_name(retcode) if name: retcode = f"{retcode} ({name})" raise WorkerError(self.test_name, f"Exit code {retcode}", stdout, diff --git a/Lib/test/libregrtest/utils.py b/Lib/test/libregrtest/utils.py index 0167742d388a2c..2a3449016fe951 100644 --- a/Lib/test/libregrtest/utils.py +++ b/Lib/test/libregrtest/utils.py @@ -264,6 +264,12 @@ def clear_caches(): for f in typing._cleanups: f() + import inspect + abs_classes = filter(inspect.isabstract, typing.__dict__.values()) + for abc in abs_classes: + for obj in abc.__subclasses__() + [abc]: + obj._abc_caches_clear() + try: fractions = sys.modules['fractions'] except KeyError: @@ -685,35 +691,6 @@ def cleanup_temp_dir(tmp_dir: StrPath): print("Remove file: %s" % name) os_helper.unlink(name) -WINDOWS_STATUS = { - 0xC0000005: "STATUS_ACCESS_VIOLATION", - 0xC00000FD: "STATUS_STACK_OVERFLOW", - 0xC000013A: "STATUS_CONTROL_C_EXIT", -} - -def get_signal_name(exitcode): - if exitcode < 0: - signum = -exitcode - try: - return signal.Signals(signum).name - except ValueError: - pass - - # Shell exit code (ex: WASI build) - if 128 < exitcode < 256: - signum = exitcode - 128 - try: - return signal.Signals(signum).name - except ValueError: - pass - - try: - return WINDOWS_STATUS[exitcode] - except KeyError: - pass - - return None - ILLEGAL_XML_CHARS_RE = re.compile( '[' diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index dbea070929be9b..7f6579319589b4 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -387,7 +387,7 @@ def skip_if_buildbot(reason=None): reason = 'not suitable for buildbots' try: isbuildbot = getpass.getuser().lower() == 'buildbot' - except (KeyError, EnvironmentError) as err: + except (KeyError, OSError) as err: warnings.warn(f'getpass.getuser() failed {err}.', RuntimeWarning) isbuildbot = False return unittest.skipIf(isbuildbot, reason) @@ -2632,3 +2632,35 @@ def initialized_with_pyrepl(): """Detect whether PyREPL was used during Python initialization.""" # If the main module has a __file__ attribute it's a Python module, which means PyREPL. return hasattr(sys.modules["__main__"], "__file__") + + +WINDOWS_STATUS = { + 0xC0000005: "STATUS_ACCESS_VIOLATION", + 0xC00000FD: "STATUS_STACK_OVERFLOW", + 0xC000013A: "STATUS_CONTROL_C_EXIT", +} + +def get_signal_name(exitcode): + import signal + + if exitcode < 0: + signum = -exitcode + try: + return signal.Signals(signum).name + except ValueError: + pass + + # Shell exit code (ex: WASI build) + if 128 < exitcode < 256: + signum = exitcode - 128 + try: + return signal.Signals(signum).name + except ValueError: + pass + + try: + return WINDOWS_STATUS[exitcode] + except KeyError: + pass + + return None diff --git a/Lib/test/support/script_helper.py b/Lib/test/support/script_helper.py index 65e0bc199e7f0b..d0be3179b0efa3 100644 --- a/Lib/test/support/script_helper.py +++ b/Lib/test/support/script_helper.py @@ -70,23 +70,25 @@ def fail(self, cmd_line): out = b'(... truncated stdout ...)' + out[-maxlen:] if len(err) > maxlen: err = b'(... truncated stderr ...)' + err[-maxlen:] - out = out.decode('ascii', 'replace').rstrip() - err = err.decode('ascii', 'replace').rstrip() - raise AssertionError("Process return code is %d\n" - "command line: %r\n" - "\n" - "stdout:\n" - "---\n" - "%s\n" - "---\n" - "\n" - "stderr:\n" - "---\n" - "%s\n" - "---" - % (self.rc, cmd_line, - out, - err)) + out = out.decode('utf8', 'replace').rstrip() + err = err.decode('utf8', 'replace').rstrip() + + exitcode = self.rc + signame = support.get_signal_name(exitcode) + if signame: + exitcode = f"{exitcode} ({signame})" + raise AssertionError(f"Process return code is {exitcode}\n" + f"command line: {cmd_line!r}\n" + f"\n" + f"stdout:\n" + f"---\n" + f"{out}\n" + f"---\n" + f"\n" + f"stderr:\n" + f"---\n" + f"{err}\n" + f"---") # Executing the interpreter in a subprocess diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index 5d71d524516df2..497c3f261a1fca 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -948,6 +948,15 @@ def test_compare_fieldless(self): self.assertTrue(ast.compare(ast.Add(), ast.Add())) self.assertFalse(ast.compare(ast.Sub(), ast.Add())) + # test that missing runtime fields is handled in ast.compare() + a1, a2 = ast.Name('a'), ast.Name('a') + self.assertTrue(ast.compare(a1, a2)) + self.assertTrue(ast.compare(a1, a2)) + del a1.id + self.assertFalse(ast.compare(a1, a2)) + del a2.id + self.assertTrue(ast.compare(a1, a2)) + def test_compare_modes(self): for mode, sources in ( ("exec", exec_tests), @@ -970,6 +979,16 @@ def parse(a, b): self.assertTrue(ast.compare(a, b, compare_attributes=False)) self.assertFalse(ast.compare(a, b, compare_attributes=True)) + def test_compare_attributes_option_missing_attribute(self): + # test that missing runtime attributes is handled in ast.compare() + a1, a2 = ast.Name('a', lineno=1), ast.Name('a', lineno=1) + self.assertTrue(ast.compare(a1, a2)) + self.assertTrue(ast.compare(a1, a2, compare_attributes=True)) + del a1.lineno + self.assertFalse(ast.compare(a1, a2, compare_attributes=True)) + del a2.lineno + self.assertTrue(ast.compare(a1, a2, compare_attributes=True)) + def test_positional_only_feature_version(self): ast.parse('def foo(x, /): ...', feature_version=(3, 8)) ast.parse('def bar(x=1, /): ...', feature_version=(3, 8)) @@ -1130,6 +1149,25 @@ def test_none_checks(self) -> None: class CopyTests(unittest.TestCase): """Test copying and pickling AST nodes.""" + @staticmethod + def iter_ast_classes(): + """Iterate over the (native) subclasses of ast.AST recursively. + + This excludes the special class ast.Index since its constructor + returns an integer. + """ + def do(cls): + if cls.__module__ != 'ast': + return + if cls is ast.Index: + return + + yield cls + for sub in cls.__subclasses__(): + yield from do(sub) + + yield from do(ast.AST) + def test_pickling(self): import pickle @@ -1199,6 +1237,251 @@ def test_copy_with_parents(self): )): self.assertEqual(to_tuple(child.parent), to_tuple(node)) + def test_replace_interface(self): + for klass in self.iter_ast_classes(): + with self.subTest(klass=klass): + self.assertTrue(hasattr(klass, '__replace__')) + + fields = set(klass._fields) + with self.subTest(klass=klass, fields=fields): + node = klass(**dict.fromkeys(fields)) + # forbid positional arguments in replace() + self.assertRaises(TypeError, copy.replace, node, 1) + self.assertRaises(TypeError, node.__replace__, 1) + + def test_replace_native(self): + for klass in self.iter_ast_classes(): + fields = set(klass._fields) + attributes = set(klass._attributes) + + with self.subTest(klass=klass, fields=fields, attributes=attributes): + # use of object() to ensure that '==' and 'is' + # behave similarly in ast.compare(node, repl) + old_fields = {field: object() for field in fields} + old_attrs = {attr: object() for attr in attributes} + + # check shallow copy + node = klass(**old_fields) + repl = copy.replace(node) + self.assertTrue(ast.compare(node, repl, compare_attributes=True)) + # check when passing using attributes (they may be optional!) + node = klass(**old_fields, **old_attrs) + repl = copy.replace(node) + self.assertTrue(ast.compare(node, repl, compare_attributes=True)) + + for field in fields: + # check when we sometimes have attributes and sometimes not + for init_attrs in [{}, old_attrs]: + node = klass(**old_fields, **init_attrs) + # only change a single field (do not change attributes) + new_value = object() + repl = copy.replace(node, **{field: new_value}) + for f in fields: + old_value = old_fields[f] + # assert that there is no side-effect + self.assertIs(getattr(node, f), old_value) + # check the changes + if f != field: + self.assertIs(getattr(repl, f), old_value) + else: + self.assertIs(getattr(repl, f), new_value) + self.assertFalse(ast.compare(node, repl, compare_attributes=True)) + + for attribute in attributes: + node = klass(**old_fields, **old_attrs) + # only change a single attribute (do not change fields) + new_attr = object() + repl = copy.replace(node, **{attribute: new_attr}) + for a in attributes: + old_attr = old_attrs[a] + # assert that there is no side-effect + self.assertIs(getattr(node, a), old_attr) + # check the changes + if a != attribute: + self.assertIs(getattr(repl, a), old_attr) + else: + self.assertIs(getattr(repl, a), new_attr) + self.assertFalse(ast.compare(node, repl, compare_attributes=True)) + + def test_replace_accept_known_class_fields(self): + nid, ctx = object(), object() + + node = ast.Name(id=nid, ctx=ctx) + self.assertIs(node.id, nid) + self.assertIs(node.ctx, ctx) + + new_nid = object() + repl = copy.replace(node, id=new_nid) + # assert that there is no side-effect + self.assertIs(node.id, nid) + self.assertIs(node.ctx, ctx) + # check the changes + self.assertIs(repl.id, new_nid) + self.assertIs(repl.ctx, node.ctx) # no changes + + def test_replace_accept_known_class_attributes(self): + node = ast.parse('x').body[0].value + self.assertEqual(node.id, 'x') + self.assertEqual(node.lineno, 1) + + # constructor allows any type so replace() should do the same + lineno = object() + repl = copy.replace(node, lineno=lineno) + # assert that there is no side-effect + self.assertEqual(node.lineno, 1) + # check the changes + self.assertEqual(repl.id, node.id) + self.assertEqual(repl.ctx, node.ctx) + self.assertEqual(repl.lineno, lineno) + + _, _, state = node.__reduce__() + self.assertEqual(state['id'], 'x') + self.assertEqual(state['ctx'], node.ctx) + self.assertEqual(state['lineno'], 1) + + _, _, state = repl.__reduce__() + self.assertEqual(state['id'], 'x') + self.assertEqual(state['ctx'], node.ctx) + self.assertEqual(state['lineno'], lineno) + + def test_replace_accept_known_custom_class_fields(self): + class MyNode(ast.AST): + _fields = ('name', 'data') + __annotations__ = {'name': str, 'data': object} + __match_args__ = ('name', 'data') + + name, data = 'name', object() + + node = MyNode(name, data) + self.assertIs(node.name, name) + self.assertIs(node.data, data) + # check shallow copy + repl = copy.replace(node) + # assert that there is no side-effect + self.assertIs(node.name, name) + self.assertIs(node.data, data) + # check the shallow copy + self.assertIs(repl.name, name) + self.assertIs(repl.data, data) + + node = MyNode(name, data) + repl_data = object() + # replace custom but known field + repl = copy.replace(node, data=repl_data) + # assert that there is no side-effect + self.assertIs(node.name, name) + self.assertIs(node.data, data) + # check the changes + self.assertIs(repl.name, node.name) + self.assertIs(repl.data, repl_data) + + def test_replace_accept_known_custom_class_attributes(self): + class MyNode(ast.AST): + x = 0 + y = 1 + _attributes = ('x', 'y') + + node = MyNode() + self.assertEqual(node.x, 0) + self.assertEqual(node.y, 1) + + y = object() + repl = copy.replace(node, y=y) + # assert that there is no side-effect + self.assertEqual(node.x, 0) + self.assertEqual(node.y, 1) + # check the changes + self.assertEqual(repl.x, 0) + self.assertEqual(repl.y, y) + + def test_replace_ignore_known_custom_instance_fields(self): + node = ast.parse('x').body[0].value + node.extra = extra = object() # add instance 'extra' field + context = node.ctx + + # assert initial values + self.assertIs(node.id, 'x') + self.assertIs(node.ctx, context) + self.assertIs(node.extra, extra) + # shallow copy, but drops extra fields + repl = copy.replace(node) + # assert that there is no side-effect + self.assertIs(node.id, 'x') + self.assertIs(node.ctx, context) + self.assertIs(node.extra, extra) + # verify that the 'extra' field is not kept + self.assertIs(repl.id, 'x') + self.assertIs(repl.ctx, context) + self.assertRaises(AttributeError, getattr, repl, 'extra') + + # change known native field + repl = copy.replace(node, id='y') + # assert that there is no side-effect + self.assertIs(node.id, 'x') + self.assertIs(node.ctx, context) + self.assertIs(node.extra, extra) + # verify that the 'extra' field is not kept + self.assertIs(repl.id, 'y') + self.assertIs(repl.ctx, context) + self.assertRaises(AttributeError, getattr, repl, 'extra') + + def test_replace_reject_missing_field(self): + # case: warn if deleted field is not replaced + node = ast.parse('x').body[0].value + context = node.ctx + del node.id + + self.assertRaises(AttributeError, getattr, node, 'id') + self.assertIs(node.ctx, context) + msg = "Name.__replace__ missing 1 keyword argument: 'id'." + with self.assertRaisesRegex(TypeError, re.escape(msg)): + copy.replace(node) + # assert that there is no side-effect + self.assertRaises(AttributeError, getattr, node, 'id') + self.assertIs(node.ctx, context) + + # case: do not raise if deleted field is replaced + node = ast.parse('x').body[0].value + context = node.ctx + del node.id + + self.assertRaises(AttributeError, getattr, node, 'id') + self.assertIs(node.ctx, context) + repl = copy.replace(node, id='y') + # assert that there is no side-effect + self.assertRaises(AttributeError, getattr, node, 'id') + self.assertIs(node.ctx, context) + self.assertIs(repl.id, 'y') + self.assertIs(repl.ctx, context) + + def test_replace_reject_known_custom_instance_fields_commits(self): + node = ast.parse('x').body[0].value + node.extra = extra = object() # add instance 'extra' field + context = node.ctx + + # explicit rejection of known instance fields + self.assertTrue(hasattr(node, 'extra')) + msg = "Name.__replace__ got an unexpected keyword argument 'extra'." + with self.assertRaisesRegex(TypeError, re.escape(msg)): + copy.replace(node, extra=1) + # assert that there is no side-effect + self.assertIs(node.id, 'x') + self.assertIs(node.ctx, context) + self.assertIs(node.extra, extra) + + def test_replace_reject_unknown_instance_fields(self): + node = ast.parse('x').body[0].value + context = node.ctx + + # explicit rejection of unknown extra fields + self.assertRaises(AttributeError, getattr, node, 'unknown') + msg = "Name.__replace__ got an unexpected keyword argument 'unknown'." + with self.assertRaisesRegex(TypeError, re.escape(msg)): + copy.replace(node, unknown=1) + # assert that there is no side-effect + self.assertIs(node.id, 'x') + self.assertIs(node.ctx, context) + self.assertRaises(AttributeError, getattr, node, 'unknown') class ASTHelpers_Test(unittest.TestCase): maxDiff = None @@ -2959,6 +3242,18 @@ class FieldsAndTypes(ast.AST): obj = FieldsAndTypes(a=1) self.assertEqual(obj.a, 1) + def test_custom_attributes(self): + class MyAttrs(ast.AST): + _attributes = ("a", "b") + + obj = MyAttrs(a=1, b=2) + self.assertEqual(obj.a, 1) + self.assertEqual(obj.b, 2) + + with self.assertWarnsRegex(DeprecationWarning, + r"MyAttrs.__init__ got an unexpected keyword argument 'c'."): + obj = MyAttrs(c=3) + def test_fields_and_types_no_default(self): class FieldsAndTypesNoDefault(ast.AST): _fields = ('a',) diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index 9ff0f488dc4fa9..5818e96d61f480 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -1768,6 +1768,11 @@ def __getitem__(self, index): sum(([x] for x in range(10)), empty) self.assertEqual(empty, []) + xs = [complex(random.random() - .5, random.random() - .5) + for _ in range(10000)] + self.assertEqual(sum(xs), complex(sum(z.real for z in xs), + sum(z.imag for z in xs))) + @requires_IEEE_754 @unittest.skipIf(HAVE_DOUBLE_ROUNDING, "sum accuracy not guaranteed on machines with double rounding") @@ -1775,6 +1780,10 @@ def __getitem__(self, index): def test_sum_accuracy(self): self.assertEqual(sum([0.1] * 10), 1.0) self.assertEqual(sum([1.0, 10E100, 1.0, -10E100]), 2.0) + self.assertEqual(sum([1.0, 10E100, 1.0, -10E100, 2j]), 2+2j) + self.assertEqual(sum([2+1j, 10E100j, 1j, -10E100j]), 2+2j) + self.assertEqual(sum([1j, 1, 10E100j, 1j, 1.0, -10E100j]), 2+2j) + self.assertEqual(sum([0.1j]*10 + [fractions.Fraction(1, 10)]), 0.1+1j) def test_type(self): self.assertEqual(type(''), type('123')) diff --git a/Lib/test/test_concurrent_futures/test_init.py b/Lib/test/test_concurrent_futures/test_init.py index a36f592b79b7cf..df640929309318 100644 --- a/Lib/test/test_concurrent_futures/test_init.py +++ b/Lib/test/test_concurrent_futures/test_init.py @@ -139,6 +139,7 @@ def _test(self, test_class): def test_spawn(self): self._test(ProcessPoolSpawnFailingInitializerTest) + @support.skip_if_sanitizer("TSAN doesn't support threads after fork", thread=True) def test_forkserver(self): self._test(ProcessPoolForkserverFailingInitializerTest) diff --git a/Lib/test/test_copy.py b/Lib/test/test_copy.py index 89102373759ca0..3dec64cc9a2414 100644 --- a/Lib/test/test_copy.py +++ b/Lib/test/test_copy.py @@ -972,6 +972,10 @@ class C: copy.replace(c, x=1, error=2) +class MiscTestCase(unittest.TestCase): + def test__all__(self): + support.check__all__(self, copy, not_exported={"dispatch_table", "error"}) + def global_foo(x, y): return x+y diff --git a/Lib/test/test_ctypes/test_libc.py b/Lib/test/test_ctypes/test_libc.py index dec0afff4b38fd..cab3cc9f46003a 100644 --- a/Lib/test/test_ctypes/test_libc.py +++ b/Lib/test/test_ctypes/test_libc.py @@ -33,6 +33,20 @@ def test_csqrt(self): self.assertAlmostEqual(lib.my_csqrt(-1-0.01j), 0.004999937502734214-1.0000124996093955j) + lib.my_csqrtf.argtypes = ctypes.c_float_complex, + lib.my_csqrtf.restype = ctypes.c_float_complex + self.assertAlmostEqual(lib.my_csqrtf(-1+0.01j), + 0.004999937502734214+1.0000124996093955j) + self.assertAlmostEqual(lib.my_csqrtf(-1-0.01j), + 0.004999937502734214-1.0000124996093955j) + + lib.my_csqrtl.argtypes = ctypes.c_longdouble_complex, + lib.my_csqrtl.restype = ctypes.c_longdouble_complex + self.assertAlmostEqual(lib.my_csqrtl(-1+0.01j), + 0.004999937502734214+1.0000124996093955j) + self.assertAlmostEqual(lib.my_csqrtl(-1-0.01j), + 0.004999937502734214-1.0000124996093955j) + def test_qsort(self): comparefunc = CFUNCTYPE(c_int, POINTER(c_char), POINTER(c_char)) lib.my_qsort.argtypes = c_void_p, c_size_t, c_size_t, comparefunc diff --git a/Lib/test/test_ctypes/test_numbers.py b/Lib/test/test_ctypes/test_numbers.py index b3816f61a6e7aa..1dd3f2a234b1ee 100644 --- a/Lib/test/test_ctypes/test_numbers.py +++ b/Lib/test/test_ctypes/test_numbers.py @@ -146,7 +146,8 @@ def test_floats(self): @unittest.skipUnless(hasattr(ctypes, "c_double_complex"), "requires C11 complex type") def test_complex(self): - for t in [ctypes.c_double_complex]: + for t in [ctypes.c_double_complex, ctypes.c_float_complex, + ctypes.c_longdouble_complex]: self.assertEqual(t(1).value, 1+0j) self.assertEqual(t(1.0).value, 1+0j) self.assertEqual(t(1+0.125j).value, 1+0.125j) @@ -162,9 +163,10 @@ def test_complex_round_trip(self): values = [complex(*_) for _ in combinations([1, -1, 0.0, -0.0, 2, -3, INF, -INF, NAN], 2)] for z in values: - with self.subTest(z=z): - z2 = ctypes.c_double_complex(z).value - self.assertComplexesAreIdentical(z, z2) + for t in [ctypes.c_double_complex, ctypes.c_float_complex, + ctypes.c_longdouble_complex]: + with self.subTest(z=z, type=t): + self.assertComplexesAreIdentical(z, t(z).value) def test_integers(self): f = FloatLike() diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index 634513ec7a5812..30dab1fbaa48b2 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -48,6 +48,8 @@ INIT_LOOPS = 4 MAX_HASH_SEED = 4294967295 +ABI_THREAD = 't' if sysconfig.get_config_var('Py_GIL_DISABLED') else '' + # If we are running from a build dir, but the stdlib has been installed, # some tests need to expect different results. @@ -1285,11 +1287,11 @@ def module_search_paths(self, prefix=None, exec_prefix=None): ver = sys.version_info return [ os.path.join(prefix, sys.platlibdir, - f'python{ver.major}{ver.minor}.zip'), + f'python{ver.major}{ver.minor}{ABI_THREAD}.zip'), os.path.join(prefix, sys.platlibdir, - f'python{ver.major}.{ver.minor}'), + f'python{ver.major}.{ver.minor}{ABI_THREAD}'), os.path.join(exec_prefix, sys.platlibdir, - f'python{ver.major}.{ver.minor}', 'lib-dynload'), + f'python{ver.major}.{ver.minor}{ABI_THREAD}', 'lib-dynload'), ] @contextlib.contextmanager @@ -1343,7 +1345,7 @@ def test_init_setpythonhome(self): expected_paths = [paths[0], os.path.join(home, 'DLLs'), stdlib] else: version = f'{sys.version_info.major}.{sys.version_info.minor}' - stdlib = os.path.join(home, sys.platlibdir, f'python{version}') + stdlib = os.path.join(home, sys.platlibdir, f'python{version}{ABI_THREAD}') expected_paths = self.module_search_paths(prefix=home, exec_prefix=home) config = { @@ -1384,7 +1386,7 @@ def test_init_is_python_build_with_home(self): expected_paths = [paths[0], os.path.join(home, 'DLLs'), stdlib] else: version = f'{sys.version_info.major}.{sys.version_info.minor}' - stdlib = os.path.join(home, sys.platlibdir, f'python{version}') + stdlib = os.path.join(home, sys.platlibdir, f'python{version}{ABI_THREAD}') expected_paths = self.module_search_paths(prefix=home, exec_prefix=home) config = { @@ -1515,7 +1517,7 @@ def test_init_pyvenv_cfg(self): if not MS_WINDOWS: lib_dynload = os.path.join(pyvenv_home, sys.platlibdir, - f'python{ver.major}.{ver.minor}', + f'python{ver.major}.{ver.minor}{ABI_THREAD}', 'lib-dynload') os.makedirs(lib_dynload) else: diff --git a/Lib/test/test_free_threading/test_type.py b/Lib/test/test_free_threading/test_type.py index 75259795e81bcb..649676db9c08a5 100644 --- a/Lib/test/test_free_threading/test_type.py +++ b/Lib/test/test_free_threading/test_type.py @@ -106,7 +106,7 @@ class Bar: thing = Foo() def work(): foo = thing - for _ in range(10000): + for _ in range(5000): foo.__class__ = Bar type(foo) foo.__class__ = Foo diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index 1ce0f4d0aea6ee..492a16a8c7ff45 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -405,9 +405,7 @@ class A: self.assertEqual(A.meth(3, b=4), ((1, 3), {'a': 2, 'b': 4})) self.assertEqual(A.cmeth(3, b=4), ((1, A, 3), {'a': 2, 'b': 4})) self.assertEqual(A.smeth(3, b=4), ((1, 3), {'a': 2, 'b': 4})) - with self.assertWarns(FutureWarning) as w: - self.assertEqual(a.meth(3, b=4), ((1, 3), {'a': 2, 'b': 4})) - self.assertEqual(w.filename, __file__) + self.assertEqual(a.meth(3, b=4), ((1, a, 3), {'a': 2, 'b': 4})) self.assertEqual(a.cmeth(3, b=4), ((1, A, 3), {'a': 2, 'b': 4})) self.assertEqual(a.smeth(3, b=4), ((1, 3), {'a': 2, 'b': 4})) diff --git a/Lib/test/test_generated_cases.py b/Lib/test/test_generated_cases.py index 30e39e7720e6d1..00def509a219c3 100644 --- a/Lib/test/test_generated_cases.py +++ b/Lib/test/test_generated_cases.py @@ -815,7 +815,6 @@ def test_annotated_op(self): """ self.run_cases_test(input, output) - def test_deopt_and_exit(self): input = """ pure op(OP, (arg1 -- out)) { @@ -827,6 +826,49 @@ def test_deopt_and_exit(self): with self.assertRaises(Exception): self.run_cases_test(input, output) + def test_array_of_one(self): + input = """ + inst(OP, (arg[1] -- out[1])) { + out[0] = arg[0]; + } + """ + output = """ + TARGET(OP) { + frame->instr_ptr = next_instr; + next_instr += 1; + INSTRUCTION_STATS(OP); + _PyStackRef *arg; + _PyStackRef *out; + arg = &stack_pointer[-1]; + out = &stack_pointer[-1]; + out[0] = arg[0]; + DISPATCH(); + } + """ + self.run_cases_test(input, output) + + def test_pointer_to_stackref(self): + input = """ + inst(OP, (arg: _PyStackRef * -- out)) { + out = *arg; + } + """ + output = """ + TARGET(OP) { + frame->instr_ptr = next_instr; + next_instr += 1; + INSTRUCTION_STATS(OP); + _PyStackRef *arg; + _PyStackRef out; + arg = (_PyStackRef *)stack_pointer[-1].bits; + out = *arg; + stack_pointer[-1] = out; + DISPATCH(); + } + """ + self.run_cases_test(input, output) + + class TestGeneratedAbstractCases(unittest.TestCase): def setUp(self) -> None: super().setUp() diff --git a/Lib/test/test_getpath.py b/Lib/test/test_getpath.py index 2f7aa69efc184a..6c86c3d1c8c57e 100644 --- a/Lib/test/test_getpath.py +++ b/Lib/test/test_getpath.py @@ -844,6 +844,7 @@ def test_explicitly_set_stdlib_dir(self): PYDEBUGEXT="", VERSION_MAJOR=9, # fixed version number for ease VERSION_MINOR=8, # of testing + ABI_THREAD="", PYWINVER=None, EXE_SUFFIX=None, diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py index c10f689c4ea34b..e29097baaf53ae 100644 --- a/Lib/test/test_import/__init__.py +++ b/Lib/test/test_import/__init__.py @@ -3034,13 +3034,6 @@ def test_basic_multiple_interpreters_deleted_no_reset(self): def test_basic_multiple_interpreters_reset_each(self): # resetting between each interpreter - if Py_TRACE_REFS: - # It's a Py_TRACE_REFS build. - # This test breaks interpreter isolation a little, - # which causes problems on Py_TRACE_REF builds. - # See gh-121110. - raise unittest.SkipTest('crashes on Py_TRACE_REFS builds') - # At this point: # * alive in 0 interpreters # * module def may or may not be loaded already diff --git a/Lib/test/test_importlib/test_namespace_pkgs.py b/Lib/test/test_importlib/test_namespace_pkgs.py index 072e198795d394..cbbdada3b010a7 100644 --- a/Lib/test/test_importlib/test_namespace_pkgs.py +++ b/Lib/test/test_importlib/test_namespace_pkgs.py @@ -286,25 +286,24 @@ def test_project3_succeeds(self): class ZipWithMissingDirectory(NamespacePackageTest): paths = ['missing_directory.zip'] + # missing_directory.zip contains: + # Length Date Time Name + # --------- ---------- ----- ---- + # 29 2012-05-03 18:13 foo/one.py + # 0 2012-05-03 20:57 bar/ + # 38 2012-05-03 20:57 bar/two.py + # --------- ------- + # 67 3 files - @unittest.expectedFailure def test_missing_directory(self): - # This will fail because missing_directory.zip contains: - # Length Date Time Name - # --------- ---------- ----- ---- - # 29 2012-05-03 18:13 foo/one.py - # 0 2012-05-03 20:57 bar/ - # 38 2012-05-03 20:57 bar/two.py - # --------- ------- - # 67 3 files - - # Because there is no 'foo/', the zipimporter currently doesn't - # know that foo is a namespace package - import foo.one + self.assertEqual(foo.one.attr, 'portion1 foo one') + + def test_missing_directory2(self): + import foo + self.assertFalse(hasattr(foo, 'one')) def test_present_directory(self): - # This succeeds because there is a "bar/" in the zip file import bar.two self.assertEqual(bar.two.attr, 'missing_directory foo two') diff --git a/Lib/test/test_inspect/test_inspect.py b/Lib/test/test_inspect/test_inspect.py index 308c09874fe2ac..d39c3ccdc847bd 100644 --- a/Lib/test/test_inspect/test_inspect.py +++ b/Lib/test/test_inspect/test_inspect.py @@ -3868,17 +3868,15 @@ def __init__(self, b): with self.subTest('partial'): class CM(type): - __call__ = functools.partial(lambda x, a: (x, a), 2) + __call__ = functools.partial(lambda x, a, b: (x, a, b), 2) class C(metaclass=CM): - def __init__(self, b): + def __init__(self, c): pass - with self.assertWarns(FutureWarning): - self.assertEqual(C(1), (2, 1)) - with self.assertWarns(FutureWarning): - self.assertEqual(self.signature(C), - ((('a', ..., ..., "positional_or_keyword"),), - ...)) + self.assertEqual(C(1), (2, C, 1)) + self.assertEqual(self.signature(C), + ((('b', ..., ..., "positional_or_keyword"),), + ...)) with self.subTest('partialmethod'): class CM(type): @@ -4024,14 +4022,12 @@ class C: with self.subTest('partial'): class C: - __init__ = functools.partial(lambda x, a: None, 2) + __init__ = functools.partial(lambda x, a, b: None, 2) - with self.assertWarns(FutureWarning): - C(1) # does not raise - with self.assertWarns(FutureWarning): - self.assertEqual(self.signature(C), - ((('a', ..., ..., "positional_or_keyword"),), - ...)) + C(1) # does not raise + self.assertEqual(self.signature(C), + ((('b', ..., ..., "positional_or_keyword"),), + ...)) with self.subTest('partialmethod'): class C: @@ -4284,15 +4280,13 @@ class C: with self.subTest('partial'): class C: - __call__ = functools.partial(lambda x, a: (x, a), 2) + __call__ = functools.partial(lambda x, a, b: (x, a, b), 2) c = C() - with self.assertWarns(FutureWarning): - self.assertEqual(c(1), (2, 1)) - with self.assertWarns(FutureWarning): - self.assertEqual(self.signature(c), - ((('a', ..., ..., "positional_or_keyword"),), - ...)) + self.assertEqual(c(1), (2, c, 1)) + self.assertEqual(self.signature(C()), + ((('b', ..., ..., "positional_or_keyword"),), + ...)) with self.subTest('partialmethod'): class C: diff --git a/Lib/test/test_interpreters/test_channels.py b/Lib/test/test_interpreters/test_channels.py index 68cc45d1a5e09f..6c37754142e361 100644 --- a/Lib/test/test_interpreters/test_channels.py +++ b/Lib/test/test_interpreters/test_channels.py @@ -48,6 +48,7 @@ def test_list_all(self): self.assertEqual(after, created) def test_shareable(self): + interp = interpreters.create() rch, sch = channels.create() self.assertTrue( @@ -60,8 +61,25 @@ def test_shareable(self): rch2 = rch.recv() sch2 = rch.recv() + interp.prepare_main(rch=rch, sch=sch) + sch.send_nowait(rch) + sch.send_nowait(sch) + interp.exec(dedent(""" + rch2 = rch.recv() + sch2 = rch.recv() + assert rch2 == rch + assert sch2 == sch + + sch.send_nowait(rch2) + sch.send_nowait(sch2) + """)) + rch3 = rch.recv() + sch3 = rch.recv() + self.assertEqual(rch2, rch) self.assertEqual(sch2, sch) + self.assertEqual(rch3, rch) + self.assertEqual(sch3, sch) def test_is_closed(self): rch, sch = channels.create() diff --git a/Lib/test/test_list.py b/Lib/test/test_list.py index 4d2d54705fc894..ad7accf2099f43 100644 --- a/Lib/test/test_list.py +++ b/Lib/test/test_list.py @@ -299,6 +299,15 @@ def __eq__(self, other): lst = [X(), X()] X() in lst + def test_tier2_invalidates_iterator(self): + # GH-121012 + for _ in range(100): + a = [1, 2, 3] + it = iter(a) + for _ in it: + pass + a.append(4) + self.assertEqual(list(it), []) if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_multiprocessing_fork/__init__.py b/Lib/test/test_multiprocessing_fork/__init__.py index aa1fff50b28f5f..b35e82879d7fe2 100644 --- a/Lib/test/test_multiprocessing_fork/__init__.py +++ b/Lib/test/test_multiprocessing_fork/__init__.py @@ -12,5 +12,8 @@ if sys.platform == 'darwin': raise unittest.SkipTest("test may crash on macOS (bpo-33725)") +if support.check_sanitizer(thread=True): + raise unittest.SkipTest("TSAN doesn't support threads after fork") + def load_tests(*args): return support.load_package_tests(os.path.dirname(__file__), *args) diff --git a/Lib/test/test_pathlib/test_pathlib.py b/Lib/test/test_pathlib/test_pathlib.py index da6d82465d29cf..1328a8695b0cca 100644 --- a/Lib/test/test_pathlib/test_pathlib.py +++ b/Lib/test/test_pathlib/test_pathlib.py @@ -653,6 +653,55 @@ def test_open_unbuffered(self): self.assertIsInstance(f, io.RawIOBase) self.assertEqual(f.read().strip(), b"this is file A") + def test_copy_file_preserve_metadata(self): + base = self.cls(self.base) + source = base / 'fileA' + if hasattr(os, 'chmod'): + os.chmod(source, stat.S_IRWXU | stat.S_IRWXO) + if hasattr(os, 'chflags') and hasattr(stat, 'UF_NODUMP'): + os.chflags(source, stat.UF_NODUMP) + source_st = source.stat() + target = base / 'copyA' + source.copy(target, preserve_metadata=True) + self.assertTrue(target.exists()) + self.assertEqual(source.read_text(), target.read_text()) + target_st = target.stat() + self.assertLessEqual(source_st.st_atime, target_st.st_atime) + self.assertLessEqual(source_st.st_mtime, target_st.st_mtime) + self.assertEqual(source_st.st_mode, target_st.st_mode) + if hasattr(source_st, 'st_flags'): + self.assertEqual(source_st.st_flags, target_st.st_flags) + + @os_helper.skip_unless_xattr + def test_copy_file_preserve_metadata_xattrs(self): + base = self.cls(self.base) + source = base / 'fileA' + os.setxattr(source, b'user.foo', b'42') + target = base / 'copyA' + source.copy(target, preserve_metadata=True) + self.assertEqual(os.getxattr(target, b'user.foo'), b'42') + + @needs_symlinks + def test_copy_link_preserve_metadata(self): + base = self.cls(self.base) + source = base / 'linkA' + if hasattr(os, 'lchmod'): + os.lchmod(source, stat.S_IRWXU | stat.S_IRWXO) + if hasattr(os, 'lchflags') and hasattr(stat, 'UF_NODUMP'): + os.lchflags(source, stat.UF_NODUMP) + source_st = source.lstat() + target = base / 'copyA' + source.copy(target, follow_symlinks=False, preserve_metadata=True) + self.assertTrue(target.exists()) + self.assertTrue(target.is_symlink()) + self.assertEqual(source.readlink(), target.readlink()) + target_st = target.lstat() + self.assertLessEqual(source_st.st_atime, target_st.st_atime) + self.assertLessEqual(source_st.st_mtime, target_st.st_mtime) + self.assertEqual(source_st.st_mode, target_st.st_mode) + if hasattr(source_st, 'st_flags'): + self.assertEqual(source_st.st_flags, target_st.st_flags) + @unittest.skipIf(sys.platform == "win32" or sys.platform == "wasi", "directories are always readable on Windows and WASI") @unittest.skipIf(root_in_posix, "test fails with root privilege") def test_copytree_no_read_permission(self): diff --git a/Lib/test/test_pathlib/test_pathlib_abc.py b/Lib/test/test_pathlib/test_pathlib_abc.py index ad692e872ede0b..28c9664cc90fe1 100644 --- a/Lib/test/test_pathlib/test_pathlib_abc.py +++ b/Lib/test/test_pathlib/test_pathlib_abc.py @@ -5,7 +5,8 @@ import stat import unittest -from pathlib._abc import UnsupportedOperation, ParserBase, PurePathBase, PathBase +from pathlib._os import UnsupportedOperation +from pathlib._abc import ParserBase, PurePathBase, PathBase import posixpath from test.support import is_wasi diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py index 71240157e324a1..343e15a4edc14c 100644 --- a/Lib/test/test_pdb.py +++ b/Lib/test/test_pdb.py @@ -2448,6 +2448,49 @@ def test_pdb_show_attribute_and_item(): (Pdb) c """ +# doctest will modify pdb.set_trace during the test, so we need to backup +# the original function to use it in the test +original_pdb_settrace = pdb.set_trace + +def test_pdb_with_inline_breakpoint(): + """Hard-coded breakpoint() calls should invoke the same debugger instance + + >>> def test_function(): + ... x = 1 + ... import pdb; pdb.Pdb().set_trace() + ... original_pdb_settrace() + ... x = 2 + + >>> with PdbTestInput(['display x', + ... 'n', + ... 'n', + ... 'n', + ... 'n', + ... 'undisplay', + ... 'c']): + ... test_function() + > (3)test_function() + -> import pdb; pdb.Pdb().set_trace() + (Pdb) display x + display x: 1 + (Pdb) n + > (4)test_function() + -> original_pdb_settrace() + (Pdb) n + > (4)test_function() + -> original_pdb_settrace() + (Pdb) n + > (5)test_function() + -> x = 2 + (Pdb) n + --Return-- + > (5)test_function()->None + -> x = 2 + display x: 2 [old: 1] + (Pdb) undisplay + (Pdb) c + """ + def test_pdb_issue_20766(): """Test for reference leaks when the SIGINT handler is set. @@ -3545,6 +3588,23 @@ def change_file(content, filename): # the file as up to date self.assertNotIn("WARNING:", stdout) + def test_post_mortem_restart(self): + script = """ + def foo(): + raise ValueError("foo") + foo() + """ + + commands = """ + continue + restart + continue + quit + """ + + stdout, stderr = self.run_pdb_script(script, commands) + self.assertIn("Restarting", stdout) + def test_relative_imports(self): self.module_name = 't_main' os_helper.rmtree(self.module_name) diff --git a/Lib/test/test_pyrepl/test_pyrepl.py b/Lib/test/test_pyrepl/test_pyrepl.py index b189d3291e8181..015b690566223d 100644 --- a/Lib/test/test_pyrepl/test_pyrepl.py +++ b/Lib/test/test_pyrepl/test_pyrepl.py @@ -1,14 +1,18 @@ import io import itertools import os +import pathlib import rlcompleter import select import subprocess import sys +import tempfile from unittest import TestCase, skipUnless from unittest.mock import patch from test.support import force_not_colorized from test.support import SHORT_TIMEOUT +from test.support.import_helper import import_module +from test.support.os_helper import unlink from .support import ( FakeConsole, @@ -898,10 +902,40 @@ def test_python_basic_repl(self): self.assertNotIn("Exception", output) self.assertNotIn("Traceback", output) + def test_not_wiping_history_file(self): + # skip, if readline module is not available + import_module('readline') + + hfile = tempfile.NamedTemporaryFile(delete=False) + self.addCleanup(unlink, hfile.name) + env = os.environ.copy() + env["PYTHON_HISTORY"] = hfile.name + commands = "123\nspam\nexit()\n" + + env.pop("PYTHON_BASIC_REPL", None) + output, exit_code = self.run_repl(commands, env=env) + self.assertEqual(exit_code, 0) + self.assertIn("123", output) + self.assertIn("spam", output) + self.assertNotEqual(pathlib.Path(hfile.name).stat().st_size, 0) + + hfile.file.truncate() + hfile.close() + + env["PYTHON_BASIC_REPL"] = "1" + output, exit_code = self.run_repl(commands, env=env) + self.assertEqual(exit_code, 0) + self.assertIn("123", output) + self.assertIn("spam", output) + self.assertNotEqual(pathlib.Path(hfile.name).stat().st_size, 0) + def run_repl(self, repl_input: str | list[str], env: dict | None = None) -> tuple[str, int]: master_fd, slave_fd = pty.openpty() + cmd = [sys.executable, "-i", "-u"] + if env is None: + cmd.append("-I") process = subprocess.Popen( - [sys.executable, "-i", "-u"], + cmd, stdin=slave_fd, stdout=slave_fd, stderr=slave_fd, diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py index 44fd11bfdc3fcb..d4f4a69a7a38c1 100644 --- a/Lib/test/test_regrtest.py +++ b/Lib/test/test_regrtest.py @@ -2329,16 +2329,6 @@ def test_normalize_test_name(self): self.assertIsNone(normalize('setUpModule (test.test_x)', is_error=True)) self.assertIsNone(normalize('tearDownModule (test.test_module)', is_error=True)) - def test_get_signal_name(self): - for exitcode, expected in ( - (-int(signal.SIGINT), 'SIGINT'), - (-int(signal.SIGSEGV), 'SIGSEGV'), - (128 + int(signal.SIGABRT), 'SIGABRT'), - (3221225477, "STATUS_ACCESS_VIOLATION"), - (0xC00000FD, "STATUS_STACK_OVERFLOW"), - ): - self.assertEqual(utils.get_signal_name(exitcode), expected, exitcode) - def test_format_resources(self): format_resources = utils.format_resources ALL_RESOURCES = utils.ALL_RESOURCES diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py index bcdc232c712071..035913cdd05f34 100644 --- a/Lib/test/test_site.py +++ b/Lib/test/test_site.py @@ -328,13 +328,13 @@ def test_getsitepackages(self): if sys.platlibdir != "lib": self.assertEqual(len(dirs), 2) wanted = os.path.join('xoxo', sys.platlibdir, - 'python%d.%d' % sys.version_info[:2], + f'python{sysconfig._get_python_version_abi()}', 'site-packages') self.assertEqual(dirs[0], wanted) else: self.assertEqual(len(dirs), 1) wanted = os.path.join('xoxo', 'lib', - 'python%d.%d' % sys.version_info[:2], + f'python{sysconfig._get_python_version_abi()}', 'site-packages') self.assertEqual(dirs[-1], wanted) else: diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py index 8b69cd03ba7f24..9412a2d737bb2e 100644 --- a/Lib/test/test_subprocess.py +++ b/Lib/test/test_subprocess.py @@ -1407,7 +1407,7 @@ def open_fds(): t = threading.Thread(target=open_fds) t.start() try: - with self.assertRaises(EnvironmentError): + with self.assertRaises(OSError): subprocess.Popen(NONEXISTING_CMD, stdin=subprocess.PIPE, stdout=subprocess.PIPE, diff --git a/Lib/test/test_support.py b/Lib/test/test_support.py index d6f024a476920c..e60e5477d32e1f 100644 --- a/Lib/test/test_support.py +++ b/Lib/test/test_support.py @@ -3,6 +3,7 @@ import io import os import shutil +import signal import socket import stat import subprocess @@ -732,6 +733,17 @@ def test_copy_python_src_ignore(self): self.assertEqual(support.copy_python_src_ignore(path, os.listdir(path)), ignored) + def test_get_signal_name(self): + for exitcode, expected in ( + (-int(signal.SIGINT), 'SIGINT'), + (-int(signal.SIGSEGV), 'SIGSEGV'), + (128 + int(signal.SIGABRT), 'SIGABRT'), + (3221225477, "STATUS_ACCESS_VIOLATION"), + (0xC00000FD, "STATUS_STACK_OVERFLOW"), + ): + self.assertEqual(support.get_signal_name(exitcode), expected, + exitcode) + # XXX -follows a list of untested API # make_legacy_pyc # is_resource_enabled diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py index 9233304c6a5327..37cee927686ba3 100644 --- a/Lib/test/test_sysconfig.py +++ b/Lib/test/test_sysconfig.py @@ -157,7 +157,7 @@ def test_posix_venv_scheme(self): binpath = 'bin' incpath = 'include' libpath = os.path.join('lib', - 'python%d.%d' % sys.version_info[:2], + f'python{sysconfig._get_python_version_abi()}', 'site-packages') # Resolve the paths in an imaginary venv/ directory diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py index fbca198aab5180..38a98828085e2f 100644 --- a/Lib/test/test_types.py +++ b/Lib/test/test_types.py @@ -10,6 +10,7 @@ import pickle import locale import sys +import textwrap import types import unittest.mock import weakref @@ -2345,5 +2346,40 @@ def ex(a, /, b, *, c): ) +class SubinterpreterTests(unittest.TestCase): + + @classmethod + def setUpClass(cls): + global interpreters + try: + from test.support import interpreters + except ModuleNotFoundError: + raise unittest.SkipTest('subinterpreters required') + import test.support.interpreters.channels + + @cpython_only + def test_slot_wrappers(self): + rch, sch = interpreters.channels.create() + + # For now it's sufficient to check int.__str__. + # See https://github.com/python/cpython/issues/117482 + # and https://github.com/python/cpython/pull/117660. + script = textwrap.dedent(''' + text = repr(int.__str__) + sch.send_nowait(text) + ''') + + exec(script) + expected = rch.recv() + + interp = interpreters.create() + interp.exec('from test.support import interpreters') + interp.prepare_main(sch=sch) + interp.exec(script) + results = rch.recv() + + self.assertEqual(results, expected) + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py index 1769ed61b94075..2b7d297f011741 100644 --- a/Lib/test/test_venv.py +++ b/Lib/test/test_venv.py @@ -75,7 +75,7 @@ def setUp(self): self.include = 'Include' else: self.bindir = 'bin' - self.lib = ('lib', 'python%d.%d' % sys.version_info[:2]) + self.lib = ('lib', f'python{sysconfig._get_python_version_abi()}') self.include = 'include' executable = sys._base_executable self.exe = os.path.split(executable)[-1] @@ -593,7 +593,8 @@ def test_zippath_from_non_installed_posix(self): libdir = os.path.join(non_installed_dir, platlibdir, self.lib[1]) os.makedirs(libdir) landmark = os.path.join(libdir, "os.py") - stdlib_zip = "python%d%d.zip" % sys.version_info[:2] + abi_thread = "t" if sysconfig.get_config_var("Py_GIL_DISABLED") else "" + stdlib_zip = f"python{sys.version_info.major}{sys.version_info.minor}{abi_thread}" zip_landmark = os.path.join(non_installed_dir, platlibdir, stdlib_zip) diff --git a/Lib/test/test_zipimport.py b/Lib/test/test_zipimport.py index 0bae54d26c64f1..1861616d5ec3bf 100644 --- a/Lib/test/test_zipimport.py +++ b/Lib/test/test_zipimport.py @@ -52,8 +52,11 @@ def module_path_to_dotted_name(path): TESTMOD = "ziptestmodule" +TESTMOD2 = "ziptestmodule2" +TESTMOD3 = "ziptestmodule3" TESTPACK = "ziptestpackage" TESTPACK2 = "ziptestpackage2" +TESTPACK3 = "ziptestpackage3" TEMP_DIR = os.path.abspath("junk95142") TEMP_ZIP = os.path.abspath("junk95142.zip") TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), "zipimport_data") @@ -95,8 +98,10 @@ def makeTree(self, files, dirName=TEMP_DIR): # defined by files under the directory dirName. self.addCleanup(os_helper.rmtree, dirName) - for name, (mtime, data) in files.items(): - path = os.path.join(dirName, name) + for name, data in files.items(): + if isinstance(data, tuple): + mtime, data = data + path = os.path.join(dirName, *name.split('/')) if path[-1] == os.sep: if not os.path.isdir(path): os.makedirs(path) @@ -107,22 +112,18 @@ def makeTree(self, files, dirName=TEMP_DIR): with open(path, 'wb') as fp: fp.write(data) - def makeZip(self, files, zipName=TEMP_ZIP, **kw): + def makeZip(self, files, zipName=TEMP_ZIP, *, + comment=None, file_comment=None, stuff=None, prefix='', **kw): # Create a zip archive based set of modules/packages - # defined by files in the zip file zipName. If the - # key 'stuff' exists in kw it is prepended to the archive. + # defined by files in the zip file zipName. + # If stuff is not None, it is prepended to the archive. self.addCleanup(os_helper.unlink, zipName) - with ZipFile(zipName, "w") as z: - for name, (mtime, data) in files.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - z.writestr(zinfo, data) - comment = kw.get("comment", None) + with ZipFile(zipName, "w", compression=self.compression) as z: + self.writeZip(z, files, file_comment=file_comment, prefix=prefix) if comment is not None: z.comment = comment - stuff = kw.get("stuff", None) if stuff is not None: # Prepend 'stuff' to the start of the zipfile with open(zipName, "rb") as f: @@ -131,26 +132,47 @@ def makeZip(self, files, zipName=TEMP_ZIP, **kw): f.write(stuff) f.write(data) + def writeZip(self, z, files, *, file_comment=None, prefix=''): + for name, data in files.items(): + if isinstance(data, tuple): + mtime, data = data + else: + mtime = NOW + name = name.replace(os.sep, '/') + zinfo = ZipInfo(prefix + name, time.localtime(mtime)) + zinfo.compress_type = self.compression + if file_comment is not None: + zinfo.comment = file_comment + if data is None: + zinfo.CRC = 0 + z.mkdir(zinfo) + else: + assert name[-1] != '/' + z.writestr(zinfo, data) + def getZip64Files(self): # This is the simplest way to make zipfile generate the zip64 EOCD block - return {f"f{n}.py": (NOW, test_src) for n in range(65537)} + return {f"f{n}.py": test_src for n in range(65537)} def doTest(self, expected_ext, files, *modules, **kw): + if 'prefix' not in kw: + kw['prefix'] = 'pre/fix/' self.makeZip(files, **kw) self.doTestWithPreBuiltZip(expected_ext, *modules, **kw) - def doTestWithPreBuiltZip(self, expected_ext, *modules, **kw): - sys.path.insert(0, TEMP_ZIP) + def doTestWithPreBuiltZip(self, expected_ext, *modules, + call=None, prefix='', **kw): + zip_path = os.path.join(TEMP_ZIP, *prefix.split('/')[:-1]) + sys.path.insert(0, zip_path) mod = importlib.import_module(".".join(modules)) - call = kw.get('call') if call is not None: call(mod) if expected_ext: file = mod.get_file() - self.assertEqual(file, os.path.join(TEMP_ZIP, + self.assertEqual(file, os.path.join(zip_path, *modules) + expected_ext) def testAFakeZlib(self): @@ -176,7 +198,7 @@ def testAFakeZlib(self): self.skipTest('zlib is a builtin module') if "zlib" in sys.modules: del sys.modules["zlib"] - files = {"zlib.py": (NOW, test_src)} + files = {"zlib.py": test_src} try: self.doTest(".py", files, "zlib") except ImportError: @@ -187,16 +209,16 @@ def testAFakeZlib(self): self.fail("expected test to raise ImportError") def testPy(self): - files = {TESTMOD + ".py": (NOW, test_src)} + files = {TESTMOD + ".py": test_src} self.doTest(".py", files, TESTMOD) def testPyc(self): - files = {TESTMOD + pyc_ext: (NOW, test_pyc)} + files = {TESTMOD + pyc_ext: test_pyc} self.doTest(pyc_ext, files, TESTMOD) def testBoth(self): - files = {TESTMOD + ".py": (NOW, test_src), - TESTMOD + pyc_ext: (NOW, test_pyc)} + files = {TESTMOD + ".py": test_src, + TESTMOD + pyc_ext: test_pyc} self.doTest(pyc_ext, files, TESTMOD) def testUncheckedHashBasedPyc(self): @@ -229,22 +251,22 @@ def check(mod): self.doTest(None, files, TESTMOD, call=check) def testEmptyPy(self): - files = {TESTMOD + ".py": (NOW, "")} + files = {TESTMOD + ".py": ""} self.doTest(None, files, TESTMOD) def testBadMagic(self): # make pyc magic word invalid, forcing loading from .py badmagic_pyc = bytearray(test_pyc) badmagic_pyc[0] ^= 0x04 # flip an arbitrary bit - files = {TESTMOD + ".py": (NOW, test_src), - TESTMOD + pyc_ext: (NOW, badmagic_pyc)} + files = {TESTMOD + ".py": test_src, + TESTMOD + pyc_ext: badmagic_pyc} self.doTest(".py", files, TESTMOD) def testBadMagic2(self): # make pyc magic word invalid, causing an ImportError badmagic_pyc = bytearray(test_pyc) badmagic_pyc[0] ^= 0x04 # flip an arbitrary bit - files = {TESTMOD + pyc_ext: (NOW, badmagic_pyc)} + files = {TESTMOD + pyc_ext: badmagic_pyc} try: self.doTest(".py", files, TESTMOD) self.fail("This should not be reached") @@ -257,22 +279,22 @@ def testBadMTime(self): # flip the second bit -- not the first as that one isn't stored in the # .py's mtime in the zip archive. badtime_pyc[11] ^= 0x02 - files = {TESTMOD + ".py": (NOW, test_src), - TESTMOD + pyc_ext: (NOW, badtime_pyc)} + files = {TESTMOD + ".py": test_src, + TESTMOD + pyc_ext: badtime_pyc} self.doTest(".py", files, TESTMOD) def test2038MTime(self): # Make sure we can handle mtimes larger than what a 32-bit signed number # can hold. twenty_thirty_eight_pyc = make_pyc(test_co, 2**32 - 1, len(test_src)) - files = {TESTMOD + ".py": (NOW, test_src), - TESTMOD + pyc_ext: (NOW, twenty_thirty_eight_pyc)} + files = {TESTMOD + ".py": test_src, + TESTMOD + pyc_ext: twenty_thirty_eight_pyc} self.doTest(".py", files, TESTMOD) def testPackage(self): packdir = TESTPACK + os.sep - files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), - packdir + TESTMOD + pyc_ext: (NOW, test_pyc)} + files = {packdir + "__init__" + pyc_ext: test_pyc, + packdir + TESTMOD + pyc_ext: test_pyc} self.doTest(pyc_ext, files, TESTPACK, TESTMOD) def testSubPackage(self): @@ -280,9 +302,9 @@ def testSubPackage(self): # archives. packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} + files = {packdir + "__init__" + pyc_ext: test_pyc, + packdir2 + "__init__" + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc} self.doTest(pyc_ext, files, TESTPACK, TESTPACK2, TESTMOD) def testSubNamespacePackage(self): @@ -291,29 +313,104 @@ def testSubNamespacePackage(self): packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep # The first two files are just directory entries (so have no data). - files = {packdir: (NOW, ""), - packdir2: (NOW, ""), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} + files = {packdir: None, + packdir2: None, + packdir2 + TESTMOD + pyc_ext: test_pyc} self.doTest(pyc_ext, files, TESTPACK, TESTPACK2, TESTMOD) + def testPackageExplicitDirectories(self): + # Test explicit namespace packages with explicit directory entries. + self.addCleanup(os_helper.unlink, TEMP_ZIP) + with ZipFile(TEMP_ZIP, 'w', compression=self.compression) as z: + z.mkdir('a') + z.writestr('a/__init__.py', test_src) + z.mkdir('a/b') + z.writestr('a/b/__init__.py', test_src) + z.mkdir('a/b/c') + z.writestr('a/b/c/__init__.py', test_src) + z.writestr('a/b/c/d.py', test_src) + self._testPackage(initfile='__init__.py') + + def testPackageImplicitDirectories(self): + # Test explicit namespace packages without explicit directory entries. + self.addCleanup(os_helper.unlink, TEMP_ZIP) + with ZipFile(TEMP_ZIP, 'w', compression=self.compression) as z: + z.writestr('a/__init__.py', test_src) + z.writestr('a/b/__init__.py', test_src) + z.writestr('a/b/c/__init__.py', test_src) + z.writestr('a/b/c/d.py', test_src) + self._testPackage(initfile='__init__.py') + + def testNamespacePackageExplicitDirectories(self): + # Test implicit namespace packages with explicit directory entries. + self.addCleanup(os_helper.unlink, TEMP_ZIP) + with ZipFile(TEMP_ZIP, 'w', compression=self.compression) as z: + z.mkdir('a') + z.mkdir('a/b') + z.mkdir('a/b/c') + z.writestr('a/b/c/d.py', test_src) + self._testPackage(initfile=None) + + def testNamespacePackageImplicitDirectories(self): + # Test implicit namespace packages without explicit directory entries. + self.addCleanup(os_helper.unlink, TEMP_ZIP) + with ZipFile(TEMP_ZIP, 'w', compression=self.compression) as z: + z.writestr('a/b/c/d.py', test_src) + self._testPackage(initfile=None) + + def _testPackage(self, initfile): + zi = zipimport.zipimporter(os.path.join(TEMP_ZIP, 'a')) + if initfile is None: + # XXX Should it work? + self.assertRaises(zipimport.ZipImportError, zi.is_package, 'b') + self.assertRaises(zipimport.ZipImportError, zi.get_source, 'b') + self.assertRaises(zipimport.ZipImportError, zi.get_code, 'b') + else: + self.assertTrue(zi.is_package('b')) + self.assertEqual(zi.get_source('b'), test_src) + self.assertEqual(zi.get_code('b').co_filename, + os.path.join(TEMP_ZIP, 'a', 'b', initfile)) + + sys.path.insert(0, TEMP_ZIP) + self.assertNotIn('a', sys.modules) + + mod = importlib.import_module(f'a.b') + self.assertIn('a', sys.modules) + self.assertIs(sys.modules['a.b'], mod) + if initfile is None: + self.assertIsNone(mod.__file__) + else: + self.assertEqual(mod.__file__, + os.path.join(TEMP_ZIP, 'a', 'b', initfile)) + self.assertEqual(len(mod.__path__), 1, mod.__path__) + self.assertEqual(mod.__path__[0], os.path.join(TEMP_ZIP, 'a', 'b')) + + mod2 = importlib.import_module(f'a.b.c.d') + self.assertIn('a.b.c', sys.modules) + self.assertIn('a.b.c.d', sys.modules) + self.assertIs(sys.modules['a.b.c.d'], mod2) + self.assertIs(mod.c.d, mod2) + self.assertEqual(mod2.__file__, + os.path.join(TEMP_ZIP, 'a', 'b', 'c', 'd.py')) + def testMixedNamespacePackage(self): # Test implicit namespace packages spread between a # real filesystem and a zip archive. packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - packdir3 = packdir2 + TESTPACK + '3' + os.sep - files1 = {packdir: (NOW, ""), - packdir + TESTMOD + pyc_ext: (NOW, test_pyc), - packdir2: (NOW, ""), - packdir3: (NOW, ""), - packdir3 + TESTMOD + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + '3' + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} - files2 = {packdir: (NOW, ""), - packdir + TESTMOD + '2' + pyc_ext: (NOW, test_pyc), - packdir2: (NOW, ""), - packdir2 + TESTMOD + '2' + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} + packdir3 = packdir2 + TESTPACK3 + os.sep + files1 = {packdir: None, + packdir + TESTMOD + pyc_ext: test_pyc, + packdir2: None, + packdir3: None, + packdir3 + TESTMOD + pyc_ext: test_pyc, + packdir2 + TESTMOD3 + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc} + files2 = {packdir: None, + packdir + TESTMOD2 + pyc_ext: test_pyc, + packdir2: None, + packdir2 + TESTMOD2 + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc} zip1 = os.path.abspath("path1.zip") self.makeZip(files1, zip1) @@ -346,8 +443,8 @@ def testMixedNamespacePackage(self): mod = importlib.import_module('.'.join((TESTPACK, TESTMOD))) self.assertEqual("path1.zip", mod.__file__.split(os.sep)[-3]) - # And TESTPACK/(TESTMOD + '2') only exists in path2. - mod = importlib.import_module('.'.join((TESTPACK, TESTMOD + '2'))) + # And TESTPACK/(TESTMOD2) only exists in path2. + mod = importlib.import_module('.'.join((TESTPACK, TESTMOD2))) self.assertEqual(os.path.basename(TEMP_DIR), mod.__file__.split(os.sep)[-3]) @@ -364,13 +461,13 @@ def testMixedNamespacePackage(self): self.assertEqual(os.path.basename(TEMP_DIR), mod.__file__.split(os.sep)[-4]) - # subpkg.TESTMOD + '2' only exists in zip2. - mod = importlib.import_module('.'.join((subpkg, TESTMOD + '2'))) + # subpkg.TESTMOD2 only exists in zip2. + mod = importlib.import_module('.'.join((subpkg, TESTMOD2))) self.assertEqual(os.path.basename(TEMP_DIR), mod.__file__.split(os.sep)[-4]) - # Finally subpkg.TESTMOD + '3' only exists in zip1. - mod = importlib.import_module('.'.join((subpkg, TESTMOD + '3'))) + # Finally subpkg.TESTMOD3 only exists in zip1. + mod = importlib.import_module('.'.join((subpkg, TESTMOD3))) self.assertEqual('path1.zip', mod.__file__.split(os.sep)[-4]) def testNamespacePackage(self): @@ -378,22 +475,22 @@ def testNamespacePackage(self): # archives. packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - packdir3 = packdir2 + TESTPACK + '3' + os.sep - files1 = {packdir: (NOW, ""), - packdir + TESTMOD + pyc_ext: (NOW, test_pyc), - packdir2: (NOW, ""), - packdir3: (NOW, ""), - packdir3 + TESTMOD + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + '3' + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} + packdir3 = packdir2 + TESTPACK3 + os.sep + files1 = {packdir: None, + packdir + TESTMOD + pyc_ext: test_pyc, + packdir2: None, + packdir3: None, + packdir3 + TESTMOD + pyc_ext: test_pyc, + packdir2 + TESTMOD3 + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc} zip1 = os.path.abspath("path1.zip") self.makeZip(files1, zip1) - files2 = {packdir: (NOW, ""), - packdir + TESTMOD + '2' + pyc_ext: (NOW, test_pyc), - packdir2: (NOW, ""), - packdir2 + TESTMOD + '2' + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} + files2 = {packdir: None, + packdir + TESTMOD2 + pyc_ext: test_pyc, + packdir2: None, + packdir2 + TESTMOD2 + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc} zip2 = os.path.abspath("path2.zip") self.makeZip(files2, zip2) @@ -422,8 +519,8 @@ def testNamespacePackage(self): mod = importlib.import_module('.'.join((TESTPACK, TESTMOD))) self.assertEqual("path1.zip", mod.__file__.split(os.sep)[-3]) - # And TESTPACK/(TESTMOD + '2') only exists in path2. - mod = importlib.import_module('.'.join((TESTPACK, TESTMOD + '2'))) + # And TESTPACK/(TESTMOD2) only exists in path2. + mod = importlib.import_module('.'.join((TESTPACK, TESTMOD2))) self.assertEqual("path2.zip", mod.__file__.split(os.sep)[-3]) # One level deeper... @@ -438,29 +535,22 @@ def testNamespacePackage(self): mod = importlib.import_module('.'.join((subpkg, TESTMOD))) self.assertEqual('path2.zip', mod.__file__.split(os.sep)[-4]) - # subpkg.TESTMOD + '2' only exists in zip2. - mod = importlib.import_module('.'.join((subpkg, TESTMOD + '2'))) + # subpkg.TESTMOD2 only exists in zip2. + mod = importlib.import_module('.'.join((subpkg, TESTMOD2))) self.assertEqual('path2.zip', mod.__file__.split(os.sep)[-4]) - # Finally subpkg.TESTMOD + '3' only exists in zip1. - mod = importlib.import_module('.'.join((subpkg, TESTMOD + '3'))) + # Finally subpkg.TESTMOD3 only exists in zip1. + mod = importlib.import_module('.'.join((subpkg, TESTMOD3))) self.assertEqual('path1.zip', mod.__file__.split(os.sep)[-4]) def testZipImporterMethods(self): packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc), - "spam" + pyc_ext: (NOW, test_pyc)} - - self.addCleanup(os_helper.unlink, TEMP_ZIP) - with ZipFile(TEMP_ZIP, "w") as z: - for name, (mtime, data) in files.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - zinfo.comment = b"spam" - z.writestr(zinfo, data) + files = {packdir + "__init__" + pyc_ext: test_pyc, + packdir2 + "__init__" + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc, + "spam" + pyc_ext: test_pyc} + self.makeZip(files, file_comment=b"spam") zi = zipimport.zipimporter(TEMP_ZIP) self.assertEqual(zi.archive, TEMP_ZIP) @@ -516,35 +606,26 @@ def testZipImporterMethods(self): def testInvalidateCaches(self): packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc), - "spam" + pyc_ext: (NOW, test_pyc)} - self.addCleanup(os_helper.unlink, TEMP_ZIP) - with ZipFile(TEMP_ZIP, "w") as z: - for name, (mtime, data) in files.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - zinfo.comment = b"spam" - z.writestr(zinfo, data) + files = {packdir + "__init__" + pyc_ext: test_pyc, + packdir2 + "__init__" + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc, + "spam" + pyc_ext: test_pyc} + extra_files = [packdir, packdir2] + self.makeZip(files, file_comment=b"spam") zi = zipimport.zipimporter(TEMP_ZIP) - self.assertEqual(zi._get_files().keys(), files.keys()) + self.assertEqual(sorted(zi._get_files()), sorted([*files, *extra_files])) # Check that the file information remains accurate after reloading zi.invalidate_caches() - self.assertEqual(zi._get_files().keys(), files.keys()) + self.assertEqual(sorted(zi._get_files()), sorted([*files, *extra_files])) # Add a new file to the ZIP archive - newfile = {"spam2" + pyc_ext: (NOW, test_pyc)} + newfile = {"spam2" + pyc_ext: test_pyc} files.update(newfile) - with ZipFile(TEMP_ZIP, "a") as z: - for name, (mtime, data) in newfile.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - zinfo.comment = b"spam" - z.writestr(zinfo, data) + with ZipFile(TEMP_ZIP, "a", compression=self.compression) as z: + self.writeZip(z, newfile, file_comment=b"spam") # Check that we can detect the new file after invalidating the cache zi.invalidate_caches() - self.assertEqual(zi._get_files().keys(), files.keys()) + self.assertEqual(sorted(zi._get_files()), sorted([*files, *extra_files])) spec = zi.find_spec('spam2') self.assertIsNotNone(spec) self.assertIsInstance(spec.loader, zipimport.zipimporter) @@ -558,36 +639,27 @@ def testInvalidateCaches(self): def testInvalidateCachesWithMultipleZipimports(self): packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc), - "spam" + pyc_ext: (NOW, test_pyc)} - self.addCleanup(os_helper.unlink, TEMP_ZIP) - with ZipFile(TEMP_ZIP, "w") as z: - for name, (mtime, data) in files.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - zinfo.comment = b"spam" - z.writestr(zinfo, data) + files = {packdir + "__init__" + pyc_ext: test_pyc, + packdir2 + "__init__" + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc, + "spam" + pyc_ext: test_pyc} + extra_files = [packdir, packdir2] + self.makeZip(files, file_comment=b"spam") zi = zipimport.zipimporter(TEMP_ZIP) - self.assertEqual(zi._get_files().keys(), files.keys()) + self.assertEqual(sorted(zi._get_files()), sorted([*files, *extra_files])) # Zipimporter for the same path. zi2 = zipimport.zipimporter(TEMP_ZIP) - self.assertEqual(zi2._get_files().keys(), files.keys()) + self.assertEqual(sorted(zi2._get_files()), sorted([*files, *extra_files])) # Add a new file to the ZIP archive to make the cache wrong. - newfile = {"spam2" + pyc_ext: (NOW, test_pyc)} + newfile = {"spam2" + pyc_ext: test_pyc} files.update(newfile) - with ZipFile(TEMP_ZIP, "a") as z: - for name, (mtime, data) in newfile.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - zinfo.comment = b"spam" - z.writestr(zinfo, data) + with ZipFile(TEMP_ZIP, "a", compression=self.compression) as z: + self.writeZip(z, newfile, file_comment=b"spam") # Invalidate the cache of the first zipimporter. zi.invalidate_caches() # Check that the second zipimporter detects the new file and isn't using a stale cache. - self.assertEqual(zi2._get_files().keys(), files.keys()) + self.assertEqual(sorted(zi2._get_files()), sorted([*files, *extra_files])) spec = zi2.find_spec('spam2') self.assertIsNotNone(spec) self.assertIsInstance(spec.loader, zipimport.zipimporter) @@ -595,16 +667,9 @@ def testInvalidateCachesWithMultipleZipimports(self): def testZipImporterMethodsInSubDirectory(self): packdir = TESTPACK + os.sep packdir2 = packdir + TESTPACK2 + os.sep - files = {packdir2 + "__init__" + pyc_ext: (NOW, test_pyc), - packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)} - - self.addCleanup(os_helper.unlink, TEMP_ZIP) - with ZipFile(TEMP_ZIP, "w") as z: - for name, (mtime, data) in files.items(): - zinfo = ZipInfo(name, time.localtime(mtime)) - zinfo.compress_type = self.compression - zinfo.comment = b"eggs" - z.writestr(zinfo, data) + files = {packdir2 + "__init__" + pyc_ext: test_pyc, + packdir2 + TESTMOD + pyc_ext: test_pyc} + self.makeZip(files, file_comment=b"eggs") zi = zipimport.zipimporter(TEMP_ZIP + os.sep + packdir) self.assertEqual(zi.archive, TEMP_ZIP) @@ -650,17 +715,33 @@ def testZipImporterMethodsInSubDirectory(self): self.assertIsNone(loader.get_source(mod_name)) self.assertEqual(loader.get_filename(mod_name), mod.__file__) - def testGetData(self): + def testGetDataExplicitDirectories(self): self.addCleanup(os_helper.unlink, TEMP_ZIP) - with ZipFile(TEMP_ZIP, "w") as z: - z.compression = self.compression - name = "testdata.dat" - data = bytes(x for x in range(256)) - z.writestr(name, data) - - zi = zipimport.zipimporter(TEMP_ZIP) - self.assertEqual(data, zi.get_data(name)) - self.assertIn('zipimporter object', repr(zi)) + with ZipFile(TEMP_ZIP, 'w', compression=self.compression) as z: + z.mkdir('a') + z.mkdir('a/b') + z.mkdir('a/b/c') + data = bytes(range(256)) + z.writestr('a/b/c/testdata.dat', data) + self._testGetData() + + def testGetDataImplicitDirectories(self): + self.addCleanup(os_helper.unlink, TEMP_ZIP) + with ZipFile(TEMP_ZIP, 'w', compression=self.compression) as z: + data = bytes(range(256)) + z.writestr('a/b/c/testdata.dat', data) + self._testGetData() + + def _testGetData(self): + zi = zipimport.zipimporter(os.path.join(TEMP_ZIP, 'ignored')) + pathname = os.path.join('a', 'b', 'c', 'testdata.dat') + data = bytes(range(256)) + self.assertEqual(zi.get_data(pathname), data) + self.assertEqual(zi.get_data(os.path.join(TEMP_ZIP, pathname)), data) + self.assertEqual(zi.get_data(os.path.join('a', 'b', '')), b'') + self.assertEqual(zi.get_data(os.path.join(TEMP_ZIP, 'a', 'b', '')), b'') + self.assertRaises(OSError, zi.get_data, os.path.join('a', 'b')) + self.assertRaises(OSError, zi.get_data, os.path.join(TEMP_ZIP, 'a', 'b')) def testImporterAttr(self): src = """if 1: # indent hack @@ -669,9 +750,9 @@ def get_file(): if __loader__.get_data("some.data") != b"some data": raise AssertionError("bad data")\n""" pyc = make_pyc(compile(src, "", "exec"), NOW, len(src)) - files = {TESTMOD + pyc_ext: (NOW, pyc), - "some.data": (NOW, "some data")} - self.doTest(pyc_ext, files, TESTMOD) + files = {TESTMOD + pyc_ext: pyc, + "some.data": "some data"} + self.doTest(pyc_ext, files, TESTMOD, prefix='') def testDefaultOptimizationLevel(self): # zipimport should use the default optimization level (#28131) @@ -679,7 +760,7 @@ def testDefaultOptimizationLevel(self): def test(val): assert(val) return val\n""" - files = {TESTMOD + '.py': (NOW, src)} + files = {TESTMOD + '.py': src} self.makeZip(files) sys.path.insert(0, TEMP_ZIP) mod = importlib.import_module(TESTMOD) @@ -692,7 +773,7 @@ def test(val): def testImport_WithStuff(self): # try importing from a zipfile which contains additional # stuff at the beginning of the file - files = {TESTMOD + ".py": (NOW, test_src)} + files = {TESTMOD + ".py": test_src} self.doTest(".py", files, TESTMOD, stuff=b"Some Stuff"*31) @@ -700,18 +781,18 @@ def assertModuleSource(self, module): self.assertEqual(inspect.getsource(module), test_src) def testGetSource(self): - files = {TESTMOD + ".py": (NOW, test_src)} + files = {TESTMOD + ".py": test_src} self.doTest(".py", files, TESTMOD, call=self.assertModuleSource) def testGetCompiledSource(self): pyc = make_pyc(compile(test_src, "", "exec"), NOW, len(test_src)) - files = {TESTMOD + ".py": (NOW, test_src), - TESTMOD + pyc_ext: (NOW, pyc)} + files = {TESTMOD + ".py": test_src, + TESTMOD + pyc_ext: pyc} self.doTest(pyc_ext, files, TESTMOD, call=self.assertModuleSource) def runDoctest(self, callback): - files = {TESTMOD + ".py": (NOW, test_src), - "xyz.txt": (NOW, ">>> log.append(True)\n")} + files = {TESTMOD + ".py": test_src, + "xyz.txt": ">>> log.append(True)\n"} self.doTest(".py", files, TESTMOD, call=callback) def doDoctestFile(self, module): @@ -763,29 +844,21 @@ def doTraceback(self, module): raise AssertionError("This ought to be impossible") def testTraceback(self): - files = {TESTMOD + ".py": (NOW, raise_src)} + files = {TESTMOD + ".py": raise_src} self.doTest(None, files, TESTMOD, call=self.doTraceback) @unittest.skipIf(os_helper.TESTFN_UNENCODABLE is None, "need an unencodable filename") def testUnencodable(self): filename = os_helper.TESTFN_UNENCODABLE + ".zip" - self.addCleanup(os_helper.unlink, filename) - with ZipFile(filename, "w") as z: - zinfo = ZipInfo(TESTMOD + ".py", time.localtime(NOW)) - zinfo.compress_type = self.compression - z.writestr(zinfo, test_src) + self.makeZip({TESTMOD + ".py": test_src}, filename) spec = zipimport.zipimporter(filename).find_spec(TESTMOD) mod = importlib.util.module_from_spec(spec) spec.loader.exec_module(mod) def testBytesPath(self): filename = os_helper.TESTFN + ".zip" - self.addCleanup(os_helper.unlink, filename) - with ZipFile(filename, "w") as z: - zinfo = ZipInfo(TESTMOD + ".py", time.localtime(NOW)) - zinfo.compress_type = self.compression - z.writestr(zinfo, test_src) + self.makeZip({TESTMOD + ".py": test_src}, filename) zipimport.zipimporter(filename) with self.assertRaises(TypeError): @@ -796,15 +869,15 @@ def testBytesPath(self): zipimport.zipimporter(memoryview(os.fsencode(filename))) def testComment(self): - files = {TESTMOD + ".py": (NOW, test_src)} + files = {TESTMOD + ".py": test_src} self.doTest(".py", files, TESTMOD, comment=b"comment") def testBeginningCruftAndComment(self): - files = {TESTMOD + ".py": (NOW, test_src)} + files = {TESTMOD + ".py": test_src} self.doTest(".py", files, TESTMOD, stuff=b"cruft" * 64, comment=b"hi") def testLargestPossibleComment(self): - files = {TESTMOD + ".py": (NOW, test_src)} + files = {TESTMOD + ".py": test_src} self.doTest(".py", files, TESTMOD, comment=b"c" * ((1 << 16) - 1)) @support.requires_resource('cpu') diff --git a/Lib/zipimport.py b/Lib/zipimport.py index a49a21f0799df2..68f031f89c9996 100644 --- a/Lib/zipimport.py +++ b/Lib/zipimport.py @@ -155,6 +155,8 @@ def get_data(self, pathname): toc_entry = self._get_files()[key] except KeyError: raise OSError(0, '', key) + if toc_entry is None: + return b'' return _get_data(self.archive, toc_entry) @@ -554,6 +556,22 @@ def _read_directory(archive): finally: fp.seek(start_offset) _bootstrap._verbose_message('zipimport: found {} names in {!r}', count, archive) + + # Add implicit directories. + count = 0 + for name in list(files): + while True: + i = name.rstrip(path_sep).rfind(path_sep) + if i < 0: + break + name = name[:i + 1] + if name in files: + break + files[name] = None + count += 1 + if count: + _bootstrap._verbose_message('zipimport: added {} implicit directories in {!r}', + count, archive) return files # During bootstrap, we may need to load the encodings diff --git a/Makefile.pre.in b/Makefile.pre.in index e1c793ce629b02..d380c422714a32 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -41,6 +41,7 @@ AR= @AR@ READELF= @READELF@ SOABI= @SOABI@ ABIFLAGS= @ABIFLAGS@ +ABI_THREAD= @ABI_THREAD@ LDVERSION= @LDVERSION@ MODULE_LDFLAGS=@MODULE_LDFLAGS@ GITVERSION= @GITVERSION@ @@ -158,7 +159,7 @@ WHEEL_PKG_DIR= @WHEEL_PKG_DIR@ # Detailed destination directories BINLIBDEST= @BINLIBDEST@ -LIBDEST= $(SCRIPTDIR)/python$(VERSION) +LIBDEST= $(SCRIPTDIR)/python$(VERSION)$(ABI_THREAD) INCLUDEPY= $(INCLUDEDIR)/python$(LDVERSION) CONFINCLUDEPY= $(CONFINCLUDEDIR)/python$(LDVERSION) @@ -1115,6 +1116,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/cpython/longobject.h \ $(srcdir)/Include/cpython/memoryobject.h \ $(srcdir)/Include/cpython/methodobject.h \ + $(srcdir)/Include/cpython/modsupport.h \ $(srcdir)/Include/cpython/monitoring.h \ $(srcdir)/Include/cpython/object.h \ $(srcdir)/Include/cpython/objimpl.h \ @@ -2651,7 +2653,7 @@ inclinstall: $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$(INCLUDEPY)/internal; \ else true; \ fi - @if test "$(INSTALL_MIMALLOC)" == "yes"; then \ + @if test "$(INSTALL_MIMALLOC)" = "yes"; then \ if test ! -d $(DESTDIR)$(INCLUDEPY)/internal/mimalloc/mimalloc; then \ echo "Creating directory $(DESTDIR)$(INCLUDEPY)/internal/mimalloc/mimalloc"; \ $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$(INCLUDEPY)/internal/mimalloc/mimalloc; \ @@ -2672,7 +2674,7 @@ inclinstall: echo $(INSTALL_DATA) $$i $(INCLUDEPY)/internal; \ $(INSTALL_DATA) $$i $(DESTDIR)$(INCLUDEPY)/internal; \ done - @if test "$(INSTALL_MIMALLOC)" == "yes"; then \ + @if test "$(INSTALL_MIMALLOC)" = "yes"; then \ echo $(INSTALL_DATA) $(srcdir)/Include/internal/mimalloc/mimalloc.h $(DESTDIR)$(INCLUDEPY)/internal/mimalloc/mimalloc.h; \ $(INSTALL_DATA) $(srcdir)/Include/internal/mimalloc/mimalloc.h $(DESTDIR)$(INCLUDEPY)/internal/mimalloc/mimalloc.h; \ for i in $(srcdir)/Include/internal/mimalloc/mimalloc/*.h; \ diff --git a/Misc/HISTORY b/Misc/HISTORY index 8ca35e1af62c05..a74d7e06acd071 100644 --- a/Misc/HISTORY +++ b/Misc/HISTORY @@ -3952,7 +3952,7 @@ Library - Issue #18626: the inspect module now offers a basic command line introspection interface (Initial patch by Claudiu Popa) -- Issue #3015: Fixed tkinter with wantobject=False. Any Tcl command call +- Issue #3015: Fixed tkinter with ``wantobjects=False``. Any Tcl command call returned empty string. - Issue #19037: The mailbox module now makes all changes to maildir files diff --git a/Misc/NEWS.d/3.10.0a1.rst b/Misc/NEWS.d/3.10.0a1.rst index 9a729a45b160eb..f30ed548e7e033 100644 --- a/Misc/NEWS.d/3.10.0a1.rst +++ b/Misc/NEWS.d/3.10.0a1.rst @@ -97,7 +97,7 @@ convention. Patch by Donghee Na. .. nonce: aJS9B3 .. section: Core and Builtins -Port the :mod:`_bisect` module to the multi-phase initialization API +Port the :mod:`!_bisect` module to the multi-phase initialization API (:pep:`489`). .. @@ -128,7 +128,7 @@ Taskaya. .. nonce: lh335O .. section: Core and Builtins -Port the :mod:`_lsprof` extension module to multi-phase initialization +Port the :mod:`!_lsprof` extension module to multi-phase initialization (:pep:`489`). .. @@ -148,7 +148,7 @@ Port the :mod:`cmath` extension module to multi-phase initialization .. nonce: jiXmyT .. section: Core and Builtins -Port the :mod:`_scproxy` extension module to multi-phase initialization +Port the :mod:`!_scproxy` extension module to multi-phase initialization (:pep:`489`). .. @@ -168,7 +168,7 @@ Port the :mod:`termios` extension module to multi-phase initialization .. nonce: QuDIut .. section: Core and Builtins -Convert the :mod:`_sha256` extension module types to heap types. +Convert the :mod:`!_sha256` extension module types to heap types. .. @@ -187,7 +187,7 @@ classes with a huge amount of arguments. Patch by Pablo Galindo. .. nonce: CnRME3 .. section: Core and Builtins -Port the :mod:`_overlapped` extension module to multi-phase initialization +Port the :mod:`!_overlapped` extension module to multi-phase initialization (:pep:`489`). .. @@ -197,7 +197,7 @@ Port the :mod:`_overlapped` extension module to multi-phase initialization .. nonce: X9CZgo .. section: Core and Builtins -Port the :mod:`_curses_panel` extension module to multi-phase initialization +Port the :mod:`!_curses_panel` extension module to multi-phase initialization (:pep:`489`). .. @@ -207,7 +207,7 @@ Port the :mod:`_curses_panel` extension module to multi-phase initialization .. nonce: 5jZymK .. section: Core and Builtins -Port the :mod:`_opcode` extension module to multi-phase initialization +Port the :mod:`!_opcode` extension module to multi-phase initialization (:pep:`489`). .. @@ -282,7 +282,7 @@ initialized ``_ast`` module. .. nonce: vcxSUa .. section: Core and Builtins -Convert :mod:`_operator` to use :c:func:`PyType_FromSpec`. +Convert :mod:`!_operator` to use :c:func:`PyType_FromSpec`. .. @@ -291,7 +291,7 @@ Convert :mod:`_operator` to use :c:func:`PyType_FromSpec`. .. nonce: fubBkb .. section: Core and Builtins -Port :mod:`_sha3` to multi-phase init. Convert static types to heap types. +Port :mod:`!_sha3` to multi-phase init. Convert static types to heap types. .. @@ -300,7 +300,7 @@ Port :mod:`_sha3` to multi-phase init. Convert static types to heap types. .. nonce: FC13e7 .. section: Core and Builtins -Port the :mod:`_blake2` extension module to the multi-phase initialization +Port the :mod:`!_blake2` extension module to the multi-phase initialization API (:pep:`489`). .. @@ -339,7 +339,7 @@ The output of ``python --help`` contains now only ASCII characters. .. nonce: O0d3ym .. section: Core and Builtins -Port the :mod:`_sha1`, :mod:`_sha512`, and :mod:`_md5` extension modules to +Port the :mod:`!_sha1`, :mod:`!_sha512`, and :mod:`!_md5` extension modules to multi-phase initialization API (:pep:`489`). .. @@ -636,7 +636,7 @@ Remove the remaining files from the old parser and the :mod:`symbol` module. .. nonce: _yI-ax .. section: Core and Builtins -Convert :mod:`_bz2` to use :c:func:`PyType_FromSpec`. +Convert :mod:`!_bz2` to use :c:func:`PyType_FromSpec`. .. @@ -666,7 +666,7 @@ by Brandt Bucher. .. nonce: 61iyYh .. section: Core and Builtins -Port :mod:`_gdbm` to multiphase initialization. +Port :mod:`!_gdbm` to multiphase initialization. .. @@ -696,7 +696,7 @@ for emitting syntax errors. Patch by Pablo Galindo. .. nonce: mmlp3Q .. section: Core and Builtins -Port :mod:`_dbm` to multiphase initialization. +Port :mod:`!_dbm` to multiphase initialization. .. @@ -1010,7 +1010,7 @@ Port :mod:`mmap` to multiphase initialization. .. nonce: Kfe9fT .. section: Core and Builtins -Port :mod:`_lzma` to multiphase initialization. +Port :mod:`!_lzma` to multiphase initialization. .. diff --git a/Misc/NEWS.d/3.10.0a2.rst b/Misc/NEWS.d/3.10.0a2.rst index 79f570439b52b8..bdf9488c81bae1 100644 --- a/Misc/NEWS.d/3.10.0a2.rst +++ b/Misc/NEWS.d/3.10.0a2.rst @@ -362,7 +362,7 @@ plistlib: fix parsing XML plists with hexadecimal integer values .. nonce: 85BsRA .. section: Library -Fix an incorrectly formatted error from :meth:`_codecs.charmap_decode` when +Fix an incorrectly formatted error from :meth:`!_codecs.charmap_decode` when called with a mapped value outside the range of valid Unicode code points. PR by Max Bernstein. diff --git a/Misc/NEWS.d/3.10.0a3.rst b/Misc/NEWS.d/3.10.0a3.rst index 179cf3e9cfb08c..2aef87ab929aab 100644 --- a/Misc/NEWS.d/3.10.0a3.rst +++ b/Misc/NEWS.d/3.10.0a3.rst @@ -1386,7 +1386,7 @@ Python already implicitly installs signal handlers: see The ``Py_TRASHCAN_BEGIN`` macro no longer accesses PyTypeObject attributes, but now can get the condition by calling the new private -:c:func:`_PyTrash_cond()` function which hides implementation details. +:c:func:`!_PyTrash_cond()` function which hides implementation details. .. diff --git a/Misc/NEWS.d/3.10.0a4.rst b/Misc/NEWS.d/3.10.0a4.rst index ae667f2bffe192..5cea16c259d5ee 100644 --- a/Misc/NEWS.d/3.10.0a4.rst +++ b/Misc/NEWS.d/3.10.0a4.rst @@ -193,7 +193,7 @@ subinterpreters. Patch by Victor Stinner. .. nonce: j7nl6A .. section: Core and Builtins -Make :c:func:`_PyUnicode_FromId` function compatible with subinterpreters. +Make :c:func:`!_PyUnicode_FromId` function compatible with subinterpreters. Each interpreter now has an array of identifier objects (interned strings decoded from UTF-8). Patch by Victor Stinner. @@ -367,7 +367,7 @@ uses "options" instead. .. nonce: Quy3zn .. section: Library -Port the :mod:`_thread` extension module to the multiphase initialization +Port the :mod:`!_thread` extension module to the multiphase initialization API (:pep:`489`) and convert its static types to heap types. .. @@ -960,8 +960,8 @@ explicitly and so not exported. .. nonce: Je08Ny .. section: C API -Remove the private :c:func:`_Py_fopen` function which is no longer needed. -Use :c:func:`_Py_wfopen` or :c:func:`_Py_fopen_obj` instead. Patch by Victor +Remove the private :c:func:`!_Py_fopen` function which is no longer needed. +Use :c:func:`!_Py_wfopen` or :c:func:`!_Py_fopen_obj` instead. Patch by Victor Stinner. .. diff --git a/Misc/NEWS.d/3.10.0a5.rst b/Misc/NEWS.d/3.10.0a5.rst index dc95e8ce072fd9..a85ea1ff1c2817 100644 --- a/Misc/NEWS.d/3.10.0a5.rst +++ b/Misc/NEWS.d/3.10.0a5.rst @@ -108,7 +108,7 @@ a slice at the start of the ``bytearray`` to a shorter byte string). .. nonce: WfTdfg .. section: Core and Builtins -Fix the :c:func:`_PyUnicode_FromId` function (_Py_IDENTIFIER(var) API) when +Fix the :c:func:`!_PyUnicode_FromId` function (_Py_IDENTIFIER(var) API) when :c:func:`Py_Initialize` / :c:func:`Py_Finalize` is called multiple times: preserve ``_PyRuntime.unicode_ids.next_index`` value. diff --git a/Misc/NEWS.d/3.10.0a6.rst b/Misc/NEWS.d/3.10.0a6.rst index bad3528084897b..31b7df2c61158e 100644 --- a/Misc/NEWS.d/3.10.0a6.rst +++ b/Misc/NEWS.d/3.10.0a6.rst @@ -315,7 +315,7 @@ Adds :const:`resource.RLIMIT_KQUEUES` constant from FreeBSD to the .. section: Library Make the pure Python implementation of :mod:`xml.etree.ElementTree` behave -the same as the C implementation (:mod:`_elementree`) regarding default +the same as the C implementation (:mod:`!_elementree`) regarding default attribute values (by not setting ``specified_attributes=1``). .. diff --git a/Misc/NEWS.d/3.10.0a7.rst b/Misc/NEWS.d/3.10.0a7.rst index fe6213d95a88bb..32ee34d9a68910 100644 --- a/Misc/NEWS.d/3.10.0a7.rst +++ b/Misc/NEWS.d/3.10.0a7.rst @@ -83,7 +83,7 @@ instruction dispatch a bit. .. nonce: PhaT-B .. section: Core and Builtins -Fix reference leak in the :mod:`_hashopenssl` extension. Patch by Pablo +Fix reference leak in the :mod:`!_hashopenssl` extension. Patch by Pablo Galindo. .. diff --git a/Misc/NEWS.d/3.10.0b1.rst b/Misc/NEWS.d/3.10.0b1.rst index 640f3ee58adbae..306e987a41612e 100644 --- a/Misc/NEWS.d/3.10.0b1.rst +++ b/Misc/NEWS.d/3.10.0b1.rst @@ -182,7 +182,7 @@ normally be possible, but might occur in some unusual circumstances. .. nonce: u5Y6bS .. section: Core and Builtins -Importing the :mod:`_signal` module in a subinterpreter has no longer side +Importing the :mod:`!_signal` module in a subinterpreter has no longer side effects. .. @@ -776,11 +776,11 @@ builtins.open() is now io.open(). .. nonce: o1zEk_ .. section: Library -The Python :func:`_pyio.open` function becomes a static method to behave as +The Python :func:`!_pyio.open` function becomes a static method to behave as :func:`io.open` built-in function: don't become a bound method when stored as a class variable. It becomes possible since static methods are now -callable in Python 3.10. Moreover, :func:`_pyio.OpenWrapper` becomes a -simple alias to :func:`_pyio.open`. Patch by Victor Stinner. +callable in Python 3.10. Moreover, :func:`!_pyio.OpenWrapper` becomes a +simple alias to :func:`!_pyio.open`. Patch by Victor Stinner. .. diff --git a/Misc/NEWS.d/3.11.0a1.rst b/Misc/NEWS.d/3.11.0a1.rst index 40fbb9d42b7944..23b13c058f96bd 100644 --- a/Misc/NEWS.d/3.11.0a1.rst +++ b/Misc/NEWS.d/3.11.0a1.rst @@ -613,7 +613,7 @@ Rename ``types.Union`` to ``types.UnionType``. .. section: Core and Builtins Expose specialization stats in python via -:func:`_opcode.get_specialization_stats`. +:func:`!_opcode.get_specialization_stats`. .. @@ -1701,7 +1701,7 @@ Remove many old deprecated :mod:`unittest` features: .. nonce: y1kEfP .. section: Library -Remove the deprecated ``split()`` method of :class:`_tkinter.TkappType`. +Remove the deprecated ``split()`` method of :class:`!_tkinter.TkappType`. Patch by Erlend E. Aasland. .. @@ -2298,9 +2298,9 @@ Adopt *binacii.a2b_base64*'s strict mode in *base64.b64decode*. .. nonce: ThuDMI .. section: Library -Fixed a bug in the :mod:`_ssl` module that was throwing :exc:`OverflowError` -when using :meth:`_ssl._SSLSocket.write` and :meth:`_ssl._SSLSocket.read` -for a big value of the ``len`` parameter. Patch by Pablo Galindo +Fixed a bug in the :mod:`!_ssl` module that was throwing :exc:`OverflowError` +when using :meth:`!_ssl._SSLSocket.write` and :meth:`!_ssl._SSLSocket.read` +for a big value of the ``len`` parameter. Patch by Pablo Galindo. .. @@ -2398,7 +2398,7 @@ class in the interactive session. Instead of :exc:`TypeError`, it should be .. nonce: R3IcM1 .. section: Library -Fix memory leak in :func:`_tkinter._flatten` if it is called with a sequence +Fix memory leak in :func:`!_tkinter._flatten` if it is called with a sequence or set, but not list or tuple. .. @@ -4187,7 +4187,7 @@ Add calls of :func:`gc.collect` in tests to support PyPy. .. nonce: mQZdXU .. section: Tests -Made tests relying on the :mod:`_asyncio` C extension module optional to +Made tests relying on the :mod:`!_asyncio` C extension module optional to allow running on alternative Python implementations. Patch by Serhiy Storchaka. diff --git a/Misc/NEWS.d/3.11.0a2.rst b/Misc/NEWS.d/3.11.0a2.rst index 05644d0a4639b1..48cf2c1e428d87 100644 --- a/Misc/NEWS.d/3.11.0a2.rst +++ b/Misc/NEWS.d/3.11.0a2.rst @@ -15,7 +15,7 @@ Improve the :exc:`SyntaxError` message when using ``True``, ``None`` or .. section: Core and Builtins :data:`sys.stdlib_module_names` now contains the macOS-specific module -:mod:`_scproxy`. +:mod:`!_scproxy`. .. @@ -1023,7 +1023,7 @@ compile shared modules. .. nonce: 61gM2A .. section: Build -:mod:`pyexpat` and :mod:`_elementtree` no longer define obsolete macros +:mod:`pyexpat` and :mod:`!_elementtree` no longer define obsolete macros ``HAVE_EXPAT_CONFIG_H`` and ``USE_PYEXPAT_CAPI``. ``XML_POOR_ENTROPY`` is now defined in ``expat_config.h``. diff --git a/Misc/NEWS.d/3.11.0a3.rst b/Misc/NEWS.d/3.11.0a3.rst index 2842aad0e163d6..6a0ae20d1fb5ed 100644 --- a/Misc/NEWS.d/3.11.0a3.rst +++ b/Misc/NEWS.d/3.11.0a3.rst @@ -27,7 +27,7 @@ invalid targets. Patch by Pablo Galindo .. nonce: 3TmTSw .. section: Core and Builtins -:c:func:`_PyErr_ChainStackItem` no longer normalizes ``exc_info`` (including +:c:func:`!_PyErr_ChainStackItem` no longer normalizes ``exc_info`` (including setting the traceback on the exception instance) because ``exc_info`` is always normalized. diff --git a/Misc/NEWS.d/3.11.0a4.rst b/Misc/NEWS.d/3.11.0a4.rst index a5ce7620016cc7..64e2f39ad9db18 100644 --- a/Misc/NEWS.d/3.11.0a4.rst +++ b/Misc/NEWS.d/3.11.0a4.rst @@ -258,7 +258,7 @@ instruction which performs the same operation, but without the loop. .. nonce: ADVaPT .. section: Core and Builtins -The code called from :c:func:`_PyErr_Display` was refactored to improve +The code called from :c:func:`!_PyErr_Display` was refactored to improve error handling. It now exits immediately upon an unrecoverable error. .. diff --git a/Misc/NEWS.d/3.11.0b1.rst b/Misc/NEWS.d/3.11.0b1.rst index c35e8e2c1caf07..a035d0f5addbf2 100644 --- a/Misc/NEWS.d/3.11.0b1.rst +++ b/Misc/NEWS.d/3.11.0b1.rst @@ -285,7 +285,7 @@ macros. .. nonce: 11YXHQ .. section: Core and Builtins -Add a new :c:func:`_PyFrame_IsEntryFrame` API function, to check if a +Add a new :c:func:`!_PyFrame_IsEntryFrame` API function, to check if a :c:type:`PyFrameObject` is an entry frame. Patch by Pablo Galindo. .. diff --git a/Misc/NEWS.d/3.12.0a1.rst b/Misc/NEWS.d/3.12.0a1.rst index 84d9d4e017609d..77a34124fb39e6 100644 --- a/Misc/NEWS.d/3.12.0a1.rst +++ b/Misc/NEWS.d/3.12.0a1.rst @@ -102,7 +102,7 @@ well as generator expressions. .. section: Core and Builtins Added unicode check for ``name`` attribute of ``spec`` argument passed in -:func:`_imp.create_builtin` function. +:func:`!_imp.create_builtin` function. .. @@ -483,7 +483,7 @@ Fix case of undefined behavior in ceval.c .. nonce: AfCi36 .. section: Core and Builtins -Convert :mod:`_functools` to argument clinic. +Convert :mod:`!_functools` to argument clinic. .. @@ -492,7 +492,7 @@ Convert :mod:`_functools` to argument clinic. .. nonce: wky0Fc .. section: Core and Builtins -Do not expose ``KeyWrapper`` in :mod:`_functools`. +Do not expose ``KeyWrapper`` in :mod:`!_functools`. .. @@ -1731,7 +1731,7 @@ tracing functions implemented in C. .. nonce: lenv9h .. section: Core and Builtins -:meth:`_warnings.warn_explicit` is ported to Argument Clinic. +:meth:`!_warnings.warn_explicit` is ported to Argument Clinic. .. @@ -3142,8 +3142,8 @@ test.test_codecs.EncodedFileTest`` instead. .. nonce: VhS1eS .. section: Library -Made :class:`_struct.Struct` GC-tracked in order to fix a reference leak in -the :mod:`_struct` module. +Made :class:`!_struct.Struct` GC-tracked in order to fix a reference leak in +the :mod:`!_struct` module. .. @@ -3258,7 +3258,7 @@ on the main thread Remove ``io.OpenWrapper`` and ``_pyio.OpenWrapper``, deprecated in Python 3.10: just use :func:`open` instead. The :func:`open` (:func:`io.open`) -function is a built-in function. Since Python 3.10, :func:`_pyio.open` is +function is a built-in function. Since Python 3.10, :func:`!_pyio.open` is also a static method. Patch by Victor Stinner. .. @@ -5610,7 +5610,7 @@ Accept os.PathLike for the argument to winsound.PlaySound Support native Windows case-insensitive path comparisons by using ``LCMapStringEx`` instead of :func:`str.lower` in :func:`ntpath.normcase`. -Add ``LCMapStringEx`` to the :mod:`_winapi` module. +Add ``LCMapStringEx`` to the :mod:`!_winapi` module. .. diff --git a/Misc/NEWS.d/3.12.0a2.rst b/Misc/NEWS.d/3.12.0a2.rst index 88d84ad93b35b5..3626f8b1e20809 100644 --- a/Misc/NEWS.d/3.12.0a2.rst +++ b/Misc/NEWS.d/3.12.0a2.rst @@ -527,7 +527,7 @@ Stinner. .. nonce: Ai2KDh .. section: Library -Now :mod:`_pyio` is consistent with :mod:`_io` in raising ``ValueError`` +Now :mod:`!_pyio` is consistent with :mod:`!_io` in raising ``ValueError`` when executing methods over closed buffers. .. @@ -537,7 +537,7 @@ when executing methods over closed buffers. .. nonce: 0v8iyw .. section: Library -Clean up refleak on failed module initialisation in :mod:`_zoneinfo` +Clean up refleak on failed module initialisation in :mod:`!_zoneinfo` .. @@ -546,7 +546,7 @@ Clean up refleak on failed module initialisation in :mod:`_zoneinfo` .. nonce: qc_KHr .. section: Library -Clean up refleaks on failed module initialisation in :mod:`_pickle` +Clean up refleaks on failed module initialisation in :mod:`!_pickle` .. @@ -555,7 +555,7 @@ Clean up refleaks on failed module initialisation in :mod:`_pickle` .. nonce: LBl79O .. section: Library -Clean up refleak on failed module initialisation in :mod:`_io`. +Clean up refleak on failed module initialisation in :mod:`!_io`. .. diff --git a/Misc/NEWS.d/3.12.0a3.rst b/Misc/NEWS.d/3.12.0a3.rst index 07593998d80891..f6a4dc75d456f4 100644 --- a/Misc/NEWS.d/3.12.0a3.rst +++ b/Misc/NEWS.d/3.12.0a3.rst @@ -70,7 +70,7 @@ Fix bug where compiler crashes on an if expression with an empty body block. .. nonce: DcKoBJ .. section: Core and Builtins -Fix a reference bug in :func:`_imp.create_builtin()` after the creation of +Fix a reference bug in :func:`!_imp.create_builtin` after the creation of the first sub-interpreter for modules ``builtins`` and ``sys``. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/3.12.0a4.rst b/Misc/NEWS.d/3.12.0a4.rst index d7af30f6c09b2b..53e1688b802bae 100644 --- a/Misc/NEWS.d/3.12.0a4.rst +++ b/Misc/NEWS.d/3.12.0a4.rst @@ -241,7 +241,7 @@ are now always dumped, even if switched off. Improve ``BUILD_LIST`` opcode so that it works similarly to the ``BUILD_TUPLE`` opcode, by stealing references from the stack rather than repeatedly using stack operations to set list elements. Implementation -details are in a new private API :c:func:`_PyList_FromArraySteal`. +details are in a new private API :c:func:`!_PyList_FromArraySteal`. .. diff --git a/Misc/NEWS.d/3.12.0b1.rst b/Misc/NEWS.d/3.12.0b1.rst index 9f3095b224233e..7126e08a20c7fd 100644 --- a/Misc/NEWS.d/3.12.0b1.rst +++ b/Misc/NEWS.d/3.12.0b1.rst @@ -1828,7 +1828,7 @@ is relative. .. nonce: 511Tbh .. section: Library -Convert private :meth:`_posixsubprocess.fork_exec` to use Argument Clinic. +Convert private :meth:`!_posixsubprocess.fork_exec` to use Argument Clinic. .. diff --git a/Misc/NEWS.d/3.13.0a1.rst b/Misc/NEWS.d/3.13.0a1.rst index 9a321f779c24ff..0ba61b43411792 100644 --- a/Misc/NEWS.d/3.13.0a1.rst +++ b/Misc/NEWS.d/3.13.0a1.rst @@ -2888,9 +2888,9 @@ documented and were not intended to be used externally. .. nonce: vMbmj_ .. section: Library -:data:`opcode.ENABLE_SPECIALIZATION` (which was added in 3.12 but never +:data:`!opcode.ENABLE_SPECIALIZATION` (which was added in 3.12 but never documented or intended for external usage) is moved to -:data:`_opcode.ENABLE_SPECIALIZATION` where tests can access it. +:data:`!_opcode.ENABLE_SPECIALIZATION` where tests can access it. .. @@ -3053,7 +3053,7 @@ Donghee Na. .. nonce: U9nD_B .. section: Library -Optimize :meth:`_PollLikeSelector.select` for many iteration case. +Optimize :meth:`!_PollLikeSelector.select` for many iteration case. .. @@ -3173,7 +3173,7 @@ Disable tab completion in multiline mode of :mod:`pdb` .. nonce: pYSwMj .. section: Library -Expose opcode metadata through :mod:`_opcode`. +Expose opcode metadata through :mod:`!_opcode`. .. @@ -3735,7 +3735,7 @@ overwritten. .. nonce: _sZilh .. section: Library -Fix bugs in :mod:`_ctypes` where exceptions could end up being overwritten. +Fix bugs in :mod:`!_ctypes` where exceptions could end up being overwritten. .. diff --git a/Misc/NEWS.d/3.13.0a2.rst b/Misc/NEWS.d/3.13.0a2.rst index c6b2b1b263ffab..f4a637bf624d03 100644 --- a/Misc/NEWS.d/3.13.0a2.rst +++ b/Misc/NEWS.d/3.13.0a2.rst @@ -777,7 +777,7 @@ Add error checking during :mod:`!_socket` module init. .. nonce: urFYtn .. section: Library -Fix :mod:`_blake2` not checking for errors when initializing. +Fix :mod:`!_blake2` not checking for errors when initializing. .. diff --git a/Misc/NEWS.d/3.13.0a3.rst b/Misc/NEWS.d/3.13.0a3.rst index 2c660192dcd5b3..29fbe00efef76d 100644 --- a/Misc/NEWS.d/3.13.0a3.rst +++ b/Misc/NEWS.d/3.13.0a3.rst @@ -449,8 +449,8 @@ well-formed for surrogateescape encoding. Patch by Sidney Markowitz. .. nonce: N8E1zw .. section: Core and Builtins -Use the object's actual class name in :meth:`_io.FileIO.__repr__`, -:meth:`_io._WindowsConsoleIO` and :meth:`_io.TextIOWrapper.__repr__`, to +Use the object's actual class name in :meth:`!_io.FileIO.__repr__`, +:meth:`!_io._WindowsConsoleIO` and :meth:`!_io.TextIOWrapper.__repr__`, to make these methods subclass friendly. .. diff --git a/Misc/NEWS.d/3.13.0a5.rst b/Misc/NEWS.d/3.13.0a5.rst index 6d74c6bc5c4d55..d8cc88c8756a17 100644 --- a/Misc/NEWS.d/3.13.0a5.rst +++ b/Misc/NEWS.d/3.13.0a5.rst @@ -541,7 +541,7 @@ descriptors in :meth:`inspect.Signature.from_callable`. .. nonce: sGMKr0 .. section: Library -Isolate :mod:`_lsprof` (apply :pep:`687`). +Isolate :mod:`!_lsprof` (apply :pep:`687`). .. @@ -773,8 +773,8 @@ combination with unicode encoding. .. section: Library Fix :func:`io.BufferedReader.tell`, :func:`io.BufferedReader.seek`, -:func:`_pyio.BufferedReader.tell`, :func:`io.BufferedRandom.tell`, -:func:`io.BufferedRandom.seek` and :func:`_pyio.BufferedRandom.tell` being +:func:`!_pyio.BufferedReader.tell`, :func:`io.BufferedRandom.tell`, +:func:`io.BufferedRandom.seek` and :func:`!_pyio.BufferedRandom.tell` being able to return negative offsets. .. diff --git a/Misc/NEWS.d/3.13.0a6.rst b/Misc/NEWS.d/3.13.0a6.rst index fff29083e0dab7..0cdbb8232250d7 100644 --- a/Misc/NEWS.d/3.13.0a6.rst +++ b/Misc/NEWS.d/3.13.0a6.rst @@ -550,7 +550,7 @@ or DuplicateOptionError. .. nonce: PBiRQB .. section: Library -:class:`_io.WindowsConsoleIO` now emit a warning if a boolean value is +:class:`!_io.WindowsConsoleIO` now emit a warning if a boolean value is passed as a filedescriptor argument. .. diff --git a/Misc/NEWS.d/3.13.0b1.rst b/Misc/NEWS.d/3.13.0b1.rst index ab5f24fe345af9..831ba623765df7 100644 --- a/Misc/NEWS.d/3.13.0b1.rst +++ b/Misc/NEWS.d/3.13.0b1.rst @@ -666,7 +666,7 @@ by :pep:`738`. .. section: Library Allow to specify the signature of custom callable instances of extension -type by the :attr:`__text_signature__` attribute. Specify signatures of +type by the ``__text_signature__`` attribute. Specify signatures of :class:`operator.attrgetter`, :class:`operator.itemgetter`, and :class:`operator.methodcaller` instances. @@ -687,10 +687,10 @@ padding is not detected when no padding is necessary. .. nonce: 5N2Xcy .. section: Library -Add the :class:`!PhotoImage` methods :meth:`~tkinter.PhotoImage.read` to -read an image from a file and :meth:`~tkinter.PhotoImage.data` to get the +Add the :class:`!PhotoImage` methods :meth:`!read` to +read an image from a file and :meth:`!data` to get the image data. Add *background* and *grayscale* parameters to -:class:`!PhotoImage` method :meth:`~tkinter.PhotoImage.write`. +:class:`!PhotoImage` method :meth:`!write`. .. @@ -855,7 +855,7 @@ is used to bind indexed, nameless placeholders. See also :gh:`100668`. .. nonce: RstWg- .. section: Library -Fix TypeError in :func:`email.Message.get_payload` when the charset is +Fix TypeError in :func:`email.message.Message.get_payload` when the charset is :rfc:`2231` encoded. .. @@ -953,7 +953,7 @@ Speed up :meth:`pathlib.Path.walk` by working with strings internally. .. nonce: oxIUEI .. section: Library -Change the new multi-separator support in :meth:`asyncio.Stream.readuntil` +Change the new multi-separator support in :meth:`asyncio.StreamReader.readuntil` to only accept tuples of separators rather than arbitrary iterables. .. @@ -1260,7 +1260,7 @@ Support opcode events in :mod:`bdb` .. nonce: YoI8TV .. section: Library -:mod:`ncurses`: fixed a crash that could occur on macOS 13 or earlier when +:mod:`!ncurses`: fixed a crash that could occur on macOS 13 or earlier when Python was built with Apple Xcode 15's SDK. .. @@ -1347,13 +1347,13 @@ urllib. .. nonce: du4UKW .. section: Library -Setting the :mod:`!tkinter` module global :data:`~tkinter.wantobject` to ``2`` +Setting the :mod:`!tkinter` module global :data:`!wantobjects` to ``2`` before creating the :class:`~tkinter.Tk` object or call the -:meth:`~tkinter.Tk.wantobject` method of the :class:`!Tk` object with argument +:meth:`!wantobjects` method of the :class:`!Tk` object with argument ``2`` makes now arguments to callbacks registered in the :mod:`tkinter` module to be passed as various Python objects (``int``, ``float``, ``bytes``, ``tuple``), depending on their internal representation in Tcl, instead of always ``str``. -:data:`!tkinter.wantobject` is now set to ``2`` by default. +:data:`!tkinter.wantobjects` is now set to ``2`` by default. .. diff --git a/Misc/NEWS.d/3.5.0a1.rst b/Misc/NEWS.d/3.5.0a1.rst index 442ab62fee8185..35f340f503df18 100644 --- a/Misc/NEWS.d/3.5.0a1.rst +++ b/Misc/NEWS.d/3.5.0a1.rst @@ -3447,7 +3447,8 @@ tkinter.ttk now works when default root window is not set. .. nonce: FE_PII .. section: Library -_tkinter.create() now creates tkapp object with wantobject=1 by default. +``_tkinter.create()`` now creates ``tkapp`` object with ``wantobjects=1`` by +default. .. diff --git a/Misc/NEWS.d/3.6.0a1.rst b/Misc/NEWS.d/3.6.0a1.rst index 5c9a6e5d64b469..803c9fc5925fa6 100644 --- a/Misc/NEWS.d/3.6.0a1.rst +++ b/Misc/NEWS.d/3.6.0a1.rst @@ -1484,9 +1484,9 @@ on UNIX signals (SIGSEGV, SIGFPE, SIGABRT). .. nonce: RWN1jR .. section: Library -Add C functions :c:func:`_PyTraceMalloc_Track` and -:c:func:`_PyTraceMalloc_Untrack` to track memory blocks using the -:mod:`tracemalloc` module. Add :c:func:`_PyTraceMalloc_GetTraceback` to get +Add C functions :c:func:`!_PyTraceMalloc_Track` and +:c:func:`!_PyTraceMalloc_Untrack` to track memory blocks using the +:mod:`tracemalloc` module. Add :c:func:`!_PyTraceMalloc_GetTraceback` to get the traceback of an object. .. diff --git a/Misc/NEWS.d/3.8.0a1.rst b/Misc/NEWS.d/3.8.0a1.rst index 9decc4034d6b87..35b9e7fca27a7b 100644 --- a/Misc/NEWS.d/3.8.0a1.rst +++ b/Misc/NEWS.d/3.8.0a1.rst @@ -2519,7 +2519,7 @@ non-Windows systems. .. nonce: dQS1ng .. section: Library -Fix incorrect parsing of :class:`_io.IncrementalNewlineDecoder`'s +Fix incorrect parsing of :class:`io.IncrementalNewlineDecoder`'s *translate* argument. .. @@ -8051,7 +8051,7 @@ Update macOS 10.9+ installer to Tcl/Tk 8.6.8. .. nonce: K6jCVG .. section: macOS -In :mod:`_scproxy`, drop the GIL when calling into ``SystemConfiguration`` +In :mod:`!_scproxy`, drop the GIL when calling into ``SystemConfiguration`` to avoid deadlocks. .. diff --git a/Misc/NEWS.d/3.8.0a4.rst b/Misc/NEWS.d/3.8.0a4.rst index 7bf0de1210935b..edce71b2555a89 100644 --- a/Misc/NEWS.d/3.8.0a4.rst +++ b/Misc/NEWS.d/3.8.0a4.rst @@ -945,7 +945,7 @@ P. Hemsley. .. nonce: __FTq9 .. section: Tests -Add a new :mod:`_testinternalcapi` module to test the internal C API. +Add a new :mod:`!_testinternalcapi` module to test the internal C API. .. @@ -1383,7 +1383,7 @@ Since Python 3.7.0, calling :c:func:`Py_DecodeLocale` before coerced and/or if the UTF-8 Mode is enabled by the user configuration. The LC_CTYPE coercion and UTF-8 Mode are now disabled by default to fix the mojibake issue. They must now be enabled explicitly (opt-in) using the new -:c:func:`_Py_PreInitialize` API with ``_PyPreConfig``. +:c:func:`!_Py_PreInitialize` API with ``_PyPreConfig``. .. diff --git a/Misc/NEWS.d/3.8.0b1.rst b/Misc/NEWS.d/3.8.0b1.rst index 4174ab8fac6192..fc4e3a9bd887fb 100644 --- a/Misc/NEWS.d/3.8.0b1.rst +++ b/Misc/NEWS.d/3.8.0b1.rst @@ -600,7 +600,7 @@ default. .. nonce: sLULGQ .. section: Library -Fix destructor :class:`_pyio.BytesIO` and :class:`_pyio.TextIOWrapper`: +Fix destructor :class:`!_pyio.BytesIO` and :class:`!_pyio.TextIOWrapper`: initialize their ``_buffer`` attribute as soon as possible (in the class body), because it's used by ``__del__()`` which calls ``close()``. diff --git a/Misc/NEWS.d/3.9.0a1.rst b/Misc/NEWS.d/3.9.0a1.rst index a38b93e4b76d17..b0f63c3b9c3537 100644 --- a/Misc/NEWS.d/3.9.0a1.rst +++ b/Misc/NEWS.d/3.9.0a1.rst @@ -1384,7 +1384,7 @@ Nested subclasses of :class:`typing.NamedTuple` are now pickleable. .. nonce: hwrPN7 .. section: Library -Prevent :exc:`KeyError` thrown by :func:`_encoded_words.decode` when given +Prevent :exc:`KeyError` thrown by :func:`!_encoded_words.decode` when given an encoded-word with invalid content-type encoding from propagating all the way to :func:`email.message.get`. @@ -1395,7 +1395,7 @@ way to :func:`email.message.get`. .. nonce: S6Klvm .. section: Library -Deprecated the ``split()`` method in :class:`_tkinter.TkappType` in favour +Deprecated the ``split()`` method in :class:`!_tkinter.TkappType` in favour of the ``splitlist()`` method which has more consistent and predictable behavior. @@ -3013,7 +3013,7 @@ thread was still running. .. section: Library Allow pure Python implementation of :mod:`pickle` to work even when the C -:mod:`_pickle` module is unavailable. +:mod:`!_pickle` module is unavailable. .. @@ -3064,8 +3064,8 @@ internal tasks weak set is changed by another thread during iteration. .. nonce: ADqCkq .. section: Library -:class:`_pyio.IOBase` destructor now does nothing if getting the ``closed`` -attribute fails to better mimic :class:`_io.IOBase` finalizer. +:class:`!_pyio.IOBase` destructor now does nothing if getting the ``closed`` +attribute fails to better mimic :class:`!_io.IOBase` finalizer. .. @@ -4993,7 +4993,7 @@ Make :const:`winreg.REG_MULTI_SZ` support zero-length strings. .. section: Windows Replace use of :c:func:`strcasecmp` for the system function -:c:func:`_stricmp`. Patch by Minmin Gong. +:c:func:`!_stricmp`. Patch by Minmin Gong. .. @@ -5696,8 +5696,8 @@ Add :c:func:`PyConfig_SetWideStringList` function. .. section: C API Add fast functions for calling methods: -:c:func:`_PyObject_VectorcallMethod`, :c:func:`_PyObject_CallMethodNoArgs` -and :c:func:`_PyObject_CallMethodOneArg`. +:c:func:`!_PyObject_VectorcallMethod`, :c:func:`!_PyObject_CallMethodNoArgs` +and :c:func:`!_PyObject_CallMethodOneArg`. .. diff --git a/Misc/NEWS.d/3.9.0a6.rst b/Misc/NEWS.d/3.9.0a6.rst index b7ea1051c314f2..4ba4cfe818c2d0 100644 --- a/Misc/NEWS.d/3.9.0a6.rst +++ b/Misc/NEWS.d/3.9.0a6.rst @@ -111,7 +111,7 @@ str.decode(). .. nonce: m15TTX .. section: Core and Builtins -Fix possible refleaks in :mod:`_json`, memo of PyScannerObject should be +Fix possible refleaks in :mod:`!_json`, memo of PyScannerObject should be traversed. .. @@ -666,8 +666,8 @@ for _main_thread, instead of a _DummyThread instance. .. nonce: VTq_8s .. section: Library -Add a private ``_at_fork_reinit()`` method to :class:`_thread.Lock`, -:class:`_thread.RLock`, :class:`threading.RLock` and +Add a private ``_at_fork_reinit()`` method to :class:`!_thread.Lock`, +:class:`!_thread.RLock`, :class:`threading.RLock` and :class:`threading.Condition` classes: reinitialize the lock at fork in the child process, reset the lock to the unlocked state. Rename also the private ``_reset_internal_locks()`` method of :class:`threading.Event` to diff --git a/Misc/NEWS.d/next/Build/2024-07-02-20-16-09.gh-issue-121103.TMef9j.rst b/Misc/NEWS.d/next/Build/2024-07-02-20-16-09.gh-issue-121103.TMef9j.rst new file mode 100644 index 00000000000000..4bc8c6de0b7733 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2024-07-02-20-16-09.gh-issue-121103.TMef9j.rst @@ -0,0 +1,3 @@ +On POSIX systems, excluding macOS framework installs, the lib directory +for the free-threaded build now includes a "t" suffix to avoid conflicts +with a co-located default build installation. diff --git a/Misc/NEWS.d/next/Build/2024-07-08-01-11-54.gh-issue-121467.3qWRQj.rst b/Misc/NEWS.d/next/Build/2024-07-08-01-11-54.gh-issue-121467.3qWRQj.rst new file mode 100644 index 00000000000000..a2238475546eaa --- /dev/null +++ b/Misc/NEWS.d/next/Build/2024-07-08-01-11-54.gh-issue-121467.3qWRQj.rst @@ -0,0 +1 @@ +Fix a Makefile bug that prevented mimalloc header files from being installed. diff --git a/Misc/NEWS.d/next/Build/2024-07-08-14-01-17.gh-issue-121487.ekHmpR.rst b/Misc/NEWS.d/next/Build/2024-07-08-14-01-17.gh-issue-121487.ekHmpR.rst new file mode 100644 index 00000000000000..e30d4dcdbfe779 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2024-07-08-14-01-17.gh-issue-121487.ekHmpR.rst @@ -0,0 +1 @@ +Fix deprecation warning for ATOMIC_VAR_INIT in mimalloc. diff --git a/Misc/NEWS.d/next/C API/2024-07-02-11-03-40.gh-issue-112136.f3fiY8.rst b/Misc/NEWS.d/next/C API/2024-07-02-11-03-40.gh-issue-112136.f3fiY8.rst new file mode 100644 index 00000000000000..a240b4e852c4d1 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2024-07-02-11-03-40.gh-issue-112136.f3fiY8.rst @@ -0,0 +1,3 @@ +Restore the private ``_PyArg_Parser`` structure and the private +``_PyArg_ParseTupleAndKeywordsFast()`` function, previously removed in Python +3.13 alpha 1. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/C_API/2024-07-09-15-55-20.gh-issue-89364.yYYroI.rst b/Misc/NEWS.d/next/C_API/2024-07-09-15-55-20.gh-issue-89364.yYYroI.rst new file mode 100644 index 00000000000000..b82e78446e4e87 --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2024-07-09-15-55-20.gh-issue-89364.yYYroI.rst @@ -0,0 +1,3 @@ +Export the :c:func:`PySignal_SetWakeupFd` function. Previously, the function +was documented but it couldn't be used in 3rd party code. Patch by Victor +Stinner. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-06-05-06-26-04.gh-issue- b/Misc/NEWS.d/next/Core and Builtins/2024-06-05-06-26-04.gh-issue- deleted file mode 100644 index 29f06d43c3598c..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2024-06-05-06-26-04.gh-issue- +++ /dev/null @@ -1 +0,0 @@ -Support Linux perf profiler to see Python calls on RISC-V architecture diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-06-12-12-29-45.gh-issue-120400.lZYHVS.rst b/Misc/NEWS.d/next/Core and Builtins/2024-06-12-12-29-45.gh-issue-120400.lZYHVS.rst deleted file mode 100644 index 8c86d4750e39a8..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2024-06-12-12-29-45.gh-issue-120400.lZYHVS.rst +++ /dev/null @@ -1 +0,0 @@ -Support Linux perf profiler to see Python calls on RISC-V architecture. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-06-19-19-54-35.gh-issue-120754.uF29sj.rst b/Misc/NEWS.d/next/Core and Builtins/2024-06-19-19-54-35.gh-issue-120754.uF29sj.rst new file mode 100644 index 00000000000000..46481d8f31aaba --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-06-19-19-54-35.gh-issue-120754.uF29sj.rst @@ -0,0 +1 @@ +Reduce the number of system calls invoked when reading a whole file (ex. ``open('a.txt').read()``). For a sample program that reads the contents of the 400+ ``.rst`` files in the cpython repository ``Doc`` folder, there is an over 10% reduction in system call count. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-06-28-18-34-49.gh-issue-119726.Fjv_Ab.rst b/Misc/NEWS.d/next/Core and Builtins/2024-06-28-18-34-49.gh-issue-119726.Fjv_Ab.rst new file mode 100644 index 00000000000000..cf5d61450aa3ae --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-06-28-18-34-49.gh-issue-119726.Fjv_Ab.rst @@ -0,0 +1,2 @@ +Optimize code layout for calls to C functions from the JIT on AArch64. +Patch by Diego Russo. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-06-30-03-48-10.gh-issue-121149.lLBMKe.rst b/Misc/NEWS.d/next/Core and Builtins/2024-06-30-03-48-10.gh-issue-121149.lLBMKe.rst new file mode 100644 index 00000000000000..38d618f06090fd --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-06-30-03-48-10.gh-issue-121149.lLBMKe.rst @@ -0,0 +1,2 @@ +Added specialization for summation of complexes, this also improves accuracy +of builtin :func:`sum` for such inputs. Patch by Sergey B Kirpichev. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-04-23-38-30.gh-issue-121368.m3EF9E.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-04-23-38-30.gh-issue-121368.m3EF9E.rst new file mode 100644 index 00000000000000..3df5b216cbc0af --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-04-23-38-30.gh-issue-121368.m3EF9E.rst @@ -0,0 +1,3 @@ +Fix race condition in ``_PyType_Lookup`` in the free-threaded build due to +a missing memory fence. This could lead to ``_PyType_Lookup`` returning +incorrect results on arm64. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-05-11-29-27.gh-issue-121288.lYKYYP.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-05-11-29-27.gh-issue-121288.lYKYYP.rst new file mode 100644 index 00000000000000..bd3e20b5658562 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-05-11-29-27.gh-issue-121288.lYKYYP.rst @@ -0,0 +1,5 @@ +:exc:`ValueError` messages for :meth:`!list.index()`, :meth:`!range.index()`, +:meth:`!deque.index()`, :meth:`!deque.remove()` and +:meth:`!ShareableList.index()` no longer contain the repr of the searched +value (which can be arbitrary large) and are consistent with error messages +for other :meth:`!index()` and :meth:`!remove()` methods. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-08-02-24-55.gh-issue-121439.jDHod3.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-08-02-24-55.gh-issue-121439.jDHod3.rst new file mode 100644 index 00000000000000..361f9fc71186c6 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-08-02-24-55.gh-issue-121439.jDHod3.rst @@ -0,0 +1 @@ +Allow tuples of length 20 in the freelist to be reused. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-08-10-31-08.gh-issue-121012.M5hHk-.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-08-10-31-08.gh-issue-121012.M5hHk-.rst new file mode 100644 index 00000000000000..7b04eb68b03752 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-08-10-31-08.gh-issue-121012.M5hHk-.rst @@ -0,0 +1,2 @@ +Tier 2 execution now ensures that list iterators remain exhausted, once they +become exhausted. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-10-15-43-54.gh-issue-117482.5WYaXR.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-10-15-43-54.gh-issue-117482.5WYaXR.rst new file mode 100644 index 00000000000000..ec1e7327b77f19 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-10-15-43-54.gh-issue-117482.5WYaXR.rst @@ -0,0 +1,2 @@ +Unexpected slot wrappers are no longer created for builtin static types in +subinterpreters. diff --git a/Misc/NEWS.d/next/Library/2023-04-24-05-34-23.gh-issue-103194.GwBwWL.rst b/Misc/NEWS.d/next/Library/2023-04-24-05-34-23.gh-issue-103194.GwBwWL.rst index 3f70168b81069e..bc9187309c6a53 100644 --- a/Misc/NEWS.d/next/Library/2023-04-24-05-34-23.gh-issue-103194.GwBwWL.rst +++ b/Misc/NEWS.d/next/Library/2023-04-24-05-34-23.gh-issue-103194.GwBwWL.rst @@ -1,4 +1,4 @@ Prepare Tkinter for C API changes in Tcl 8.7/9.0 to avoid -:class:`_tkinter.Tcl_Obj` being unexpectedly returned +:class:`!_tkinter.Tcl_Obj` being unexpectedly returned instead of :class:`bool`, :class:`str`, :class:`bytearray`, or :class:`int`. diff --git a/Misc/NEWS.d/next/Library/2024-05-07-17-38-53.gh-issue-118714.XXKpVZ.rst b/Misc/NEWS.d/next/Library/2024-05-07-17-38-53.gh-issue-118714.XXKpVZ.rst new file mode 100644 index 00000000000000..f41baee303482a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-05-07-17-38-53.gh-issue-118714.XXKpVZ.rst @@ -0,0 +1,2 @@ +Allow ``restart`` in post-mortem debugging of :mod:`pdb`. Removed restart message +when the user quits pdb from post-mortem mode. diff --git a/Misc/NEWS.d/next/Library/2024-05-08-18-33-07.gh-issue-118507.OCQsAY.rst b/Misc/NEWS.d/next/Library/2024-05-08-18-33-07.gh-issue-118507.OCQsAY.rst index de1462f0d24fce..67b1fea4f83cb4 100644 --- a/Misc/NEWS.d/next/Library/2024-05-08-18-33-07.gh-issue-118507.OCQsAY.rst +++ b/Misc/NEWS.d/next/Library/2024-05-08-18-33-07.gh-issue-118507.OCQsAY.rst @@ -1 +1,2 @@ +Fix :func:`os.path.isfile` on Windows for pipes. Speedup :func:`os.path.isjunction` and :func:`os.path.lexists` on Windows with a native implementation. diff --git a/Misc/NEWS.d/next/Library/2024-06-07-10-10-32.gh-issue-117983.NeMR9n.rst b/Misc/NEWS.d/next/Library/2024-06-07-10-10-32.gh-issue-117983.NeMR9n.rst new file mode 100644 index 00000000000000..cca97f50a20496 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-06-07-10-10-32.gh-issue-117983.NeMR9n.rst @@ -0,0 +1,2 @@ +Defer the ``threading`` import in ``importlib.util`` until lazy loading is +used. diff --git a/Misc/NEWS.d/next/Library/2024-06-23-07-23-08.gh-issue-61103.ca_U_l.rst b/Misc/NEWS.d/next/Library/2024-06-23-07-23-08.gh-issue-61103.ca_U_l.rst index 7b11d8c303c2f6..890eb62010eb33 100644 --- a/Misc/NEWS.d/next/Library/2024-06-23-07-23-08.gh-issue-61103.ca_U_l.rst +++ b/Misc/NEWS.d/next/Library/2024-06-23-07-23-08.gh-issue-61103.ca_U_l.rst @@ -1,3 +1,5 @@ -Support :c:expr:`double complex` C type in :mod:`ctypes` via -:class:`~ctypes.c_double_complex` if compiler has C11 complex -arithmetic. Patch by Sergey B Kirpichev. +Support :c:expr:`float complex`, :c:expr:`double complex` and +:c:expr:`long double complex` C types in :mod:`ctypes` as +:class:`~ctypes.c_float_complex`, :class:`~ctypes.c_double_complex` and +:class:`~ctypes.c_longdouble_complex` if the compiler has C11 complex arithmetic. +Patch by Sergey B Kirpichev. diff --git a/Misc/NEWS.d/next/Library/2024-06-26-03-04-24.gh-issue-121018.clVSc4.rst b/Misc/NEWS.d/next/Library/2024-06-26-03-04-24.gh-issue-121018.clVSc4.rst index eac5bab3e9fe6d..346a89879cad41 100644 --- a/Misc/NEWS.d/next/Library/2024-06-26-03-04-24.gh-issue-121018.clVSc4.rst +++ b/Misc/NEWS.d/next/Library/2024-06-26-03-04-24.gh-issue-121018.clVSc4.rst @@ -1,3 +1,3 @@ -Fixed issues where :meth:`!argparse.ArgumentParser.parses_args` did not honor +Fixed issues where :meth:`!argparse.ArgumentParser.parse_args` did not honor ``exit_on_error=False``. Based on patch by Ben Hsing. diff --git a/Misc/NEWS.d/next/Library/2024-06-27-12-27-52.gh-issue-121027.D4K1OX.rst b/Misc/NEWS.d/next/Library/2024-06-27-12-27-52.gh-issue-121027.D4K1OX.rst new file mode 100644 index 00000000000000..a450726d9afed9 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-06-27-12-27-52.gh-issue-121027.D4K1OX.rst @@ -0,0 +1 @@ +Make the :class:`functools.partial` object a method descriptor. diff --git a/Misc/NEWS.d/next/Library/2024-06-29-15-21-12.gh-issue-121141.4evD6q.rst b/Misc/NEWS.d/next/Library/2024-06-29-15-21-12.gh-issue-121141.4evD6q.rst new file mode 100644 index 00000000000000..f2dc621050ff4b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-06-29-15-21-12.gh-issue-121141.4evD6q.rst @@ -0,0 +1 @@ +Add support for :func:`copy.replace` to AST nodes. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2024-06-29-19-30-15.gh-issue-121163.SJKDFq.rst b/Misc/NEWS.d/next/Library/2024-06-29-19-30-15.gh-issue-121163.SJKDFq.rst index 029838030278a6..50f945ab9f1436 100644 --- a/Misc/NEWS.d/next/Library/2024-06-29-19-30-15.gh-issue-121163.SJKDFq.rst +++ b/Misc/NEWS.d/next/Library/2024-06-29-19-30-15.gh-issue-121163.SJKDFq.rst @@ -1,3 +1,2 @@ Add support for ``all`` as an valid ``action`` for :func:`warnings.simplefilter` -and :func:`warnings.filterswarnings`. - +and :func:`warnings.filterwarnings`. diff --git a/Misc/NEWS.d/next/Library/2024-07-01-11-23-18.gh-issue-121210.cD0zfn.rst b/Misc/NEWS.d/next/Library/2024-07-01-11-23-18.gh-issue-121210.cD0zfn.rst new file mode 100644 index 00000000000000..55d5b221bf0765 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-01-11-23-18.gh-issue-121210.cD0zfn.rst @@ -0,0 +1,2 @@ +Handle AST nodes with missing runtime fields or attributes in +:func:`ast.compare`. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2024-07-02-11-34-06.gh-issue-121245.sSkDAr.rst b/Misc/NEWS.d/next/Library/2024-07-02-11-34-06.gh-issue-121245.sSkDAr.rst new file mode 100644 index 00000000000000..6e9dec2545166f --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-02-11-34-06.gh-issue-121245.sSkDAr.rst @@ -0,0 +1,2 @@ +Fix a bug in the handling of the command history of the new :term:`REPL` that caused +the history file to be wiped at REPL exit. diff --git a/Misc/NEWS.d/next/Library/2024-07-03-07-25-21.gh-issue-121332.Iz6FEq.rst b/Misc/NEWS.d/next/Library/2024-07-03-07-25-21.gh-issue-121332.Iz6FEq.rst new file mode 100644 index 00000000000000..480f27e05953a6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-03-07-25-21.gh-issue-121332.Iz6FEq.rst @@ -0,0 +1,4 @@ +Fix constructor of :mod:`ast` nodes with custom ``_attributes``. Previously, +passing custom attributes would raise a :py:exc:`DeprecationWarning`. Passing +arguments to the constructor that are not in ``_fields`` or ``_attributes`` +remains deprecated. Patch by Jelle Zijlstra. diff --git a/Misc/NEWS.d/next/Library/2024-07-04-17-36-03.gh-issue-59110.IlI9Fz.rst b/Misc/NEWS.d/next/Library/2024-07-04-17-36-03.gh-issue-59110.IlI9Fz.rst new file mode 100644 index 00000000000000..b8e3ee0720cfe6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-04-17-36-03.gh-issue-59110.IlI9Fz.rst @@ -0,0 +1,2 @@ +:mod:`zipimport` supports now namespace packages when no directory entry +exists. diff --git a/Misc/NEWS.d/next/Library/2024-07-06-16-08-39.gh-issue-119169.o0YymL.rst b/Misc/NEWS.d/next/Library/2024-07-06-16-08-39.gh-issue-119169.o0YymL.rst new file mode 100644 index 00000000000000..5d9b50d452a9cd --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-06-16-08-39.gh-issue-119169.o0YymL.rst @@ -0,0 +1 @@ +Slightly speed up :func:`os.walk` by simplifying exception handling. diff --git a/Misc/NEWS.d/next/Library/2024-07-06-23-39-38.gh-issue-121450.vGqb3c.rst b/Misc/NEWS.d/next/Library/2024-07-06-23-39-38.gh-issue-121450.vGqb3c.rst new file mode 100644 index 00000000000000..4a65fb737f025b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-06-23-39-38.gh-issue-121450.vGqb3c.rst @@ -0,0 +1,4 @@ +Hard-coded breakpoints (:func:`breakpoint` and :func:`pdb.set_trace()`) now +reuse the most recent ``Pdb`` instance that calls ``Pdb.set_trace()``, +instead of creating a new one each time. As a result, all the instance specific +data like ``display`` and ``commands`` are preserved across Hard-coded breakpoints. diff --git a/Misc/NEWS.d/next/Tests/2024-07-04-15-10-29.gh-issue-121084.qxcd5d.rst b/Misc/NEWS.d/next/Tests/2024-07-04-15-10-29.gh-issue-121084.qxcd5d.rst new file mode 100644 index 00000000000000..b91ea8acfadbf1 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2024-07-04-15-10-29.gh-issue-121084.qxcd5d.rst @@ -0,0 +1,3 @@ +Fix test_typing random leaks. Clear typing ABC caches when running tests for +refleaks (``-R`` option): call ``_abc_caches_clear()`` on typing abstract +classes and their subclasses. Patch by Victor Stinner. diff --git a/Modules/Setup.bootstrap.in b/Modules/Setup.bootstrap.in index aa4e60e272653b..4dcc0f55176d0e 100644 --- a/Modules/Setup.bootstrap.in +++ b/Modules/Setup.bootstrap.in @@ -30,6 +30,7 @@ _weakref _weakref.c _abc _abc.c _functools _functoolsmodule.c _locale _localemodule.c +_opcode _opcode.c _operator _operator.c _stat _stat.c _symtable symtablemodule.c diff --git a/Modules/Setup.stdlib.in b/Modules/Setup.stdlib.in index 78b979698fcd75..dfc75077650df8 100644 --- a/Modules/Setup.stdlib.in +++ b/Modules/Setup.stdlib.in @@ -36,7 +36,6 @@ @MODULE__HEAPQ_TRUE@_heapq _heapqmodule.c @MODULE__JSON_TRUE@_json _json.c @MODULE__LSPROF_TRUE@_lsprof _lsprof.c rotatingtree.c -@MODULE__OPCODE_TRUE@_opcode _opcode.c @MODULE__PICKLE_TRUE@_pickle _pickle.c @MODULE__QUEUE_TRUE@_queue _queuemodule.c @MODULE__RANDOM_TRUE@_random _randommodule.c diff --git a/Modules/_collectionsmodule.c b/Modules/_collectionsmodule.c index 0bc61db4117c5d..fbfed59995c21e 100644 --- a/Modules/_collectionsmodule.c +++ b/Modules/_collectionsmodule.c @@ -1293,7 +1293,7 @@ deque_index_impl(dequeobject *deque, PyObject *v, Py_ssize_t start, index = 0; } } - PyErr_Format(PyExc_ValueError, "%R is not in deque", v); + PyErr_SetString(PyExc_ValueError, "deque.index(x): x not in deque"); return NULL; } @@ -1462,7 +1462,7 @@ deque_remove_impl(dequeobject *deque, PyObject *value) } } if (i == n) { - PyErr_Format(PyExc_ValueError, "%R is not in deque", value); + PyErr_SetString(PyExc_ValueError, "deque.remove(x): x not in deque"); return NULL; } rv = deque_del_item(deque, i); diff --git a/Modules/_complex.h b/Modules/_complex.h index 1c1d1c8cae51b9..28d4a32794b97c 100644 --- a/Modules/_complex.h +++ b/Modules/_complex.h @@ -21,6 +21,8 @@ #if !defined(CMPLX) # if defined(__clang__) && __has_builtin(__builtin_complex) # define CMPLX(x, y) __builtin_complex ((double) (x), (double) (y)) +# define CMPLXF(x, y) __builtin_complex ((float) (x), (float) (y)) +# define CMPLXL(x, y) __builtin_complex ((long double) (x), (long double) (y)) # else static inline double complex CMPLX(double real, double imag) @@ -30,5 +32,23 @@ CMPLX(double real, double imag) ((double *)(&z))[1] = imag; return z; } + +static inline float complex +CMPLXF(float real, float imag) +{ + float complex z; + ((float *)(&z))[0] = real; + ((float *)(&z))[1] = imag; + return z; +} + +static inline long double complex +CMPLXL(long double real, long double imag) +{ + long double complex z; + ((long double *)(&z))[0] = real; + ((long double *)(&z))[1] = imag; + return z; +} # endif #endif diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index 3647361b13a52c..db58f33511c166 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -1751,7 +1751,7 @@ class _ctypes.c_void_p "PyObject *" "clinic_state_sub()->PyCSimpleType_Type" /*[clinic end generated code: output=da39a3ee5e6b4b0d input=dd4d9646c56f43a9]*/ #if defined(Py_HAVE_C_COMPLEX) && defined(FFI_TARGET_HAS_COMPLEX_TYPE) -static const char SIMPLE_TYPE_CHARS[] = "cbBhHiIlLdCfuzZqQPXOv?g"; +static const char SIMPLE_TYPE_CHARS[] = "cbBhHiIlLdCEFfuzZqQPXOv?g"; #else static const char SIMPLE_TYPE_CHARS[] = "cbBhHiIlLdfuzZqQPXOv?g"; #endif @@ -2234,12 +2234,13 @@ PyCSimpleType_init(PyObject *self, PyObject *args, PyObject *kwds) stginfo->ffi_type_pointer = *fmt->pffi_type; } else { + const size_t els_size = sizeof(fmt->pffi_type->elements); stginfo->ffi_type_pointer.size = fmt->pffi_type->size; stginfo->ffi_type_pointer.alignment = fmt->pffi_type->alignment; stginfo->ffi_type_pointer.type = fmt->pffi_type->type; - stginfo->ffi_type_pointer.elements = PyMem_Malloc(2 * sizeof(ffi_type)); + stginfo->ffi_type_pointer.elements = PyMem_Malloc(els_size); memcpy(stginfo->ffi_type_pointer.elements, - fmt->pffi_type->elements, 2 * sizeof(ffi_type)); + fmt->pffi_type->elements, els_size); } stginfo->align = fmt->pffi_type->alignment; stginfo->length = 0; diff --git a/Modules/_ctypes/_ctypes_test.c b/Modules/_ctypes/_ctypes_test.c index cbc8f8b0b453af..b8e613fd669d1b 100644 --- a/Modules/_ctypes/_ctypes_test.c +++ b/Modules/_ctypes/_ctypes_test.c @@ -454,6 +454,16 @@ EXPORT(double complex) my_csqrt(double complex a) { return csqrt(a); } + +EXPORT(float complex) my_csqrtf(float complex a) +{ + return csqrtf(a); +} + +EXPORT(long double complex) my_csqrtl(long double complex a) +{ + return csqrtl(a); +} #endif EXPORT(void) my_qsort(void *base, size_t num, size_t width, int(*compare)(const void*, const void*)) diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index a83fa19af32402..fd89d9c67b3fc0 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -657,6 +657,8 @@ union result { void *p; #if defined(Py_HAVE_C_COMPLEX) && defined(FFI_TARGET_HAS_COMPLEX_TYPE) double complex C; + float complex E; + long double complex F; #endif }; diff --git a/Modules/_ctypes/cfield.c b/Modules/_ctypes/cfield.c index 40b72d83d16aeb..2c1fb9b862e12d 100644 --- a/Modules/_ctypes/cfield.c +++ b/Modules/_ctypes/cfield.c @@ -1112,6 +1112,50 @@ C_get(void *ptr, Py_ssize_t size) memcpy(&x, ptr, sizeof(x)); return PyComplex_FromDoubles(creal(x), cimag(x)); } + +static PyObject * +E_set(void *ptr, PyObject *value, Py_ssize_t size) +{ + Py_complex c = PyComplex_AsCComplex(value); + + if (c.real == -1 && PyErr_Occurred()) { + return NULL; + } + float complex x = CMPLXF((float)c.real, (float)c.imag); + memcpy(ptr, &x, sizeof(x)); + _RET(value); +} + +static PyObject * +E_get(void *ptr, Py_ssize_t size) +{ + float complex x; + + memcpy(&x, ptr, sizeof(x)); + return PyComplex_FromDoubles(crealf(x), cimagf(x)); +} + +static PyObject * +F_set(void *ptr, PyObject *value, Py_ssize_t size) +{ + Py_complex c = PyComplex_AsCComplex(value); + + if (c.real == -1 && PyErr_Occurred()) { + return NULL; + } + long double complex x = CMPLXL(c.real, c.imag); + memcpy(ptr, &x, sizeof(x)); + _RET(value); +} + +static PyObject * +F_get(void *ptr, Py_ssize_t size) +{ + long double complex x; + + memcpy(&x, ptr, sizeof(x)); + return PyComplex_FromDoubles((double)creall(x), (double)cimagl(x)); +} #endif static PyObject * @@ -1621,6 +1665,8 @@ static struct fielddesc formattable[] = { { 'd', d_set, d_get, NULL, d_set_sw, d_get_sw}, #if defined(Py_HAVE_C_COMPLEX) && defined(FFI_TARGET_HAS_COMPLEX_TYPE) { 'C', C_set, C_get, NULL}, + { 'E', E_set, E_get, NULL}, + { 'F', F_set, F_get, NULL}, #endif { 'g', g_set, g_get, NULL}, { 'f', f_set, f_get, NULL, f_set_sw, f_get_sw}, @@ -1674,6 +1720,8 @@ _ctypes_init_fielddesc(void) case 'd': fd->pffi_type = &ffi_type_double; break; #if defined(Py_HAVE_C_COMPLEX) && defined(FFI_TARGET_HAS_COMPLEX_TYPE) case 'C': fd->pffi_type = &ffi_type_complex_double; break; + case 'E': fd->pffi_type = &ffi_type_complex_float; break; + case 'F': fd->pffi_type = &ffi_type_complex_longdouble; break; #endif case 'g': fd->pffi_type = &ffi_type_longdouble; break; case 'f': fd->pffi_type = &ffi_type_float; break; diff --git a/Modules/_ctypes/ctypes.h b/Modules/_ctypes/ctypes.h index 5ba5eb3851a690..a794cfe86b5f42 100644 --- a/Modules/_ctypes/ctypes.h +++ b/Modules/_ctypes/ctypes.h @@ -401,6 +401,8 @@ struct tagPyCArgObject { void *p; #if defined(Py_HAVE_C_COMPLEX) && defined(FFI_TARGET_HAS_COMPLEX_TYPE) double complex C; + float complex E; + long double complex F; #endif } value; PyObject *obj; diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c index 564c271915959a..64766b474514bf 100644 --- a/Modules/_functoolsmodule.c +++ b/Modules/_functoolsmodule.c @@ -203,14 +203,7 @@ partial_descr_get(PyObject *self, PyObject *obj, PyObject *type) if (obj == Py_None || obj == NULL) { return Py_NewRef(self); } - if (PyErr_WarnEx(PyExc_FutureWarning, - "functools.partial will be a method descriptor in " - "future Python versions; wrap it in staticmethod() " - "if you want to preserve the old behavior", 1) < 0) - { - return NULL; - } - return Py_NewRef(self); + return PyMethod_New(self, obj); } /* Merging keyword arguments using the vectorcall convention is messy, so diff --git a/Modules/_interpchannelsmodule.c b/Modules/_interpchannelsmodule.c index ff8dacf5bd1ad0..f0447475c49116 100644 --- a/Modules/_interpchannelsmodule.c +++ b/Modules/_interpchannelsmodule.c @@ -2615,10 +2615,10 @@ _get_current_channelend_type(int end) } if (cls == NULL) { // Force the module to be loaded, to register the type. - PyObject *highlevel = PyImport_ImportModule("interpreters.channel"); + PyObject *highlevel = PyImport_ImportModule("interpreters.channels"); if (highlevel == NULL) { PyErr_Clear(); - highlevel = PyImport_ImportModule("test.support.interpreters.channel"); + highlevel = PyImport_ImportModule("test.support.interpreters.channels"); if (highlevel == NULL) { return NULL; } @@ -2977,7 +2977,7 @@ channelsmod_send(PyObject *self, PyObject *args, PyObject *kwds) } PyDoc_STRVAR(channelsmod_send_doc, -"channel_send(cid, obj, blocking=True)\n\ +"channel_send(cid, obj, *, blocking=True, timeout=None)\n\ \n\ Add the object's data to the channel's queue.\n\ By default this waits for the object to be received."); @@ -3027,7 +3027,7 @@ channelsmod_send_buffer(PyObject *self, PyObject *args, PyObject *kwds) } PyDoc_STRVAR(channelsmod_send_buffer_doc, -"channel_send_buffer(cid, obj, blocking=True)\n\ +"channel_send_buffer(cid, obj, *, blocking=True, timeout=None)\n\ \n\ Add the object's buffer to the channel's queue.\n\ By default this waits for the object to be received."); diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c index b5129ffcbffdcf..5d9d87d6118a75 100644 --- a/Modules/_io/fileio.c +++ b/Modules/_io/fileio.c @@ -54,6 +54,9 @@ # define SMALLCHUNK BUFSIZ #endif +/* Size at which a buffer is considered "large" and behavior should change to + avoid excessive memory allocation */ +#define LARGE_BUFFER_CUTOFF_SIZE 65536 /*[clinic input] module _io @@ -72,6 +75,7 @@ typedef struct { unsigned int closefd : 1; char finalizing; unsigned int blksize; + Py_off_t estimated_size; PyObject *weakreflist; PyObject *dict; } fileio; @@ -196,6 +200,7 @@ fileio_new(PyTypeObject *type, PyObject *args, PyObject *kwds) self->appending = 0; self->seekable = -1; self->blksize = 0; + self->estimated_size = -1; self->closefd = 1; self->weakreflist = NULL; } @@ -482,6 +487,9 @@ _io_FileIO___init___impl(fileio *self, PyObject *nameobj, const char *mode, if (fdfstat.st_blksize > 1) self->blksize = fdfstat.st_blksize; #endif /* HAVE_STRUCT_STAT_ST_BLKSIZE */ + if (fdfstat.st_size < PY_SSIZE_T_MAX) { + self->estimated_size = (Py_off_t)fdfstat.st_size; + } } #if defined(MS_WINDOWS) || defined(__CYGWIN__) @@ -684,7 +692,7 @@ new_buffersize(fileio *self, size_t currentsize) giving us amortized linear-time behavior. For bigger sizes, use a less-than-double growth factor to avoid excessive allocation. */ assert(currentsize <= PY_SSIZE_T_MAX); - if (currentsize > 65536) + if (currentsize > LARGE_BUFFER_CUTOFF_SIZE) addend = currentsize >> 3; else addend = 256 + currentsize; @@ -707,43 +715,56 @@ static PyObject * _io_FileIO_readall_impl(fileio *self) /*[clinic end generated code: output=faa0292b213b4022 input=dbdc137f55602834]*/ { - struct _Py_stat_struct status; Py_off_t pos, end; PyObject *result; Py_ssize_t bytes_read = 0; Py_ssize_t n; size_t bufsize; - int fstat_result; - if (self->fd < 0) + if (self->fd < 0) { return err_closed(); + } - Py_BEGIN_ALLOW_THREADS - _Py_BEGIN_SUPPRESS_IPH -#ifdef MS_WINDOWS - pos = _lseeki64(self->fd, 0L, SEEK_CUR); -#else - pos = lseek(self->fd, 0L, SEEK_CUR); -#endif - _Py_END_SUPPRESS_IPH - fstat_result = _Py_fstat_noraise(self->fd, &status); - Py_END_ALLOW_THREADS - - if (fstat_result == 0) - end = status.st_size; - else - end = (Py_off_t)-1; - - if (end > 0 && end >= pos && pos >= 0 && end - pos < PY_SSIZE_T_MAX) { + end = self->estimated_size; + if (end <= 0) { + /* Use a default size and resize as needed. */ + bufsize = SMALLCHUNK; + } + else { /* This is probably a real file, so we try to allocate a buffer one byte larger than the rest of the file. If the calculation is right then we should get EOF without having to enlarge the buffer. */ - bufsize = (size_t)(end - pos + 1); - } else { - bufsize = SMALLCHUNK; + if (end > _PY_READ_MAX - 1) { + bufsize = _PY_READ_MAX; + } + else { + bufsize = (size_t)end + 1; + } + + /* While a lot of code does open().read() to get the whole contents + of a file it is possible a caller seeks/reads a ways into the file + then calls readall() to get the rest, which would result in allocating + more than required. Guard against that for larger files where we expect + the I/O time to dominate anyways while keeping small files fast. */ + if (bufsize > LARGE_BUFFER_CUTOFF_SIZE) { + Py_BEGIN_ALLOW_THREADS + _Py_BEGIN_SUPPRESS_IPH +#ifdef MS_WINDOWS + pos = _lseeki64(self->fd, 0L, SEEK_CUR); +#else + pos = lseek(self->fd, 0L, SEEK_CUR); +#endif + _Py_END_SUPPRESS_IPH + Py_END_ALLOW_THREADS + + if (end >= pos && pos >= 0 && (end - pos) < (_PY_READ_MAX - 1)) { + bufsize = (size_t)(end - pos) + 1; + } + } } + result = PyBytes_FromStringAndSize(NULL, bufsize); if (result == NULL) return NULL; @@ -783,7 +804,6 @@ _io_FileIO_readall_impl(fileio *self) return NULL; } bytes_read += n; - pos += n; } if (PyBytes_GET_SIZE(result) > bytes_read) { @@ -1074,6 +1094,12 @@ _io_FileIO_truncate_impl(fileio *self, PyTypeObject *cls, PyObject *posobj) return NULL; } + /* Sometimes a large file is truncated. While estimated_size is used as a + estimate, that it is much larger than the actual size can result in a + significant over allocation and sometimes a MemoryError / running out of + memory. */ + self->estimated_size = pos; + return posobj; } #endif /* HAVE_FTRUNCATE */ diff --git a/Modules/_opcode.c b/Modules/_opcode.c index 67643641bea861..dc93063aee7e54 100644 --- a/Modules/_opcode.c +++ b/Modules/_opcode.c @@ -10,6 +10,8 @@ #include "pycore_compile.h" #include "pycore_intrinsics.h" #include "pycore_optimizer.h" // _Py_GetExecutor() +#include "pycore_opcode_metadata.h" // IS_VALID_OPCODE, OPCODE_HAS_*, etc +#include "pycore_opcode_utils.h" /*[clinic input] module _opcode @@ -81,7 +83,7 @@ static int _opcode_is_valid_impl(PyObject *module, int opcode) /*[clinic end generated code: output=b0d918ea1d073f65 input=fe23e0aa194ddae0]*/ { - return _PyCompile_OpcodeIsValid(opcode); + return IS_VALID_OPCODE(opcode); } /*[clinic input] @@ -97,8 +99,7 @@ static int _opcode_has_arg_impl(PyObject *module, int opcode) /*[clinic end generated code: output=7a062d3b2dcc0815 input=93d878ba6361db5f]*/ { - return _PyCompile_OpcodeIsValid(opcode) && - _PyCompile_OpcodeHasArg(opcode); + return IS_VALID_OPCODE(opcode) && OPCODE_HAS_ARG(opcode); } /*[clinic input] @@ -114,8 +115,7 @@ static int _opcode_has_const_impl(PyObject *module, int opcode) /*[clinic end generated code: output=c646d5027c634120 input=a6999e4cf13f9410]*/ { - return _PyCompile_OpcodeIsValid(opcode) && - _PyCompile_OpcodeHasConst(opcode); + return IS_VALID_OPCODE(opcode) && OPCODE_HAS_CONST(opcode); } /*[clinic input] @@ -131,8 +131,7 @@ static int _opcode_has_name_impl(PyObject *module, int opcode) /*[clinic end generated code: output=b49a83555c2fa517 input=448aa5e4bcc947ba]*/ { - return _PyCompile_OpcodeIsValid(opcode) && - _PyCompile_OpcodeHasName(opcode); + return IS_VALID_OPCODE(opcode) && OPCODE_HAS_NAME(opcode); } /*[clinic input] @@ -148,9 +147,7 @@ static int _opcode_has_jump_impl(PyObject *module, int opcode) /*[clinic end generated code: output=e9c583c669f1c46a input=35f711274357a0c3]*/ { - return _PyCompile_OpcodeIsValid(opcode) && - _PyCompile_OpcodeHasJump(opcode); - + return IS_VALID_OPCODE(opcode) && OPCODE_HAS_JUMP(opcode); } /*[clinic input] @@ -171,9 +168,7 @@ static int _opcode_has_free_impl(PyObject *module, int opcode) /*[clinic end generated code: output=d81ae4d79af0ee26 input=117dcd5c19c1139b]*/ { - return _PyCompile_OpcodeIsValid(opcode) && - _PyCompile_OpcodeHasFree(opcode); - + return IS_VALID_OPCODE(opcode) && OPCODE_HAS_FREE(opcode); } /*[clinic input] @@ -189,8 +184,7 @@ static int _opcode_has_local_impl(PyObject *module, int opcode) /*[clinic end generated code: output=da5a8616b7a5097b input=9a798ee24aaef49d]*/ { - return _PyCompile_OpcodeIsValid(opcode) && - _PyCompile_OpcodeHasLocal(opcode); + return IS_VALID_OPCODE(opcode) && OPCODE_HAS_LOCAL(opcode); } /*[clinic input] @@ -206,8 +200,7 @@ static int _opcode_has_exc_impl(PyObject *module, int opcode) /*[clinic end generated code: output=41b68dff0ec82a52 input=db0e4bdb9bf13fa5]*/ { - return _PyCompile_OpcodeIsValid(opcode) && - _PyCompile_OpcodeHasExc(opcode); + return IS_VALID_OPCODE(opcode) && IS_BLOCK_PUSH_OPCODE(opcode); } /*[clinic input] @@ -424,7 +417,7 @@ opcode_functions[] = { {NULL, NULL, 0, NULL} }; -int +static int _opcode_exec(PyObject *m) { if (PyModule_AddIntMacro(m, ENABLE_SPECIALIZATION) < 0) { return -1; diff --git a/Modules/_testcapi/pyatomic.c b/Modules/_testcapi/pyatomic.c index 4f72844535ebd6..850de6f9c3366b 100644 --- a/Modules/_testcapi/pyatomic.c +++ b/Modules/_testcapi/pyatomic.c @@ -125,6 +125,7 @@ test_atomic_fences(PyObject *self, PyObject *obj) { // Just make sure that the fences compile. We are not // testing any synchronizing ordering. _Py_atomic_fence_seq_cst(); + _Py_atomic_fence_acquire(); _Py_atomic_fence_release(); Py_RETURN_NONE; } diff --git a/Modules/clinic/selectmodule.c.h b/Modules/clinic/selectmodule.c.h index dc7d3fb814396d..0ccbf63b688f1b 100644 --- a/Modules/clinic/selectmodule.c.h +++ b/Modules/clinic/selectmodule.c.h @@ -6,6 +6,7 @@ preserve # include "pycore_gc.h" // PyGC_Head # include "pycore_runtime.h" // _Py_ID() #endif +#include "pycore_critical_section.h"// Py_BEGIN_CRITICAL_SECTION() #include "pycore_long.h" // _PyLong_UnsignedShort_Converter() #include "pycore_modsupport.h" // _PyArg_CheckPositional() @@ -110,7 +111,9 @@ select_poll_register(pollObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: + Py_BEGIN_CRITICAL_SECTION(self); return_value = select_poll_register_impl(self, fd, eventmask); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -155,7 +158,9 @@ select_poll_modify(pollObject *self, PyObject *const *args, Py_ssize_t nargs) if (!_PyLong_UnsignedShort_Converter(args[1], &eventmask)) { goto exit; } + Py_BEGIN_CRITICAL_SECTION(self); return_value = select_poll_modify_impl(self, fd, eventmask); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -187,7 +192,9 @@ select_poll_unregister(pollObject *self, PyObject *arg) if (fd < 0) { goto exit; } + Py_BEGIN_CRITICAL_SECTION(self); return_value = select_poll_unregister_impl(self, fd); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -230,7 +237,9 @@ select_poll_poll(pollObject *self, PyObject *const *args, Py_ssize_t nargs) } timeout_obj = args[0]; skip_optional: + Py_BEGIN_CRITICAL_SECTION(self); return_value = select_poll_poll_impl(self, timeout_obj); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -281,7 +290,9 @@ select_devpoll_register(devpollObject *self, PyObject *const *args, Py_ssize_t n goto exit; } skip_optional: + Py_BEGIN_CRITICAL_SECTION(self); return_value = select_devpoll_register_impl(self, fd, eventmask); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -332,7 +343,9 @@ select_devpoll_modify(devpollObject *self, PyObject *const *args, Py_ssize_t nar goto exit; } skip_optional: + Py_BEGIN_CRITICAL_SECTION(self); return_value = select_devpoll_modify_impl(self, fd, eventmask); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -364,7 +377,9 @@ select_devpoll_unregister(devpollObject *self, PyObject *arg) if (fd < 0) { goto exit; } + Py_BEGIN_CRITICAL_SECTION(self); return_value = select_devpoll_unregister_impl(self, fd); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -407,7 +422,9 @@ select_devpoll_poll(devpollObject *self, PyObject *const *args, Py_ssize_t nargs } timeout_obj = args[0]; skip_optional: + Py_BEGIN_CRITICAL_SECTION(self); return_value = select_devpoll_poll_impl(self, timeout_obj); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -434,7 +451,13 @@ select_devpoll_close_impl(devpollObject *self); static PyObject * select_devpoll_close(devpollObject *self, PyObject *Py_UNUSED(ignored)) { - return select_devpoll_close_impl(self); + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = select_devpoll_close_impl(self); + Py_END_CRITICAL_SECTION(); + + return return_value; } #endif /* (defined(HAVE_POLL) && !defined(HAVE_BROKEN_POLL)) && defined(HAVE_SYS_DEVPOLL_H) */ @@ -456,7 +479,13 @@ select_devpoll_fileno_impl(devpollObject *self); static PyObject * select_devpoll_fileno(devpollObject *self, PyObject *Py_UNUSED(ignored)) { - return select_devpoll_fileno_impl(self); + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = select_devpoll_fileno_impl(self); + Py_END_CRITICAL_SECTION(); + + return return_value; } #endif /* (defined(HAVE_POLL) && !defined(HAVE_BROKEN_POLL)) && defined(HAVE_SYS_DEVPOLL_H) */ @@ -615,7 +644,13 @@ select_epoll_close_impl(pyEpoll_Object *self); static PyObject * select_epoll_close(pyEpoll_Object *self, PyObject *Py_UNUSED(ignored)) { - return select_epoll_close_impl(self); + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = select_epoll_close_impl(self); + Py_END_CRITICAL_SECTION(); + + return return_value; } #endif /* defined(HAVE_EPOLL) */ @@ -1108,7 +1143,13 @@ select_kqueue_close_impl(kqueue_queue_Object *self); static PyObject * select_kqueue_close(kqueue_queue_Object *self, PyObject *Py_UNUSED(ignored)) { - return select_kqueue_close_impl(self); + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = select_kqueue_close_impl(self); + Py_END_CRITICAL_SECTION(); + + return return_value; } #endif /* defined(HAVE_KQUEUE) */ @@ -1319,4 +1360,4 @@ select_kqueue_control(kqueue_queue_Object *self, PyObject *const *args, Py_ssize #ifndef SELECT_KQUEUE_CONTROL_METHODDEF #define SELECT_KQUEUE_CONTROL_METHODDEF #endif /* !defined(SELECT_KQUEUE_CONTROL_METHODDEF) */ -/*[clinic end generated code: output=4fc17ae9b6cfdc86 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=f31e724f492225b1 input=a9049054013a1b77]*/ diff --git a/Modules/getpath.c b/Modules/getpath.c index abed139028244a..d0128b20faeeae 100644 --- a/Modules/getpath.c +++ b/Modules/getpath.c @@ -951,6 +951,11 @@ _PyConfig_InitPathConfig(PyConfig *config, int compute_path_config) !wchar_to_dict(dict, "executable_dir", NULL) || !wchar_to_dict(dict, "py_setpath", _PyPathConfig_GetGlobalModuleSearchPath()) || !funcs_to_dict(dict, config->pathconfig_warnings) || +#ifdef Py_GIL_DISABLED + !decode_to_dict(dict, "ABI_THREAD", "t") || +#else + !decode_to_dict(dict, "ABI_THREAD", "") || +#endif #ifndef MS_WINDOWS PyDict_SetItemString(dict, "winreg", Py_None) < 0 || #endif diff --git a/Modules/getpath.py b/Modules/getpath.py index bc7053224aaf16..1f1bfcb4f64dd4 100644 --- a/Modules/getpath.py +++ b/Modules/getpath.py @@ -40,6 +40,7 @@ # EXE_SUFFIX -- [in, opt] '.exe' on Windows/Cygwin/similar # VERSION_MAJOR -- [in] sys.version_info.major # VERSION_MINOR -- [in] sys.version_info.minor +# ABI_THREAD -- [in] either 't' for free-threaded builds or '' # PYWINVER -- [in] the Windows platform-specific version (e.g. 3.8-32) # ** Values read from the environment ** @@ -172,17 +173,18 @@ # ****************************************************************************** platlibdir = config.get('platlibdir') or PLATLIBDIR +ABI_THREAD = ABI_THREAD or '' if os_name == 'posix' or os_name == 'darwin': BUILDDIR_TXT = 'pybuilddir.txt' BUILD_LANDMARK = 'Modules/Setup.local' DEFAULT_PROGRAM_NAME = f'python{VERSION_MAJOR}' - STDLIB_SUBDIR = f'{platlibdir}/python{VERSION_MAJOR}.{VERSION_MINOR}' + STDLIB_SUBDIR = f'{platlibdir}/python{VERSION_MAJOR}.{VERSION_MINOR}{ABI_THREAD}' STDLIB_LANDMARKS = [f'{STDLIB_SUBDIR}/os.py', f'{STDLIB_SUBDIR}/os.pyc'] - PLATSTDLIB_LANDMARK = f'{platlibdir}/python{VERSION_MAJOR}.{VERSION_MINOR}/lib-dynload' + PLATSTDLIB_LANDMARK = f'{platlibdir}/python{VERSION_MAJOR}.{VERSION_MINOR}{ABI_THREAD}/lib-dynload' BUILDSTDLIB_LANDMARKS = ['Lib/os.py'] VENV_LANDMARK = 'pyvenv.cfg' - ZIP_LANDMARK = f'{platlibdir}/python{VERSION_MAJOR}{VERSION_MINOR}.zip' + ZIP_LANDMARK = f'{platlibdir}/python{VERSION_MAJOR}{VERSION_MINOR}{ABI_THREAD}.zip' DELIM = ':' SEP = '/' diff --git a/Modules/rotatingtree.c b/Modules/rotatingtree.c index 217e495b3d2a9d..5910e25bed6389 100644 --- a/Modules/rotatingtree.c +++ b/Modules/rotatingtree.c @@ -1,9 +1,4 @@ -#ifndef Py_BUILD_CORE_BUILTIN -# define Py_BUILD_CORE_MODULE 1 -#endif - #include "Python.h" -#include "pycore_lock.h" #include "rotatingtree.h" #define KEY_LOWER_THAN(key1, key2) ((char*)(key1) < (char*)(key2)) diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c index 3eaee22c652c28..0a5b5a703a5aa1 100644 --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -473,6 +473,7 @@ update_ufd_array(pollObject *self) } /*[clinic input] +@critical_section select.poll.register fd: fildes @@ -486,7 +487,7 @@ Register a file descriptor with the polling object. static PyObject * select_poll_register_impl(pollObject *self, int fd, unsigned short eventmask) -/*[clinic end generated code: output=0dc7173c800a4a65 input=34e16cfb28d3c900]*/ +/*[clinic end generated code: output=0dc7173c800a4a65 input=c475e029ce6c2830]*/ { PyObject *key, *value; int err; @@ -514,6 +515,7 @@ select_poll_register_impl(pollObject *self, int fd, unsigned short eventmask) /*[clinic input] +@critical_section select.poll.modify fd: fildes @@ -528,7 +530,7 @@ Modify an already registered file descriptor. static PyObject * select_poll_modify_impl(pollObject *self, int fd, unsigned short eventmask) -/*[clinic end generated code: output=1a7b88bf079eff17 input=a8e383df075c32cf]*/ +/*[clinic end generated code: output=1a7b88bf079eff17 input=38c9db5346711872]*/ { PyObject *key, *value; int err; @@ -566,6 +568,7 @@ select_poll_modify_impl(pollObject *self, int fd, unsigned short eventmask) /*[clinic input] +@critical_section select.poll.unregister fd: fildes @@ -576,7 +579,7 @@ Remove a file descriptor being tracked by the polling object. static PyObject * select_poll_unregister_impl(pollObject *self, int fd) -/*[clinic end generated code: output=8c9f42e75e7d291b input=4b4fccc1040e79cb]*/ +/*[clinic end generated code: output=8c9f42e75e7d291b input=ae6315d7f5243704]*/ { PyObject *key; @@ -599,6 +602,7 @@ select_poll_unregister_impl(pollObject *self, int fd) } /*[clinic input] +@critical_section select.poll.poll timeout as timeout_obj: object = None @@ -614,7 +618,7 @@ report, as a list of (fd, event) 2-tuples. static PyObject * select_poll_poll_impl(pollObject *self, PyObject *timeout_obj) -/*[clinic end generated code: output=876e837d193ed7e4 input=c2f6953ec45e5622]*/ +/*[clinic end generated code: output=876e837d193ed7e4 input=54310631457efdec]*/ { PyObject *result_list = NULL; int poll_result, i, j; @@ -857,6 +861,7 @@ internal_devpoll_register(devpollObject *self, int fd, } /*[clinic input] +@critical_section select.devpoll.register fd: fildes @@ -872,12 +877,13 @@ Register a file descriptor with the polling object. static PyObject * select_devpoll_register_impl(devpollObject *self, int fd, unsigned short eventmask) -/*[clinic end generated code: output=6e07fe8b74abba0c input=22006fabe9567522]*/ +/*[clinic end generated code: output=6e07fe8b74abba0c input=8d48bd2653a61c42]*/ { return internal_devpoll_register(self, fd, eventmask, 0); } /*[clinic input] +@critical_section select.devpoll.modify fd: fildes @@ -893,12 +899,13 @@ Modify a possible already registered file descriptor. static PyObject * select_devpoll_modify_impl(devpollObject *self, int fd, unsigned short eventmask) -/*[clinic end generated code: output=bc2e6d23aaff98b4 input=09fa335db7cdc09e]*/ +/*[clinic end generated code: output=bc2e6d23aaff98b4 input=773b37e9abca2460]*/ { return internal_devpoll_register(self, fd, eventmask, 1); } /*[clinic input] +@critical_section select.devpoll.unregister fd: fildes @@ -909,7 +916,7 @@ Remove a file descriptor being tracked by the polling object. static PyObject * select_devpoll_unregister_impl(devpollObject *self, int fd) -/*[clinic end generated code: output=95519ffa0c7d43fe input=b4ea42a4442fd467]*/ +/*[clinic end generated code: output=95519ffa0c7d43fe input=6052d368368d4d05]*/ { if (self->fd_devpoll < 0) return devpoll_err_closed(); @@ -926,6 +933,7 @@ select_devpoll_unregister_impl(devpollObject *self, int fd) } /*[clinic input] +@critical_section select.devpoll.poll timeout as timeout_obj: object = None The maximum time to wait in milliseconds, or else None (or a negative @@ -940,7 +948,7 @@ report, as a list of (fd, event) 2-tuples. static PyObject * select_devpoll_poll_impl(devpollObject *self, PyObject *timeout_obj) -/*[clinic end generated code: output=2654e5457cca0b3c input=3c3f0a355ec2bedb]*/ +/*[clinic end generated code: output=2654e5457cca0b3c input=fe7a3f6dcbc118c5]*/ { struct dvpoll dvp; PyObject *result_list = NULL; @@ -1059,6 +1067,7 @@ devpoll_internal_close(devpollObject *self) } /*[clinic input] +@critical_section select.devpoll.close Close the devpoll file descriptor. @@ -1068,7 +1077,7 @@ Further operations on the devpoll object will raise an exception. static PyObject * select_devpoll_close_impl(devpollObject *self) -/*[clinic end generated code: output=26b355bd6429f21b input=6273c30f5560a99b]*/ +/*[clinic end generated code: output=26b355bd6429f21b input=408fde21a377ccfb]*/ { errno = devpoll_internal_close(self); if (errno < 0) { @@ -1088,6 +1097,7 @@ devpoll_get_closed(devpollObject *self, void *Py_UNUSED(ignored)) } /*[clinic input] +@critical_section select.devpoll.fileno Return the file descriptor. @@ -1095,7 +1105,7 @@ Return the file descriptor. static PyObject * select_devpoll_fileno_impl(devpollObject *self) -/*[clinic end generated code: output=26920929f8d292f4 input=ef15331ebde6c368]*/ +/*[clinic end generated code: output=26920929f8d292f4 input=8c9db2efa1ade538]*/ { if (self->fd_devpoll < 0) return devpoll_err_closed(); @@ -1378,6 +1388,7 @@ pyepoll_dealloc(pyEpoll_Object *self) } /*[clinic input] +@critical_section select.epoll.close Close the epoll control file descriptor. @@ -1387,7 +1398,7 @@ Further operations on the epoll object will raise an exception. static PyObject * select_epoll_close_impl(pyEpoll_Object *self) -/*[clinic end generated code: output=ee2144c446a1a435 input=ca6c66ba5a736bfd]*/ +/*[clinic end generated code: output=ee2144c446a1a435 input=f626a769192e1dbe]*/ { errno = pyepoll_internal_close(self); if (errno < 0) { @@ -2023,10 +2034,8 @@ kqueue_tracking_init(PyObject *module) { } static int -kqueue_tracking_add(_selectstate *state, kqueue_queue_Object *self) { - if (!state->kqueue_tracking_initialized) { - kqueue_tracking_init(PyType_GetModule(Py_TYPE(self))); - } +kqueue_tracking_add_lock_held(_selectstate *state, kqueue_queue_Object *self) +{ assert(self->kqfd >= 0); _kqueue_list_item *item = PyMem_New(_kqueue_list_item, 1); if (item == NULL) { @@ -2039,8 +2048,23 @@ kqueue_tracking_add(_selectstate *state, kqueue_queue_Object *self) { return 0; } +static int +kqueue_tracking_add(_selectstate *state, kqueue_queue_Object *self) +{ + int ret; + PyObject *module = PyType_GetModule(Py_TYPE(self)); + Py_BEGIN_CRITICAL_SECTION(module); + if (!state->kqueue_tracking_initialized) { + kqueue_tracking_init(module); + } + ret = kqueue_tracking_add_lock_held(state, self); + Py_END_CRITICAL_SECTION(); + return ret; +} + static void -kqueue_tracking_remove(_selectstate *state, kqueue_queue_Object *self) { +kqueue_tracking_remove_lock_held(_selectstate *state, kqueue_queue_Object *self) +{ _kqueue_list *listptr = &state->kqueue_open_list; while (*listptr != NULL) { _kqueue_list_item *item = *listptr; @@ -2056,6 +2080,14 @@ kqueue_tracking_remove(_selectstate *state, kqueue_queue_Object *self) { assert(0); } +static void +kqueue_tracking_remove(_selectstate *state, kqueue_queue_Object *self) +{ + Py_BEGIN_CRITICAL_SECTION(PyType_GetModule(Py_TYPE(self))); + kqueue_tracking_remove_lock_held(state, self); + Py_END_CRITICAL_SECTION(); +} + static int kqueue_queue_internal_close(kqueue_queue_Object *self) { @@ -2150,6 +2182,7 @@ kqueue_queue_finalize(kqueue_queue_Object *self) } /*[clinic input] +@critical_section select.kqueue.close Close the kqueue control file descriptor. @@ -2159,7 +2192,7 @@ Further operations on the kqueue object will raise an exception. static PyObject * select_kqueue_close_impl(kqueue_queue_Object *self) -/*[clinic end generated code: output=d1c7df0b407a4bc1 input=0b12d95430e0634c]*/ +/*[clinic end generated code: output=d1c7df0b407a4bc1 input=6d763c858b17b690]*/ { errno = kqueue_queue_internal_close(self); if (errno < 0) { diff --git a/Objects/bytes_methods.c b/Objects/bytes_methods.c index 55252406578774..c239ae18a593e3 100644 --- a/Objects/bytes_methods.c +++ b/Objects/bytes_methods.c @@ -432,19 +432,24 @@ parse_args_finds_byte(const char *function_name, PyObject **subobj, char *byte) } /* helper macro to fixup start/end slice values */ -#define ADJUST_INDICES(start, end, len) \ - if (end > len) \ - end = len; \ - else if (end < 0) { \ - end += len; \ - if (end < 0) \ - end = 0; \ - } \ - if (start < 0) { \ - start += len; \ - if (start < 0) \ - start = 0; \ - } +#define ADJUST_INDICES(start, end, len) \ + do { \ + if (end > len) { \ + end = len; \ + } \ + else if (end < 0) { \ + end += len; \ + if (end < 0) { \ + end = 0; \ + } \ + } \ + if (start < 0) { \ + start += len; \ + if (start < 0) { \ + start = 0; \ + } \ + } \ + } while (0) Py_LOCAL_INLINE(Py_ssize_t) find_internal(const char *str, Py_ssize_t len, diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 2b11a01595b0bc..149e552af3a729 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -158,6 +158,10 @@ ASSERT_DICT_LOCKED(PyObject *op) if (!_PyInterpreterState_GET()->stoptheworld.world_stopped) { \ ASSERT_DICT_LOCKED(op); \ } +#define ASSERT_WORLD_STOPPED_OR_OBJ_LOCKED(op) \ + if (!_PyInterpreterState_GET()->stoptheworld.world_stopped) { \ + _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(op); \ + } #define IS_DICT_SHARED(mp) _PyObject_GC_IS_SHARED(mp) #define SET_DICT_SHARED(mp) _PyObject_GC_SET_SHARED(mp) @@ -165,16 +169,15 @@ ASSERT_DICT_LOCKED(PyObject *op) #define STORE_INDEX(keys, size, idx, value) _Py_atomic_store_int##size##_relaxed(&((int##size##_t*)keys->dk_indices)[idx], (int##size##_t)value); #define ASSERT_OWNED_OR_SHARED(mp) \ assert(_Py_IsOwnedByCurrentThread((PyObject *)mp) || IS_DICT_SHARED(mp)); -#define LOAD_KEYS_NENTRIES(d) #define LOCK_KEYS_IF_SPLIT(keys, kind) \ if (kind == DICT_KEYS_SPLIT) { \ - LOCK_KEYS(dk); \ + LOCK_KEYS(keys); \ } #define UNLOCK_KEYS_IF_SPLIT(keys, kind) \ if (kind == DICT_KEYS_SPLIT) { \ - UNLOCK_KEYS(dk); \ + UNLOCK_KEYS(keys); \ } static inline Py_ssize_t @@ -208,7 +211,7 @@ set_values(PyDictObject *mp, PyDictValues *values) #define INCREF_KEYS(dk) _Py_atomic_add_ssize(&dk->dk_refcnt, 1) // Dec refs the keys object, giving the previous value #define DECREF_KEYS(dk) _Py_atomic_add_ssize(&dk->dk_refcnt, -1) -#define LOAD_KEYS_NENTIRES(keys) _Py_atomic_load_ssize_relaxed(&keys->dk_nentries) +#define LOAD_KEYS_NENTRIES(keys) _Py_atomic_load_ssize_relaxed(&keys->dk_nentries) #define INCREF_KEYS_FT(dk) dictkeys_incref(dk) #define DECREF_KEYS_FT(dk, shared) dictkeys_decref(_PyInterpreterState_GET(), dk, shared) @@ -227,6 +230,7 @@ static inline void split_keys_entry_added(PyDictKeysObject *keys) #define ASSERT_DICT_LOCKED(op) #define ASSERT_WORLD_STOPPED_OR_DICT_LOCKED(op) +#define ASSERT_WORLD_STOPPED_OR_OBJ_LOCKED(op) #define LOCK_KEYS(keys) #define UNLOCK_KEYS(keys) #define ASSERT_KEYS_LOCKED(keys) @@ -234,7 +238,7 @@ static inline void split_keys_entry_added(PyDictKeysObject *keys) #define STORE_SHARED_KEY(key, value) key = value #define INCREF_KEYS(dk) dk->dk_refcnt++ #define DECREF_KEYS(dk) dk->dk_refcnt-- -#define LOAD_KEYS_NENTIRES(keys) keys->dk_nentries +#define LOAD_KEYS_NENTRIES(keys) keys->dk_nentries #define INCREF_KEYS_FT(dk) #define DECREF_KEYS_FT(dk, shared) #define LOCK_KEYS_IF_SPLIT(keys, kind) @@ -689,10 +693,15 @@ _PyDict_CheckConsistency(PyObject *op, int check_content) int splitted = _PyDict_HasSplitTable(mp); Py_ssize_t usable = USABLE_FRACTION(DK_SIZE(keys)); + // In the free-threaded build, shared keys may be concurrently modified, + // so use atomic loads. + Py_ssize_t dk_usable = FT_ATOMIC_LOAD_SSIZE_ACQUIRE(keys->dk_usable); + Py_ssize_t dk_nentries = FT_ATOMIC_LOAD_SSIZE_ACQUIRE(keys->dk_nentries); + CHECK(0 <= mp->ma_used && mp->ma_used <= usable); - CHECK(0 <= keys->dk_usable && keys->dk_usable <= usable); - CHECK(0 <= keys->dk_nentries && keys->dk_nentries <= usable); - CHECK(keys->dk_usable + keys->dk_nentries <= usable); + CHECK(0 <= dk_usable && dk_usable <= usable); + CHECK(0 <= dk_nentries && dk_nentries <= usable); + CHECK(dk_usable + dk_nentries <= usable); if (!splitted) { /* combined table */ @@ -709,6 +718,7 @@ _PyDict_CheckConsistency(PyObject *op, int check_content) } if (check_content) { + LOCK_KEYS_IF_SPLIT(keys, keys->dk_kind); for (Py_ssize_t i=0; i < DK_SIZE(keys); i++) { Py_ssize_t ix = dictkeys_get_index(keys, i); CHECK(DKIX_DUMMY <= ix && ix <= usable); @@ -764,6 +774,7 @@ _PyDict_CheckConsistency(PyObject *op, int check_content) CHECK(mp->ma_values->values[index] != NULL); } } + UNLOCK_KEYS_IF_SPLIT(keys, keys->dk_kind); } return 1; @@ -4032,7 +4043,7 @@ dict_equal_lock_held(PyDictObject *a, PyDictObject *b) /* can't be equal if # of entries differ */ return 0; /* Same # of entries -- check all of 'em. Exit early on any diff. */ - for (i = 0; i < LOAD_KEYS_NENTIRES(a->ma_keys); i++) { + for (i = 0; i < LOAD_KEYS_NENTRIES(a->ma_keys); i++) { PyObject *key, *aval; Py_hash_t hash; if (DK_IS_UNICODE(a->ma_keys)) { @@ -6667,10 +6678,10 @@ make_dict_from_instance_attributes(PyInterpreterState *interp, return res; } -static PyDictObject * -materialize_managed_dict_lock_held(PyObject *obj) +PyDictObject * +_PyObject_MaterializeManagedDict_LockHeld(PyObject *obj) { - _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(obj); + ASSERT_WORLD_STOPPED_OR_OBJ_LOCKED(obj); PyDictValues *values = _PyObject_InlineValues(obj); PyInterpreterState *interp = _PyInterpreterState_GET(); @@ -6699,7 +6710,7 @@ _PyObject_MaterializeManagedDict(PyObject *obj) goto exit; } #endif - dict = materialize_managed_dict_lock_held(obj); + dict = _PyObject_MaterializeManagedDict_LockHeld(obj); #ifdef Py_GIL_DISABLED exit: @@ -7132,7 +7143,7 @@ PyObject_ClearManagedDict(PyObject *obj) int _PyDict_DetachFromObject(PyDictObject *mp, PyObject *obj) { - _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(obj); + ASSERT_WORLD_STOPPED_OR_OBJ_LOCKED(obj); assert(_PyObject_ManagedDictPointer(obj)->dict == mp); assert(_PyObject_InlineValuesConsistencyCheck(obj)); diff --git a/Objects/listobject.c b/Objects/listobject.c index 9eae9626f7c1f1..f29f58dc25be04 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -3244,7 +3244,7 @@ list_index_impl(PyListObject *self, PyObject *value, Py_ssize_t start, else if (cmp < 0) return NULL; } - PyErr_Format(PyExc_ValueError, "%R is not in list", value); + PyErr_SetString(PyExc_ValueError, "list.index(x): x not in list"); return NULL; } diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c index d033e2bad1891a..a6a71802ef8e01 100644 --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -386,8 +386,16 @@ _PyMem_ArenaFree(void *Py_UNUSED(ctx), void *ptr, ) { #ifdef MS_WINDOWS + /* Unlike free(), VirtualFree() does not special-case NULL to noop. */ + if (ptr == NULL) { + return; + } VirtualFree(ptr, 0, MEM_RELEASE); #elif defined(ARENAS_USE_MMAP) + /* Unlike free(), munmap() does not special-case NULL to noop. */ + if (ptr == NULL) { + return; + } munmap(ptr, size); #else free(ptr); diff --git a/Objects/rangeobject.c b/Objects/rangeobject.c index d5db48c143324f..9727b4f47b53a1 100644 --- a/Objects/rangeobject.c +++ b/Objects/rangeobject.c @@ -655,7 +655,7 @@ range_index(rangeobject *r, PyObject *ob) } /* object is not in the range */ - PyErr_Format(PyExc_ValueError, "%R is not in range", ob); + PyErr_SetString(PyExc_ValueError, "range.index(x): x not in range"); return NULL; } diff --git a/Objects/setobject.c b/Objects/setobject.c index eb0c404bf6b8e0..5d7ad395d08c90 100644 --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -184,14 +184,14 @@ set_add_entry(PySetObject *so, PyObject *key, Py_hash_t hash) found_unused_or_dummy: if (freeslot == NULL) goto found_unused; - so->used++; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, so->used + 1); freeslot->key = key; freeslot->hash = hash; return 0; found_unused: so->fill++; - so->used++; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, so->used + 1); entry->key = key; entry->hash = hash; if ((size_t)so->fill*5 < mask*3) @@ -357,7 +357,7 @@ set_discard_entry(PySetObject *so, PyObject *key, Py_hash_t hash) old_key = entry->key; entry->key = dummy; entry->hash = -1; - so->used--; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, so->used - 1); Py_DECREF(old_key); return DISCARD_FOUND; } @@ -397,7 +397,7 @@ set_empty_to_minsize(PySetObject *so) { memset(so->smalltable, 0, sizeof(so->smalltable)); so->fill = 0; - so->used = 0; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, 0); so->mask = PySet_MINSIZE - 1; so->table = so->smalltable; so->hash = -1; @@ -615,7 +615,7 @@ set_merge_lock_held(PySetObject *so, PyObject *otherset) } } so->fill = other->fill; - so->used = other->used; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, other->used); return 0; } @@ -624,7 +624,7 @@ set_merge_lock_held(PySetObject *so, PyObject *otherset) setentry *newtable = so->table; size_t newmask = (size_t)so->mask; so->fill = other->used; - so->used = other->used; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, other->used); for (i = other->mask + 1; i > 0 ; i--, other_entry++) { key = other_entry->key; if (key != NULL && key != dummy) { @@ -678,7 +678,7 @@ set_pop_impl(PySetObject *so) key = entry->key; entry->key = dummy; entry->hash = -1; - so->used--; + FT_ATOMIC_STORE_SSIZE_RELAXED(so->used, so->used - 1); so->finger = entry - so->table + 1; /* next place to start */ return key; } @@ -1173,7 +1173,9 @@ set_swap_bodies(PySetObject *a, PySetObject *b) Py_hash_t h; t = a->fill; a->fill = b->fill; b->fill = t; - t = a->used; a->used = b->used; b->used = t; + t = a->used; + FT_ATOMIC_STORE_SSIZE_RELAXED(a->used, b->used); + FT_ATOMIC_STORE_SSIZE_RELAXED(b->used, t); t = a->mask; a->mask = b->mask; b->mask = t; u = a->table; diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index 5ae1ee9a89af84..3704d095a977ea 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -390,6 +390,27 @@ _PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) return (PyObject *)tuple; } +PyObject * +_PyTuple_FromStackRefSteal(const _PyStackRef *src, Py_ssize_t n) +{ + if (n == 0) { + return tuple_get_empty(); + } + PyTupleObject *tuple = tuple_alloc(n); + if (tuple == NULL) { + for (Py_ssize_t i = 0; i < n; i++) { + PyStackRef_CLOSE(src[i]); + } + return NULL; + } + PyObject **dst = tuple->ob_item; + for (Py_ssize_t i = 0; i < n; i++) { + dst[i] = PyStackRef_AsPyObjectSteal(src[i]); + } + _PyObject_GC_TRACK(tuple); + return (PyObject *)tuple; +} + PyObject * _PyTuple_FromArraySteal(PyObject *const *src, Py_ssize_t n) { @@ -1132,7 +1153,7 @@ maybe_freelist_pop(Py_ssize_t size) return NULL; } assert(size > 0); - if (size < PyTuple_MAXSAVESIZE) { + if (size <= PyTuple_MAXSAVESIZE) { Py_ssize_t index = size - 1; PyTupleObject *op = TUPLE_FREELIST.items[index]; if (op != NULL) { diff --git a/Objects/typeobject.c b/Objects/typeobject.c index b042e64a188d9d..7d01b680605a38 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -314,6 +314,16 @@ managed_static_type_state_clear(PyInterpreterState *interp, PyTypeObject *self, } } +static PyTypeObject * +managed_static_type_get_def(PyTypeObject *self, int isbuiltin) +{ + size_t index = managed_static_type_index_get(self); + size_t full_index = isbuiltin + ? index + : index + _Py_MAX_MANAGED_STATIC_BUILTIN_TYPES; + return &_PyRuntime.types.managed_static.types[full_index].def; +} + // Also see _PyStaticType_InitBuiltin() and _PyStaticType_FiniBuiltin(). /* end static builtin helpers */ @@ -5387,7 +5397,7 @@ _PyType_LookupRef(PyTypeObject *type, PyObject *name) #ifdef Py_GIL_DISABLED // synchronize-with other writing threads by doing an acquire load on the sequence while (1) { - int sequence = _PySeqLock_BeginRead(&entry->sequence); + uint32_t sequence = _PySeqLock_BeginRead(&entry->sequence); uint32_t entry_version = _Py_atomic_load_uint32_relaxed(&entry->version); uint32_t type_version = _Py_atomic_load_uint32_acquire(&type->tp_version_tag); if (entry_version == type_version && @@ -5840,7 +5850,6 @@ fini_static_type(PyInterpreterState *interp, PyTypeObject *type, _PyStaticType_ClearWeakRefs(interp, type); managed_static_type_state_clear(interp, type, isbuiltin, final); - /* We leave _Py_TPFLAGS_STATIC_BUILTIN set on tp_flags. */ } void @@ -6540,28 +6549,11 @@ compatible_for_assignment(PyTypeObject* oldto, PyTypeObject* newto, const char* return 0; } -static int -object_set_class(PyObject *self, PyObject *value, void *closure) -{ - if (value == NULL) { - PyErr_SetString(PyExc_TypeError, - "can't delete __class__ attribute"); - return -1; - } - if (!PyType_Check(value)) { - PyErr_Format(PyExc_TypeError, - "__class__ must be set to a class, not '%s' object", - Py_TYPE(value)->tp_name); - return -1; - } - PyTypeObject *newto = (PyTypeObject *)value; - - if (PySys_Audit("object.__setattr__", "OsO", - self, "__class__", value) < 0) { - return -1; - } +static int +object_set_class_world_stopped(PyObject *self, PyTypeObject *newto) +{ PyTypeObject *oldto = Py_TYPE(self); /* In versions of CPython prior to 3.5, the code in @@ -6627,39 +6619,66 @@ object_set_class(PyObject *self, PyObject *value, void *closure) /* Changing the class will change the implicit dict keys, * so we must materialize the dictionary first. */ if (oldto->tp_flags & Py_TPFLAGS_INLINE_VALUES) { - PyDictObject *dict = _PyObject_MaterializeManagedDict(self); + PyDictObject *dict = _PyObject_GetManagedDict(self); if (dict == NULL) { - return -1; + dict = _PyObject_MaterializeManagedDict_LockHeld(self); + if (dict == NULL) { + return -1; + } } - bool error = false; - - Py_BEGIN_CRITICAL_SECTION2(self, dict); - - // If we raced after materialization and replaced the dict - // then the materialized dict should no longer have the - // inline values in which case detach is a nop. - assert(_PyObject_GetManagedDict(self) == dict || - dict->ma_values != _PyObject_InlineValues(self)); + assert(_PyObject_GetManagedDict(self) == dict); if (_PyDict_DetachFromObject(dict, self) < 0) { - error = true; - } - - Py_END_CRITICAL_SECTION2(); - if (error) { return -1; } + } if (newto->tp_flags & Py_TPFLAGS_HEAPTYPE) { Py_INCREF(newto); } - Py_BEGIN_CRITICAL_SECTION(self); - // The real Py_TYPE(self) (`oldto`) may have changed from - // underneath us in another thread, so we re-fetch it here. - oldto = Py_TYPE(self); + Py_SET_TYPE(self, newto); - Py_END_CRITICAL_SECTION(); + + return 0; + } + else { + return -1; + } +} + +static int +object_set_class(PyObject *self, PyObject *value, void *closure) +{ + + if (value == NULL) { + PyErr_SetString(PyExc_TypeError, + "can't delete __class__ attribute"); + return -1; + } + if (!PyType_Check(value)) { + PyErr_Format(PyExc_TypeError, + "__class__ must be set to a class, not '%s' object", + Py_TYPE(value)->tp_name); + return -1; + } + PyTypeObject *newto = (PyTypeObject *)value; + + if (PySys_Audit("object.__setattr__", "OsO", + self, "__class__", value) < 0) { + return -1; + } + +#ifdef Py_GIL_DISABLED + PyInterpreterState *interp = _PyInterpreterState_GET(); + _PyEval_StopTheWorld(interp); +#endif + PyTypeObject *oldto = Py_TYPE(self); + int res = object_set_class_world_stopped(self, newto); +#ifdef Py_GIL_DISABLED + _PyEval_StartTheWorld(interp); +#endif + if (res == 0) { if (oldto->tp_flags & Py_TPFLAGS_HEAPTYPE) { Py_DECREF(oldto); } @@ -6667,9 +6686,7 @@ object_set_class(PyObject *self, PyObject *value, void *closure) RARE_EVENT_INC(set_class); return 0; } - else { - return -1; - } + return res; } static PyGetSetDef object_getsets[] = { @@ -7842,7 +7859,7 @@ inherit_slots(PyTypeObject *type, PyTypeObject *base) return 0; } -static int add_operators(PyTypeObject *); +static int add_operators(PyTypeObject *, PyTypeObject *); static int add_tp_new_wrapper(PyTypeObject *type); #define COLLECTION_FLAGS (Py_TPFLAGS_SEQUENCE | Py_TPFLAGS_MAPPING) @@ -8007,10 +8024,10 @@ type_dict_set_doc(PyTypeObject *type) static int -type_ready_fill_dict(PyTypeObject *type) +type_ready_fill_dict(PyTypeObject *type, PyTypeObject *def) { /* Add type-specific descriptors to tp_dict */ - if (add_operators(type) < 0) { + if (add_operators(type, def) < 0) { return -1; } if (type_add_methods(type) < 0) { @@ -8329,7 +8346,7 @@ type_ready_post_checks(PyTypeObject *type) static int -type_ready(PyTypeObject *type, int initial) +type_ready(PyTypeObject *type, PyTypeObject *def, int initial) { ASSERT_TYPE_LOCK_HELD(); @@ -8368,7 +8385,7 @@ type_ready(PyTypeObject *type, int initial) if (type_ready_set_new(type, initial) < 0) { goto error; } - if (type_ready_fill_dict(type) < 0) { + if (type_ready_fill_dict(type, def) < 0) { goto error; } if (initial) { @@ -8395,7 +8412,7 @@ type_ready(PyTypeObject *type, int initial) } /* All done -- set the ready flag */ - type->tp_flags = type->tp_flags | Py_TPFLAGS_READY; + type->tp_flags |= Py_TPFLAGS_READY; stop_readying(type); assert(_PyType_CheckConsistency(type)); @@ -8425,7 +8442,7 @@ PyType_Ready(PyTypeObject *type) int res; BEGIN_TYPE_LOCK(); if (!(type->tp_flags & Py_TPFLAGS_READY)) { - res = type_ready(type, 1); + res = type_ready(type, NULL, 1); } else { res = 0; assert(_PyType_CheckConsistency(type)); @@ -8461,14 +8478,20 @@ init_static_type(PyInterpreterState *interp, PyTypeObject *self, managed_static_type_state_init(interp, self, isbuiltin, initial); + PyTypeObject *def = managed_static_type_get_def(self, isbuiltin); + if (initial) { + memcpy(def, self, sizeof(PyTypeObject)); + } + int res; BEGIN_TYPE_LOCK(); - res = type_ready(self, initial); + res = type_ready(self, def, initial); END_TYPE_LOCK(); if (res < 0) { _PyStaticType_ClearWeakRefs(interp, self); managed_static_type_state_clear(interp, self, isbuiltin, initial); } + return res; } @@ -11056,17 +11079,22 @@ recurse_down_subclasses(PyTypeObject *type, PyObject *attr_name, infinite recursion here.) */ static int -add_operators(PyTypeObject *type) +add_operators(PyTypeObject *type, PyTypeObject *def) { PyObject *dict = lookup_tp_dict(type); pytype_slotdef *p; PyObject *descr; void **ptr; + assert(def == NULL || (type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)); + if (def == NULL) { + def = type; + } + for (p = slotdefs; p->name; p++) { if (p->wrapper == NULL) continue; - ptr = slotptr(type, p->offset); + ptr = slotptr(def, p->offset); if (!ptr || !*ptr) continue; int r = PyDict_Contains(dict, p->name_strobj); diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 9738442ab962b0..394ea888fc9231 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -9315,19 +9315,24 @@ _PyUnicode_TransformDecimalAndSpaceToASCII(PyObject *unicode) /* --- Helpers ------------------------------------------------------------ */ /* helper macro to fixup start/end slice values */ -#define ADJUST_INDICES(start, end, len) \ - if (end > len) \ - end = len; \ - else if (end < 0) { \ - end += len; \ - if (end < 0) \ - end = 0; \ - } \ - if (start < 0) { \ - start += len; \ - if (start < 0) \ - start = 0; \ - } +#define ADJUST_INDICES(start, end, len) \ + do { \ + if (end > len) { \ + end = len; \ + } \ + else if (end < 0) { \ + end += len; \ + if (end < 0) { \ + end = 0; \ + } \ + } \ + if (start < 0) { \ + start += len; \ + if (start < 0) { \ + start = 0; \ + } \ + } \ + } while (0) static Py_ssize_t any_find_slice(PyObject* s1, PyObject* s2, diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 3378ed54203f18..f36fcb8caece33 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -163,6 +163,7 @@ + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index 742d88d9e1fa7a..a1b43addf9e36a 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -432,6 +432,9 @@ Include\cpython + + Include\cpython + Include\cpython diff --git a/PCbuild/regen.targets b/PCbuild/regen.targets index 4aa14ed1fad9eb..416241d9d0df10 100644 --- a/PCbuild/regen.targets +++ b/PCbuild/regen.targets @@ -90,23 +90,23 @@ Inputs="@(_CasesSources)" Outputs="@(_CasesOutputs)" DependsOnTargets="FindPythonForBuild"> - - - - - - - - - diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index e338656a5b1eb9..e6867f138a5ccb 100755 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -880,7 +880,7 @@ def visitModule(self, mod): Py_ssize_t i, numfields = 0; int res = -1; - PyObject *key, *value, *fields, *remaining_fields = NULL; + PyObject *key, *value, *fields, *attributes = NULL, *remaining_fields = NULL; if (PyObject_GetOptionalAttr((PyObject*)Py_TYPE(self), state->_fields, &fields) < 0) { goto cleanup; } @@ -947,22 +947,32 @@ def visitModule(self, mod): goto cleanup; } } - else if ( - PyUnicode_CompareWithASCIIString(key, "lineno") != 0 && - PyUnicode_CompareWithASCIIString(key, "col_offset") != 0 && - PyUnicode_CompareWithASCIIString(key, "end_lineno") != 0 && - PyUnicode_CompareWithASCIIString(key, "end_col_offset") != 0 - ) { - if (PyErr_WarnFormat( - PyExc_DeprecationWarning, 1, - "%.400s.__init__ got an unexpected keyword argument '%U'. " - "Support for arbitrary keyword arguments is deprecated " - "and will be removed in Python 3.15.", - Py_TYPE(self)->tp_name, key - ) < 0) { + else { + // Lazily initialize "attributes" + if (attributes == NULL) { + attributes = PyObject_GetAttr((PyObject*)Py_TYPE(self), state->_attributes); + if (attributes == NULL) { + res = -1; + goto cleanup; + } + } + int contains = PySequence_Contains(attributes, key); + if (contains == -1) { res = -1; goto cleanup; } + else if (contains == 0) { + if (PyErr_WarnFormat( + PyExc_DeprecationWarning, 1, + "%.400s.__init__ got an unexpected keyword argument '%U'. " + "Support for arbitrary keyword arguments is deprecated " + "and will be removed in Python 3.15.", + Py_TYPE(self)->tp_name, key + ) < 0) { + res = -1; + goto cleanup; + } + } } res = PyObject_SetAttr(self, key, value); if (res < 0) { @@ -1045,6 +1055,7 @@ def visitModule(self, mod): Py_DECREF(field_types); } cleanup: + Py_XDECREF(attributes); Py_XDECREF(fields); Py_XDECREF(remaining_fields); return res; @@ -1132,6 +1143,279 @@ def visitModule(self, mod): return result; } +/* + * Perform the following validations: + * + * - All keyword arguments are known 'fields' or 'attributes'. + * - No field or attribute would be left unfilled after copy.replace(). + * + * On success, this returns 1. Otherwise, set a TypeError + * exception and returns -1 (no exception is set if some + * other internal errors occur). + * + * Parameters + * + * self The AST node instance. + * dict The AST node instance dictionary (self.__dict__). + * fields The list of fields (self._fields). + * attributes The list of attributes (self._attributes). + * kwargs Keyword arguments passed to ast_type_replace(). + * + * The 'dict', 'fields', 'attributes' and 'kwargs' arguments can be NULL. + * + * Note: this function can be removed in 3.15 since the verification + * will be done inside the constructor. + */ +static inline int +ast_type_replace_check(PyObject *self, + PyObject *dict, + PyObject *fields, + PyObject *attributes, + PyObject *kwargs) +{ + // While it is possible to make some fast paths that would avoid + // allocating objects on the stack, this would cost us readability. + // For instance, if 'fields' and 'attributes' are both empty, and + // 'kwargs' is not empty, we could raise a TypeError immediately. + PyObject *expecting = PySet_New(fields); + if (expecting == NULL) { + return -1; + } + if (attributes) { + if (_PySet_Update(expecting, attributes) < 0) { + Py_DECREF(expecting); + return -1; + } + } + // Any keyword argument that is neither a field nor attribute is rejected. + // We first need to check whether a keyword argument is accepted or not. + // If all keyword arguments are accepted, we compute the required fields + // and attributes. A field or attribute is not needed if: + // + // 1) it is given in 'kwargs', or + // 2) it already exists on 'self'. + if (kwargs) { + Py_ssize_t pos = 0; + PyObject *key, *value; + while (PyDict_Next(kwargs, &pos, &key, &value)) { + int rc = PySet_Discard(expecting, key); + if (rc < 0) { + Py_DECREF(expecting); + return -1; + } + if (rc == 0) { + PyErr_Format(PyExc_TypeError, + "%.400s.__replace__ got an unexpected keyword " + "argument '%U'.", Py_TYPE(self)->tp_name, key); + Py_DECREF(expecting); + return -1; + } + } + } + // check that the remaining fields or attributes would be filled + if (dict) { + Py_ssize_t pos = 0; + PyObject *key, *value; + while (PyDict_Next(dict, &pos, &key, &value)) { + // Mark fields or attributes that are found on the instance + // as non-mandatory. If they are not given in 'kwargs', they + // will be shallow-coied; otherwise, they would be replaced + // (not in this function). + if (PySet_Discard(expecting, key) < 0) { + Py_DECREF(expecting); + return -1; + } + } + if (attributes) { + // Some attributes may or may not be present at runtime. + // In particular, now that we checked whether 'kwargs' + // is correct or not, we allow any attribute to be missing. + // + // Note that fields must still be entirely determined when + // calling the constructor later. + PyObject *unused = PyObject_CallMethodOneArg(expecting, + &_Py_ID(difference_update), + attributes); + if (unused == NULL) { + Py_DECREF(expecting); + return -1; + } + Py_DECREF(unused); + } + } + // Now 'expecting' contains the fields or attributes + // that would not be filled inside ast_type_replace(). + Py_ssize_t m = PySet_GET_SIZE(expecting); + if (m > 0) { + PyObject *names = PyList_New(m); + if (names == NULL) { + Py_DECREF(expecting); + return -1; + } + Py_ssize_t i = 0, pos = 0; + PyObject *item; + Py_hash_t hash; + while (_PySet_NextEntry(expecting, &pos, &item, &hash)) { + PyObject *name = PyObject_Repr(item); + if (name == NULL) { + Py_DECREF(expecting); + Py_DECREF(names); + return -1; + } + // steal the reference 'name' + PyList_SET_ITEM(names, i++, name); + } + Py_DECREF(expecting); + if (PyList_Sort(names) < 0) { + Py_DECREF(names); + return -1; + } + PyObject *sep = PyUnicode_FromString(", "); + if (sep == NULL) { + Py_DECREF(names); + return -1; + } + PyObject *str_names = PyUnicode_Join(sep, names); + Py_DECREF(sep); + Py_DECREF(names); + if (str_names == NULL) { + return -1; + } + PyErr_Format(PyExc_TypeError, + "%.400s.__replace__ missing %ld keyword argument%s: %U.", + Py_TYPE(self)->tp_name, m, m == 1 ? "" : "s", str_names); + Py_DECREF(str_names); + return -1; + } + else { + Py_DECREF(expecting); + return 1; + } +} + +/* + * Python equivalent: + * + * for key in keys: + * if hasattr(self, key): + * payload[key] = getattr(self, key) + * + * The 'keys' argument is a sequence corresponding to + * the '_fields' or the '_attributes' of an AST node. + * + * This returns -1 if an error occurs and 0 otherwise. + * + * Parameters + * + * payload A dictionary to fill. + * keys A sequence of keys or NULL for an empty sequence. + * dict The AST node instance dictionary (must not be NULL). + */ +static inline int +ast_type_replace_update_payload(PyObject *payload, + PyObject *keys, + PyObject *dict) +{ + assert(dict != NULL); + if (keys == NULL) { + return 0; + } + Py_ssize_t n = PySequence_Size(keys); + if (n == -1) { + return -1; + } + for (Py_ssize_t i = 0; i < n; i++) { + PyObject *key = PySequence_GetItem(keys, i); + if (key == NULL) { + return -1; + } + PyObject *value; + if (PyDict_GetItemRef(dict, key, &value) < 0) { + Py_DECREF(key); + return -1; + } + if (value == NULL) { + Py_DECREF(key); + // If a field or attribute is not present at runtime, it should + // be explicitly given in 'kwargs'. If not, the constructor will + // issue a warning (which becomes an error in 3.15). + continue; + } + int rc = PyDict_SetItem(payload, key, value); + Py_DECREF(key); + Py_DECREF(value); + if (rc < 0) { + return -1; + } + } + return 0; +} + +/* copy.replace() support (shallow copy) */ +static PyObject * +ast_type_replace(PyObject *self, PyObject *args, PyObject *kwargs) +{ + if (!_PyArg_NoPositional("__replace__", args)) { + return NULL; + } + + struct ast_state *state = get_ast_state(); + if (state == NULL) { + return NULL; + } + + PyObject *result = NULL; + // known AST class fields and attributes + PyObject *fields = NULL, *attributes = NULL; + // current instance dictionary + PyObject *dict = NULL; + // constructor positional and keyword arguments + PyObject *empty_tuple = NULL, *payload = NULL; + + PyObject *type = (PyObject *)Py_TYPE(self); + if (PyObject_GetOptionalAttr(type, state->_fields, &fields) < 0) { + goto cleanup; + } + if (PyObject_GetOptionalAttr(type, state->_attributes, &attributes) < 0) { + goto cleanup; + } + if (PyObject_GetOptionalAttr(self, state->__dict__, &dict) < 0) { + goto cleanup; + } + if (ast_type_replace_check(self, dict, fields, attributes, kwargs) < 0) { + goto cleanup; + } + empty_tuple = PyTuple_New(0); + if (empty_tuple == NULL) { + goto cleanup; + } + payload = PyDict_New(); + if (payload == NULL) { + goto cleanup; + } + if (dict) { // in case __dict__ is missing (for some obscure reason) + // copy the instance's fields (possibly NULL) + if (ast_type_replace_update_payload(payload, fields, dict) < 0) { + goto cleanup; + } + // copy the instance's attributes (possibly NULL) + if (ast_type_replace_update_payload(payload, attributes, dict) < 0) { + goto cleanup; + } + } + if (kwargs && PyDict_Update(payload, kwargs) < 0) { + goto cleanup; + } + result = PyObject_Call(type, empty_tuple, payload); +cleanup: + Py_XDECREF(payload); + Py_XDECREF(empty_tuple); + Py_XDECREF(dict); + Py_XDECREF(attributes); + Py_XDECREF(fields); + return result; +} + static PyMemberDef ast_type_members[] = { {"__dictoffset__", Py_T_PYSSIZET, offsetof(AST_object, dict), Py_READONLY}, {NULL} /* Sentinel */ @@ -1139,6 +1423,10 @@ def visitModule(self, mod): static PyMethodDef ast_type_methods[] = { {"__reduce__", ast_type_reduce, METH_NOARGS, NULL}, + {"__replace__", _PyCFunction_CAST(ast_type_replace), METH_VARARGS | METH_KEYWORDS, + PyDoc_STR("__replace__($self, /, **fields)\\n--\\n\\n" + "Return a copy of the AST node with new values " + "for the specified fields.")}, {NULL} }; @@ -1773,7 +2061,9 @@ def generate_module_def(mod, metadata, f, internal_h): #include "pycore_ceval.h" // _Py_EnterRecursiveCall #include "pycore_lock.h" // _PyOnceFlag #include "pycore_interp.h" // _PyInterpreterState.ast + #include "pycore_modsupport.h" // _PyArg_NoPositional() #include "pycore_pystate.h" // _PyInterpreterState_GET() + #include "pycore_setobject.h" // _PySet_NextEntry(), _PySet_Update() #include "pycore_unionobject.h" // _Py_union_type_or #include "structmember.h" #include diff --git a/Python/Python-ast.c b/Python/Python-ast.c index 01ffea1869350b..4d0db457a8b172 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -6,7 +6,9 @@ #include "pycore_ceval.h" // _Py_EnterRecursiveCall #include "pycore_lock.h" // _PyOnceFlag #include "pycore_interp.h" // _PyInterpreterState.ast +#include "pycore_modsupport.h" // _PyArg_NoPositional() #include "pycore_pystate.h" // _PyInterpreterState_GET() +#include "pycore_setobject.h" // _PySet_NextEntry(), _PySet_Update() #include "pycore_unionobject.h" // _Py_union_type_or #include "structmember.h" #include @@ -5079,7 +5081,7 @@ ast_type_init(PyObject *self, PyObject *args, PyObject *kw) Py_ssize_t i, numfields = 0; int res = -1; - PyObject *key, *value, *fields, *remaining_fields = NULL; + PyObject *key, *value, *fields, *attributes = NULL, *remaining_fields = NULL; if (PyObject_GetOptionalAttr((PyObject*)Py_TYPE(self), state->_fields, &fields) < 0) { goto cleanup; } @@ -5146,22 +5148,32 @@ ast_type_init(PyObject *self, PyObject *args, PyObject *kw) goto cleanup; } } - else if ( - PyUnicode_CompareWithASCIIString(key, "lineno") != 0 && - PyUnicode_CompareWithASCIIString(key, "col_offset") != 0 && - PyUnicode_CompareWithASCIIString(key, "end_lineno") != 0 && - PyUnicode_CompareWithASCIIString(key, "end_col_offset") != 0 - ) { - if (PyErr_WarnFormat( - PyExc_DeprecationWarning, 1, - "%.400s.__init__ got an unexpected keyword argument '%U'. " - "Support for arbitrary keyword arguments is deprecated " - "and will be removed in Python 3.15.", - Py_TYPE(self)->tp_name, key - ) < 0) { + else { + // Lazily initialize "attributes" + if (attributes == NULL) { + attributes = PyObject_GetAttr((PyObject*)Py_TYPE(self), state->_attributes); + if (attributes == NULL) { + res = -1; + goto cleanup; + } + } + int contains = PySequence_Contains(attributes, key); + if (contains == -1) { res = -1; goto cleanup; } + else if (contains == 0) { + if (PyErr_WarnFormat( + PyExc_DeprecationWarning, 1, + "%.400s.__init__ got an unexpected keyword argument '%U'. " + "Support for arbitrary keyword arguments is deprecated " + "and will be removed in Python 3.15.", + Py_TYPE(self)->tp_name, key + ) < 0) { + res = -1; + goto cleanup; + } + } } res = PyObject_SetAttr(self, key, value); if (res < 0) { @@ -5244,6 +5256,7 @@ ast_type_init(PyObject *self, PyObject *args, PyObject *kw) Py_DECREF(field_types); } cleanup: + Py_XDECREF(attributes); Py_XDECREF(fields); Py_XDECREF(remaining_fields); return res; @@ -5331,6 +5344,279 @@ ast_type_reduce(PyObject *self, PyObject *unused) return result; } +/* + * Perform the following validations: + * + * - All keyword arguments are known 'fields' or 'attributes'. + * - No field or attribute would be left unfilled after copy.replace(). + * + * On success, this returns 1. Otherwise, set a TypeError + * exception and returns -1 (no exception is set if some + * other internal errors occur). + * + * Parameters + * + * self The AST node instance. + * dict The AST node instance dictionary (self.__dict__). + * fields The list of fields (self._fields). + * attributes The list of attributes (self._attributes). + * kwargs Keyword arguments passed to ast_type_replace(). + * + * The 'dict', 'fields', 'attributes' and 'kwargs' arguments can be NULL. + * + * Note: this function can be removed in 3.15 since the verification + * will be done inside the constructor. + */ +static inline int +ast_type_replace_check(PyObject *self, + PyObject *dict, + PyObject *fields, + PyObject *attributes, + PyObject *kwargs) +{ + // While it is possible to make some fast paths that would avoid + // allocating objects on the stack, this would cost us readability. + // For instance, if 'fields' and 'attributes' are both empty, and + // 'kwargs' is not empty, we could raise a TypeError immediately. + PyObject *expecting = PySet_New(fields); + if (expecting == NULL) { + return -1; + } + if (attributes) { + if (_PySet_Update(expecting, attributes) < 0) { + Py_DECREF(expecting); + return -1; + } + } + // Any keyword argument that is neither a field nor attribute is rejected. + // We first need to check whether a keyword argument is accepted or not. + // If all keyword arguments are accepted, we compute the required fields + // and attributes. A field or attribute is not needed if: + // + // 1) it is given in 'kwargs', or + // 2) it already exists on 'self'. + if (kwargs) { + Py_ssize_t pos = 0; + PyObject *key, *value; + while (PyDict_Next(kwargs, &pos, &key, &value)) { + int rc = PySet_Discard(expecting, key); + if (rc < 0) { + Py_DECREF(expecting); + return -1; + } + if (rc == 0) { + PyErr_Format(PyExc_TypeError, + "%.400s.__replace__ got an unexpected keyword " + "argument '%U'.", Py_TYPE(self)->tp_name, key); + Py_DECREF(expecting); + return -1; + } + } + } + // check that the remaining fields or attributes would be filled + if (dict) { + Py_ssize_t pos = 0; + PyObject *key, *value; + while (PyDict_Next(dict, &pos, &key, &value)) { + // Mark fields or attributes that are found on the instance + // as non-mandatory. If they are not given in 'kwargs', they + // will be shallow-coied; otherwise, they would be replaced + // (not in this function). + if (PySet_Discard(expecting, key) < 0) { + Py_DECREF(expecting); + return -1; + } + } + if (attributes) { + // Some attributes may or may not be present at runtime. + // In particular, now that we checked whether 'kwargs' + // is correct or not, we allow any attribute to be missing. + // + // Note that fields must still be entirely determined when + // calling the constructor later. + PyObject *unused = PyObject_CallMethodOneArg(expecting, + &_Py_ID(difference_update), + attributes); + if (unused == NULL) { + Py_DECREF(expecting); + return -1; + } + Py_DECREF(unused); + } + } + // Now 'expecting' contains the fields or attributes + // that would not be filled inside ast_type_replace(). + Py_ssize_t m = PySet_GET_SIZE(expecting); + if (m > 0) { + PyObject *names = PyList_New(m); + if (names == NULL) { + Py_DECREF(expecting); + return -1; + } + Py_ssize_t i = 0, pos = 0; + PyObject *item; + Py_hash_t hash; + while (_PySet_NextEntry(expecting, &pos, &item, &hash)) { + PyObject *name = PyObject_Repr(item); + if (name == NULL) { + Py_DECREF(expecting); + Py_DECREF(names); + return -1; + } + // steal the reference 'name' + PyList_SET_ITEM(names, i++, name); + } + Py_DECREF(expecting); + if (PyList_Sort(names) < 0) { + Py_DECREF(names); + return -1; + } + PyObject *sep = PyUnicode_FromString(", "); + if (sep == NULL) { + Py_DECREF(names); + return -1; + } + PyObject *str_names = PyUnicode_Join(sep, names); + Py_DECREF(sep); + Py_DECREF(names); + if (str_names == NULL) { + return -1; + } + PyErr_Format(PyExc_TypeError, + "%.400s.__replace__ missing %ld keyword argument%s: %U.", + Py_TYPE(self)->tp_name, m, m == 1 ? "" : "s", str_names); + Py_DECREF(str_names); + return -1; + } + else { + Py_DECREF(expecting); + return 1; + } +} + +/* + * Python equivalent: + * + * for key in keys: + * if hasattr(self, key): + * payload[key] = getattr(self, key) + * + * The 'keys' argument is a sequence corresponding to + * the '_fields' or the '_attributes' of an AST node. + * + * This returns -1 if an error occurs and 0 otherwise. + * + * Parameters + * + * payload A dictionary to fill. + * keys A sequence of keys or NULL for an empty sequence. + * dict The AST node instance dictionary (must not be NULL). + */ +static inline int +ast_type_replace_update_payload(PyObject *payload, + PyObject *keys, + PyObject *dict) +{ + assert(dict != NULL); + if (keys == NULL) { + return 0; + } + Py_ssize_t n = PySequence_Size(keys); + if (n == -1) { + return -1; + } + for (Py_ssize_t i = 0; i < n; i++) { + PyObject *key = PySequence_GetItem(keys, i); + if (key == NULL) { + return -1; + } + PyObject *value; + if (PyDict_GetItemRef(dict, key, &value) < 0) { + Py_DECREF(key); + return -1; + } + if (value == NULL) { + Py_DECREF(key); + // If a field or attribute is not present at runtime, it should + // be explicitly given in 'kwargs'. If not, the constructor will + // issue a warning (which becomes an error in 3.15). + continue; + } + int rc = PyDict_SetItem(payload, key, value); + Py_DECREF(key); + Py_DECREF(value); + if (rc < 0) { + return -1; + } + } + return 0; +} + +/* copy.replace() support (shallow copy) */ +static PyObject * +ast_type_replace(PyObject *self, PyObject *args, PyObject *kwargs) +{ + if (!_PyArg_NoPositional("__replace__", args)) { + return NULL; + } + + struct ast_state *state = get_ast_state(); + if (state == NULL) { + return NULL; + } + + PyObject *result = NULL; + // known AST class fields and attributes + PyObject *fields = NULL, *attributes = NULL; + // current instance dictionary + PyObject *dict = NULL; + // constructor positional and keyword arguments + PyObject *empty_tuple = NULL, *payload = NULL; + + PyObject *type = (PyObject *)Py_TYPE(self); + if (PyObject_GetOptionalAttr(type, state->_fields, &fields) < 0) { + goto cleanup; + } + if (PyObject_GetOptionalAttr(type, state->_attributes, &attributes) < 0) { + goto cleanup; + } + if (PyObject_GetOptionalAttr(self, state->__dict__, &dict) < 0) { + goto cleanup; + } + if (ast_type_replace_check(self, dict, fields, attributes, kwargs) < 0) { + goto cleanup; + } + empty_tuple = PyTuple_New(0); + if (empty_tuple == NULL) { + goto cleanup; + } + payload = PyDict_New(); + if (payload == NULL) { + goto cleanup; + } + if (dict) { // in case __dict__ is missing (for some obscure reason) + // copy the instance's fields (possibly NULL) + if (ast_type_replace_update_payload(payload, fields, dict) < 0) { + goto cleanup; + } + // copy the instance's attributes (possibly NULL) + if (ast_type_replace_update_payload(payload, attributes, dict) < 0) { + goto cleanup; + } + } + if (kwargs && PyDict_Update(payload, kwargs) < 0) { + goto cleanup; + } + result = PyObject_Call(type, empty_tuple, payload); +cleanup: + Py_XDECREF(payload); + Py_XDECREF(empty_tuple); + Py_XDECREF(dict); + Py_XDECREF(attributes); + Py_XDECREF(fields); + return result; +} + static PyMemberDef ast_type_members[] = { {"__dictoffset__", Py_T_PYSSIZET, offsetof(AST_object, dict), Py_READONLY}, {NULL} /* Sentinel */ @@ -5338,6 +5624,10 @@ static PyMemberDef ast_type_members[] = { static PyMethodDef ast_type_methods[] = { {"__reduce__", ast_type_reduce, METH_NOARGS, NULL}, + {"__replace__", _PyCFunction_CAST(ast_type_replace), METH_VARARGS | METH_KEYWORDS, + PyDoc_STR("__replace__($self, /, **fields)\n--\n\n" + "Return a copy of the AST node with new values " + "for the specified fields.")}, {NULL} }; diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index 6e50623cafa4ed..a5b45e358d9efb 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -2516,6 +2516,49 @@ Without arguments, equivalent to locals().\n\ With an argument, equivalent to object.__dict__."); +/* Improved Kahan–Babuška algorithm by Arnold Neumaier + Neumaier, A. (1974), Rundungsfehleranalyse einiger Verfahren + zur Summation endlicher Summen. Z. angew. Math. Mech., + 54: 39-51. https://doi.org/10.1002/zamm.19740540106 + https://en.wikipedia.org/wiki/Kahan_summation_algorithm#Further_enhancements + */ + +typedef struct { + double hi; /* high-order bits for a running sum */ + double lo; /* a running compensation for lost low-order bits */ +} CompensatedSum; + +static inline CompensatedSum +cs_from_double(double x) +{ + return (CompensatedSum) {x}; +} + +static inline CompensatedSum +cs_add(CompensatedSum total, double x) +{ + double t = total.hi + x; + if (fabs(total.hi) >= fabs(x)) { + total.lo += (total.hi - t) + x; + } + else { + total.lo += (x - t) + total.hi; + } + return (CompensatedSum) {t, total.lo}; +} + +static inline double +cs_to_double(CompensatedSum total) +{ + /* Avoid losing the sign on a negative result, + and don't let adding the compensation convert + an infinite or overflowed sum to a NaN. */ + if (total.lo && isfinite(total.lo)) { + return total.hi + total.lo; + } + return total.hi; +} + /*[clinic input] sum as builtin_sum @@ -2628,8 +2671,7 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) } if (PyFloat_CheckExact(result)) { - double f_result = PyFloat_AS_DOUBLE(result); - double c = 0.0; + CompensatedSum re_sum = cs_from_double(PyFloat_AS_DOUBLE(result)); Py_SETREF(result, NULL); while(result == NULL) { item = PyIter_Next(iter); @@ -2637,28 +2679,10 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) Py_DECREF(iter); if (PyErr_Occurred()) return NULL; - /* Avoid losing the sign on a negative result, - and don't let adding the compensation convert - an infinite or overflowed sum to a NaN. */ - if (c && isfinite(c)) { - f_result += c; - } - return PyFloat_FromDouble(f_result); + return PyFloat_FromDouble(cs_to_double(re_sum)); } if (PyFloat_CheckExact(item)) { - // Improved Kahan–Babuška algorithm by Arnold Neumaier - // Neumaier, A. (1974), Rundungsfehleranalyse einiger Verfahren - // zur Summation endlicher Summen. Z. angew. Math. Mech., - // 54: 39-51. https://doi.org/10.1002/zamm.19740540106 - // https://en.wikipedia.org/wiki/Kahan_summation_algorithm#Further_enhancements - double x = PyFloat_AS_DOUBLE(item); - double t = f_result + x; - if (fabs(f_result) >= fabs(x)) { - c += (f_result - t) + x; - } else { - c += (x - t) + f_result; - } - f_result = t; + re_sum = cs_add(re_sum, PyFloat_AS_DOUBLE(item)); _Py_DECREF_SPECIALIZED(item, _PyFloat_ExactDealloc); continue; } @@ -2667,15 +2691,70 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) int overflow; value = PyLong_AsLongAndOverflow(item, &overflow); if (!overflow) { - f_result += (double)value; + re_sum.hi += (double)value; + Py_DECREF(item); + continue; + } + } + result = PyFloat_FromDouble(cs_to_double(re_sum)); + if (result == NULL) { + Py_DECREF(item); + Py_DECREF(iter); + return NULL; + } + temp = PyNumber_Add(result, item); + Py_DECREF(result); + Py_DECREF(item); + result = temp; + if (result == NULL) { + Py_DECREF(iter); + return NULL; + } + } + } + + if (PyComplex_CheckExact(result)) { + Py_complex z = PyComplex_AsCComplex(result); + CompensatedSum re_sum = cs_from_double(z.real); + CompensatedSum im_sum = cs_from_double(z.imag); + Py_SETREF(result, NULL); + while (result == NULL) { + item = PyIter_Next(iter); + if (item == NULL) { + Py_DECREF(iter); + if (PyErr_Occurred()) { + return NULL; + } + return PyComplex_FromDoubles(cs_to_double(re_sum), + cs_to_double(im_sum)); + } + if (PyComplex_CheckExact(item)) { + z = PyComplex_AsCComplex(item); + re_sum = cs_add(re_sum, z.real); + im_sum = cs_add(im_sum, z.imag); + Py_DECREF(item); + continue; + } + if (PyLong_Check(item)) { + long value; + int overflow; + value = PyLong_AsLongAndOverflow(item, &overflow); + if (!overflow) { + re_sum.hi += (double)value; + im_sum.hi += 0.0; Py_DECREF(item); continue; } } - if (c && isfinite(c)) { - f_result += c; + if (PyFloat_Check(item)) { + double value = PyFloat_AS_DOUBLE(item); + re_sum.hi += value; + im_sum.hi += 0.0; + _Py_DECREF_SPECIALIZED(item, _PyFloat_ExactDealloc); + continue; } - result = PyFloat_FromDouble(f_result); + result = PyComplex_FromDoubles(cs_to_double(re_sum), + cs_to_double(im_sum)); if (result == NULL) { Py_DECREF(item); Py_DECREF(iter); diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 343481e9313de4..84241c64ffae88 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -1780,13 +1780,7 @@ dummy_func( } inst(BUILD_TUPLE, (values[oparg] -- tup)) { - STACKREFS_TO_PYOBJECTS(values, oparg, values_o); - if (CONVERSION_FAILED(values_o)) { - DECREF_INPUTS(); - ERROR_IF(true, error); - } - PyObject *tup_o = _PyTuple_FromArraySteal(values_o, oparg); - STACKREFS_TO_PYOBJECTS_CLEANUP(values_o); + PyObject *tup_o = _PyTuple_FromStackRefSteal(values, oparg); ERROR_IF(tup_o == NULL, error); tup = PyStackRef_FromPyObjectSteal(tup_o); } @@ -2973,7 +2967,10 @@ dummy_func( assert(Py_TYPE(iter_o) == &PyListIter_Type); PyListObject *seq = it->it_seq; EXIT_IF(seq == NULL); - EXIT_IF((size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)); + if ((size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) { + it->it_index = -1; + EXIT_IF(1); + } } op(_ITER_NEXT_LIST, (iter -- iter, next)) { diff --git a/Python/ceval.c b/Python/ceval.c index a71244676f3029..d8bc830f8e80c1 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -730,15 +730,6 @@ _PyObjectArray_Free(PyObject **array, PyObject **scratch) * so consume 3 units of C stack */ #define PY_EVAL_C_STACK_UNITS 2 -#if defined(_MSC_VER) && defined(_Py_USING_PGO) -/* gh-111786: _PyEval_EvalFrameDefault is too large to optimize for speed with - PGO on MSVC. Disable that optimization temporarily. If this is fixed - upstream, we should gate this on the version of MSVC. - */ -# pragma optimize("t", off) -/* This setting is reversed below following _PyEval_EvalFrameDefault */ -#endif - PyObject* _Py_HOT_FUNCTION _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int throwflag) { @@ -1158,7 +1149,6 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int # pragma GCC diagnostic pop #elif defined(_MSC_VER) /* MS_WINDOWS */ # pragma warning(pop) -# pragma optimize("", on) #endif static void @@ -1500,13 +1490,7 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func, u = (PyObject *)&_Py_SINGLETON(tuple_empty); } else { - assert(args != NULL); - STACKREFS_TO_PYOBJECTS((_PyStackRef *)args, argcount, args_o); - if (args_o == NULL) { - goto fail_pre_positional; - } - u = _PyTuple_FromArraySteal((args_o + n), argcount - n); - STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); + u = _PyTuple_FromStackRefSteal(args + n, argcount - n); } if (u == NULL) { goto fail_post_positional; diff --git a/Python/compile.c b/Python/compile.c index 69de0ec2996e00..4190b141324b38 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -71,20 +71,29 @@ ((C)->c_flags.cf_flags & PyCF_ALLOW_TOP_LEVEL_AWAIT) \ && ((C)->u->u_ste->ste_type == ModuleBlock)) +struct compiler; + +typedef _PyInstruction instruction; +typedef _PyInstructionSequence instr_sequence; + +static instr_sequence *compiler_instr_sequence(struct compiler *c); +static int compiler_future_features(struct compiler *c); +static struct symtable *compiler_symtable(struct compiler *c); +static PySTEntryObject *compiler_symtable_entry(struct compiler *c); + +#define INSTR_SEQUENCE(C) compiler_instr_sequence(C) +#define FUTURE_FEATURES(C) compiler_future_features(C) +#define SYMTABLE(C) compiler_symtable(C) +#define SYMTABLE_ENTRY(C) compiler_symtable_entry(C) + typedef _Py_SourceLocation location; typedef struct _PyCfgBuilder cfg_builder; +static PyObject *compiler_maybe_mangle(struct compiler *c, PyObject *name); + #define LOCATION(LNO, END_LNO, COL, END_COL) \ ((const _Py_SourceLocation){(LNO), (END_LNO), (COL), (END_COL)}) -/* Return true if loc1 starts after loc2 ends. */ -static inline bool -location_is_after(location loc1, location loc2) { - return (loc1.lineno > loc2.end_lineno) || - ((loc1.lineno == loc2.end_lineno) && - (loc1.col_offset > loc2.end_col_offset)); -} - #define LOC(x) SRC_LOCATION_FROM_AST(x) typedef _PyJumpTargetLabel jump_target_label; @@ -137,12 +146,6 @@ enum { }; -typedef _PyInstruction instruction; -typedef _PyInstructionSequence instr_sequence; - -#define INITIAL_INSTR_SEQUENCE_SIZE 100 -#define INITIAL_INSTR_SEQUENCE_LABELS_MAP_SIZE 10 - static const int compare_masks[] = { [Py_LT] = COMPARISON_LESS_THAN, [Py_LE] = COMPARISON_LESS_THAN | COMPARISON_EQUALS, @@ -262,8 +265,6 @@ struct compiler { */ }; -#define INSTR_SEQUENCE(C) ((C)->u->u_instr_sequence) - typedef struct { // A list of strings corresponding to name captures. It is used to track: @@ -321,7 +322,6 @@ static int compiler_call_helper(struct compiler *c, location loc, asdl_keyword_seq *keywords); static int compiler_try_except(struct compiler *, stmt_ty); static int compiler_try_star_except(struct compiler *, stmt_ty); -static int compiler_set_qualname(struct compiler *); static int compiler_sync_comprehension_generator( struct compiler *c, location loc, @@ -562,8 +562,8 @@ compiler_unit_free(struct compiler_unit *u) PyMem_Free(u); } -static struct compiler_unit * -get_class_compiler_unit(struct compiler *c) +static int +compiler_add_static_attribute_to_class(struct compiler *c, PyObject *attr) { Py_ssize_t stack_size = PyList_GET_SIZE(c->c_stack); for (Py_ssize_t i = stack_size - 1; i >= 0; i--) { @@ -572,10 +572,12 @@ get_class_compiler_unit(struct compiler *c) capsule, CAPSULE_NAME); assert(u); if (u->u_scope_type == COMPILER_SCOPE_CLASS) { - return u; + assert(u->u_static_attributes); + RETURN_IF_ERROR(PySet_Add(u->u_static_attributes, attr)); + break; } } - return NULL; + return SUCCESS; } static int @@ -663,54 +665,6 @@ compiler_set_qualname(struct compiler *c) return SUCCESS; } -int -_PyCompile_OpcodeIsValid(int opcode) -{ - return IS_VALID_OPCODE(opcode); -} - -int -_PyCompile_OpcodeHasArg(int opcode) -{ - return OPCODE_HAS_ARG(opcode); -} - -int -_PyCompile_OpcodeHasConst(int opcode) -{ - return OPCODE_HAS_CONST(opcode); -} - -int -_PyCompile_OpcodeHasName(int opcode) -{ - return OPCODE_HAS_NAME(opcode); -} - -int -_PyCompile_OpcodeHasJump(int opcode) -{ - return OPCODE_HAS_JUMP(opcode); -} - -int -_PyCompile_OpcodeHasFree(int opcode) -{ - return OPCODE_HAS_FREE(opcode); -} - -int -_PyCompile_OpcodeHasLocal(int opcode) -{ - return OPCODE_HAS_LOCAL(opcode); -} - -int -_PyCompile_OpcodeHasExc(int opcode) -{ - return IS_BLOCK_PUSH_OPCODE(opcode); -} - static int codegen_addop_noarg(instr_sequence *seq, int opcode, location loc) { @@ -745,9 +699,11 @@ dict_add_o(PyObject *dict, PyObject *o) return arg; } -// Merge const *o* recursively and return constant key object. +/* Merge const *o* and return constant key object. + * If recursive, insert all elements if o is a tuple or frozen set. + */ static PyObject* -merge_consts_recursive(PyObject *const_cache, PyObject *o) +const_cache_insert(PyObject *const_cache, PyObject *o, bool recursive) { assert(PyDict_CheckExact(const_cache)); // None and Ellipsis are immortal objects, and key is the singleton. @@ -771,6 +727,10 @@ merge_consts_recursive(PyObject *const_cache, PyObject *o) } Py_DECREF(t); + if (!recursive) { + return key; + } + // We registered o in const_cache. // When o is a tuple or frozenset, we want to merge its // items too. @@ -778,7 +738,7 @@ merge_consts_recursive(PyObject *const_cache, PyObject *o) Py_ssize_t len = PyTuple_GET_SIZE(o); for (Py_ssize_t i = 0; i < len; i++) { PyObject *item = PyTuple_GET_ITEM(o, i); - PyObject *u = merge_consts_recursive(const_cache, item); + PyObject *u = const_cache_insert(const_cache, item, recursive); if (u == NULL) { Py_DECREF(key); return NULL; @@ -820,7 +780,7 @@ merge_consts_recursive(PyObject *const_cache, PyObject *o) PyObject *item; Py_hash_t hash; while (_PySet_NextEntry(o, &pos, &item, &hash)) { - PyObject *k = merge_consts_recursive(const_cache, item); + PyObject *k = const_cache_insert(const_cache, item, recursive); if (k == NULL) { Py_DECREF(tuple); Py_DECREF(key); @@ -854,39 +814,44 @@ merge_consts_recursive(PyObject *const_cache, PyObject *o) return key; } +static PyObject* +merge_consts_recursive(PyObject *const_cache, PyObject *o) +{ + return const_cache_insert(const_cache, o, true); +} + static Py_ssize_t -compiler_add_const(PyObject *const_cache, struct compiler_unit *u, PyObject *o) +compiler_add_const(struct compiler *c, PyObject *o) { - assert(PyDict_CheckExact(const_cache)); - PyObject *key = merge_consts_recursive(const_cache, o); + PyObject *key = merge_consts_recursive(c->c_const_cache, o); if (key == NULL) { return ERROR; } - Py_ssize_t arg = dict_add_o(u->u_metadata.u_consts, key); + Py_ssize_t arg = dict_add_o(c->u->u_metadata.u_consts, key); Py_DECREF(key); return arg; } static int -compiler_addop_load_const(PyObject *const_cache, struct compiler_unit *u, location loc, PyObject *o) +compiler_addop_load_const(struct compiler *c, location loc, PyObject *o) { - Py_ssize_t arg = compiler_add_const(const_cache, u, o); + Py_ssize_t arg = compiler_add_const(c, o); if (arg < 0) { return ERROR; } - return codegen_addop_i(u->u_instr_sequence, LOAD_CONST, arg, loc); + return codegen_addop_i(INSTR_SEQUENCE(c), LOAD_CONST, arg, loc); } static int -compiler_addop_o(struct compiler_unit *u, location loc, +compiler_addop_o(struct compiler *c, location loc, int opcode, PyObject *dict, PyObject *o) { Py_ssize_t arg = dict_add_o(dict, o); if (arg < 0) { return ERROR; } - return codegen_addop_i(u->u_instr_sequence, opcode, arg, loc); + return codegen_addop_i(INSTR_SEQUENCE(c), opcode, arg, loc); } #define LOAD_METHOD -1 @@ -895,10 +860,10 @@ compiler_addop_o(struct compiler_unit *u, location loc, #define LOAD_ZERO_SUPER_METHOD -4 static int -compiler_addop_name(struct compiler_unit *u, location loc, +compiler_addop_name(struct compiler *c, location loc, int opcode, PyObject *dict, PyObject *o) { - PyObject *mangled = _Py_MaybeMangle(u->u_private, u->u_ste, o); + PyObject *mangled = compiler_maybe_mangle(c, o); if (!mangled) { return ERROR; } @@ -933,7 +898,7 @@ compiler_addop_name(struct compiler_unit *u, location loc, arg <<= 2; arg |= 1; } - return codegen_addop_i(u->u_instr_sequence, opcode, arg, loc); + return codegen_addop_i(INSTR_SEQUENCE(c), opcode, arg, loc); } /* Add an opcode with an integer argument */ @@ -976,7 +941,7 @@ codegen_addop_j(instr_sequence *seq, location loc, #define ADDOP_IN_SCOPE(C, LOC, OP) RETURN_IF_ERROR_IN_SCOPE((C), codegen_addop_noarg(INSTR_SEQUENCE(C), (OP), (LOC))) #define ADDOP_LOAD_CONST(C, LOC, O) \ - RETURN_IF_ERROR(compiler_addop_load_const((C)->c_const_cache, (C)->u, (LOC), (O))) + RETURN_IF_ERROR(compiler_addop_load_const((C), (LOC), (O))) /* Same as ADDOP_LOAD_CONST, but steals a reference. */ #define ADDOP_LOAD_CONST_NEW(C, LOC, O) { \ @@ -984,7 +949,7 @@ codegen_addop_j(instr_sequence *seq, location loc, if (__new_const == NULL) { \ return ERROR; \ } \ - if (compiler_addop_load_const((C)->c_const_cache, (C)->u, (LOC), __new_const) < 0) { \ + if (compiler_addop_load_const((C), (LOC), __new_const) < 0) { \ Py_DECREF(__new_const); \ return ERROR; \ } \ @@ -993,7 +958,7 @@ codegen_addop_j(instr_sequence *seq, location loc, #define ADDOP_N(C, LOC, OP, O, TYPE) { \ assert(!OPCODE_HAS_CONST(OP)); /* use ADDOP_LOAD_CONST_NEW */ \ - if (compiler_addop_o((C)->u, (LOC), (OP), (C)->u->u_metadata.u_ ## TYPE, (O)) < 0) { \ + if (compiler_addop_o((C), (LOC), (OP), (C)->u->u_metadata.u_ ## TYPE, (O)) < 0) { \ Py_DECREF((O)); \ return ERROR; \ } \ @@ -1001,7 +966,7 @@ codegen_addop_j(instr_sequence *seq, location loc, } #define ADDOP_NAME(C, LOC, OP, O, TYPE) \ - RETURN_IF_ERROR(compiler_addop_name((C)->u, (LOC), (OP), (C)->u->u_metadata.u_ ## TYPE, (O))) + RETURN_IF_ERROR(compiler_addop_name((C), (LOC), (OP), (C)->u->u_metadata.u_ ## TYPE, (O))) #define ADDOP_I(C, LOC, OP, O) \ RETURN_IF_ERROR(codegen_addop_i(INSTR_SEQUENCE(C), (OP), (O), (LOC))) @@ -1060,8 +1025,8 @@ codegen_addop_j(instr_sequence *seq, location loc, static int -compiler_enter_scope(struct compiler *c, identifier name, - int scope_type, void *key, int lineno) +compiler_enter_scope(struct compiler *c, identifier name, int scope_type, + void *key, int lineno, PyObject *private) { location loc = LOCATION(lineno, lineno, 0, 0); @@ -1140,7 +1105,6 @@ compiler_enter_scope(struct compiler *c, identifier name, return ERROR; } - u->u_private = NULL; u->u_deferred_annotations = NULL; if (scope_type == COMPILER_SCOPE_CLASS) { u->u_static_attributes = PySet_New(0); @@ -1154,6 +1118,10 @@ compiler_enter_scope(struct compiler *c, identifier name, } u->u_instr_sequence = (instr_sequence*)_PyInstructionSequence_New(); + if (!u->u_instr_sequence) { + compiler_unit_free(u); + return ERROR; + } /* Push the old compiler_unit on the stack. */ if (c->u) { @@ -1164,8 +1132,13 @@ compiler_enter_scope(struct compiler *c, identifier name, return ERROR; } Py_DECREF(capsule); - u->u_private = Py_XNewRef(c->u->u_private); + if (private == NULL) { + private = c->u->u_private; + } } + + u->u_private = Py_XNewRef(private); + c->u = u; c->c_nestlevel++; @@ -1444,7 +1417,7 @@ compiler_setup_annotations_scope(struct compiler *c, location loc, void *key, PyObject *name) { if (compiler_enter_scope(c, name, COMPILER_SCOPE_ANNOTATIONS, - key, loc.lineno) == -1) { + key, loc.lineno, NULL) == -1) { return ERROR; } c->u->u_metadata.u_posonlyargcount = 1; @@ -1498,7 +1471,7 @@ compiler_body(struct compiler *c, location loc, asdl_stmt_seq *stmts) /* If from __future__ import annotations is active, * every annotated class and module should have __annotations__. * Else __annotate__ is created when necessary. */ - if ((c->c_future.ff_features & CO_FUTURE_ANNOTATIONS) && c->u->u_ste->ste_annotations_used) { + if ((FUTURE_FEATURES(c) & CO_FUTURE_ANNOTATIONS) && SYMTABLE_ENTRY(c)->ste_annotations_used) { ADDOP(c, loc, SETUP_ANNOTATIONS); } if (!asdl_seq_LEN(stmts)) { @@ -1530,7 +1503,7 @@ compiler_body(struct compiler *c, location loc, asdl_stmt_seq *stmts) // If there are annotations and the future import is not on, we // collect the annotations in a separate pass and generate an // __annotate__ function. See PEP 649. - if (!(c->c_future.ff_features & CO_FUTURE_ANNOTATIONS) && + if (!(FUTURE_FEATURES(c) & CO_FUTURE_ANNOTATIONS) && c->u->u_deferred_annotations != NULL) { // It's possible that ste_annotations_block is set but @@ -1538,11 +1511,12 @@ compiler_body(struct compiler *c, location loc, asdl_stmt_seq *stmts) // set if there are only non-simple annotations (i.e., annotations // for attributes, subscripts, or parenthesized names). However, the // reverse should not be possible. - assert(c->u->u_ste->ste_annotation_block != NULL); + PySTEntryObject *ste = SYMTABLE_ENTRY(c); + assert(ste->ste_annotation_block != NULL); PyObject *deferred_anno = Py_NewRef(c->u->u_deferred_annotations); - void *key = (void *)((uintptr_t)c->u->u_ste->ste_id + 1); + void *key = (void *)((uintptr_t)ste->ste_id + 1); if (compiler_setup_annotations_scope(c, loc, key, - c->u->u_ste->ste_annotation_block->ste_name) == -1) { + ste->ste_annotation_block->ste_name) == -1) { Py_DECREF(deferred_anno); return ERROR; } @@ -1605,7 +1579,7 @@ compiler_enter_anonymous_scope(struct compiler* c, mod_ty mod) _Py_DECLARE_STR(anon_module, ""); RETURN_IF_ERROR( compiler_enter_scope(c, &_Py_STR(anon_module), COMPILER_SCOPE_MODULE, - mod, 1)); + mod, 1, NULL)); return SUCCESS; } @@ -1626,13 +1600,8 @@ compiler_mod(struct compiler *c, mod_ty mod) return co; } -/* The test for LOCAL must come before the test for FREE in order to - handle classes where name is both local and free. The local var is - a method and the free var is a free var referenced within a method. -*/ - static int -get_ref_type(struct compiler *c, PyObject *name) +compiler_get_ref_type(struct compiler *c, PyObject *name) { int scope; if (c->u->u_scope_type == COMPILER_SCOPE_CLASS && @@ -1640,15 +1609,16 @@ get_ref_type(struct compiler *c, PyObject *name) _PyUnicode_EqualToASCIIString(name, "__classdict__"))) { return CELL; } - scope = _PyST_GetScope(c->u->u_ste, name); + PySTEntryObject *ste = SYMTABLE_ENTRY(c); + scope = _PyST_GetScope(ste, name); if (scope == 0) { PyErr_Format(PyExc_SystemError, "_PyST_GetScope(name=%R) failed: " "unknown scope in unit %S (%R); " "symbols: %R; locals: %R; globals: %R", name, - c->u->u_metadata.u_name, c->u->u_ste->ste_id, - c->u->u_ste->ste_symbols, c->u->u_metadata.u_varnames, c->u->u_metadata.u_names); + c->u->u_metadata.u_name, ste->ste_id, + ste->ste_symbols, c->u->u_metadata.u_varnames, c->u->u_metadata.u_names); return ERROR; } return scope; @@ -1682,7 +1652,7 @@ compiler_make_closure(struct compiler *c, location loc, class. It should be handled by the closure, as well as by the normal name lookup logic. */ - int reftype = get_ref_type(c, name); + int reftype = compiler_get_ref_type(c, name); if (reftype == -1) { return ERROR; } @@ -1778,7 +1748,7 @@ compiler_kwonlydefaults(struct compiler *c, location loc, arg_ty arg = asdl_seq_GET(kwonlyargs, i); expr_ty default_ = asdl_seq_GET(kw_defaults, i); if (default_) { - PyObject *mangled = _Py_MaybeMangle(c->u->u_private, c->u->u_ste, arg->arg); + PyObject *mangled = compiler_maybe_mangle(c, arg->arg); if (!mangled) { goto error; } @@ -1835,14 +1805,14 @@ compiler_argannotation(struct compiler *c, identifier id, if (!annotation) { return SUCCESS; } - PyObject *mangled = _Py_MaybeMangle(c->u->u_private, c->u->u_ste, id); + PyObject *mangled = compiler_maybe_mangle(c, id); if (!mangled) { return ERROR; } ADDOP_LOAD_CONST(c, loc, mangled); Py_DECREF(mangled); - if (c->c_future.ff_features & CO_FUTURE_ANNOTATIONS) { + if (FUTURE_FEATURES(c) & CO_FUTURE_ANNOTATIONS) { VISIT(c, annexpr, annotation); } else { @@ -1924,7 +1894,7 @@ compiler_annotations(struct compiler *c, location loc, Py_ssize_t annotations_len = 0; PySTEntryObject *ste; - if (_PySymtable_LookupOptional(c->c_st, args, &ste) < 0) { + if (_PySymtable_LookupOptional(SYMTABLE(c), args, &ste) < 0) { return ERROR; } assert(ste != NULL); @@ -2060,7 +2030,7 @@ compiler_type_param_bound_or_default(struct compiler *c, expr_ty e, bool allow_starred) { if (compiler_enter_scope(c, name, COMPILER_SCOPE_ANNOTATIONS, - key, e->lineno) == -1) { + key, e->lineno, NULL) == -1) { return ERROR; } if (allow_starred && e->kind == Starred_kind) { @@ -2205,7 +2175,7 @@ compiler_function_body(struct compiler *c, stmt_ty s, int is_async, Py_ssize_t f } RETURN_IF_ERROR( - compiler_enter_scope(c, name, scope_type, (void *)s, firstlineno)); + compiler_enter_scope(c, name, scope_type, (void *)s, firstlineno, NULL)); Py_ssize_t first_instr = 0; PyObject *docstring = _PyAST_GetDocString(body); @@ -2223,7 +2193,7 @@ compiler_function_body(struct compiler *c, stmt_ty s, int is_async, Py_ssize_t f docstring = NULL; } } - if (compiler_add_const(c->c_const_cache, c->u, docstring ? docstring : Py_None) < 0) { + if (compiler_add_const(c, docstring ? docstring : Py_None) < 0) { Py_XDECREF(docstring); compiler_exit_scope(c); return ERROR; @@ -2236,7 +2206,8 @@ compiler_function_body(struct compiler *c, stmt_ty s, int is_async, Py_ssize_t f NEW_JUMP_TARGET_LABEL(c, start); USE_LABEL(c, start); - bool add_stopiteration_handler = c->u->u_ste->ste_coroutine || c->u->u_ste->ste_generator; + PySTEntryObject *ste = SYMTABLE_ENTRY(c); + bool add_stopiteration_handler = ste->ste_coroutine || ste->ste_generator; if (add_stopiteration_handler) { /* wrap_in_stopiteration_handler will push a block, so we need to account for that */ RETURN_IF_ERROR( @@ -2332,7 +2303,7 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async) return ERROR; } if (compiler_enter_scope(c, type_params_name, COMPILER_SCOPE_ANNOTATIONS, - (void *)type_params, firstlineno) == -1) { + (void *)type_params, firstlineno, NULL) == -1) { Py_DECREF(type_params_name); return ERROR; } @@ -2415,12 +2386,10 @@ compiler_class_body(struct compiler *c, stmt_ty s, int firstlineno) /* 1. compile the class body into a code object */ RETURN_IF_ERROR( - compiler_enter_scope(c, s->v.ClassDef.name, - COMPILER_SCOPE_CLASS, (void *)s, firstlineno)); + compiler_enter_scope(c, s->v.ClassDef.name, COMPILER_SCOPE_CLASS, + (void *)s, firstlineno, s->v.ClassDef.name)); location loc = LOCATION(firstlineno, firstlineno, 0, 0); - /* use the class name for name mangling */ - Py_XSETREF(c->u->u_private, Py_NewRef(s->v.ClassDef.name)); /* load (global) __name__ ... */ if (compiler_nameop(c, loc, &_Py_ID(__name__), Load) < 0) { compiler_exit_scope(c); @@ -2449,14 +2418,14 @@ compiler_class_body(struct compiler *c, stmt_ty s, int firstlineno) return ERROR; } } - if (c->u->u_ste->ste_needs_classdict) { + if (SYMTABLE_ENTRY(c)->ste_needs_classdict) { ADDOP(c, loc, LOAD_LOCALS); // We can't use compiler_nameop here because we need to generate a // STORE_DEREF in a class namespace, and compiler_nameop() won't do // that by default. PyObject *cellvars = c->u->u_metadata.u_cellvars; - if (compiler_addop_o(c->u, loc, STORE_DEREF, cellvars, + if (compiler_addop_o(c, loc, STORE_DEREF, cellvars, &_Py_ID(__classdict__)) < 0) { compiler_exit_scope(c); return ERROR; @@ -2481,7 +2450,7 @@ compiler_class_body(struct compiler *c, stmt_ty s, int firstlineno) } /* The following code is artificial */ /* Set __classdictcell__ if necessary */ - if (c->u->u_ste->ste_needs_classdict) { + if (SYMTABLE_ENTRY(c)->ste_needs_classdict) { /* Store __classdictcell__ into class namespace */ int i = compiler_lookup_arg(c->u->u_metadata.u_cellvars, &_Py_ID(__classdict__)); if (i < 0) { @@ -2495,7 +2464,7 @@ compiler_class_body(struct compiler *c, stmt_ty s, int firstlineno) } } /* Return __classcell__ if it is referenced, otherwise return None */ - if (c->u->u_ste->ste_needs_class_closure) { + if (SYMTABLE_ENTRY(c)->ste_needs_class_closure) { /* Store __classcell__ into class namespace & return it */ int i = compiler_lookup_arg(c->u->u_metadata.u_cellvars, &_Py_ID(__class__)); if (i < 0) { @@ -2566,12 +2535,11 @@ compiler_class(struct compiler *c, stmt_ty s) return ERROR; } if (compiler_enter_scope(c, type_params_name, COMPILER_SCOPE_ANNOTATIONS, - (void *)type_params, firstlineno) == -1) { + (void *)type_params, firstlineno, s->v.ClassDef.name) == -1) { Py_DECREF(type_params_name); return ERROR; } Py_DECREF(type_params_name); - Py_XSETREF(c->u->u_private, Py_NewRef(s->v.ClassDef.name)); RETURN_IF_ERROR_IN_SCOPE(c, compiler_type_params(c, type_params)); _Py_DECLARE_STR(type_params, ".type_params"); RETURN_IF_ERROR_IN_SCOPE(c, compiler_nameop(c, loc, &_Py_STR(type_params), Store)); @@ -2651,10 +2619,10 @@ compiler_typealias_body(struct compiler *c, stmt_ty s) location loc = LOC(s); PyObject *name = s->v.TypeAlias.name->v.Name.id; RETURN_IF_ERROR( - compiler_enter_scope(c, name, COMPILER_SCOPE_FUNCTION, s, loc.lineno)); + compiler_enter_scope(c, name, COMPILER_SCOPE_FUNCTION, s, loc.lineno, NULL)); /* Make None the first constant, so the evaluate function can't have a docstring. */ - RETURN_IF_ERROR(compiler_add_const(c->c_const_cache, c->u, Py_None)); + RETURN_IF_ERROR(compiler_add_const(c, Py_None)); VISIT_IN_SCOPE(c, expr, s->v.TypeAlias.value); ADDOP_IN_SCOPE(c, loc, RETURN_VALUE); PyCodeObject *co = optimize_and_assemble(c, 0); @@ -2686,13 +2654,13 @@ compiler_typealias(struct compiler *c, stmt_ty s) return ERROR; } if (compiler_enter_scope(c, type_params_name, COMPILER_SCOPE_ANNOTATIONS, - (void *)type_params, loc.lineno) == -1) { + (void *)type_params, loc.lineno, NULL) == -1) { Py_DECREF(type_params_name); return ERROR; } Py_DECREF(type_params_name); RETURN_IF_ERROR_IN_SCOPE( - c, compiler_addop_load_const(c->c_const_cache, c->u, loc, name) + c, compiler_addop_load_const(c, loc, name) ); RETURN_IF_ERROR_IN_SCOPE(c, compiler_type_params(c, type_params)); } @@ -2955,17 +2923,17 @@ compiler_lambda(struct compiler *c, expr_ty e) _Py_DECLARE_STR(anon_lambda, ""); RETURN_IF_ERROR( compiler_enter_scope(c, &_Py_STR(anon_lambda), COMPILER_SCOPE_LAMBDA, - (void *)e, e->lineno)); + (void *)e, e->lineno, NULL)); /* Make None the first constant, so the lambda can't have a docstring. */ - RETURN_IF_ERROR(compiler_add_const(c->c_const_cache, c->u, Py_None)); + RETURN_IF_ERROR(compiler_add_const(c, Py_None)); c->u->u_metadata.u_argcount = asdl_seq_LEN(args->args); c->u->u_metadata.u_posonlyargcount = asdl_seq_LEN(args->posonlyargs); c->u->u_metadata.u_kwonlyargcount = asdl_seq_LEN(args->kwonlyargs); VISIT_IN_SCOPE(c, expr, e->v.Lambda.body); - if (c->u->u_ste->ste_generator) { + if (SYMTABLE_ENTRY(c)->ste_generator) { co = optimize_and_assemble(c, 0); } else { @@ -3066,11 +3034,6 @@ static int compiler_async_for(struct compiler *c, stmt_ty s) { location loc = LOC(s); - if (IS_TOP_LEVEL_AWAIT(c)){ - c->u->u_ste->ste_coroutine = 1; - } else if (c->u->u_scope_type != COMPILER_SCOPE_ASYNC_FUNCTION) { - return compiler_error(c, loc, "'async for' outside async function"); - } NEW_JUMP_TARGET_LABEL(c, start); NEW_JUMP_TARGET_LABEL(c, except); @@ -3146,12 +3109,12 @@ compiler_return(struct compiler *c, stmt_ty s) location loc = LOC(s); int preserve_tos = ((s->v.Return.value != NULL) && (s->v.Return.value->kind != Constant_kind)); - if (!_PyST_IsFunctionLike(c->u->u_ste)) { + + PySTEntryObject *ste = SYMTABLE_ENTRY(c); + if (!_PyST_IsFunctionLike(ste)) { return compiler_error(c, loc, "'return' outside function"); } - if (s->v.Return.value != NULL && - c->u->u_ste->ste_coroutine && c->u->u_ste->ste_generator) - { + if (s->v.Return.value != NULL && ste->ste_coroutine && ste->ste_generator) { return compiler_error(c, loc, "'return' with value in async generator"); } @@ -3847,14 +3810,6 @@ compiler_from_import(struct compiler *c, stmt_ty s) PyTuple_SET_ITEM(names, i, Py_NewRef(alias->name)); } - if (location_is_after(LOC(s), c->c_future.ff_location) && - s->v.ImportFrom.module && s->v.ImportFrom.level == 0 && - _PyUnicode_EqualToASCIIString(s->v.ImportFrom.module, "__future__")) - { - Py_DECREF(names); - return compiler_error(c, LOC(s), "from __future__ imports must occur " - "at the beginning of the file"); - } ADDOP_LOAD_CONST_NEW(c, LOC(s), names); if (s->v.ImportFrom.module) { @@ -4109,7 +4064,8 @@ addop_binary(struct compiler *c, location loc, operator_ty binop, static int addop_yield(struct compiler *c, location loc) { - if (c->u->u_ste->ste_generator && c->u->u_ste->ste_coroutine) { + PySTEntryObject *ste = SYMTABLE_ENTRY(c); + if (ste->ste_generator && ste->ste_coroutine) { ADDOP_I(c, loc, CALL_INTRINSIC_1, INTRINSIC_ASYNC_GEN_WRAP); } ADDOP_I(c, loc, YIELD_VALUE, 0); @@ -4136,14 +4092,14 @@ compiler_nameop(struct compiler *c, location loc, return ERROR; } - mangled = _Py_MaybeMangle(c->u->u_private, c->u->u_ste, name); + mangled = compiler_maybe_mangle(c, name); if (!mangled) { return ERROR; } op = 0; optype = OP_NAME; - scope = _PyST_GetScope(c->u->u_ste, mangled); + scope = _PyST_GetScope(SYMTABLE_ENTRY(c), mangled); switch (scope) { case FREE: dict = c->u->u_metadata.u_freevars; @@ -4154,7 +4110,7 @@ compiler_nameop(struct compiler *c, location loc, optype = OP_DEREF; break; case LOCAL: - if (_PyST_IsFunctionLike(c->u->u_ste)) { + if (_PyST_IsFunctionLike(SYMTABLE_ENTRY(c))) { optype = OP_FAST; } else { @@ -4170,7 +4126,7 @@ compiler_nameop(struct compiler *c, location loc, } break; case GLOBAL_IMPLICIT: - if (_PyST_IsFunctionLike(c->u->u_ste)) + if (_PyST_IsFunctionLike(SYMTABLE_ENTRY(c))) optype = OP_GLOBAL; break; case GLOBAL_EXPLICIT: @@ -4188,17 +4144,17 @@ compiler_nameop(struct compiler *c, location loc, case OP_DEREF: switch (ctx) { case Load: - if (c->u->u_ste->ste_type == ClassBlock && !c->u->u_in_inlined_comp) { + if (SYMTABLE_ENTRY(c)->ste_type == ClassBlock && !c->u->u_in_inlined_comp) { op = LOAD_FROM_DICT_OR_DEREF; // First load the locals if (codegen_addop_noarg(INSTR_SEQUENCE(c), LOAD_LOCALS, loc) < 0) { goto error; } } - else if (c->u->u_ste->ste_can_see_class_scope) { + else if (SYMTABLE_ENTRY(c)->ste_can_see_class_scope) { op = LOAD_FROM_DICT_OR_DEREF; // First load the classdict - if (compiler_addop_o(c->u, loc, LOAD_DEREF, + if (compiler_addop_o(c, loc, LOAD_DEREF, c->u->u_metadata.u_freevars, &_Py_ID(__classdict__)) < 0) { goto error; } @@ -4222,10 +4178,10 @@ compiler_nameop(struct compiler *c, location loc, case OP_GLOBAL: switch (ctx) { case Load: - if (c->u->u_ste->ste_can_see_class_scope && scope == GLOBAL_IMPLICIT) { + if (SYMTABLE_ENTRY(c)->ste_can_see_class_scope && scope == GLOBAL_IMPLICIT) { op = LOAD_FROM_DICT_OR_GLOBALS; // First load the classdict - if (compiler_addop_o(c->u, loc, LOAD_DEREF, + if (compiler_addop_o(c, loc, LOAD_DEREF, c->u->u_metadata.u_freevars, &_Py_ID(__classdict__)) < 0) { goto error; } @@ -4240,7 +4196,7 @@ compiler_nameop(struct compiler *c, location loc, case OP_NAME: switch (ctx) { case Load: - op = (c->u->u_ste->ste_type == ClassBlock + op = (SYMTABLE_ENTRY(c)->ste_type == ClassBlock && c->u->u_in_inlined_comp) ? LOAD_GLOBAL : LOAD_NAME; @@ -4745,7 +4701,7 @@ is_import_originated(struct compiler *c, expr_ty e) return 0; } - long flags = _PyST_GetSymbol(c->c_st->st_top, e->v.Name.id); + long flags = _PyST_GetSymbol(SYMTABLE(c)->st_top, e->v.Name.id); return flags & DEF_IMPORT; } @@ -4764,11 +4720,11 @@ can_optimize_super_call(struct compiler *c, expr_ty attr) PyObject *super_name = e->v.Call.func->v.Name.id; // detect statically-visible shadowing of 'super' name - int scope = _PyST_GetScope(c->u->u_ste, super_name); + int scope = _PyST_GetScope(SYMTABLE_ENTRY(c), super_name); if (scope != GLOBAL_IMPLICIT) { return 0; } - scope = _PyST_GetScope(c->c_st->st_top, super_name); + scope = _PyST_GetScope(SYMTABLE(c)->st_top, super_name); if (scope != 0) { return 0; } @@ -4796,7 +4752,7 @@ can_optimize_super_call(struct compiler *c, expr_ty attr) return 0; } // __class__ cell should be available - if (get_ref_type(c, &_Py_ID(__class__)) == FREE) { + if (compiler_get_ref_type(c, &_Py_ID(__class__)) == FREE) { return 1; } return 0; @@ -4818,7 +4774,7 @@ load_args_for_super(struct compiler *c, expr_ty e) { // load __class__ cell PyObject *name = &_Py_ID(__class__); - assert(get_ref_type(c, name) == FREE); + assert(compiler_get_ref_type(c, name) == FREE); RETURN_IF_ERROR(compiler_nameop(c, loc, name, Load)); // load self (first argument) @@ -5490,7 +5446,7 @@ push_inlined_comprehension_state(struct compiler *c, location loc, PySTEntryObject *entry, inlined_comprehension_state *state) { - int in_class_block = (c->u->u_ste->ste_type == ClassBlock) && !c->u->u_in_inlined_comp; + int in_class_block = (SYMTABLE_ENTRY(c)->ste_type == ClassBlock) && !c->u->u_in_inlined_comp; c->u->u_in_inlined_comp++; // iterate over names bound in the comprehension and ensure we isolate // them from the outer scope as needed @@ -5500,7 +5456,7 @@ push_inlined_comprehension_state(struct compiler *c, location loc, assert(PyLong_Check(v)); long symbol = PyLong_AS_LONG(v); long scope = (symbol >> SCOPE_OFFSET) & SCOPE_MASK; - PyObject *outv = PyDict_GetItemWithError(c->u->u_ste->ste_symbols, k); + PyObject *outv = PyDict_GetItemWithError(SYMTABLE_ENTRY(c)->ste_symbols, k); if (outv == NULL) { if (PyErr_Occurred()) { return ERROR; @@ -5529,7 +5485,7 @@ push_inlined_comprehension_state(struct compiler *c, location loc, // the outer version; we'll restore it after running the // comprehension Py_INCREF(outv); - if (PyDict_SetItem(c->u->u_ste->ste_symbols, k, v) < 0) { + if (PyDict_SetItem(SYMTABLE_ENTRY(c)->ste_symbols, k, v) < 0) { Py_DECREF(outv); return ERROR; } @@ -5542,7 +5498,7 @@ push_inlined_comprehension_state(struct compiler *c, location loc, // locals handling for names bound in comprehension (DEF_LOCAL | // DEF_NONLOCAL occurs in assignment expression to nonlocal) if ((symbol & DEF_LOCAL && !(symbol & DEF_NONLOCAL)) || in_class_block) { - if (!_PyST_IsFunctionLike(c->u->u_ste)) { + if (!_PyST_IsFunctionLike(SYMTABLE_ENTRY(c))) { // non-function scope: override this name to use fast locals PyObject *orig; if (PyDict_GetItemRef(c->u->u_metadata.u_fasthidden, k, &orig) < 0) { @@ -5644,7 +5600,7 @@ pop_inlined_comprehension_state(struct compiler *c, location loc, Py_ssize_t pos = 0; if (state.temp_symbols) { while (PyDict_Next(state.temp_symbols, &pos, &k, &v)) { - if (PyDict_SetItem(c->u->u_ste->ste_symbols, k, v)) { + if (PyDict_SetItem(SYMTABLE_ENTRY(c)->ste_symbols, k, v)) { return ERROR; } } @@ -5713,7 +5669,7 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, comprehension_ty outermost; int scope_type = c->u->u_scope_type; int is_top_level_await = IS_TOP_LEVEL_AWAIT(c); - PySTEntryObject *entry = _PySymtable_Lookup(c->c_st, (void *)e); + PySTEntryObject *entry = _PySymtable_Lookup(SYMTABLE(c), (void *)e); if (entry == NULL) { goto error; } @@ -5733,7 +5689,7 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, } else { if (compiler_enter_scope(c, name, COMPILER_SCOPE_COMPREHENSION, - (void *)e, e->lineno) < 0) + (void *)e, e->lineno, NULL) < 0) { goto error; } @@ -5797,9 +5753,6 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, co = optimize_and_assemble(c, 1); compiler_exit_scope(c); - if (is_top_level_await && is_async_generator){ - c->u->u_ste->ste_coroutine = 1; - } if (co == NULL) { goto error; } @@ -5941,11 +5894,6 @@ compiler_async_with(struct compiler *c, stmt_ty s, int pos) withitem_ty item = asdl_seq_GET(s->v.AsyncWith.items, pos); assert(s->kind == AsyncWith_kind); - if (IS_TOP_LEVEL_AWAIT(c)){ - c->u->u_ste->ste_coroutine = 1; - } else if (c->u->u_scope_type != COMPILER_SCOPE_ASYNC_FUNCTION){ - return compiler_error(c, loc, "'async with' outside async function"); - } NEW_JUMP_TARGET_LABEL(c, block); NEW_JUMP_TARGET_LABEL(c, final); @@ -6158,7 +6106,7 @@ compiler_visit_expr(struct compiler *c, expr_ty e) case DictComp_kind: return compiler_dictcomp(c, e); case Yield_kind: - if (!_PyST_IsFunctionLike(c->u->u_ste)) { + if (!_PyST_IsFunctionLike(SYMTABLE_ENTRY(c))) { return compiler_error(c, loc, "'yield' outside function"); } if (e->v.Yield.value) { @@ -6170,7 +6118,7 @@ compiler_visit_expr(struct compiler *c, expr_ty e) ADDOP_YIELD(c, loc); break; case YieldFrom_kind: - if (!_PyST_IsFunctionLike(c->u->u_ste)) { + if (!_PyST_IsFunctionLike(SYMTABLE_ENTRY(c))) { return compiler_error(c, loc, "'yield' outside function"); } if (c->u->u_scope_type == COMPILER_SCOPE_ASYNC_FUNCTION) { @@ -6183,7 +6131,7 @@ compiler_visit_expr(struct compiler *c, expr_ty e) break; case Await_kind: if (!IS_TOP_LEVEL_AWAIT(c)){ - if (!_PyST_IsFunctionLike(c->u->u_ste)) { + if (!_PyST_IsFunctionLike(SYMTABLE_ENTRY(c))) { return compiler_error(c, loc, "'await' outside function"); } @@ -6223,13 +6171,7 @@ compiler_visit_expr(struct compiler *c, expr_ty e) if (e->v.Attribute.value->kind == Name_kind && _PyUnicode_EqualToASCIIString(e->v.Attribute.value->v.Name.id, "self")) { - struct compiler_unit *class_u = get_class_compiler_unit(c); - if (class_u != NULL) { - assert(class_u->u_scope_type == COMPILER_SCOPE_CLASS); - assert(class_u->u_static_attributes); - RETURN_IF_ERROR( - PySet_Add(class_u->u_static_attributes, e->v.Attribute.attr)); - } + RETURN_IF_ERROR(compiler_add_static_attribute_to_class(c, e->v.Attribute.attr)); } VISIT(c, expr, e->v.Attribute.value); loc = LOC(e); @@ -6376,7 +6318,7 @@ check_annotation(struct compiler *c, stmt_ty s) { /* Annotations of complex targets does not produce anything under annotations future */ - if (c->c_future.ff_features & CO_FUTURE_ANNOTATIONS) { + if (FUTURE_FEATURES(c) & CO_FUTURE_ANNOTATIONS) { return SUCCESS; } @@ -6423,7 +6365,7 @@ compiler_annassign(struct compiler *c, stmt_ty s) { location loc = LOC(s); expr_ty targ = s->v.AnnAssign.target; - bool future_annotations = c->c_future.ff_features & CO_FUTURE_ANNOTATIONS; + bool future_annotations = FUTURE_FEATURES(c) & CO_FUTURE_ANNOTATIONS; PyObject *mangled; assert(s->kind == AnnAssign_kind); @@ -6445,7 +6387,7 @@ compiler_annassign(struct compiler *c, stmt_ty s) if (future_annotations) { VISIT(c, annexpr, s->v.AnnAssign.annotation); ADDOP_NAME(c, loc, LOAD_NAME, &_Py_ID(__annotations__), names); - mangled = _Py_MaybeMangle(c->u->u_private, c->u->u_ste, targ->v.Name.id); + mangled = compiler_maybe_mangle(c, targ->v.Name.id); ADDOP_LOAD_CONST_NEW(c, loc, mangled); ADDOP(c, loc, STORE_SUBSCR); } @@ -7487,19 +7429,47 @@ consts_dict_keys_inorder(PyObject *dict) return consts; } +static PyObject * +compiler_maybe_mangle(struct compiler *c, PyObject *name) +{ + return _Py_MaybeMangle(c->u->u_private, c->u->u_ste, name); +} + +static instr_sequence * +compiler_instr_sequence(struct compiler *c) +{ + return c->u->u_instr_sequence; +} + +static int +compiler_future_features(struct compiler *c) +{ + return c->c_future.ff_features; +} + +static struct symtable * +compiler_symtable(struct compiler *c) +{ + return c->c_st; +} + +static PySTEntryObject * +compiler_symtable_entry(struct compiler *c) +{ + return c->u->u_ste; +} + static int compute_code_flags(struct compiler *c) { - PySTEntryObject *ste = c->u->u_ste; + PySTEntryObject *ste = SYMTABLE_ENTRY(c); int flags = 0; - if (_PyST_IsFunctionLike(c->u->u_ste)) { + if (_PyST_IsFunctionLike(ste)) { flags |= CO_NEWLOCALS | CO_OPTIMIZED; if (ste->ste_nested) flags |= CO_NESTED; if (ste->ste_generator && !ste->ste_coroutine) flags |= CO_GENERATOR; - if (!ste->ste_generator && ste->ste_coroutine) - flags |= CO_COROUTINE; if (ste->ste_generator && ste->ste_coroutine) flags |= CO_ASYNC_GENERATOR; if (ste->ste_varargs) @@ -7508,49 +7478,33 @@ compute_code_flags(struct compiler *c) flags |= CO_VARKEYWORDS; } - /* (Only) inherit compilerflags in PyCF_MASK */ - flags |= (c->c_flags.cf_flags & PyCF_MASK); - - if ((IS_TOP_LEVEL_AWAIT(c)) && - ste->ste_coroutine && - !ste->ste_generator) { + if (ste->ste_coroutine && !ste->ste_generator) { + assert (IS_TOP_LEVEL_AWAIT(c) || _PyST_IsFunctionLike(ste)); flags |= CO_COROUTINE; } + /* (Only) inherit compilerflags in PyCF_MASK */ + flags |= (c->c_flags.cf_flags & PyCF_MASK); + return flags; } -// Merge *obj* with constant cache. -// Unlike merge_consts_recursive(), this function doesn't work recursively. +// Merge *obj* with constant cache, without recursion. int _PyCompile_ConstCacheMergeOne(PyObject *const_cache, PyObject **obj) { - assert(PyDict_CheckExact(const_cache)); - PyObject *key = _PyCode_ConstantKey(*obj); + PyObject *key = const_cache_insert(const_cache, *obj, false); if (key == NULL) { return ERROR; } - - PyObject *t; - int res = PyDict_SetDefaultRef(const_cache, key, key, &t); - Py_DECREF(key); - if (res < 0) { - return ERROR; - } - if (res == 0) { // inserted: obj is new constant. - Py_DECREF(t); - return SUCCESS; - } - - if (PyTuple_CheckExact(t)) { - PyObject *item = PyTuple_GET_ITEM(t, 1); + if (PyTuple_CheckExact(key)) { + PyObject *item = PyTuple_GET_ITEM(key, 1); Py_SETREF(*obj, Py_NewRef(item)); - Py_DECREF(t); + Py_DECREF(key); } else { - Py_SETREF(*obj, t); + Py_SETREF(*obj, key); } - return SUCCESS; } diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index d70a57a9a8ffbe..8f6bc75b528d9b 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -1816,15 +1816,7 @@ _PyStackRef tup; oparg = CURRENT_OPARG(); values = &stack_pointer[-oparg]; - STACKREFS_TO_PYOBJECTS(values, oparg, values_o); - if (CONVERSION_FAILED(values_o)) { - for (int _i = oparg; --_i >= 0;) { - PyStackRef_CLOSE(values[_i]); - } - if (true) JUMP_TO_ERROR(); - } - PyObject *tup_o = _PyTuple_FromArraySteal(values_o, oparg); - STACKREFS_TO_PYOBJECTS_CLEANUP(values_o); + PyObject *tup_o = _PyTuple_FromStackRefSteal(values, oparg); if (tup_o == NULL) JUMP_TO_ERROR(); tup = PyStackRef_FromPyObjectSteal(tup_o); stack_pointer[-oparg] = tup; @@ -3063,8 +3055,11 @@ JUMP_TO_JUMP_TARGET(); } if ((size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) { - UOP_STAT_INC(uopcode, miss); - JUMP_TO_JUMP_TARGET(); + it->it_index = -1; + if (1) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } } break; } diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 32b22aff14a768..61057221291c0a 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -769,15 +769,7 @@ _PyStackRef *values; _PyStackRef tup; values = &stack_pointer[-oparg]; - STACKREFS_TO_PYOBJECTS(values, oparg, values_o); - if (CONVERSION_FAILED(values_o)) { - for (int _i = oparg; --_i >= 0;) { - PyStackRef_CLOSE(values[_i]); - } - if (true) { stack_pointer += -oparg; goto error; } - } - PyObject *tup_o = _PyTuple_FromArraySteal(values_o, oparg); - STACKREFS_TO_PYOBJECTS_CLEANUP(values_o); + PyObject *tup_o = _PyTuple_FromStackRefSteal(values, oparg); if (tup_o == NULL) { stack_pointer += -oparg; goto error; } tup = PyStackRef_FromPyObjectSteal(tup_o); stack_pointer[-oparg] = tup; diff --git a/Python/import.c b/Python/import.c index 20ad10020044df..40b7feac001d6e 100644 --- a/Python/import.c +++ b/Python/import.c @@ -1532,6 +1532,35 @@ switch_to_main_interpreter(PyThreadState *tstate) return main_tstate; } +static void +switch_back_from_main_interpreter(PyThreadState *tstate, + PyThreadState *main_tstate, + PyObject *tempobj) +{ + assert(main_tstate == PyThreadState_GET()); + assert(_Py_IsMainInterpreter(main_tstate->interp)); + assert(tstate->interp != main_tstate->interp); + + /* Handle any exceptions, which we cannot propagate directly + * to the subinterpreter. */ + if (PyErr_Occurred()) { + if (PyErr_ExceptionMatches(PyExc_MemoryError)) { + /* We trust it will be caught again soon. */ + PyErr_Clear(); + } + else { + /* Printing the exception should be sufficient. */ + PyErr_PrintEx(0); + } + } + + Py_XDECREF(tempobj); + + PyThreadState_Clear(main_tstate); + (void)PyThreadState_Swap(tstate); + PyThreadState_Delete(main_tstate); +} + static PyObject * get_core_module_dict(PyInterpreterState *interp, PyObject *name, PyObject *path) @@ -2027,27 +2056,10 @@ import_run_extension(PyThreadState *tstate, PyModInitFunction p0, /* Switch back to the subinterpreter. */ if (switched) { assert(main_tstate != tstate); - - /* Handle any exceptions, which we cannot propagate directly - * to the subinterpreter. */ - if (PyErr_Occurred()) { - if (PyErr_ExceptionMatches(PyExc_MemoryError)) { - /* We trust it will be caught again soon. */ - PyErr_Clear(); - } - else { - /* Printing the exception should be sufficient. */ - PyErr_PrintEx(0); - } - } - + switch_back_from_main_interpreter(tstate, main_tstate, mod); /* Any module we got from the init function will have to be * reloaded in the subinterpreter. */ - Py_CLEAR(mod); - - PyThreadState_Clear(main_tstate); - (void)PyThreadState_Swap(tstate); - PyThreadState_Delete(main_tstate); + mod = NULL; } /*****************************************************************/ @@ -2141,9 +2153,21 @@ clear_singlephase_extension(PyInterpreterState *interp, } } + /* We must use the main interpreter to clean up the cache. + * See the note in import_run_extension(). */ + PyThreadState *tstate = PyThreadState_GET(); + PyThreadState *main_tstate = switch_to_main_interpreter(tstate); + if (main_tstate == NULL) { + return -1; + } + /* Clear the cached module def. */ _extensions_cache_delete(path, name); + if (main_tstate != tstate) { + switch_back_from_main_interpreter(tstate, main_tstate, NULL); + } + return 0; } diff --git a/Python/lock.c b/Python/lock.c index 7c6a5175e88ff1..57675fe1873fa2 100644 --- a/Python/lock.c +++ b/Python/lock.c @@ -514,6 +514,7 @@ void _PySeqLock_LockWrite(_PySeqLock *seqlock) } else if (_Py_atomic_compare_exchange_uint32(&seqlock->sequence, &prev, prev + 1)) { // We've locked the cache + _Py_atomic_fence_release(); break; } else { @@ -547,28 +548,31 @@ uint32_t _PySeqLock_BeginRead(_PySeqLock *seqlock) return sequence; } -uint32_t _PySeqLock_EndRead(_PySeqLock *seqlock, uint32_t previous) +int _PySeqLock_EndRead(_PySeqLock *seqlock, uint32_t previous) { - // Synchronize again and validate that the entry hasn't been updated - // while we were readying the values. - if (_Py_atomic_load_uint32_acquire(&seqlock->sequence) == previous) { + // gh-121368: We need an explicit acquire fence here to ensure that + // this load of the sequence number is not reordered before any loads + // within the read lock. + _Py_atomic_fence_acquire(); + + if (_Py_atomic_load_uint32_relaxed(&seqlock->sequence) == previous) { return 1; - } + } - _Py_yield(); - return 0; + _Py_yield(); + return 0; } -uint32_t _PySeqLock_AfterFork(_PySeqLock *seqlock) +int _PySeqLock_AfterFork(_PySeqLock *seqlock) { // Synchronize again and validate that the entry hasn't been updated // while we were readying the values. - if (SEQLOCK_IS_UPDATING(seqlock->sequence)) { + if (SEQLOCK_IS_UPDATING(seqlock->sequence)) { seqlock->sequence = 0; return 1; - } + } - return 0; + return 0; } #undef PyMutex_Lock diff --git a/Python/symtable.c b/Python/symtable.c index 2e56ea6e830846..10103dbc2582a2 100644 --- a/Python/symtable.c +++ b/Python/symtable.c @@ -70,17 +70,21 @@ #define DUPLICATE_TYPE_PARAM \ "duplicate type parameter '%U'" +#define ASYNC_WITH_OUTISDE_ASYNC_FUNC \ +"'async with' outside async function" -#define LOCATION(x) \ - (x)->lineno, (x)->col_offset, (x)->end_lineno, (x)->end_col_offset +#define ASYNC_FOR_OUTISDE_ASYNC_FUNC \ +"'async for' outside async function" -#define ST_LOCATION(x) \ - (x)->ste_lineno, (x)->ste_col_offset, (x)->ste_end_lineno, (x)->ste_end_col_offset +#define LOCATION(x) SRC_LOCATION_FROM_AST(x) + +#define SET_ERROR_LOCATION(FNAME, L) \ + PyErr_RangedSyntaxLocationObject((FNAME), \ + (L).lineno, (L).col_offset + 1, (L).end_lineno, (L).end_col_offset + 1) static PySTEntryObject * ste_new(struct symtable *st, identifier name, _Py_block_ty block, - void *key, int lineno, int col_offset, - int end_lineno, int end_col_offset) + void *key, _Py_SourceLocation loc) { PySTEntryObject *ste = NULL; PyObject *k = NULL; @@ -112,13 +116,8 @@ ste_new(struct symtable *st, identifier name, _Py_block_ty block, ste->ste_free = 0; ste->ste_varargs = 0; ste->ste_varkeywords = 0; - ste->ste_opt_lineno = 0; - ste->ste_opt_col_offset = 0; ste->ste_annotations_used = 0; - ste->ste_lineno = lineno; - ste->ste_col_offset = col_offset; - ste->ste_end_lineno = end_lineno; - ste->ste_end_col_offset = end_col_offset; + ste->ste_loc = loc; if (st->st_cur != NULL && (st->st_cur->ste_nested || @@ -158,7 +157,7 @@ static PyObject * ste_repr(PySTEntryObject *ste) { return PyUnicode_FromFormat("", - ste->ste_name, ste->ste_id, ste->ste_lineno); + ste->ste_name, ste->ste_id, ste->ste_loc.lineno); } static void @@ -186,7 +185,7 @@ static PyMemberDef ste_memberlist[] = { {"children", _Py_T_OBJECT, OFF(ste_children), Py_READONLY}, {"nested", Py_T_INT, OFF(ste_nested), Py_READONLY}, {"type", Py_T_INT, OFF(ste_type), Py_READONLY}, - {"lineno", Py_T_INT, OFF(ste_lineno), Py_READONLY}, + {"lineno", Py_T_INT, OFF(ste_loc.lineno), Py_READONLY}, {NULL} }; @@ -233,9 +232,7 @@ PyTypeObject PySTEntry_Type = { static int symtable_analyze(struct symtable *st); static int symtable_enter_block(struct symtable *st, identifier name, - _Py_block_ty block, void *ast, - int lineno, int col_offset, - int end_lineno, int end_col_offset); + _Py_block_ty block, void *ast, _Py_SourceLocation loc); static int symtable_exit_block(struct symtable *st); static int symtable_visit_stmt(struct symtable *st, stmt_ty s); static int symtable_visit_expr(struct symtable *st, expr_ty s); @@ -259,6 +256,7 @@ static int symtable_visit_withitem(struct symtable *st, withitem_ty item); static int symtable_visit_match_case(struct symtable *st, match_case_ty m); static int symtable_visit_pattern(struct symtable *st, pattern_ty s); static int symtable_raise_if_annotation_block(struct symtable *st, const char *, expr_ty); +static int symtable_raise_if_not_coroutine(struct symtable *st, const char *msg, _Py_SourceLocation loc); static int symtable_raise_if_comprehension_block(struct symtable *st, expr_ty); /* For debugging purposes only */ @@ -311,8 +309,8 @@ static void _dump_symtable(PySTEntryObject* ste, PyObject* prefix) ste->ste_comp_iter_target ? " comp_iter_target" : "", ste->ste_can_see_class_scope ? " can_see_class_scope" : "", prefix, - ste->ste_lineno, - ste->ste_col_offset, + ste->ste_loc.lineno, + ste->ste_loc.col_offset, prefix ); assert(msg != NULL); @@ -424,7 +422,9 @@ _PySymtable_Build(mod_ty mod, PyObject *filename, _PyFutureFeatures *future) st->recursion_limit = Py_C_RECURSION_LIMIT; /* Make the initial symbol information gathering pass */ - if (!symtable_enter_block(st, &_Py_ID(top), ModuleBlock, (void *)mod, 0, 0, 0, 0)) { + + _Py_SourceLocation loc0 = {0, 0, 0, 0}; + if (!symtable_enter_block(st, &_Py_ID(top), ModuleBlock, (void *)mod, loc0)) { _PySymtable_Free(st); return NULL; } @@ -1379,11 +1379,9 @@ symtable_enter_existing_block(struct symtable *st, PySTEntryObject* ste) static int symtable_enter_block(struct symtable *st, identifier name, _Py_block_ty block, - void *ast, int lineno, int col_offset, - int end_lineno, int end_col_offset) + void *ast, _Py_SourceLocation loc) { - PySTEntryObject *ste = ste_new(st, name, block, ast, - lineno, col_offset, end_lineno, end_col_offset); + PySTEntryObject *ste = ste_new(st, name, block, ast, loc); if (ste == NULL) return 0; int result = symtable_enter_existing_block(st, ste); @@ -1410,7 +1408,7 @@ symtable_lookup(struct symtable *st, PyObject *name) static int symtable_add_def_helper(struct symtable *st, PyObject *name, int flag, struct _symtable_entry *ste, - int lineno, int col_offset, int end_lineno, int end_col_offset) + _Py_SourceLocation loc) { PyObject *o; PyObject *dict; @@ -1425,16 +1423,12 @@ symtable_add_def_helper(struct symtable *st, PyObject *name, int flag, struct _s if ((flag & DEF_PARAM) && (val & DEF_PARAM)) { /* Is it better to use 'mangled' or 'name' here? */ PyErr_Format(PyExc_SyntaxError, DUPLICATE_ARGUMENT, name); - PyErr_RangedSyntaxLocationObject(st->st_filename, - lineno, col_offset + 1, - end_lineno, end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, loc); goto error; } if ((flag & DEF_TYPE_PARAM) && (val & DEF_TYPE_PARAM)) { PyErr_Format(PyExc_SyntaxError, DUPLICATE_TYPE_PARAM, name); - PyErr_RangedSyntaxLocationObject(st->st_filename, - lineno, col_offset + 1, - end_lineno, end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, loc); goto error; } val |= flag; @@ -1454,9 +1448,7 @@ symtable_add_def_helper(struct symtable *st, PyObject *name, int flag, struct _s if (val & (DEF_GLOBAL | DEF_NONLOCAL)) { PyErr_Format(PyExc_SyntaxError, NAMED_EXPR_COMP_INNER_LOOP_CONFLICT, name); - PyErr_RangedSyntaxLocationObject(st->st_filename, - lineno, col_offset + 1, - end_lineno, end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, loc); goto error; } val |= DEF_COMP_ITER; @@ -1501,33 +1493,28 @@ symtable_add_def_helper(struct symtable *st, PyObject *name, int flag, struct _s } static int -symtable_add_def(struct symtable *st, PyObject *name, int flag, - int lineno, int col_offset, int end_lineno, int end_col_offset) +symtable_add_def(struct symtable *st, PyObject *name, int flag, _Py_SourceLocation loc) { if ((flag & DEF_TYPE_PARAM) && st->st_cur->ste_mangled_names != NULL) { if(PySet_Add(st->st_cur->ste_mangled_names, name) < 0) { return 0; } } - return symtable_add_def_helper(st, name, flag, st->st_cur, - lineno, col_offset, end_lineno, end_col_offset); + return symtable_add_def_helper(st, name, flag, st->st_cur, loc); } static int symtable_enter_type_param_block(struct symtable *st, identifier name, void *ast, int has_defaults, int has_kwdefaults, - enum _stmt_kind kind, - int lineno, int col_offset, - int end_lineno, int end_col_offset) + enum _stmt_kind kind, _Py_SourceLocation loc) { _Py_block_ty current_type = st->st_cur->ste_type; - if(!symtable_enter_block(st, name, TypeParametersBlock, ast, lineno, - col_offset, end_lineno, end_col_offset)) { + if(!symtable_enter_block(st, name, TypeParametersBlock, ast, loc)) { return 0; } if (current_type == ClassBlock) { st->st_cur->ste_can_see_class_scope = 1; - if (!symtable_add_def(st, &_Py_ID(__classdict__), USE, lineno, col_offset, end_lineno, end_col_offset)) { + if (!symtable_add_def(st, &_Py_ID(__classdict__), USE, loc)) { return 0; } } @@ -1535,36 +1522,30 @@ symtable_enter_type_param_block(struct symtable *st, identifier name, _Py_DECLARE_STR(type_params, ".type_params"); // It gets "set" when we create the type params tuple and // "used" when we build up the bases. - if (!symtable_add_def(st, &_Py_STR(type_params), DEF_LOCAL, - lineno, col_offset, end_lineno, end_col_offset)) { + if (!symtable_add_def(st, &_Py_STR(type_params), DEF_LOCAL, loc)) { return 0; } - if (!symtable_add_def(st, &_Py_STR(type_params), USE, - lineno, col_offset, end_lineno, end_col_offset)) { + if (!symtable_add_def(st, &_Py_STR(type_params), USE, loc)) { return 0; } // This is used for setting the generic base _Py_DECLARE_STR(generic_base, ".generic_base"); - if (!symtable_add_def(st, &_Py_STR(generic_base), DEF_LOCAL, - lineno, col_offset, end_lineno, end_col_offset)) { + if (!symtable_add_def(st, &_Py_STR(generic_base), DEF_LOCAL, loc)) { return 0; } - if (!symtable_add_def(st, &_Py_STR(generic_base), USE, - lineno, col_offset, end_lineno, end_col_offset)) { + if (!symtable_add_def(st, &_Py_STR(generic_base), USE, loc)) { return 0; } } if (has_defaults) { _Py_DECLARE_STR(defaults, ".defaults"); - if (!symtable_add_def(st, &_Py_STR(defaults), DEF_PARAM, - lineno, col_offset, end_lineno, end_col_offset)) { + if (!symtable_add_def(st, &_Py_STR(defaults), DEF_PARAM, loc)) { return 0; } } if (has_kwdefaults) { _Py_DECLARE_STR(kwdefaults, ".kwdefaults"); - if (!symtable_add_def(st, &_Py_STR(kwdefaults), DEF_PARAM, - lineno, col_offset, end_lineno, end_col_offset)) { + if (!symtable_add_def(st, &_Py_STR(kwdefaults), DEF_PARAM, loc)) { return 0; } } @@ -1627,8 +1608,7 @@ symtable_enter_type_param_block(struct symtable *st, identifier name, } while(0) static int -symtable_record_directive(struct symtable *st, identifier name, int lineno, - int col_offset, int end_lineno, int end_col_offset) +symtable_record_directive(struct symtable *st, identifier name, _Py_SourceLocation loc) { PyObject *data, *mangled; int res; @@ -1640,7 +1620,8 @@ symtable_record_directive(struct symtable *st, identifier name, int lineno, mangled = _Py_MaybeMangle(st->st_private, st->st_cur, name); if (!mangled) return 0; - data = Py_BuildValue("(Niiii)", mangled, lineno, col_offset, end_lineno, end_col_offset); + data = Py_BuildValue("(Niiii)", mangled, loc.lineno, loc.col_offset, + loc.end_lineno, loc.end_col_offset); if (!data) return 0; res = PyList_Append(st->st_cur->ste_directives, data); @@ -1660,6 +1641,35 @@ has_kwonlydefaults(asdl_arg_seq *kwonlyargs, asdl_expr_seq *kw_defaults) return 0; } +static int +check_import_from(struct symtable *st, stmt_ty s) +{ + assert(s->kind == ImportFrom_kind); + _Py_SourceLocation fut = st->st_future->ff_location; + if (s->v.ImportFrom.module && s->v.ImportFrom.level == 0 && + _PyUnicode_EqualToASCIIString(s->v.ImportFrom.module, "__future__") && + ((s->lineno > fut.lineno) || + ((s->lineno == fut.end_lineno) && (s->col_offset > fut.end_col_offset)))) + { + PyErr_SetString(PyExc_SyntaxError, + "from __future__ imports must occur " + "at the beginning of the file"); + SET_ERROR_LOCATION(st->st_filename, LOCATION(s)); + return 0; + } + return 1; +} + +static void +maybe_set_ste_coroutine_for_module(struct symtable *st, stmt_ty s) +{ + if ((st->st_future->ff_features & PyCF_ALLOW_TOP_LEVEL_AWAIT) && + (st->st_cur->ste_type == ModuleBlock)) + { + st->st_cur->ste_coroutine = 1; + } +} + static int symtable_visit_stmt(struct symtable *st, stmt_ty s) { @@ -1741,9 +1751,9 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) VISIT_SEQ(st, expr, s->v.ClassDef.bases); VISIT_SEQ(st, keyword, s->v.ClassDef.keywords); if (!symtable_enter_block(st, s->v.ClassDef.name, ClassBlock, - (void *)s, s->lineno, s->col_offset, - s->end_lineno, s->end_col_offset)) + (void *)s, LOCATION(s))) { VISIT_QUIT(st, 0); + } st->st_private = s->v.ClassDef.name; if (asdl_seq_LEN(s->v.ClassDef.type_params) > 0) { if (!symtable_add_def(st, &_Py_ID(__type_params__), @@ -1783,8 +1793,9 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) VISIT_SEQ(st, type_param, s->v.TypeAlias.type_params); } if (!symtable_enter_block(st, name, TypeAliasBlock, - (void *)s, LOCATION(s))) + (void *)s, LOCATION(s))) { VISIT_QUIT(st, 0); + } st->st_cur->ste_can_see_class_scope = is_in_class; if (is_in_class && !symtable_add_def(st, &_Py_ID(__classdict__), USE, LOCATION(s->v.TypeAlias.value))) { VISIT_QUIT(st, 0); @@ -1825,11 +1836,7 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) PyErr_Format(PyExc_SyntaxError, cur & DEF_GLOBAL ? GLOBAL_ANNOT : NONLOCAL_ANNOT, e_name->v.Name.id); - PyErr_RangedSyntaxLocationObject(st->st_filename, - s->lineno, - s->col_offset + 1, - s->end_lineno, - s->end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, LOCATION(s)); VISIT_QUIT(st, 0); } if (s->v.AnnAssign.simple && @@ -1914,6 +1921,9 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) break; case ImportFrom_kind: VISIT_SEQ(st, alias, s->v.ImportFrom.names); + if (!check_import_from(st, s)) { + VISIT_QUIT(st, 0); + } break; case Global_kind: { Py_ssize_t i; @@ -1936,18 +1946,15 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) } PyErr_Format(PyExc_SyntaxError, msg, name); - PyErr_RangedSyntaxLocationObject(st->st_filename, - s->lineno, - s->col_offset + 1, - s->end_lineno, - s->end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, LOCATION(s)); VISIT_QUIT(st, 0); } - if (!symtable_add_def(st, name, DEF_GLOBAL, LOCATION(s))) + if (!symtable_add_def(st, name, DEF_GLOBAL, LOCATION(s))) { VISIT_QUIT(st, 0); - if (!symtable_record_directive(st, name, s->lineno, s->col_offset, - s->end_lineno, s->end_col_offset)) + } + if (!symtable_record_directive(st, name, LOCATION(s))) { VISIT_QUIT(st, 0); + } } break; } @@ -1971,18 +1978,14 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) msg = NONLOCAL_AFTER_ASSIGN; } PyErr_Format(PyExc_SyntaxError, msg, name); - PyErr_RangedSyntaxLocationObject(st->st_filename, - s->lineno, - s->col_offset + 1, - s->end_lineno, - s->end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, LOCATION(s)); VISIT_QUIT(st, 0); } if (!symtable_add_def(st, name, DEF_NONLOCAL, LOCATION(s))) VISIT_QUIT(st, 0); - if (!symtable_record_directive(st, name, s->lineno, s->col_offset, - s->end_lineno, s->end_col_offset)) + if (!symtable_record_directive(st, name, LOCATION(s))) { VISIT_QUIT(st, 0); + } } break; } @@ -2050,10 +2053,18 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) break; } case AsyncWith_kind: + maybe_set_ste_coroutine_for_module(st, s); + if (!symtable_raise_if_not_coroutine(st, ASYNC_WITH_OUTISDE_ASYNC_FUNC, LOCATION(s))) { + VISIT_QUIT(st, 0); + } VISIT_SEQ(st, withitem, s->v.AsyncWith.items); VISIT_SEQ(st, stmt, s->v.AsyncWith.body); break; case AsyncFor_kind: + maybe_set_ste_coroutine_for_module(st, s); + if (!symtable_raise_if_not_coroutine(st, ASYNC_FOR_OUTISDE_ASYNC_FUNC, LOCATION(s))) { + VISIT_QUIT(st, 0); + } VISIT(st, expr, s->v.AsyncFor.target); VISIT(st, expr, s->v.AsyncFor.iter); VISIT_SEQ(st, stmt, s->v.AsyncFor.body); @@ -2088,11 +2099,7 @@ symtable_extend_namedexpr_scope(struct symtable *st, expr_ty e) if ((target_in_scope & DEF_COMP_ITER) && (target_in_scope & DEF_LOCAL)) { PyErr_Format(PyExc_SyntaxError, NAMED_EXPR_COMP_CONFLICT, target_name); - PyErr_RangedSyntaxLocationObject(st->st_filename, - e->lineno, - e->col_offset + 1, - e->end_lineno, - e->end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, LOCATION(e)); VISIT_QUIT(st, 0); } continue; @@ -2105,20 +2112,24 @@ symtable_extend_namedexpr_scope(struct symtable *st, expr_ty e) if (!symtable_add_def(st, target_name, DEF_GLOBAL, LOCATION(e))) VISIT_QUIT(st, 0); } else { - if (!symtable_add_def(st, target_name, DEF_NONLOCAL, LOCATION(e))) + if (!symtable_add_def(st, target_name, DEF_NONLOCAL, LOCATION(e))) { VISIT_QUIT(st, 0); + } } - if (!symtable_record_directive(st, target_name, LOCATION(e))) + if (!symtable_record_directive(st, target_name, LOCATION(e))) { VISIT_QUIT(st, 0); + } return symtable_add_def_helper(st, target_name, DEF_LOCAL, ste, LOCATION(e)); } /* If we find a ModuleBlock entry, add as GLOBAL */ if (ste->ste_type == ModuleBlock) { - if (!symtable_add_def(st, target_name, DEF_GLOBAL, LOCATION(e))) + if (!symtable_add_def(st, target_name, DEF_GLOBAL, LOCATION(e))) { VISIT_QUIT(st, 0); - if (!symtable_record_directive(st, target_name, LOCATION(e))) + } + if (!symtable_record_directive(st, target_name, LOCATION(e))) { VISIT_QUIT(st, 0); + } return symtable_add_def_helper(st, target_name, DEF_GLOBAL, ste, LOCATION(e)); } @@ -2143,11 +2154,7 @@ symtable_extend_namedexpr_scope(struct symtable *st, expr_ty e) default: Py_UNREACHABLE(); } - PyErr_RangedSyntaxLocationObject(st->st_filename, - e->lineno, - e->col_offset + 1, - e->end_lineno, - e->end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, LOCATION(e)); VISIT_QUIT(st, 0); } } @@ -2165,11 +2172,7 @@ symtable_handle_namedexpr(struct symtable *st, expr_ty e) if (st->st_cur->ste_comp_iter_expr > 0) { /* Assignment isn't allowed in a comprehension iterable expression */ PyErr_Format(PyExc_SyntaxError, NAMED_EXPR_COMP_ITER_EXPR); - PyErr_RangedSyntaxLocationObject(st->st_filename, - e->lineno, - e->col_offset + 1, - e->end_lineno, - e->end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, LOCATION(e)); return 0; } if (st->st_cur->ste_comprehension) { @@ -2214,10 +2217,9 @@ symtable_visit_expr(struct symtable *st, expr_ty e) if (e->v.Lambda.args->kw_defaults) VISIT_SEQ_WITH_NULL(st, expr, e->v.Lambda.args->kw_defaults); if (!symtable_enter_block(st, &_Py_ID(lambda), - FunctionBlock, (void *)e, - e->lineno, e->col_offset, - e->end_lineno, e->end_col_offset)) + FunctionBlock, (void *)e, LOCATION(e))) { VISIT_QUIT(st, 0); + } VISIT(st, arguments, e->v.Lambda.args); VISIT(st, expr, e->v.Lambda.body); if (!symtable_exit_block(st)) @@ -2349,8 +2351,9 @@ symtable_visit_type_param_bound_or_default( { if (e) { int is_in_class = st->st_cur->ste_can_see_class_scope; - if (!symtable_enter_block(st, name, TypeVariableBlock, key, LOCATION(e))) + if (!symtable_enter_block(st, name, TypeVariableBlock, key, LOCATION(e))) { return 0; + } st->st_cur->ste_can_see_class_scope = is_in_class; if (is_in_class && !symtable_add_def(st, &_Py_ID(__classdict__), USE, LOCATION(e))) { @@ -2483,7 +2486,7 @@ symtable_implicit_arg(struct symtable *st, int pos) PyObject *id = PyUnicode_FromFormat(".%d", pos); if (id == NULL) return 0; - if (!symtable_add_def(st, id, DEF_PARAM, ST_LOCATION(st->st_cur))) { + if (!symtable_add_def(st, id, DEF_PARAM, st->st_cur->ste_loc)) { Py_DECREF(id); return 0; } @@ -2704,14 +2707,8 @@ symtable_visit_alias(struct symtable *st, alias_ty a) } else { if (st->st_cur->ste_type != ModuleBlock) { - int lineno = a->lineno; - int col_offset = a->col_offset; - int end_lineno = a->end_lineno; - int end_col_offset = a->end_col_offset; PyErr_SetString(PyExc_SyntaxError, IMPORT_STAR_WARNING); - PyErr_RangedSyntaxLocationObject(st->st_filename, - lineno, col_offset + 1, - end_lineno, end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, LOCATION(a)); Py_DECREF(store_name); return 0; } @@ -2760,9 +2757,7 @@ symtable_handle_comprehension(struct symtable *st, expr_ty e, st->st_cur->ste_comp_iter_expr--; /* Create comprehension scope for the rest */ if (!scope_name || - !symtable_enter_block(st, scope_name, FunctionBlock, (void *)e, - e->lineno, e->col_offset, - e->end_lineno, e->end_col_offset)) { + !symtable_enter_block(st, scope_name, FunctionBlock, (void *)e, LOCATION(e))) { return 0; } switch(e->kind) { @@ -2866,11 +2861,7 @@ symtable_raise_if_annotation_block(struct symtable *st, const char *name, expr_t else return 1; - PyErr_RangedSyntaxLocationObject(st->st_filename, - e->lineno, - e->col_offset + 1, - e->end_lineno, - e->end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, LOCATION(e)); return 0; } @@ -2882,12 +2873,20 @@ symtable_raise_if_comprehension_block(struct symtable *st, expr_ty e) { (type == SetComprehension) ? "'yield' inside set comprehension" : (type == DictComprehension) ? "'yield' inside dict comprehension" : "'yield' inside generator expression"); - PyErr_RangedSyntaxLocationObject(st->st_filename, - e->lineno, e->col_offset + 1, - e->end_lineno, e->end_col_offset + 1); + SET_ERROR_LOCATION(st->st_filename, LOCATION(e)); VISIT_QUIT(st, 0); } +static int +symtable_raise_if_not_coroutine(struct symtable *st, const char *msg, _Py_SourceLocation loc) { + if (!st->st_cur->ste_coroutine) { + PyErr_SetString(PyExc_SyntaxError, msg); + SET_ERROR_LOCATION(st->st_filename, loc); + return 0; + } + return 1; +} + struct symtable * _Py_SymtableStringObjectFlags(const char *str, PyObject *filename, int start, PyCompilerFlags *flags) diff --git a/Python/tracemalloc.c b/Python/tracemalloc.c index fee7dd0e56d96d..e58b60ddd5e484 100644 --- a/Python/tracemalloc.c +++ b/Python/tracemalloc.c @@ -838,7 +838,7 @@ _PyTraceMalloc_Init(void) tracemalloc_tracebacks = hashtable_new(hashtable_hash_traceback, hashtable_compare_traceback, - NULL, raw_free); + raw_free, NULL); tracemalloc_traces = tracemalloc_create_traces_table(); tracemalloc_domains = tracemalloc_create_domains_table(); diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py index 6b1af1b59f14d8..ec365bad3992d5 100644 --- a/Tools/cases_generator/analyzer.py +++ b/Tools/cases_generator/analyzer.py @@ -106,13 +106,15 @@ class StackItem: def __str__(self) -> str: cond = f" if ({self.condition})" if self.condition else "" - size = f"[{self.size}]" if self.size != "1" else "" + size = f"[{self.size}]" if self.size else "" type = "" if self.type is None else f"{self.type} " return f"{type}{self.name}{size}{cond} {self.peek}" def is_array(self) -> bool: - return self.type == "_PyStackRef *" + return self.size != "" + def get_size(self) -> str: + return self.size if self.size else "1" @dataclass class StackEffect: @@ -293,7 +295,7 @@ def convert_stack_item(item: parser.StackEffect, replace_op_arg_1: str | None) - if replace_op_arg_1 and OPARG_AND_1.match(item.cond): cond = replace_op_arg_1 return StackItem( - item.name, item.type, cond, (item.size or "1") + item.name, item.type, cond, item.size ) def analyze_stack(op: parser.InstDef | parser.Pseudo, replace_op_arg_1: str | None = None) -> StackEffect: @@ -431,6 +433,7 @@ def has_error_without_pop(op: parser.InstDef) -> bool: "CONVERSION_FAILED", "_PyList_FromArraySteal", "_PyTuple_FromArraySteal", + "_PyTuple_FromStackRefSteal", ) ESCAPING_FUNCTIONS = ( diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py index e4e0c9b658c19d..9314bb9e79687f 100644 --- a/Tools/cases_generator/generators_common.py +++ b/Tools/cases_generator/generators_common.py @@ -5,9 +5,10 @@ Instruction, Uop, Properties, + StackItem, ) from cwriter import CWriter -from typing import Callable, Mapping, TextIO, Iterator +from typing import Callable, Mapping, TextIO, Iterator, Tuple from lexer import Token from stack import Stack @@ -24,6 +25,15 @@ def root_relative_path(filename: str) -> str: return filename +def type_and_null(var: StackItem) -> Tuple[str, str]: + if var.type: + return var.type, "NULL" + elif var.is_array(): + return "_PyStackRef *", "NULL" + else: + return "_PyStackRef", "PyStackRef_NULL" + + def write_header( generator: str, sources: list[str], outfile: TextIO, comment: str = "//" ) -> None: @@ -126,7 +136,7 @@ def replace_decrefs( for var in uop.stack.inputs: if var.name == "unused" or var.name == "null" or var.peek: continue - if var.size != "1": + if var.size: out.emit(f"for (int _i = {var.size}; --_i >= 0;) {{\n") out.emit(f"PyStackRef_CLOSE({var.name}[_i]);\n") out.emit("}\n") diff --git a/Tools/cases_generator/parsing.py b/Tools/cases_generator/parsing.py index 0bd4229e2beaf2..8957838f7a90a1 100644 --- a/Tools/cases_generator/parsing.py +++ b/Tools/cases_generator/parsing.py @@ -285,7 +285,6 @@ def stack_effect(self) -> StackEffect | None: if not (size := self.expression()): raise self.make_syntax_error("Expected expression") self.require(lx.RBRACKET) - type_text = "_PyStackRef *" size_text = size.text.strip() return StackEffect(tkn.text, type_text, cond_text, size_text) return None diff --git a/Tools/cases_generator/stack.py b/Tools/cases_generator/stack.py index c0e1278e519143..ebe62df537f15f 100644 --- a/Tools/cases_generator/stack.py +++ b/Tools/cases_generator/stack.py @@ -28,14 +28,15 @@ def var_size(var: StackItem) -> str: if var.condition == "0": return "0" elif var.condition == "1": - return var.size - elif var.condition == "oparg & 1" and var.size == "1": + return var.get_size() + elif var.condition == "oparg & 1" and not var.size: return f"({var.condition})" else: - return f"(({var.condition}) ? {var.size} : 0)" - else: + return f"(({var.condition}) ? {var.get_size()} : 0)" + elif var.size: return var.size - + else: + return "1" @dataclass class StackOffset: diff --git a/Tools/cases_generator/tier1_generator.py b/Tools/cases_generator/tier1_generator.py index c9dce1d5f1804e..85be673b1c396c 100644 --- a/Tools/cases_generator/tier1_generator.py +++ b/Tools/cases_generator/tier1_generator.py @@ -13,12 +13,14 @@ analyze_files, Skip, analysis_error, + StackItem, ) from generators_common import ( DEFAULT_INPUT, ROOT, write_header, emit_tokens, + type_and_null, ) from cwriter import CWriter from typing import TextIO @@ -38,19 +40,16 @@ def declare_variables(inst: Instruction, out: CWriter) -> None: for var in reversed(uop.stack.inputs): if var.name not in variables: variables.add(var.name) - type, null = (var.type, "NULL") if var.type else ("_PyStackRef", "PyStackRef_NULL") + type, null = type_and_null(var) space = " " if type[-1].isalnum() else "" if var.condition: out.emit(f"{type}{space}{var.name} = {null};\n") else: - if var.is_array(): - out.emit(f"{var.type}{space}{var.name};\n") - else: - out.emit(f"{type}{space}{var.name};\n") + out.emit(f"{type}{space}{var.name};\n") for var in uop.stack.outputs: if var.name not in variables: variables.add(var.name) - type, null = (var.type, "NULL") if var.type else ("_PyStackRef", "PyStackRef_NULL") + type, null = type_and_null(var) space = " " if type[-1].isalnum() else "" if var.condition: out.emit(f"{type}{space}{var.name} = {null};\n") diff --git a/Tools/cases_generator/tier2_generator.py b/Tools/cases_generator/tier2_generator.py index f3769bd31c295d..7a69aa6e121fa7 100644 --- a/Tools/cases_generator/tier2_generator.py +++ b/Tools/cases_generator/tier2_generator.py @@ -20,6 +20,7 @@ emit_tokens, emit_to, REPLACEMENT_FUNCTIONS, + type_and_null, ) from cwriter import CWriter from typing import TextIO, Iterator @@ -35,7 +36,7 @@ def declare_variable( if var.name in variables: return variables.add(var.name) - type, null = (var.type, "NULL") if var.type else ("_PyStackRef", "PyStackRef_NULL") + type, null = type_and_null(var) space = " " if type[-1].isalnum() else "" if var.condition: out.emit(f"{type}{space}{var.name} = {null};\n") diff --git a/Tools/jit/_stencils.py b/Tools/jit/_stencils.py index 68eb1d13394170..1c6a9edb39840d 100644 --- a/Tools/jit/_stencils.py +++ b/Tools/jit/_stencils.py @@ -184,7 +184,7 @@ def pad(self, alignment: int) -> None: self.disassembly.append(f"{offset:x}: {' '.join(['00'] * padding)}") self.body.extend([0] * padding) - def emit_aarch64_trampoline(self, hole: Hole, alignment: int) -> None: + def emit_aarch64_trampoline(self, hole: Hole, alignment: int) -> Hole: """Even with the large code model, AArch64 Linux insists on 28-bit jumps.""" assert hole.symbol is not None reuse_trampoline = hole.symbol in self.trampolines @@ -194,14 +194,10 @@ def emit_aarch64_trampoline(self, hole: Hole, alignment: int) -> None: else: self.pad(alignment) base = len(self.body) - where = slice(hole.offset, hole.offset + 4) - instruction = int.from_bytes(self.body[where], sys.byteorder) - instruction &= 0xFC000000 - instruction |= ((base - hole.offset) >> 2) & 0x03FFFFFF - self.body[where] = instruction.to_bytes(4, sys.byteorder) + new_hole = hole.replace(addend=base, symbol=None, value=HoleValue.DATA) if reuse_trampoline: - return + return new_hole self.disassembly += [ f"{base + 4 * 0:x}: 58000048 ldr x8, 8", @@ -219,6 +215,7 @@ def emit_aarch64_trampoline(self, hole: Hole, alignment: int) -> None: self.body.extend(code) self.holes.append(hole.replace(offset=base + 8, kind="R_AARCH64_ABS64")) self.trampolines[hole.symbol] = base + return new_hole def remove_jump(self, *, alignment: int = 1) -> None: """Remove a zero-length continuation jump, if it exists.""" @@ -294,8 +291,9 @@ def process_relocations(self, *, alignment: int = 1) -> None: in {"R_AARCH64_CALL26", "R_AARCH64_JUMP26", "ARM64_RELOC_BRANCH26"} and hole.value is HoleValue.ZERO ): - self.code.emit_aarch64_trampoline(hole, alignment) + new_hole = self.data.emit_aarch64_trampoline(hole, alignment) self.code.holes.remove(hole) + self.code.holes.append(new_hole) self.code.remove_jump(alignment=alignment) self.code.pad(alignment) self.data.pad(8) diff --git a/Tools/requirements-hypothesis.txt b/Tools/requirements-hypothesis.txt index 9d5a18c881bf36..ab3f39ac6ee087 100644 --- a/Tools/requirements-hypothesis.txt +++ b/Tools/requirements-hypothesis.txt @@ -1,4 +1,4 @@ # Requirements file for hypothesis that # we use to run our property-based tests in CI. -hypothesis==6.100.2 +hypothesis==6.104.2 diff --git a/Tools/tsan/suppressions_free_threading.txt b/Tools/tsan/suppressions_free_threading.txt index 534a0cedb743dd..0955387dfb8370 100644 --- a/Tools/tsan/suppressions_free_threading.txt +++ b/Tools/tsan/suppressions_free_threading.txt @@ -23,25 +23,12 @@ race:free_threadstate # These warnings trigger directly in a CPython function. -race_top:_add_to_weak_set -race_top:_in_weak_set race_top:_PyEval_EvalFrameDefault race_top:assign_version_tag -race_top:insertdict -race_top:lookup_tp_dict race_top:new_reference -# https://gist.github.com/colesbury/d13d033f413b4ad07929d044bed86c35 -race_top:set_discard_entry -race_top:_PyDict_CheckConsistency -race_top:_Py_dict_lookup_threadsafe race_top:_multiprocessing_SemLock_acquire_impl -race_top:dictiter_new -race_top:dictresize -race_top:insert_to_emptydict -race_top:insertdict race_top:list_get_item_ref race_top:make_pending_calls -race_top:set_add_entry race_top:_Py_slot_tp_getattr_hook race_top:add_threadstate race_top:dump_traceback diff --git a/Tools/wasm/wasi.py b/Tools/wasm/wasi.py index f69299fd662806..a14f58bdac0cb2 100644 --- a/Tools/wasm/wasi.py +++ b/Tools/wasm/wasi.py @@ -26,6 +26,9 @@ LOCAL_SETUP = CHECKOUT / "Modules" / "Setup.local" LOCAL_SETUP_MARKER = "# Generated by Tools/wasm/wasi.py\n".encode("utf-8") +WASMTIME_VAR_NAME = "WASMTIME" +WASMTIME_HOST_RUNNER_VAR = f"{{{WASMTIME_VAR_NAME}}}" + def updated_env(updates={}): """Create a new dict representing the environment to use. @@ -215,11 +218,20 @@ def configure_wasi_python(context, working_dir): # Use PYTHONPATH to include sysconfig data which must be anchored to the # WASI guest's `/` directory. - host_runner = context.host_runner.format(GUEST_DIR="/", - HOST_DIR=CHECKOUT, - ENV_VAR_NAME="PYTHONPATH", - ENV_VAR_VALUE=f"/{sysconfig_data}", - PYTHON_WASM=working_dir / "python.wasm") + args = {"GUEST_DIR": "/", + "HOST_DIR": CHECKOUT, + "ENV_VAR_NAME": "PYTHONPATH", + "ENV_VAR_VALUE": f"/{sysconfig_data}", + "PYTHON_WASM": working_dir / "python.wasm"} + # Check dynamically for wasmtime in case it was specified manually via + # `--host-runner`. + if WASMTIME_HOST_RUNNER_VAR in context.host_runner: + if wasmtime := shutil.which("wasmtime"): + args[WASMTIME_VAR_NAME] = wasmtime + else: + raise FileNotFoundError("wasmtime not found; download from " + "https://github.com/bytecodealliance/wasmtime") + host_runner = context.host_runner.format_map(args) env_additions = {"CONFIG_SITE": config_site, "HOSTRUNNER": host_runner} build_python = os.fsdecode(build_python_path()) # The path to `configure` MUST be relative, else `python.wasm` is unable @@ -277,7 +289,7 @@ def clean_contents(context): def main(): - default_host_runner = (f"{shutil.which('wasmtime')} run " + default_host_runner = (f"{WASMTIME_HOST_RUNNER_VAR} run " # Make sure the stack size will work for a pydebug # build. # Use 16 MiB stack. diff --git a/configure b/configure index de016083dcf377..d4fd7d4fc9c954 100755 --- a/configure +++ b/configure @@ -795,8 +795,6 @@ MODULE__POSIXSUBPROCESS_FALSE MODULE__POSIXSUBPROCESS_TRUE MODULE__PICKLE_FALSE MODULE__PICKLE_TRUE -MODULE__OPCODE_FALSE -MODULE__OPCODE_TRUE MODULE__LSPROF_FALSE MODULE__LSPROF_TRUE MODULE__JSON_FALSE @@ -930,6 +928,7 @@ DEF_MAKE_RULE DEF_MAKE_ALL_RULE JIT_STENCILS_H REGEN_JIT_COMMAND +ABI_THREAD ABIFLAGS LN MKDIR_P @@ -8166,7 +8165,9 @@ fi # For calculating the .so ABI tag. + ABIFLAGS="" +ABI_THREAD="" # Check for --disable-gil # --disable-gil @@ -8196,6 +8197,7 @@ printf "%s\n" "#define Py_GIL_DISABLED 1" >>confdefs.h # Add "t" for "threaded" ABIFLAGS="${ABIFLAGS}t" + ABI_THREAD="t" fi # Check for --with-pydebug @@ -13296,8 +13298,6 @@ case $PLATFORM_TRIPLET in #( perf_trampoline=yes ;; #( aarch64-linux-gnu) : perf_trampoline=yes ;; #( - riscv64-linux-gnu) : - perf_trampoline=yes ;; #( *) : perf_trampoline=no ;; @@ -24746,11 +24746,11 @@ fi -BINLIBDEST='$(LIBDIR)/python$(VERSION)' +BINLIBDEST='$(LIBDIR)/python$(VERSION)$(ABI_THREAD)' # Check for --with-platlibdir -# /usr/$LIDIRNAME/python$VERSION +# /usr/$PLATLIBDIR/python$(VERSION)$(ABI_THREAD) PLATLIBDIR="lib" { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --with-platlibdir" >&5 @@ -24769,7 +24769,7 @@ then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } PLATLIBDIR="$withval" - BINLIBDEST='${exec_prefix}/${PLATLIBDIR}/python$(VERSION)' + BINLIBDEST='${exec_prefix}/${PLATLIBDIR}/python$(VERSION)$(ABI_THREAD)' else { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } @@ -24783,9 +24783,9 @@ fi if test x$PLATFORM_TRIPLET = x; then - LIBPL='$(prefix)'"/${PLATLIBDIR}/python${VERSION}/config-${LDVERSION}" + LIBPL='$(prefix)'"/${PLATLIBDIR}/python${VERSION}${ABI_THREAD}/config-${LDVERSION}" else - LIBPL='$(prefix)'"/${PLATLIBDIR}/python${VERSION}/config-${LDVERSION}-${PLATFORM_TRIPLET}" + LIBPL='$(prefix)'"/${PLATLIBDIR}/python${VERSION}${ABI_THREAD}/config-${LDVERSION}-${PLATFORM_TRIPLET}" fi @@ -29237,28 +29237,6 @@ then : -fi - - - if test "$py_cv_module__opcode" != "n/a" -then : - py_cv_module__opcode=yes -fi - if test "$py_cv_module__opcode" = yes; then - MODULE__OPCODE_TRUE= - MODULE__OPCODE_FALSE='#' -else - MODULE__OPCODE_TRUE='#' - MODULE__OPCODE_FALSE= -fi - - as_fn_append MODULE_BLOCK "MODULE__OPCODE_STATE=$py_cv_module__opcode$as_nl" - if test "x$py_cv_module__opcode" = xyes -then : - - - - fi @@ -31790,10 +31768,6 @@ if test -z "${MODULE__LSPROF_TRUE}" && test -z "${MODULE__LSPROF_FALSE}"; then as_fn_error $? "conditional \"MODULE__LSPROF\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi -if test -z "${MODULE__OPCODE_TRUE}" && test -z "${MODULE__OPCODE_FALSE}"; then - as_fn_error $? "conditional \"MODULE__OPCODE\" was never defined. -Usually this means the macro was only invoked conditionally." "$LINENO" 5 -fi if test -z "${MODULE__PICKLE_TRUE}" && test -z "${MODULE__PICKLE_FALSE}"; then as_fn_error $? "conditional \"MODULE__PICKLE\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 diff --git a/configure.ac b/configure.ac index 8e70d9d1f7697d..190f1f4a286232 100644 --- a/configure.ac +++ b/configure.ac @@ -1741,7 +1741,9 @@ fi # For calculating the .so ABI tag. AC_SUBST([ABIFLAGS]) +AC_SUBST([ABI_THREAD]) ABIFLAGS="" +ABI_THREAD="" # Check for --disable-gil # --disable-gil @@ -1758,6 +1760,7 @@ then [Define if you want to disable the GIL]) # Add "t" for "threaded" ABIFLAGS="${ABIFLAGS}t" + ABI_THREAD="t" fi # Check for --with-pydebug @@ -3711,7 +3714,6 @@ AC_MSG_CHECKING([perf trampoline]) AS_CASE([$PLATFORM_TRIPLET], [x86_64-linux-gnu], [perf_trampoline=yes], [aarch64-linux-gnu], [perf_trampoline=yes], - [riscv64-linux-gnu], [perf_trampoline=yes], [perf_trampoline=no] ) AC_MSG_RESULT([$perf_trampoline]) @@ -6237,11 +6239,11 @@ fi AC_SUBST([BINLIBDEST]) -BINLIBDEST='$(LIBDIR)/python$(VERSION)' +BINLIBDEST='$(LIBDIR)/python$(VERSION)$(ABI_THREAD)' # Check for --with-platlibdir -# /usr/$LIDIRNAME/python$VERSION +# /usr/$PLATLIBDIR/python$(VERSION)$(ABI_THREAD) AC_SUBST([PLATLIBDIR]) PLATLIBDIR="lib" AC_MSG_CHECKING([for --with-platlibdir]) @@ -6260,7 +6262,7 @@ if test -n "$withval" -a "$withval" != yes -a "$withval" != no then AC_MSG_RESULT([yes]) PLATLIBDIR="$withval" - BINLIBDEST='${exec_prefix}/${PLATLIBDIR}/python$(VERSION)' + BINLIBDEST='${exec_prefix}/${PLATLIBDIR}/python$(VERSION)$(ABI_THREAD)' else AC_MSG_RESULT([no]) fi], @@ -6270,9 +6272,9 @@ fi], dnl define LIBPL after ABIFLAGS and LDVERSION is defined. AC_SUBST([PY_ENABLE_SHARED]) if test x$PLATFORM_TRIPLET = x; then - LIBPL='$(prefix)'"/${PLATLIBDIR}/python${VERSION}/config-${LDVERSION}" + LIBPL='$(prefix)'"/${PLATLIBDIR}/python${VERSION}${ABI_THREAD}/config-${LDVERSION}" else - LIBPL='$(prefix)'"/${PLATLIBDIR}/python${VERSION}/config-${LDVERSION}-${PLATFORM_TRIPLET}" + LIBPL='$(prefix)'"/${PLATLIBDIR}/python${VERSION}${ABI_THREAD}/config-${LDVERSION}-${PLATFORM_TRIPLET}" fi AC_SUBST([LIBPL]) @@ -7692,7 +7694,6 @@ PY_STDLIB_MOD_SIMPLE([_csv]) PY_STDLIB_MOD_SIMPLE([_heapq]) PY_STDLIB_MOD_SIMPLE([_json]) PY_STDLIB_MOD_SIMPLE([_lsprof]) -PY_STDLIB_MOD_SIMPLE([_opcode]) PY_STDLIB_MOD_SIMPLE([_pickle]) PY_STDLIB_MOD_SIMPLE([_posixsubprocess]) PY_STDLIB_MOD_SIMPLE([_queue])